├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── benches └── bench-libproc-macro │ ├── Cargo.toml │ ├── README.md │ ├── lib.rs │ └── main.rs ├── build.rs ├── fuzz ├── .gitignore ├── Cargo.toml └── fuzz_targets │ └── parse_token_stream.rs ├── rust-toolchain.toml ├── src ├── detection.rs ├── extra.rs ├── fallback.rs ├── lib.rs ├── location.rs ├── marker.rs ├── num.rs ├── parse.rs ├── probe.rs ├── probe │ ├── proc_macro_span.rs │ ├── proc_macro_span_file.rs │ └── proc_macro_span_location.rs ├── rcvec.rs ├── rustc_literal_escaper.rs └── wrapper.rs └── tests ├── comments.rs ├── features.rs ├── marker.rs ├── test.rs ├── test_fmt.rs ├── test_size.rs └── ui ├── Cargo.toml ├── compiletest.rs ├── test-not-send.rs └── test-not-send.stderr /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: dtolnay 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | schedule: [cron: "40 1 * * *"] 8 | 9 | permissions: 10 | contents: read 11 | 12 | env: 13 | RUSTFLAGS: -Dwarnings 14 | 15 | jobs: 16 | pre_ci: 17 | uses: dtolnay/.github/.github/workflows/pre_ci.yml@master 18 | 19 | test: 20 | name: Rust ${{matrix.rust}} 21 | needs: pre_ci 22 | if: needs.pre_ci.outputs.continue 23 | runs-on: ubuntu-latest 24 | strategy: 25 | fail-fast: false 26 | matrix: 27 | rust: [1.80.0, stable, beta] 28 | timeout-minutes: 45 29 | steps: 30 | - uses: actions/checkout@v5 31 | - uses: dtolnay/rust-toolchain@master 32 | with: 33 | toolchain: ${{matrix.rust}} 34 | components: rust-src 35 | - run: cargo test 36 | - run: cargo test --no-default-features 37 | - run: cargo test --features span-locations 38 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test 39 | run: cargo test 40 | env: 41 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 42 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features 43 | run: cargo test --no-default-features 44 | env: 45 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 46 | 47 | nightly: 48 | name: Rust nightly 49 | needs: pre_ci 50 | if: needs.pre_ci.outputs.continue 51 | runs-on: ubuntu-latest 52 | timeout-minutes: 45 53 | steps: 54 | - uses: actions/checkout@v5 55 | - uses: dtolnay/rust-toolchain@nightly 56 | with: 57 | components: rust-src 58 | - name: Enable type layout randomization 59 | run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout\ --cfg=randomize_layout >> $GITHUB_ENV 60 | - run: cargo check 61 | env: 62 | RUSTFLAGS: --cfg procmacro2_nightly_testing ${{env.RUSTFLAGS}} 63 | - run: cargo test 64 | - run: cargo test --no-default-features 65 | - run: cargo test --no-default-features --test features -- --ignored make_sure_no_proc_macro # run the ignored test to make sure the `proc-macro` feature is disabled 66 | - run: cargo test --features span-locations 67 | - run: cargo test --manifest-path tests/ui/Cargo.toml 68 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test 69 | run: cargo test 70 | env: 71 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 72 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --no-default-features 73 | run: cargo test --no-default-features 74 | env: 75 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 76 | - name: RUSTFLAGS='-Z allow-features=' cargo test 77 | run: cargo test 78 | env: 79 | RUSTFLAGS: -Z allow-features= --cfg procmacro2_backtrace ${{env.RUSTFLAGS}} 80 | - uses: actions/upload-artifact@v4 81 | if: always() 82 | with: 83 | name: Cargo.lock 84 | path: Cargo.lock 85 | continue-on-error: true 86 | 87 | layout: 88 | name: Layout 89 | needs: pre_ci 90 | if: needs.pre_ci.outputs.continue 91 | runs-on: ubuntu-latest 92 | timeout-minutes: 45 93 | steps: 94 | - uses: actions/checkout@v5 95 | - uses: dtolnay/rust-toolchain@nightly 96 | with: 97 | components: rust-src 98 | - run: cargo test --test test_size 99 | - run: cargo test --test test_size --features span-locations 100 | - run: cargo test --test test_size --no-default-features 101 | - run: cargo test --test test_size --no-default-features --features span-locations 102 | 103 | msrv: 104 | name: Rust 1.60.0 105 | needs: pre_ci 106 | if: needs.pre_ci.outputs.continue 107 | runs-on: ubuntu-latest 108 | timeout-minutes: 45 109 | steps: 110 | - uses: actions/checkout@v5 111 | - uses: dtolnay/rust-toolchain@1.60.0 112 | with: 113 | components: rust-src 114 | - run: cargo check 115 | - run: cargo check --no-default-features 116 | - run: cargo check --features span-locations 117 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo check 118 | run: cargo check 119 | env: 120 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 121 | - name: RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo check --no-default-features 122 | run: cargo check --no-default-features 123 | env: 124 | RUSTFLAGS: --cfg procmacro2_semver_exempt ${{env.RUSTFLAGS}} 125 | 126 | minimal: 127 | name: Minimal versions 128 | needs: pre_ci 129 | if: needs.pre_ci.outputs.continue 130 | runs-on: ubuntu-latest 131 | timeout-minutes: 45 132 | steps: 133 | - uses: actions/checkout@v5 134 | - uses: dtolnay/rust-toolchain@nightly 135 | - run: cargo generate-lockfile -Z minimal-versions 136 | - run: cargo check --locked 137 | 138 | webassembly: 139 | name: WebAssembly 140 | needs: pre_ci 141 | if: needs.pre_ci.outputs.continue 142 | runs-on: ubuntu-latest 143 | timeout-minutes: 45 144 | steps: 145 | - uses: actions/checkout@v5 146 | - uses: dtolnay/rust-toolchain@nightly 147 | with: 148 | target: wasm32-unknown-unknown 149 | components: rust-src 150 | - name: Ignore WebAssembly linker warning 151 | run: echo RUSTFLAGS=${RUSTFLAGS}\ -Alinker_messages >> $GITHUB_ENV 152 | - run: cargo test --target wasm32-unknown-unknown --no-run 153 | 154 | fuzz: 155 | name: Fuzz 156 | needs: pre_ci 157 | if: needs.pre_ci.outputs.continue 158 | runs-on: ubuntu-latest 159 | timeout-minutes: 45 160 | steps: 161 | - uses: actions/checkout@v5 162 | - uses: dtolnay/rust-toolchain@nightly 163 | with: 164 | components: rust-src 165 | - uses: dtolnay/install@cargo-fuzz 166 | - run: cargo fuzz check 167 | - run: cargo check --no-default-features --features afl 168 | working-directory: fuzz 169 | - uses: dtolnay/install@honggfuzz 170 | - name: Run apt install binutils-dev libunwind-dev 171 | run: | 172 | sudo sed -i 's/^update_initramfs=yes$/update_initramfs=no/' /etc/initramfs-tools/update-initramfs.conf 173 | sudo rm -f /var/lib/man-db/auto-update 174 | sudo apt-get update 175 | sudo apt-get install binutils-dev libunwind-dev 176 | - run: cargo hfuzz build --no-default-features --features honggfuzz 177 | working-directory: fuzz 178 | 179 | doc: 180 | name: Documentation 181 | needs: pre_ci 182 | if: needs.pre_ci.outputs.continue 183 | runs-on: ubuntu-latest 184 | timeout-minutes: 45 185 | env: 186 | RUSTDOCFLAGS: -Dwarnings 187 | steps: 188 | - uses: actions/checkout@v5 189 | - uses: dtolnay/rust-toolchain@nightly 190 | with: 191 | components: rust-src 192 | - uses: dtolnay/install@cargo-docs-rs 193 | - run: cargo docs-rs 194 | 195 | clippy: 196 | name: Clippy 197 | runs-on: ubuntu-latest 198 | if: github.event_name != 'pull_request' 199 | timeout-minutes: 45 200 | steps: 201 | - uses: actions/checkout@v5 202 | - uses: dtolnay/rust-toolchain@nightly 203 | with: 204 | components: clippy, rust-src 205 | - run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic 206 | - run: cargo clippy --tests --all-features -- -Dclippy::all -Dclippy::pedantic 207 | 208 | miri: 209 | name: Miri 210 | needs: pre_ci 211 | if: needs.pre_ci.outputs.continue 212 | runs-on: ubuntu-latest 213 | timeout-minutes: 45 214 | steps: 215 | - uses: actions/checkout@v5 216 | - uses: dtolnay/rust-toolchain@miri 217 | - run: cargo miri setup 218 | - run: cargo miri test 219 | env: 220 | MIRIFLAGS: -Zmiri-strict-provenance 221 | 222 | outdated: 223 | name: Outdated 224 | runs-on: ubuntu-latest 225 | if: github.event_name != 'pull_request' 226 | timeout-minutes: 45 227 | steps: 228 | - uses: actions/checkout@v5 229 | - uses: dtolnay/rust-toolchain@stable 230 | - uses: dtolnay/install@cargo-outdated 231 | - run: cargo outdated --workspace --exit-code 1 232 | - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1 233 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | /Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "proc-macro2" 3 | version = "1.0.103" 4 | authors = ["David Tolnay ", "Alex Crichton "] 5 | autobenches = false 6 | categories = ["development-tools::procedural-macro-helpers"] 7 | description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." 8 | documentation = "https://docs.rs/proc-macro2" 9 | edition = "2021" 10 | keywords = ["macros", "syn"] 11 | license = "MIT OR Apache-2.0" 12 | repository = "https://github.com/dtolnay/proc-macro2" 13 | rust-version = "1.60" 14 | 15 | [package.metadata.docs.rs] 16 | rustc-args = ["--cfg=procmacro2_semver_exempt"] 17 | targets = ["x86_64-unknown-linux-gnu"] 18 | rustdoc-args = [ 19 | "--cfg=procmacro2_semver_exempt", 20 | "--generate-link-to-definition", 21 | "--generate-macro-expansion", 22 | "--extern-html-root-url=core=https://doc.rust-lang.org", 23 | "--extern-html-root-url=alloc=https://doc.rust-lang.org", 24 | "--extern-html-root-url=std=https://doc.rust-lang.org", 25 | "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", 26 | ] 27 | 28 | [package.metadata.playground] 29 | features = ["span-locations"] 30 | 31 | [dependencies] 32 | unicode-ident = "1.0" 33 | 34 | [dev-dependencies] 35 | flate2 = "1.0" 36 | quote = { version = "1.0", default-features = false } 37 | rayon = "1.0" 38 | rustversion = "1" 39 | tar = "0.4" 40 | 41 | [features] 42 | proc-macro = [] 43 | default = ["proc-macro"] 44 | 45 | # Expose methods Span::start and Span::end which give the line/column location 46 | # of a token. 47 | span-locations = [] 48 | 49 | # This feature no longer means anything. 50 | nightly = [] 51 | 52 | [workspace] 53 | members = ["benches/bench-libproc-macro", "tests/ui"] 54 | 55 | [patch.crates-io] 56 | # Our doc tests depend on quote which depends on proc-macro2. Without this line, 57 | # the proc-macro2 dependency of quote would be the released version of 58 | # proc-macro2. Quote would implement its traits for types from that proc-macro2, 59 | # meaning impls would be missing when tested against types from the local 60 | # proc-macro2. 61 | # 62 | # GitHub Actions builds that are in progress at the time that you publish may 63 | # spuriously fail. This is because they'll be building a local proc-macro2 which 64 | # carries the second-most-recent version number, pulling in quote which resolves 65 | # to a dependency on the just-published most recent version number. Thus the 66 | # patch will fail to apply because the version numbers are different. 67 | proc-macro2 = { path = "." } 68 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # proc-macro2 2 | 3 | [github](https://github.com/dtolnay/proc-macro2) 4 | [crates.io](https://crates.io/crates/proc-macro2) 5 | [docs.rs](https://docs.rs/proc-macro2) 6 | [build status](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster) 7 | 8 | A wrapper around the procedural macro API of the compiler's `proc_macro` crate. 9 | This library serves two purposes: 10 | 11 | - **Bring proc-macro-like functionality to other contexts like build.rs and 12 | main.rs.** Types from `proc_macro` are entirely specific to procedural macros 13 | and cannot ever exist in code outside of a procedural macro. Meanwhile 14 | `proc_macro2` types may exist anywhere including non-macro code. By developing 15 | foundational libraries like [syn] and [quote] against `proc_macro2` rather 16 | than `proc_macro`, the procedural macro ecosystem becomes easily applicable to 17 | many other use cases and we avoid reimplementing non-macro equivalents of 18 | those libraries. 19 | 20 | - **Make procedural macros unit testable.** As a consequence of being specific 21 | to procedural macros, nothing that uses `proc_macro` can be executed from a 22 | unit test. In order for helper libraries or components of a macro to be 23 | testable in isolation, they must be implemented using `proc_macro2`. 24 | 25 | [syn]: https://github.com/dtolnay/syn 26 | [quote]: https://github.com/dtolnay/quote 27 | 28 | ## Usage 29 | 30 | ```toml 31 | [dependencies] 32 | proc-macro2 = "1.0" 33 | ``` 34 | 35 | The skeleton of a typical procedural macro typically looks like this: 36 | 37 | ```rust 38 | extern crate proc_macro; 39 | 40 | #[proc_macro_derive(MyDerive)] 41 | pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { 42 | let input = proc_macro2::TokenStream::from(input); 43 | 44 | let output: proc_macro2::TokenStream = { 45 | /* transform input */ 46 | }; 47 | 48 | proc_macro::TokenStream::from(output) 49 | } 50 | ``` 51 | 52 | If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate 53 | parse errors correctly back to the compiler when parsing fails. 54 | 55 | [`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html 56 | 57 | ## Unstable features 58 | 59 | The default feature set of proc-macro2 tracks the most recent stable compiler 60 | API. Functionality in `proc_macro` that is not yet stable is not exposed by 61 | proc-macro2 by default. 62 | 63 | To opt into the additional APIs available in the most recent nightly compiler, 64 | the `procmacro2_semver_exempt` config flag must be passed to rustc. We will 65 | polyfill those nightly-only APIs back to Rust 1.60.0. As these are unstable APIs 66 | that track the nightly compiler, minor versions of proc-macro2 may make breaking 67 | changes to them at any time. 68 | 69 | ``` 70 | RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build 71 | ``` 72 | 73 | Note that this must not only be done for your crate, but for any crate that 74 | depends on your crate. This infectious nature is intentional, as it serves as a 75 | reminder that you are outside of the normal semver guarantees. 76 | 77 | Semver exempt methods are marked as such in the proc-macro2 documentation. 78 | 79 |
80 | 81 | #### License 82 | 83 | 84 | Licensed under either of Apache License, Version 85 | 2.0 or MIT license at your option. 86 | 87 | 88 |
89 | 90 | 91 | Unless you explicitly state otherwise, any contribution intentionally submitted 92 | for inclusion in this crate by you, as defined in the Apache-2.0 license, shall 93 | be dual licensed as above, without any additional terms or conditions. 94 | 95 | -------------------------------------------------------------------------------- /benches/bench-libproc-macro/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "bench-libproc-macro" 3 | version = "0.0.0" 4 | authors = ["David Tolnay "] 5 | edition = "2018" 6 | publish = false 7 | 8 | [lib] 9 | path = "lib.rs" 10 | proc-macro = true 11 | 12 | [[bin]] 13 | name = "bench-libproc-macro" 14 | path = "main.rs" 15 | -------------------------------------------------------------------------------- /benches/bench-libproc-macro/README.md: -------------------------------------------------------------------------------- 1 | Example output: 2 | 3 | ```console 4 | $ cargo check --release 5 | 6 | Compiling bench-libproc-macro v0.0.0 7 | STRING: 37 millis 8 | TOKENSTREAM: 276 millis 9 | Finished release [optimized] target(s) in 1.16s 10 | ``` 11 | -------------------------------------------------------------------------------- /benches/bench-libproc-macro/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate proc_macro; 2 | 3 | use proc_macro::{Ident, Punct, Spacing, Span, TokenStream, TokenTree}; 4 | use std::iter::once; 5 | use std::time::Instant; 6 | 7 | const N: u32 = 20000; 8 | 9 | #[proc_macro] 10 | pub fn bench(_input: TokenStream) -> TokenStream { 11 | let start = Instant::now(); 12 | let mut string = String::new(); 13 | for _ in 0..N { 14 | string += "core"; 15 | string += ":"; 16 | string += ":"; 17 | string += "option"; 18 | string += ":"; 19 | string += ":"; 20 | string += "Option"; 21 | string += ":"; 22 | string += ":"; 23 | string += "None"; 24 | string += ","; 25 | } 26 | string.parse::().unwrap(); 27 | eprintln!("STRING: {} millis", start.elapsed().as_millis()); 28 | 29 | let start = Instant::now(); 30 | let span = Span::call_site(); 31 | let mut tokens = TokenStream::new(); 32 | for _ in 0..N { 33 | // Similar to what is emitted by quote. 34 | tokens.extend(once(TokenTree::Ident(Ident::new("core", span)))); 35 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); 36 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); 37 | tokens.extend(once(TokenTree::Ident(Ident::new("option", span)))); 38 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); 39 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); 40 | tokens.extend(once(TokenTree::Ident(Ident::new("Option", span)))); 41 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); 42 | tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); 43 | tokens.extend(once(TokenTree::Ident(Ident::new("None", span)))); 44 | tokens.extend(once(TokenTree::Punct(Punct::new(',', Spacing::Joint)))); 45 | } 46 | eprintln!("TOKENSTREAM: {} millis", start.elapsed().as_millis()); 47 | 48 | TokenStream::new() 49 | } 50 | -------------------------------------------------------------------------------- /benches/bench-libproc-macro/main.rs: -------------------------------------------------------------------------------- 1 | bench_libproc_macro::bench!(); 2 | 3 | fn main() {} 4 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | #![allow(unknown_lints)] 2 | #![allow(unexpected_cfgs)] 3 | #![allow(clippy::uninlined_format_args)] 4 | 5 | use std::env; 6 | use std::ffi::OsString; 7 | use std::fs; 8 | use std::io::ErrorKind; 9 | use std::iter; 10 | use std::path::Path; 11 | use std::process::{self, Command, Stdio}; 12 | use std::str; 13 | 14 | fn main() { 15 | let rustc = rustc_minor_version().unwrap_or(u32::MAX); 16 | 17 | if rustc >= 80 { 18 | println!("cargo:rustc-check-cfg=cfg(fuzzing)"); 19 | println!("cargo:rustc-check-cfg=cfg(no_is_available)"); 20 | println!("cargo:rustc-check-cfg=cfg(no_literal_byte_character)"); 21 | println!("cargo:rustc-check-cfg=cfg(no_literal_c_string)"); 22 | println!("cargo:rustc-check-cfg=cfg(no_source_text)"); 23 | println!("cargo:rustc-check-cfg=cfg(proc_macro_span)"); 24 | println!("cargo:rustc-check-cfg=cfg(proc_macro_span_file)"); 25 | println!("cargo:rustc-check-cfg=cfg(proc_macro_span_location)"); 26 | println!("cargo:rustc-check-cfg=cfg(procmacro2_backtrace)"); 27 | println!("cargo:rustc-check-cfg=cfg(procmacro2_build_probe)"); 28 | println!("cargo:rustc-check-cfg=cfg(procmacro2_nightly_testing)"); 29 | println!("cargo:rustc-check-cfg=cfg(procmacro2_semver_exempt)"); 30 | println!("cargo:rustc-check-cfg=cfg(randomize_layout)"); 31 | println!("cargo:rustc-check-cfg=cfg(span_locations)"); 32 | println!("cargo:rustc-check-cfg=cfg(super_unstable)"); 33 | println!("cargo:rustc-check-cfg=cfg(wrap_proc_macro)"); 34 | } 35 | 36 | let semver_exempt = cfg!(procmacro2_semver_exempt); 37 | if semver_exempt { 38 | // https://github.com/dtolnay/proc-macro2/issues/147 39 | println!("cargo:rustc-cfg=procmacro2_semver_exempt"); 40 | } 41 | 42 | if semver_exempt || cfg!(feature = "span-locations") { 43 | // Provide methods Span::start and Span::end which give the line/column 44 | // location of a token. This is behind a cfg because tracking location 45 | // inside spans is a performance hit. 46 | println!("cargo:rustc-cfg=span_locations"); 47 | } 48 | 49 | if rustc < 57 { 50 | // Do not use proc_macro::is_available() to detect whether the proc 51 | // macro API is available vs needs to be polyfilled. Instead, use the 52 | // proc macro API unconditionally and catch the panic that occurs if it 53 | // isn't available. 54 | println!("cargo:rustc-cfg=no_is_available"); 55 | } 56 | 57 | if rustc < 66 { 58 | // Do not call libproc_macro's Span::source_text. Always return None. 59 | println!("cargo:rustc-cfg=no_source_text"); 60 | } 61 | 62 | if rustc < 79 { 63 | // Do not call Literal::byte_character nor Literal::c_string. They can 64 | // be emulated by way of Literal::from_str. 65 | println!("cargo:rustc-cfg=no_literal_byte_character"); 66 | println!("cargo:rustc-cfg=no_literal_c_string"); 67 | } 68 | 69 | if !cfg!(feature = "proc-macro") { 70 | println!("cargo:rerun-if-changed=build.rs"); 71 | return; 72 | } 73 | 74 | let proc_macro_span; 75 | let consider_rustc_bootstrap; 76 | if compile_probe_unstable("proc_macro_span", false) { 77 | // This is a nightly or dev compiler, so it supports unstable features 78 | // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if 79 | // RUSTC_BOOTSTRAP is changed. 80 | proc_macro_span = true; 81 | consider_rustc_bootstrap = false; 82 | } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") { 83 | if compile_probe_unstable("proc_macro_span", true) { 84 | // This is a stable or beta compiler for which the user has set 85 | // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script 86 | // if they change it. 87 | proc_macro_span = true; 88 | consider_rustc_bootstrap = true; 89 | } else if rustc_bootstrap == "1" { 90 | // This compiler does not support the proc macro Span API in the 91 | // form that proc-macro2 expects. No need to pay attention to 92 | // RUSTC_BOOTSTRAP. 93 | proc_macro_span = false; 94 | consider_rustc_bootstrap = false; 95 | } else { 96 | // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is 97 | // set to restrict the use of unstable features by this crate. 98 | proc_macro_span = false; 99 | consider_rustc_bootstrap = true; 100 | } 101 | } else { 102 | // Without RUSTC_BOOTSTRAP, this compiler does not support the proc 103 | // macro Span API in the form that proc-macro2 expects, but try again if 104 | // the user turns on unstable features. 105 | proc_macro_span = false; 106 | consider_rustc_bootstrap = true; 107 | } 108 | 109 | if proc_macro_span || !semver_exempt { 110 | // Wrap types from libproc_macro rather than polyfilling the whole API. 111 | // Enabled as long as procmacro2_semver_exempt is not set, because we 112 | // can't emulate the unstable API without emulating everything else. 113 | // Also enabled unconditionally on nightly, in which case the 114 | // procmacro2_semver_exempt surface area is implemented by using the 115 | // nightly-only proc_macro API. 116 | println!("cargo:rustc-cfg=wrap_proc_macro"); 117 | } 118 | 119 | if proc_macro_span { 120 | // Enable non-dummy behavior of Span::byte_range and Span::join methods 121 | // which requires an unstable compiler feature. Enabled when building 122 | // with nightly, unless `-Z allow-feature` in RUSTFLAGS disallows 123 | // unstable features. 124 | println!("cargo:rustc-cfg=proc_macro_span"); 125 | } 126 | 127 | if proc_macro_span || (rustc >= 88 && compile_probe_stable("proc_macro_span_location")) { 128 | // Enable non-dummy behavior of Span::start and Span::end methods on 129 | // Rust 1.88+. 130 | println!("cargo:rustc-cfg=proc_macro_span_location"); 131 | } 132 | 133 | if proc_macro_span || (rustc >= 88 && compile_probe_stable("proc_macro_span_file")) { 134 | // Enable non-dummy behavior of Span::file and Span::local_file methods 135 | // on Rust 1.88+. 136 | println!("cargo:rustc-cfg=proc_macro_span_file"); 137 | } 138 | 139 | if semver_exempt && proc_macro_span { 140 | // Implement the semver exempt API in terms of the nightly-only 141 | // proc_macro API. 142 | println!("cargo:rustc-cfg=super_unstable"); 143 | } 144 | 145 | if consider_rustc_bootstrap { 146 | println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP"); 147 | } 148 | } 149 | 150 | fn compile_probe_unstable(feature: &str, rustc_bootstrap: bool) -> bool { 151 | // RUSTC_STAGE indicates that this crate is being compiled as a dependency 152 | // of a multistage rustc bootstrap. This environment uses Cargo in a highly 153 | // non-standard way with issues such as: 154 | // 155 | // https://github.com/rust-lang/cargo/issues/11138 156 | // https://github.com/rust-lang/rust/issues/114839 157 | // 158 | env::var_os("RUSTC_STAGE").is_none() && do_compile_probe(feature, rustc_bootstrap) 159 | } 160 | 161 | fn compile_probe_stable(feature: &str) -> bool { 162 | env::var_os("RUSTC_STAGE").is_some() || do_compile_probe(feature, true) 163 | } 164 | 165 | fn do_compile_probe(feature: &str, rustc_bootstrap: bool) -> bool { 166 | println!("cargo:rerun-if-changed=src/probe/{}.rs", feature); 167 | 168 | let rustc = cargo_env_var("RUSTC"); 169 | let out_dir = cargo_env_var("OUT_DIR"); 170 | let out_subdir = Path::new(&out_dir).join("probe"); 171 | let probefile = Path::new("src") 172 | .join("probe") 173 | .join(feature) 174 | .with_extension("rs"); 175 | 176 | if let Err(err) = fs::create_dir(&out_subdir) { 177 | if err.kind() != ErrorKind::AlreadyExists { 178 | eprintln!("Failed to create {}: {}", out_subdir.display(), err); 179 | process::exit(1); 180 | } 181 | } 182 | 183 | let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper| !wrapper.is_empty()); 184 | let rustc_workspace_wrapper = 185 | env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper| !wrapper.is_empty()); 186 | let mut rustc = rustc_wrapper 187 | .into_iter() 188 | .chain(rustc_workspace_wrapper) 189 | .chain(iter::once(rustc)); 190 | let mut cmd = Command::new(rustc.next().unwrap()); 191 | cmd.args(rustc); 192 | 193 | if !rustc_bootstrap { 194 | cmd.env_remove("RUSTC_BOOTSTRAP"); 195 | } 196 | 197 | cmd.stderr(Stdio::null()) 198 | .arg("--cfg=procmacro2_build_probe") 199 | .arg("--edition=2021") 200 | .arg("--crate-name=proc_macro2") 201 | .arg("--crate-type=lib") 202 | .arg("--cap-lints=allow") 203 | .arg("--emit=dep-info,metadata") 204 | .arg("--out-dir") 205 | .arg(&out_subdir) 206 | .arg(probefile); 207 | 208 | if let Some(target) = env::var_os("TARGET") { 209 | cmd.arg("--target").arg(target); 210 | } 211 | 212 | // If Cargo wants to set RUSTFLAGS, use that. 213 | if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") { 214 | if !rustflags.is_empty() { 215 | for arg in rustflags.split('\x1f') { 216 | cmd.arg(arg); 217 | } 218 | } 219 | } 220 | 221 | let success = match cmd.status() { 222 | Ok(status) => status.success(), 223 | Err(_) => false, 224 | }; 225 | 226 | // Clean up to avoid leaving nondeterministic absolute paths in the dep-info 227 | // file in OUT_DIR, which causes nonreproducible builds in build systems 228 | // that treat the entire OUT_DIR as an artifact. 229 | if let Err(err) = fs::remove_dir_all(&out_subdir) { 230 | // libc::ENOTEMPTY 231 | // Some filesystems (NFSv3) have timing issues under load where '.nfs*' 232 | // dummy files can continue to get created for a short period after the 233 | // probe command completes, breaking remove_dir_all. 234 | // To be replaced with ErrorKind::DirectoryNotEmpty (Rust 1.83+). 235 | const ENOTEMPTY: i32 = 39; 236 | 237 | if !(err.kind() == ErrorKind::NotFound 238 | || (cfg!(target_os = "linux") && err.raw_os_error() == Some(ENOTEMPTY))) 239 | { 240 | eprintln!("Failed to clean up {}: {}", out_subdir.display(), err); 241 | process::exit(1); 242 | } 243 | } 244 | 245 | success 246 | } 247 | 248 | fn rustc_minor_version() -> Option { 249 | let rustc = cargo_env_var("RUSTC"); 250 | let output = Command::new(rustc).arg("--version").output().ok()?; 251 | let version = str::from_utf8(&output.stdout).ok()?; 252 | let mut pieces = version.split('.'); 253 | if pieces.next() != Some("rustc 1") { 254 | return None; 255 | } 256 | pieces.next()?.parse().ok() 257 | } 258 | 259 | fn cargo_env_var(key: &str) -> OsString { 260 | env::var_os(key).unwrap_or_else(|| { 261 | eprintln!( 262 | "Environment variable ${} is not set during execution of build script", 263 | key, 264 | ); 265 | process::exit(1); 266 | }) 267 | } 268 | -------------------------------------------------------------------------------- /fuzz/.gitignore: -------------------------------------------------------------------------------- 1 | /artifacts/ 2 | /corpus/ 3 | /coverage/ 4 | /hfuzz_target/ 5 | /hfuzz_workspace/ 6 | /in/ 7 | /out/ 8 | /target/ 9 | /Cargo.lock 10 | -------------------------------------------------------------------------------- /fuzz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "proc-macro2-fuzz" 3 | version = "0.0.0" 4 | authors = ["David Tolnay "] 5 | edition = "2021" 6 | publish = false 7 | 8 | [package.metadata] 9 | cargo-fuzz = true 10 | 11 | [dependencies] 12 | afl = { version = "0.16", optional = true } 13 | honggfuzz = { version = "0.5", optional = true } 14 | libfuzzer-sys = { version = "0.4.7", optional = true } 15 | proc-macro2 = { path = "..", default-features = false } 16 | 17 | [features] 18 | default = ["libfuzzer"] 19 | afl = ["dep:afl"] 20 | honggfuzz = ["dep:honggfuzz"] 21 | libfuzzer = ["dep:libfuzzer-sys"] 22 | span-locations = ["proc-macro2/span-locations"] 23 | 24 | [[bin]] 25 | name = "parse_token_stream" 26 | path = "fuzz_targets/parse_token_stream.rs" 27 | test = false 28 | doc = false 29 | 30 | [workspace] 31 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/parse_token_stream.rs: -------------------------------------------------------------------------------- 1 | // libfuzzer: 2 | // 3 | // cargo install cargo-fuzz 4 | // cargo fuzz run parse_token_stream -j $(nproc) -- -max_len=200 -timeout=1 5 | // 6 | // afl++: 7 | // 8 | // cargo install cargo-afl 9 | // cargo afl build --no-default-features --features afl --release 10 | // cargo afl fuzz -i in -o out target/release/parse_token_stream 11 | // 12 | // honggfuzz: 13 | // 14 | // cargo install honggfuzz 15 | // cargo hfuzz build --no-default-features --features honggfuzz 16 | // HFUZZ_RUN_ARGS="--threads $(nproc) --max_file_size 200 --timeout 1" cargo hfuzz run parse_token_stream 17 | 18 | #![cfg_attr(feature = "libfuzzer", no_main)] 19 | 20 | use std::str; 21 | 22 | #[cfg(not(any( 23 | all( 24 | feature = "libfuzzer", 25 | not(feature = "afl"), 26 | not(feature = "honggfuzz") 27 | ), 28 | all( 29 | not(feature = "libfuzzer"), 30 | feature = "afl", 31 | not(feature = "honggfuzz") 32 | ), 33 | all( 34 | not(feature = "libfuzzer"), 35 | not(feature = "afl"), 36 | feature = "honggfuzz" 37 | ), 38 | )))] 39 | fn main() { 40 | compile_error! { 41 | r#"exactly one of feature="libfuzzer" or feature="afl" or feature="honggfuzz" must be enabled"# 42 | } 43 | } 44 | 45 | #[cfg(feature = "libfuzzer")] 46 | libfuzzer_sys::fuzz_target!(|bytes: &[u8]| do_fuzz(bytes)); 47 | 48 | #[cfg(feature = "afl")] 49 | fn main() { 50 | let hook = true; // turn panic into crashes 51 | afl::fuzz(hook, do_fuzz); 52 | } 53 | 54 | #[cfg(feature = "honggfuzz")] 55 | fn main() { 56 | loop { 57 | honggfuzz::fuzz(do_fuzz); 58 | } 59 | } 60 | 61 | fn do_fuzz(bytes: &[u8]) { 62 | let ..=199 = bytes.len() else { return }; 63 | let Ok(string) = str::from_utf8(bytes) else { 64 | return; 65 | }; 66 | let _ = string.parse::(); 67 | } 68 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | components = ["rust-src"] 3 | -------------------------------------------------------------------------------- /src/detection.rs: -------------------------------------------------------------------------------- 1 | use core::sync::atomic::{AtomicUsize, Ordering}; 2 | use std::sync::Once; 3 | 4 | static WORKS: AtomicUsize = AtomicUsize::new(0); 5 | static INIT: Once = Once::new(); 6 | 7 | pub(crate) fn inside_proc_macro() -> bool { 8 | match WORKS.load(Ordering::Relaxed) { 9 | 1 => return false, 10 | 2 => return true, 11 | _ => {} 12 | } 13 | 14 | INIT.call_once(initialize); 15 | inside_proc_macro() 16 | } 17 | 18 | pub(crate) fn force_fallback() { 19 | WORKS.store(1, Ordering::Relaxed); 20 | } 21 | 22 | pub(crate) fn unforce_fallback() { 23 | initialize(); 24 | } 25 | 26 | #[cfg(not(no_is_available))] 27 | fn initialize() { 28 | let available = proc_macro::is_available(); 29 | WORKS.store(available as usize + 1, Ordering::Relaxed); 30 | } 31 | 32 | // Swap in a null panic hook to avoid printing "thread panicked" to stderr, 33 | // then use catch_unwind to determine whether the compiler's proc_macro is 34 | // working. When proc-macro2 is used from outside of a procedural macro all 35 | // of the proc_macro crate's APIs currently panic. 36 | // 37 | // The Once is to prevent the possibility of this ordering: 38 | // 39 | // thread 1 calls take_hook, gets the user's original hook 40 | // thread 1 calls set_hook with the null hook 41 | // thread 2 calls take_hook, thinks null hook is the original hook 42 | // thread 2 calls set_hook with the null hook 43 | // thread 1 calls set_hook with the actual original hook 44 | // thread 2 calls set_hook with what it thinks is the original hook 45 | // 46 | // in which the user's hook has been lost. 47 | // 48 | // There is still a race condition where a panic in a different thread can 49 | // happen during the interval that the user's original panic hook is 50 | // unregistered such that their hook is incorrectly not called. This is 51 | // sufficiently unlikely and less bad than printing panic messages to stderr 52 | // on correct use of this crate. Maybe there is a libstd feature request 53 | // here. For now, if a user needs to guarantee that this failure mode does 54 | // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from 55 | // the main thread before launching any other threads. 56 | #[cfg(no_is_available)] 57 | fn initialize() { 58 | use std::panic::{self, PanicInfo}; 59 | 60 | type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static; 61 | 62 | let null_hook: Box = Box::new(|_panic_info| { /* ignore */ }); 63 | let sanity_check = &*null_hook as *const PanicHook; 64 | let original_hook = panic::take_hook(); 65 | panic::set_hook(null_hook); 66 | 67 | let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok(); 68 | WORKS.store(works as usize + 1, Ordering::Relaxed); 69 | 70 | let hopefully_null_hook = panic::take_hook(); 71 | panic::set_hook(original_hook); 72 | if sanity_check != &*hopefully_null_hook { 73 | panic!("observed race condition in proc_macro2::inside_proc_macro"); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/extra.rs: -------------------------------------------------------------------------------- 1 | //! Items which do not have a correspondence to any API in the proc_macro crate, 2 | //! but are necessary to include in proc-macro2. 3 | 4 | use crate::fallback; 5 | use crate::imp; 6 | use crate::marker::{ProcMacroAutoTraits, MARKER}; 7 | use crate::Span; 8 | use core::fmt::{self, Debug}; 9 | 10 | /// Invalidate any `proc_macro2::Span` that exist on the current thread. 11 | /// 12 | /// The implementation of `Span` uses thread-local data structures and this 13 | /// function clears them. Calling any method on a `Span` on the current thread 14 | /// created prior to the invalidation will return incorrect values or crash. 15 | /// 16 | /// This function is useful for programs that process more than 232 17 | /// bytes of Rust source code on the same thread. Just like rustc, proc-macro2 18 | /// uses 32-bit source locations, and these wrap around when the total source 19 | /// code processed by the same thread exceeds 232 bytes (4 20 | /// gigabytes). After a wraparound, `Span` methods such as `source_text()` can 21 | /// return wrong data. 22 | /// 23 | /// # Example 24 | /// 25 | /// As of late 2023, there is 200 GB of Rust code published on crates.io. 26 | /// Looking at just the newest version of every crate, it is 16 GB of code. So a 27 | /// workload that involves parsing it all would overflow a 32-bit source 28 | /// location unless spans are being invalidated. 29 | /// 30 | /// ``` 31 | /// use flate2::read::GzDecoder; 32 | /// use std::ffi::OsStr; 33 | /// use std::io::{BufReader, Read}; 34 | /// use std::str::FromStr; 35 | /// use tar::Archive; 36 | /// 37 | /// rayon::scope(|s| { 38 | /// for krate in every_version_of_every_crate() { 39 | /// s.spawn(move |_| { 40 | /// proc_macro2::extra::invalidate_current_thread_spans(); 41 | /// 42 | /// let reader = BufReader::new(krate); 43 | /// let tar = GzDecoder::new(reader); 44 | /// let mut archive = Archive::new(tar); 45 | /// for entry in archive.entries().unwrap() { 46 | /// let mut entry = entry.unwrap(); 47 | /// let path = entry.path().unwrap(); 48 | /// if path.extension() != Some(OsStr::new("rs")) { 49 | /// continue; 50 | /// } 51 | /// let mut content = String::new(); 52 | /// entry.read_to_string(&mut content).unwrap(); 53 | /// match proc_macro2::TokenStream::from_str(&content) { 54 | /// Ok(tokens) => {/* ... */}, 55 | /// Err(_) => continue, 56 | /// } 57 | /// } 58 | /// }); 59 | /// } 60 | /// }); 61 | /// # 62 | /// # fn every_version_of_every_crate() -> Vec { 63 | /// # Vec::new() 64 | /// # } 65 | /// ``` 66 | /// 67 | /// # Panics 68 | /// 69 | /// This function is not applicable to and will panic if called from a 70 | /// procedural macro. 71 | #[cfg(span_locations)] 72 | #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] 73 | pub fn invalidate_current_thread_spans() { 74 | crate::imp::invalidate_current_thread_spans(); 75 | } 76 | 77 | /// An object that holds a [`Group`]'s `span_open()` and `span_close()` together 78 | /// in a more compact representation than holding those 2 spans individually. 79 | /// 80 | /// [`Group`]: crate::Group 81 | #[derive(Copy, Clone)] 82 | pub struct DelimSpan { 83 | inner: DelimSpanEnum, 84 | _marker: ProcMacroAutoTraits, 85 | } 86 | 87 | #[derive(Copy, Clone)] 88 | enum DelimSpanEnum { 89 | #[cfg(wrap_proc_macro)] 90 | Compiler { 91 | join: proc_macro::Span, 92 | open: proc_macro::Span, 93 | close: proc_macro::Span, 94 | }, 95 | Fallback(fallback::Span), 96 | } 97 | 98 | impl DelimSpan { 99 | pub(crate) fn new(group: &imp::Group) -> Self { 100 | #[cfg(wrap_proc_macro)] 101 | let inner = match group { 102 | imp::Group::Compiler(group) => DelimSpanEnum::Compiler { 103 | join: group.span(), 104 | open: group.span_open(), 105 | close: group.span_close(), 106 | }, 107 | imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()), 108 | }; 109 | 110 | #[cfg(not(wrap_proc_macro))] 111 | let inner = DelimSpanEnum::Fallback(group.span()); 112 | 113 | DelimSpan { 114 | inner, 115 | _marker: MARKER, 116 | } 117 | } 118 | 119 | /// Returns a span covering the entire delimited group. 120 | pub fn join(&self) -> Span { 121 | match &self.inner { 122 | #[cfg(wrap_proc_macro)] 123 | DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)), 124 | DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span), 125 | } 126 | } 127 | 128 | /// Returns a span for the opening punctuation of the group only. 129 | pub fn open(&self) -> Span { 130 | match &self.inner { 131 | #[cfg(wrap_proc_macro)] 132 | DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)), 133 | DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()), 134 | } 135 | } 136 | 137 | /// Returns a span for the closing punctuation of the group only. 138 | pub fn close(&self) -> Span { 139 | match &self.inner { 140 | #[cfg(wrap_proc_macro)] 141 | DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)), 142 | DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()), 143 | } 144 | } 145 | } 146 | 147 | impl Debug for DelimSpan { 148 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 149 | Debug::fmt(&self.join(), f) 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/location.rs: -------------------------------------------------------------------------------- 1 | use core::cmp::Ordering; 2 | 3 | /// A line-column pair representing the start or end of a `Span`. 4 | /// 5 | /// This type is semver exempt and not exposed by default. 6 | #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] 7 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 8 | pub struct LineColumn { 9 | /// The 1-indexed line in the source file on which the span starts or ends 10 | /// (inclusive). 11 | pub line: usize, 12 | /// The 0-indexed column (in UTF-8 characters) in the source file on which 13 | /// the span starts or ends (inclusive). 14 | pub column: usize, 15 | } 16 | 17 | impl Ord for LineColumn { 18 | fn cmp(&self, other: &Self) -> Ordering { 19 | self.line 20 | .cmp(&other.line) 21 | .then(self.column.cmp(&other.column)) 22 | } 23 | } 24 | 25 | impl PartialOrd for LineColumn { 26 | fn partial_cmp(&self, other: &Self) -> Option { 27 | Some(self.cmp(other)) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/marker.rs: -------------------------------------------------------------------------------- 1 | use alloc::rc::Rc; 2 | use core::marker::PhantomData; 3 | use core::panic::{RefUnwindSafe, UnwindSafe}; 4 | 5 | // Zero sized marker with the correct set of autotrait impls we want all proc 6 | // macro types to have. 7 | #[derive(Copy, Clone)] 8 | #[cfg_attr( 9 | all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)), 10 | derive(PartialEq, Eq) 11 | )] 12 | pub(crate) struct ProcMacroAutoTraits(PhantomData>); 13 | 14 | pub(crate) const MARKER: ProcMacroAutoTraits = ProcMacroAutoTraits(PhantomData); 15 | 16 | impl UnwindSafe for ProcMacroAutoTraits {} 17 | impl RefUnwindSafe for ProcMacroAutoTraits {} 18 | -------------------------------------------------------------------------------- /src/num.rs: -------------------------------------------------------------------------------- 1 | // TODO: use NonZero in Rust 1.89+ 2 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 3 | pub struct NonZeroChar(char); 4 | 5 | impl NonZeroChar { 6 | pub fn new(ch: char) -> Option { 7 | if ch == '\0' { 8 | None 9 | } else { 10 | Some(NonZeroChar(ch)) 11 | } 12 | } 13 | 14 | pub fn get(self) -> char { 15 | self.0 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/parse.rs: -------------------------------------------------------------------------------- 1 | use crate::fallback::{ 2 | self, is_ident_continue, is_ident_start, Group, Ident, LexError, Literal, Span, TokenStream, 3 | TokenStreamBuilder, 4 | }; 5 | use crate::{Delimiter, Punct, Spacing, TokenTree}; 6 | use core::char; 7 | use core::str::{Bytes, CharIndices, Chars}; 8 | 9 | #[derive(Copy, Clone, Eq, PartialEq)] 10 | pub(crate) struct Cursor<'a> { 11 | pub(crate) rest: &'a str, 12 | #[cfg(span_locations)] 13 | pub(crate) off: u32, 14 | } 15 | 16 | impl<'a> Cursor<'a> { 17 | pub(crate) fn advance(&self, bytes: usize) -> Cursor<'a> { 18 | let (_front, rest) = self.rest.split_at(bytes); 19 | Cursor { 20 | rest, 21 | #[cfg(span_locations)] 22 | off: self.off + _front.chars().count() as u32, 23 | } 24 | } 25 | 26 | pub(crate) fn starts_with(&self, s: &str) -> bool { 27 | self.rest.starts_with(s) 28 | } 29 | 30 | pub(crate) fn starts_with_char(&self, ch: char) -> bool { 31 | self.rest.starts_with(ch) 32 | } 33 | 34 | pub(crate) fn starts_with_fn(&self, f: Pattern) -> bool 35 | where 36 | Pattern: FnMut(char) -> bool, 37 | { 38 | self.rest.starts_with(f) 39 | } 40 | 41 | pub(crate) fn is_empty(&self) -> bool { 42 | self.rest.is_empty() 43 | } 44 | 45 | fn len(&self) -> usize { 46 | self.rest.len() 47 | } 48 | 49 | fn as_bytes(&self) -> &'a [u8] { 50 | self.rest.as_bytes() 51 | } 52 | 53 | fn bytes(&self) -> Bytes<'a> { 54 | self.rest.bytes() 55 | } 56 | 57 | fn chars(&self) -> Chars<'a> { 58 | self.rest.chars() 59 | } 60 | 61 | fn char_indices(&self) -> CharIndices<'a> { 62 | self.rest.char_indices() 63 | } 64 | 65 | fn parse(&self, tag: &str) -> Result, Reject> { 66 | if self.starts_with(tag) { 67 | Ok(self.advance(tag.len())) 68 | } else { 69 | Err(Reject) 70 | } 71 | } 72 | } 73 | 74 | pub(crate) struct Reject; 75 | type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>; 76 | 77 | fn skip_whitespace(input: Cursor) -> Cursor { 78 | let mut s = input; 79 | 80 | while !s.is_empty() { 81 | let byte = s.as_bytes()[0]; 82 | if byte == b'/' { 83 | if s.starts_with("//") 84 | && (!s.starts_with("///") || s.starts_with("////")) 85 | && !s.starts_with("//!") 86 | { 87 | let (cursor, _) = take_until_newline_or_eof(s); 88 | s = cursor; 89 | continue; 90 | } else if s.starts_with("/**/") { 91 | s = s.advance(4); 92 | continue; 93 | } else if s.starts_with("/*") 94 | && (!s.starts_with("/**") || s.starts_with("/***")) 95 | && !s.starts_with("/*!") 96 | { 97 | match block_comment(s) { 98 | Ok((rest, _)) => { 99 | s = rest; 100 | continue; 101 | } 102 | Err(Reject) => return s, 103 | } 104 | } 105 | } 106 | match byte { 107 | b' ' | 0x09..=0x0d => { 108 | s = s.advance(1); 109 | continue; 110 | } 111 | b if b.is_ascii() => {} 112 | _ => { 113 | let ch = s.chars().next().unwrap(); 114 | if is_whitespace(ch) { 115 | s = s.advance(ch.len_utf8()); 116 | continue; 117 | } 118 | } 119 | } 120 | return s; 121 | } 122 | s 123 | } 124 | 125 | fn block_comment(input: Cursor) -> PResult<&str> { 126 | if !input.starts_with("/*") { 127 | return Err(Reject); 128 | } 129 | 130 | let mut depth = 0usize; 131 | let bytes = input.as_bytes(); 132 | let mut i = 0usize; 133 | let upper = bytes.len() - 1; 134 | 135 | while i < upper { 136 | if bytes[i] == b'/' && bytes[i + 1] == b'*' { 137 | depth += 1; 138 | i += 1; // eat '*' 139 | } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { 140 | depth -= 1; 141 | if depth == 0 { 142 | return Ok((input.advance(i + 2), &input.rest[..i + 2])); 143 | } 144 | i += 1; // eat '/' 145 | } 146 | i += 1; 147 | } 148 | 149 | Err(Reject) 150 | } 151 | 152 | fn is_whitespace(ch: char) -> bool { 153 | // Rust treats left-to-right mark and right-to-left mark as whitespace 154 | ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' 155 | } 156 | 157 | fn word_break(input: Cursor) -> Result { 158 | match input.chars().next() { 159 | Some(ch) if is_ident_continue(ch) => Err(Reject), 160 | Some(_) | None => Ok(input), 161 | } 162 | } 163 | 164 | // Rustc's representation of a macro expansion error in expression position or 165 | // type position. 166 | const ERROR: &str = "(/*ERROR*/)"; 167 | 168 | pub(crate) fn token_stream(mut input: Cursor) -> Result { 169 | let mut trees = TokenStreamBuilder::new(); 170 | let mut stack = Vec::new(); 171 | 172 | loop { 173 | input = skip_whitespace(input); 174 | 175 | if let Ok((rest, ())) = doc_comment(input, &mut trees) { 176 | input = rest; 177 | continue; 178 | } 179 | 180 | #[cfg(span_locations)] 181 | let lo = input.off; 182 | 183 | let first = match input.bytes().next() { 184 | Some(first) => first, 185 | None => match stack.last() { 186 | None => return Ok(trees.build()), 187 | #[cfg(span_locations)] 188 | Some((lo, _frame)) => { 189 | return Err(LexError { 190 | span: Span { lo: *lo, hi: *lo }, 191 | }) 192 | } 193 | #[cfg(not(span_locations))] 194 | Some(_frame) => return Err(LexError { span: Span {} }), 195 | }, 196 | }; 197 | 198 | if let Some(open_delimiter) = match first { 199 | b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis), 200 | b'[' => Some(Delimiter::Bracket), 201 | b'{' => Some(Delimiter::Brace), 202 | _ => None, 203 | } { 204 | input = input.advance(1); 205 | let frame = (open_delimiter, trees); 206 | #[cfg(span_locations)] 207 | let frame = (lo, frame); 208 | stack.push(frame); 209 | trees = TokenStreamBuilder::new(); 210 | } else if let Some(close_delimiter) = match first { 211 | b')' => Some(Delimiter::Parenthesis), 212 | b']' => Some(Delimiter::Bracket), 213 | b'}' => Some(Delimiter::Brace), 214 | _ => None, 215 | } { 216 | let frame = match stack.pop() { 217 | Some(frame) => frame, 218 | None => return Err(lex_error(input)), 219 | }; 220 | #[cfg(span_locations)] 221 | let (lo, frame) = frame; 222 | let (open_delimiter, outer) = frame; 223 | if open_delimiter != close_delimiter { 224 | return Err(lex_error(input)); 225 | } 226 | input = input.advance(1); 227 | let mut g = Group::new(open_delimiter, trees.build()); 228 | g.set_span(Span { 229 | #[cfg(span_locations)] 230 | lo, 231 | #[cfg(span_locations)] 232 | hi: input.off, 233 | }); 234 | trees = outer; 235 | trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g))); 236 | } else { 237 | let (rest, mut tt) = match leaf_token(input) { 238 | Ok((rest, tt)) => (rest, tt), 239 | Err(Reject) => return Err(lex_error(input)), 240 | }; 241 | tt.set_span(crate::Span::_new_fallback(Span { 242 | #[cfg(span_locations)] 243 | lo, 244 | #[cfg(span_locations)] 245 | hi: rest.off, 246 | })); 247 | trees.push_token_from_parser(tt); 248 | input = rest; 249 | } 250 | } 251 | } 252 | 253 | fn lex_error(cursor: Cursor) -> LexError { 254 | #[cfg(not(span_locations))] 255 | let _ = cursor; 256 | LexError { 257 | span: Span { 258 | #[cfg(span_locations)] 259 | lo: cursor.off, 260 | #[cfg(span_locations)] 261 | hi: cursor.off, 262 | }, 263 | } 264 | } 265 | 266 | fn leaf_token(input: Cursor) -> PResult { 267 | if let Ok((input, l)) = literal(input) { 268 | // must be parsed before ident 269 | Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l)))) 270 | } else if let Ok((input, p)) = punct(input) { 271 | Ok((input, TokenTree::Punct(p))) 272 | } else if let Ok((input, i)) = ident(input) { 273 | Ok((input, TokenTree::Ident(i))) 274 | } else if input.starts_with(ERROR) { 275 | let rest = input.advance(ERROR.len()); 276 | let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned())); 277 | Ok((rest, TokenTree::Literal(repr))) 278 | } else { 279 | Err(Reject) 280 | } 281 | } 282 | 283 | fn ident(input: Cursor) -> PResult { 284 | if [ 285 | "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#", 286 | ] 287 | .iter() 288 | .any(|prefix| input.starts_with(prefix)) 289 | { 290 | Err(Reject) 291 | } else { 292 | ident_any(input) 293 | } 294 | } 295 | 296 | fn ident_any(input: Cursor) -> PResult { 297 | let raw = input.starts_with("r#"); 298 | let rest = input.advance((raw as usize) << 1); 299 | 300 | let (rest, sym) = ident_not_raw(rest)?; 301 | 302 | if !raw { 303 | let ident = 304 | crate::Ident::_new_fallback(Ident::new_unchecked(sym, fallback::Span::call_site())); 305 | return Ok((rest, ident)); 306 | } 307 | 308 | match sym { 309 | "_" | "super" | "self" | "Self" | "crate" => return Err(Reject), 310 | _ => {} 311 | } 312 | 313 | let ident = 314 | crate::Ident::_new_fallback(Ident::new_raw_unchecked(sym, fallback::Span::call_site())); 315 | Ok((rest, ident)) 316 | } 317 | 318 | fn ident_not_raw(input: Cursor) -> PResult<&str> { 319 | let mut chars = input.char_indices(); 320 | 321 | match chars.next() { 322 | Some((_, ch)) if is_ident_start(ch) => {} 323 | _ => return Err(Reject), 324 | } 325 | 326 | let mut end = input.len(); 327 | for (i, ch) in chars { 328 | if !is_ident_continue(ch) { 329 | end = i; 330 | break; 331 | } 332 | } 333 | 334 | Ok((input.advance(end), &input.rest[..end])) 335 | } 336 | 337 | pub(crate) fn literal(input: Cursor) -> PResult { 338 | let rest = literal_nocapture(input)?; 339 | let end = input.len() - rest.len(); 340 | Ok((rest, Literal::_new(input.rest[..end].to_string()))) 341 | } 342 | 343 | fn literal_nocapture(input: Cursor) -> Result { 344 | if let Ok(ok) = string(input) { 345 | Ok(ok) 346 | } else if let Ok(ok) = byte_string(input) { 347 | Ok(ok) 348 | } else if let Ok(ok) = c_string(input) { 349 | Ok(ok) 350 | } else if let Ok(ok) = byte(input) { 351 | Ok(ok) 352 | } else if let Ok(ok) = character(input) { 353 | Ok(ok) 354 | } else if let Ok(ok) = float(input) { 355 | Ok(ok) 356 | } else if let Ok(ok) = int(input) { 357 | Ok(ok) 358 | } else { 359 | Err(Reject) 360 | } 361 | } 362 | 363 | fn literal_suffix(input: Cursor) -> Cursor { 364 | match ident_not_raw(input) { 365 | Ok((input, _)) => input, 366 | Err(Reject) => input, 367 | } 368 | } 369 | 370 | fn string(input: Cursor) -> Result { 371 | if let Ok(input) = input.parse("\"") { 372 | cooked_string(input) 373 | } else if let Ok(input) = input.parse("r") { 374 | raw_string(input) 375 | } else { 376 | Err(Reject) 377 | } 378 | } 379 | 380 | fn cooked_string(mut input: Cursor) -> Result { 381 | let mut chars = input.char_indices(); 382 | 383 | while let Some((i, ch)) = chars.next() { 384 | match ch { 385 | '"' => { 386 | let input = input.advance(i + 1); 387 | return Ok(literal_suffix(input)); 388 | } 389 | '\r' => match chars.next() { 390 | Some((_, '\n')) => {} 391 | _ => break, 392 | }, 393 | '\\' => match chars.next() { 394 | Some((_, 'x')) => { 395 | backslash_x_char(&mut chars)?; 396 | } 397 | Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {} 398 | Some((_, 'u')) => { 399 | backslash_u(&mut chars)?; 400 | } 401 | Some((newline, ch @ ('\n' | '\r'))) => { 402 | input = input.advance(newline + 1); 403 | trailing_backslash(&mut input, ch as u8)?; 404 | chars = input.char_indices(); 405 | } 406 | _ => break, 407 | }, 408 | _ch => {} 409 | } 410 | } 411 | Err(Reject) 412 | } 413 | 414 | fn raw_string(input: Cursor) -> Result { 415 | let (input, delimiter) = delimiter_of_raw_string(input)?; 416 | let mut bytes = input.bytes().enumerate(); 417 | while let Some((i, byte)) = bytes.next() { 418 | match byte { 419 | b'"' if input.rest[i + 1..].starts_with(delimiter) => { 420 | let rest = input.advance(i + 1 + delimiter.len()); 421 | return Ok(literal_suffix(rest)); 422 | } 423 | b'\r' => match bytes.next() { 424 | Some((_, b'\n')) => {} 425 | _ => break, 426 | }, 427 | _ => {} 428 | } 429 | } 430 | Err(Reject) 431 | } 432 | 433 | fn byte_string(input: Cursor) -> Result { 434 | if let Ok(input) = input.parse("b\"") { 435 | cooked_byte_string(input) 436 | } else if let Ok(input) = input.parse("br") { 437 | raw_byte_string(input) 438 | } else { 439 | Err(Reject) 440 | } 441 | } 442 | 443 | fn cooked_byte_string(mut input: Cursor) -> Result { 444 | let mut bytes = input.bytes().enumerate(); 445 | while let Some((offset, b)) = bytes.next() { 446 | match b { 447 | b'"' => { 448 | let input = input.advance(offset + 1); 449 | return Ok(literal_suffix(input)); 450 | } 451 | b'\r' => match bytes.next() { 452 | Some((_, b'\n')) => {} 453 | _ => break, 454 | }, 455 | b'\\' => match bytes.next() { 456 | Some((_, b'x')) => { 457 | backslash_x_byte(&mut bytes)?; 458 | } 459 | Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {} 460 | Some((newline, b @ (b'\n' | b'\r'))) => { 461 | input = input.advance(newline + 1); 462 | trailing_backslash(&mut input, b)?; 463 | bytes = input.bytes().enumerate(); 464 | } 465 | _ => break, 466 | }, 467 | b if b.is_ascii() => {} 468 | _ => break, 469 | } 470 | } 471 | Err(Reject) 472 | } 473 | 474 | fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> { 475 | for (i, byte) in input.bytes().enumerate() { 476 | match byte { 477 | b'"' => { 478 | if i > 255 { 479 | // https://github.com/rust-lang/rust/pull/95251 480 | return Err(Reject); 481 | } 482 | return Ok((input.advance(i + 1), &input.rest[..i])); 483 | } 484 | b'#' => {} 485 | _ => break, 486 | } 487 | } 488 | Err(Reject) 489 | } 490 | 491 | fn raw_byte_string(input: Cursor) -> Result { 492 | let (input, delimiter) = delimiter_of_raw_string(input)?; 493 | let mut bytes = input.bytes().enumerate(); 494 | while let Some((i, byte)) = bytes.next() { 495 | match byte { 496 | b'"' if input.rest[i + 1..].starts_with(delimiter) => { 497 | let rest = input.advance(i + 1 + delimiter.len()); 498 | return Ok(literal_suffix(rest)); 499 | } 500 | b'\r' => match bytes.next() { 501 | Some((_, b'\n')) => {} 502 | _ => break, 503 | }, 504 | other => { 505 | if !other.is_ascii() { 506 | break; 507 | } 508 | } 509 | } 510 | } 511 | Err(Reject) 512 | } 513 | 514 | fn c_string(input: Cursor) -> Result { 515 | if let Ok(input) = input.parse("c\"") { 516 | cooked_c_string(input) 517 | } else if let Ok(input) = input.parse("cr") { 518 | raw_c_string(input) 519 | } else { 520 | Err(Reject) 521 | } 522 | } 523 | 524 | fn raw_c_string(input: Cursor) -> Result { 525 | let (input, delimiter) = delimiter_of_raw_string(input)?; 526 | let mut bytes = input.bytes().enumerate(); 527 | while let Some((i, byte)) = bytes.next() { 528 | match byte { 529 | b'"' if input.rest[i + 1..].starts_with(delimiter) => { 530 | let rest = input.advance(i + 1 + delimiter.len()); 531 | return Ok(literal_suffix(rest)); 532 | } 533 | b'\r' => match bytes.next() { 534 | Some((_, b'\n')) => {} 535 | _ => break, 536 | }, 537 | b'\0' => break, 538 | _ => {} 539 | } 540 | } 541 | Err(Reject) 542 | } 543 | 544 | fn cooked_c_string(mut input: Cursor) -> Result { 545 | let mut chars = input.char_indices(); 546 | 547 | while let Some((i, ch)) = chars.next() { 548 | match ch { 549 | '"' => { 550 | let input = input.advance(i + 1); 551 | return Ok(literal_suffix(input)); 552 | } 553 | '\r' => match chars.next() { 554 | Some((_, '\n')) => {} 555 | _ => break, 556 | }, 557 | '\\' => match chars.next() { 558 | Some((_, 'x')) => { 559 | backslash_x_nonzero(&mut chars)?; 560 | } 561 | Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {} 562 | Some((_, 'u')) => { 563 | if backslash_u(&mut chars)? == '\0' { 564 | break; 565 | } 566 | } 567 | Some((newline, ch @ ('\n' | '\r'))) => { 568 | input = input.advance(newline + 1); 569 | trailing_backslash(&mut input, ch as u8)?; 570 | chars = input.char_indices(); 571 | } 572 | _ => break, 573 | }, 574 | '\0' => break, 575 | _ch => {} 576 | } 577 | } 578 | Err(Reject) 579 | } 580 | 581 | fn byte(input: Cursor) -> Result { 582 | let input = input.parse("b'")?; 583 | let mut bytes = input.bytes().enumerate(); 584 | let ok = match bytes.next().map(|(_, b)| b) { 585 | Some(b'\\') => match bytes.next().map(|(_, b)| b) { 586 | Some(b'x') => backslash_x_byte(&mut bytes).is_ok(), 587 | Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true, 588 | _ => false, 589 | }, 590 | b => b.is_some(), 591 | }; 592 | if !ok { 593 | return Err(Reject); 594 | } 595 | let (offset, _) = bytes.next().ok_or(Reject)?; 596 | if !input.chars().as_str().is_char_boundary(offset) { 597 | return Err(Reject); 598 | } 599 | let input = input.advance(offset).parse("'")?; 600 | Ok(literal_suffix(input)) 601 | } 602 | 603 | fn character(input: Cursor) -> Result { 604 | let input = input.parse("'")?; 605 | let mut chars = input.char_indices(); 606 | let ok = match chars.next().map(|(_, ch)| ch) { 607 | Some('\\') => match chars.next().map(|(_, ch)| ch) { 608 | Some('x') => backslash_x_char(&mut chars).is_ok(), 609 | Some('u') => backslash_u(&mut chars).is_ok(), 610 | Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true, 611 | _ => false, 612 | }, 613 | ch => ch.is_some(), 614 | }; 615 | if !ok { 616 | return Err(Reject); 617 | } 618 | let (idx, _) = chars.next().ok_or(Reject)?; 619 | let input = input.advance(idx).parse("'")?; 620 | Ok(literal_suffix(input)) 621 | } 622 | 623 | macro_rules! next_ch { 624 | ($chars:ident @ $pat:pat) => { 625 | match $chars.next() { 626 | Some((_, ch)) => match ch { 627 | $pat => ch, 628 | _ => return Err(Reject), 629 | }, 630 | None => return Err(Reject), 631 | } 632 | }; 633 | } 634 | 635 | fn backslash_x_char(chars: &mut I) -> Result<(), Reject> 636 | where 637 | I: Iterator, 638 | { 639 | next_ch!(chars @ '0'..='7'); 640 | next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); 641 | Ok(()) 642 | } 643 | 644 | fn backslash_x_byte(chars: &mut I) -> Result<(), Reject> 645 | where 646 | I: Iterator, 647 | { 648 | next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); 649 | next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); 650 | Ok(()) 651 | } 652 | 653 | fn backslash_x_nonzero(chars: &mut I) -> Result<(), Reject> 654 | where 655 | I: Iterator, 656 | { 657 | let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); 658 | let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); 659 | if first == '0' && second == '0' { 660 | Err(Reject) 661 | } else { 662 | Ok(()) 663 | } 664 | } 665 | 666 | fn backslash_u(chars: &mut I) -> Result 667 | where 668 | I: Iterator, 669 | { 670 | next_ch!(chars @ '{'); 671 | let mut value = 0; 672 | let mut len = 0; 673 | for (_, ch) in chars { 674 | let digit = match ch { 675 | '0'..='9' => ch as u8 - b'0', 676 | 'a'..='f' => 10 + ch as u8 - b'a', 677 | 'A'..='F' => 10 + ch as u8 - b'A', 678 | '_' if len > 0 => continue, 679 | '}' if len > 0 => return char::from_u32(value).ok_or(Reject), 680 | _ => break, 681 | }; 682 | if len == 6 { 683 | break; 684 | } 685 | value *= 0x10; 686 | value += u32::from(digit); 687 | len += 1; 688 | } 689 | Err(Reject) 690 | } 691 | 692 | fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> { 693 | let mut whitespace = input.bytes().enumerate(); 694 | loop { 695 | if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') { 696 | return Err(Reject); 697 | } 698 | match whitespace.next() { 699 | Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => { 700 | last = b; 701 | } 702 | Some((offset, _)) => { 703 | *input = input.advance(offset); 704 | return Ok(()); 705 | } 706 | None => return Err(Reject), 707 | } 708 | } 709 | } 710 | 711 | fn float(input: Cursor) -> Result { 712 | let mut rest = float_digits(input)?; 713 | if let Some(ch) = rest.chars().next() { 714 | if is_ident_start(ch) { 715 | rest = ident_not_raw(rest)?.0; 716 | } 717 | } 718 | word_break(rest) 719 | } 720 | 721 | fn float_digits(input: Cursor) -> Result { 722 | let mut chars = input.chars().peekable(); 723 | match chars.next() { 724 | Some(ch) if '0' <= ch && ch <= '9' => {} 725 | _ => return Err(Reject), 726 | } 727 | 728 | let mut len = 1; 729 | let mut has_dot = false; 730 | let mut has_exp = false; 731 | while let Some(&ch) = chars.peek() { 732 | match ch { 733 | '0'..='9' | '_' => { 734 | chars.next(); 735 | len += 1; 736 | } 737 | '.' => { 738 | if has_dot { 739 | break; 740 | } 741 | chars.next(); 742 | if chars 743 | .peek() 744 | .map_or(false, |&ch| ch == '.' || is_ident_start(ch)) 745 | { 746 | return Err(Reject); 747 | } 748 | len += 1; 749 | has_dot = true; 750 | } 751 | 'e' | 'E' => { 752 | chars.next(); 753 | len += 1; 754 | has_exp = true; 755 | break; 756 | } 757 | _ => break, 758 | } 759 | } 760 | 761 | if !(has_dot || has_exp) { 762 | return Err(Reject); 763 | } 764 | 765 | if has_exp { 766 | let token_before_exp = if has_dot { 767 | Ok(input.advance(len - 1)) 768 | } else { 769 | Err(Reject) 770 | }; 771 | let mut has_sign = false; 772 | let mut has_exp_value = false; 773 | while let Some(&ch) = chars.peek() { 774 | match ch { 775 | '+' | '-' => { 776 | if has_exp_value { 777 | break; 778 | } 779 | if has_sign { 780 | return token_before_exp; 781 | } 782 | chars.next(); 783 | len += 1; 784 | has_sign = true; 785 | } 786 | '0'..='9' => { 787 | chars.next(); 788 | len += 1; 789 | has_exp_value = true; 790 | } 791 | '_' => { 792 | chars.next(); 793 | len += 1; 794 | } 795 | _ => break, 796 | } 797 | } 798 | if !has_exp_value { 799 | return token_before_exp; 800 | } 801 | } 802 | 803 | Ok(input.advance(len)) 804 | } 805 | 806 | fn int(input: Cursor) -> Result { 807 | let mut rest = digits(input)?; 808 | if let Some(ch) = rest.chars().next() { 809 | if is_ident_start(ch) { 810 | rest = ident_not_raw(rest)?.0; 811 | } 812 | } 813 | word_break(rest) 814 | } 815 | 816 | fn digits(mut input: Cursor) -> Result { 817 | let base = if input.starts_with("0x") { 818 | input = input.advance(2); 819 | 16 820 | } else if input.starts_with("0o") { 821 | input = input.advance(2); 822 | 8 823 | } else if input.starts_with("0b") { 824 | input = input.advance(2); 825 | 2 826 | } else { 827 | 10 828 | }; 829 | 830 | let mut len = 0; 831 | let mut empty = true; 832 | for b in input.bytes() { 833 | match b { 834 | b'0'..=b'9' => { 835 | let digit = (b - b'0') as u64; 836 | if digit >= base { 837 | return Err(Reject); 838 | } 839 | } 840 | b'a'..=b'f' => { 841 | let digit = 10 + (b - b'a') as u64; 842 | if digit >= base { 843 | break; 844 | } 845 | } 846 | b'A'..=b'F' => { 847 | let digit = 10 + (b - b'A') as u64; 848 | if digit >= base { 849 | break; 850 | } 851 | } 852 | b'_' => { 853 | if empty && base == 10 { 854 | return Err(Reject); 855 | } 856 | len += 1; 857 | continue; 858 | } 859 | _ => break, 860 | } 861 | len += 1; 862 | empty = false; 863 | } 864 | if empty { 865 | Err(Reject) 866 | } else { 867 | Ok(input.advance(len)) 868 | } 869 | } 870 | 871 | fn punct(input: Cursor) -> PResult { 872 | let (rest, ch) = punct_char(input)?; 873 | if ch == '\'' { 874 | let (after_lifetime, _ident) = ident_any(rest)?; 875 | if after_lifetime.starts_with_char('\'') 876 | || (after_lifetime.starts_with_char('#') && !rest.starts_with("r#")) 877 | { 878 | Err(Reject) 879 | } else { 880 | Ok((rest, Punct::new('\'', Spacing::Joint))) 881 | } 882 | } else { 883 | let kind = match punct_char(rest) { 884 | Ok(_) => Spacing::Joint, 885 | Err(Reject) => Spacing::Alone, 886 | }; 887 | Ok((rest, Punct::new(ch, kind))) 888 | } 889 | } 890 | 891 | fn punct_char(input: Cursor) -> PResult { 892 | if input.starts_with("//") || input.starts_with("/*") { 893 | // Do not accept `/` of a comment as a punct. 894 | return Err(Reject); 895 | } 896 | 897 | let mut chars = input.chars(); 898 | let first = match chars.next() { 899 | Some(ch) => ch, 900 | None => { 901 | return Err(Reject); 902 | } 903 | }; 904 | let recognized = "~!@#$%^&*-=+|;:,<.>/?'"; 905 | if recognized.contains(first) { 906 | Ok((input.advance(first.len_utf8()), first)) 907 | } else { 908 | Err(Reject) 909 | } 910 | } 911 | 912 | fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> { 913 | #[cfg(span_locations)] 914 | let lo = input.off; 915 | let (rest, (comment, inner)) = doc_comment_contents(input)?; 916 | let fallback_span = Span { 917 | #[cfg(span_locations)] 918 | lo, 919 | #[cfg(span_locations)] 920 | hi: rest.off, 921 | }; 922 | let span = crate::Span::_new_fallback(fallback_span); 923 | 924 | let mut scan_for_bare_cr = comment; 925 | while let Some(cr) = scan_for_bare_cr.find('\r') { 926 | let rest = &scan_for_bare_cr[cr + 1..]; 927 | if !rest.starts_with('\n') { 928 | return Err(Reject); 929 | } 930 | scan_for_bare_cr = rest; 931 | } 932 | 933 | let mut pound = Punct::new('#', Spacing::Alone); 934 | pound.set_span(span); 935 | trees.push_token_from_parser(TokenTree::Punct(pound)); 936 | 937 | if inner { 938 | let mut bang = Punct::new('!', Spacing::Alone); 939 | bang.set_span(span); 940 | trees.push_token_from_parser(TokenTree::Punct(bang)); 941 | } 942 | 943 | let doc_ident = crate::Ident::_new_fallback(Ident::new_unchecked("doc", fallback_span)); 944 | let mut equal = Punct::new('=', Spacing::Alone); 945 | equal.set_span(span); 946 | let mut literal = crate::Literal::_new_fallback(Literal::string(comment)); 947 | literal.set_span(span); 948 | let mut bracketed = TokenStreamBuilder::with_capacity(3); 949 | bracketed.push_token_from_parser(TokenTree::Ident(doc_ident)); 950 | bracketed.push_token_from_parser(TokenTree::Punct(equal)); 951 | bracketed.push_token_from_parser(TokenTree::Literal(literal)); 952 | let group = Group::new(Delimiter::Bracket, bracketed.build()); 953 | let mut group = crate::Group::_new_fallback(group); 954 | group.set_span(span); 955 | trees.push_token_from_parser(TokenTree::Group(group)); 956 | 957 | Ok((rest, ())) 958 | } 959 | 960 | fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { 961 | if input.starts_with("//!") { 962 | let input = input.advance(3); 963 | let (input, s) = take_until_newline_or_eof(input); 964 | Ok((input, (s, true))) 965 | } else if input.starts_with("/*!") { 966 | let (input, s) = block_comment(input)?; 967 | Ok((input, (&s[3..s.len() - 2], true))) 968 | } else if input.starts_with("///") { 969 | let input = input.advance(3); 970 | if input.starts_with_char('/') { 971 | return Err(Reject); 972 | } 973 | let (input, s) = take_until_newline_or_eof(input); 974 | Ok((input, (s, false))) 975 | } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') { 976 | let (input, s) = block_comment(input)?; 977 | Ok((input, (&s[3..s.len() - 2], false))) 978 | } else { 979 | Err(Reject) 980 | } 981 | } 982 | 983 | fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) { 984 | let chars = input.char_indices(); 985 | 986 | for (i, ch) in chars { 987 | if ch == '\n' { 988 | return (input.advance(i), &input.rest[..i]); 989 | } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') { 990 | return (input.advance(i + 1), &input.rest[..i]); 991 | } 992 | } 993 | 994 | (input.advance(input.len()), input.rest) 995 | } 996 | -------------------------------------------------------------------------------- /src/probe.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | 3 | #[cfg(proc_macro_span)] 4 | pub(crate) mod proc_macro_span; 5 | 6 | #[cfg(proc_macro_span_file)] 7 | pub(crate) mod proc_macro_span_file; 8 | 9 | #[cfg(proc_macro_span_location)] 10 | pub(crate) mod proc_macro_span_location; 11 | -------------------------------------------------------------------------------- /src/probe/proc_macro_span.rs: -------------------------------------------------------------------------------- 1 | // This code exercises the surface area that we expect of Span's unstable API. 2 | // If the current toolchain is able to compile it, then proc-macro2 is able to 3 | // offer these APIs too. 4 | 5 | #![cfg_attr(procmacro2_build_probe, feature(proc_macro_span))] 6 | 7 | extern crate proc_macro; 8 | 9 | use core::ops::{Range, RangeBounds}; 10 | use proc_macro::{Literal, Span}; 11 | use std::path::PathBuf; 12 | 13 | pub fn byte_range(this: &Span) -> Range { 14 | this.byte_range() 15 | } 16 | 17 | pub fn start(this: &Span) -> Span { 18 | this.start() 19 | } 20 | 21 | pub fn end(this: &Span) -> Span { 22 | this.end() 23 | } 24 | 25 | pub fn line(this: &Span) -> usize { 26 | this.line() 27 | } 28 | 29 | pub fn column(this: &Span) -> usize { 30 | this.column() 31 | } 32 | 33 | pub fn file(this: &Span) -> String { 34 | this.file() 35 | } 36 | 37 | pub fn local_file(this: &Span) -> Option { 38 | this.local_file() 39 | } 40 | 41 | pub fn join(this: &Span, other: Span) -> Option { 42 | this.join(other) 43 | } 44 | 45 | pub fn subspan>(this: &Literal, range: R) -> Option { 46 | this.subspan(range) 47 | } 48 | 49 | // Include in sccache cache key. 50 | #[cfg(procmacro2_build_probe)] 51 | const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP"); 52 | -------------------------------------------------------------------------------- /src/probe/proc_macro_span_file.rs: -------------------------------------------------------------------------------- 1 | // The subset of Span's API stabilized in Rust 1.88. 2 | 3 | extern crate proc_macro; 4 | 5 | use proc_macro::Span; 6 | use std::path::PathBuf; 7 | 8 | pub fn file(this: &Span) -> String { 9 | this.file() 10 | } 11 | 12 | pub fn local_file(this: &Span) -> Option { 13 | this.local_file() 14 | } 15 | -------------------------------------------------------------------------------- /src/probe/proc_macro_span_location.rs: -------------------------------------------------------------------------------- 1 | // The subset of Span's API stabilized in Rust 1.88. 2 | 3 | extern crate proc_macro; 4 | 5 | use proc_macro::Span; 6 | 7 | pub fn start(this: &Span) -> Span { 8 | this.start() 9 | } 10 | 11 | pub fn end(this: &Span) -> Span { 12 | this.end() 13 | } 14 | 15 | pub fn line(this: &Span) -> usize { 16 | this.line() 17 | } 18 | 19 | pub fn column(this: &Span) -> usize { 20 | this.column() 21 | } 22 | -------------------------------------------------------------------------------- /src/rcvec.rs: -------------------------------------------------------------------------------- 1 | use alloc::rc::Rc; 2 | use alloc::vec; 3 | use core::mem; 4 | use core::panic::RefUnwindSafe; 5 | use core::slice; 6 | 7 | pub(crate) struct RcVec { 8 | inner: Rc>, 9 | } 10 | 11 | pub(crate) struct RcVecBuilder { 12 | inner: Vec, 13 | } 14 | 15 | pub(crate) struct RcVecMut<'a, T> { 16 | inner: &'a mut Vec, 17 | } 18 | 19 | #[derive(Clone)] 20 | pub(crate) struct RcVecIntoIter { 21 | inner: vec::IntoIter, 22 | } 23 | 24 | impl RcVec { 25 | pub(crate) fn is_empty(&self) -> bool { 26 | self.inner.is_empty() 27 | } 28 | 29 | pub(crate) fn len(&self) -> usize { 30 | self.inner.len() 31 | } 32 | 33 | pub(crate) fn iter(&self) -> slice::Iter { 34 | self.inner.iter() 35 | } 36 | 37 | pub(crate) fn make_mut(&mut self) -> RcVecMut 38 | where 39 | T: Clone, 40 | { 41 | RcVecMut { 42 | inner: Rc::make_mut(&mut self.inner), 43 | } 44 | } 45 | 46 | pub(crate) fn get_mut(&mut self) -> Option> { 47 | let inner = Rc::get_mut(&mut self.inner)?; 48 | Some(RcVecMut { inner }) 49 | } 50 | 51 | pub(crate) fn make_owned(mut self) -> RcVecBuilder 52 | where 53 | T: Clone, 54 | { 55 | let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) { 56 | mem::take(owned) 57 | } else { 58 | Vec::clone(&self.inner) 59 | }; 60 | RcVecBuilder { inner: vec } 61 | } 62 | } 63 | 64 | impl RcVecBuilder { 65 | pub(crate) fn new() -> Self { 66 | RcVecBuilder { inner: Vec::new() } 67 | } 68 | 69 | pub(crate) fn with_capacity(cap: usize) -> Self { 70 | RcVecBuilder { 71 | inner: Vec::with_capacity(cap), 72 | } 73 | } 74 | 75 | pub(crate) fn push(&mut self, element: T) { 76 | self.inner.push(element); 77 | } 78 | 79 | pub(crate) fn extend(&mut self, iter: impl IntoIterator) { 80 | self.inner.extend(iter); 81 | } 82 | 83 | pub(crate) fn as_mut(&mut self) -> RcVecMut { 84 | RcVecMut { 85 | inner: &mut self.inner, 86 | } 87 | } 88 | 89 | pub(crate) fn build(self) -> RcVec { 90 | RcVec { 91 | inner: Rc::new(self.inner), 92 | } 93 | } 94 | } 95 | 96 | impl<'a, T> RcVecMut<'a, T> { 97 | pub(crate) fn push(&mut self, element: T) { 98 | self.inner.push(element); 99 | } 100 | 101 | pub(crate) fn extend(&mut self, iter: impl IntoIterator) { 102 | self.inner.extend(iter); 103 | } 104 | 105 | pub(crate) fn as_mut(&mut self) -> RcVecMut { 106 | RcVecMut { inner: self.inner } 107 | } 108 | 109 | pub(crate) fn take(self) -> RcVecBuilder { 110 | let vec = mem::take(self.inner); 111 | RcVecBuilder { inner: vec } 112 | } 113 | } 114 | 115 | impl Clone for RcVec { 116 | fn clone(&self) -> Self { 117 | RcVec { 118 | inner: Rc::clone(&self.inner), 119 | } 120 | } 121 | } 122 | 123 | impl IntoIterator for RcVecBuilder { 124 | type Item = T; 125 | type IntoIter = RcVecIntoIter; 126 | 127 | fn into_iter(self) -> Self::IntoIter { 128 | RcVecIntoIter { 129 | inner: self.inner.into_iter(), 130 | } 131 | } 132 | } 133 | 134 | impl Iterator for RcVecIntoIter { 135 | type Item = T; 136 | 137 | fn next(&mut self) -> Option { 138 | self.inner.next() 139 | } 140 | 141 | fn size_hint(&self) -> (usize, Option) { 142 | self.inner.size_hint() 143 | } 144 | } 145 | 146 | impl RefUnwindSafe for RcVec where T: RefUnwindSafe {} 147 | -------------------------------------------------------------------------------- /src/rustc_literal_escaper.rs: -------------------------------------------------------------------------------- 1 | // Vendored from rustc-literal-escaper v0.0.5 2 | // https://github.com/rust-lang/literal-escaper/tree/v0.0.5 3 | 4 | //! Utilities for validating (raw) string, char, and byte literals and 5 | //! turning escape sequences into the values they represent. 6 | 7 | use crate::num::NonZeroChar; 8 | use std::ffi::CStr; 9 | use std::num::NonZeroU8; 10 | use std::ops::Range; 11 | use std::str::Chars; 12 | 13 | /// Errors and warnings that can occur during string, char, and byte unescaping. 14 | /// 15 | /// Mostly relating to malformed escape sequences, but also a few other problems. 16 | #[derive(Debug, PartialEq, Eq)] 17 | pub enum EscapeError { 18 | /// Expected 1 char, but 0 were found. 19 | ZeroChars, 20 | /// Expected 1 char, but more than 1 were found. 21 | MoreThanOneChar, 22 | 23 | /// Escaped '\' character without continuation. 24 | LoneSlash, 25 | /// Invalid escape character (e.g. '\z'). 26 | InvalidEscape, 27 | /// Raw '\r' encountered. 28 | BareCarriageReturn, 29 | /// Raw '\r' encountered in raw string. 30 | BareCarriageReturnInRawString, 31 | /// Unescaped character that was expected to be escaped (e.g. raw '\t'). 32 | EscapeOnlyChar, 33 | 34 | /// Numeric character escape is too short (e.g. '\x1'). 35 | TooShortHexEscape, 36 | /// Invalid character in numeric escape (e.g. '\xz') 37 | InvalidCharInHexEscape, 38 | /// Character code in numeric escape is non-ascii (e.g. '\xFF'). 39 | OutOfRangeHexEscape, 40 | 41 | /// '\u' not followed by '{'. 42 | NoBraceInUnicodeEscape, 43 | /// Non-hexadecimal value in '\u{..}'. 44 | InvalidCharInUnicodeEscape, 45 | /// '\u{}' 46 | EmptyUnicodeEscape, 47 | /// No closing brace in '\u{..}', e.g. '\u{12'. 48 | UnclosedUnicodeEscape, 49 | /// '\u{_12}' 50 | LeadingUnderscoreUnicodeEscape, 51 | /// More than 6 characters in '\u{..}', e.g. '\u{10FFFF_FF}' 52 | OverlongUnicodeEscape, 53 | /// Invalid in-bound unicode character code, e.g. '\u{DFFF}'. 54 | LoneSurrogateUnicodeEscape, 55 | /// Out of bounds unicode character code, e.g. '\u{FFFFFF}'. 56 | OutOfRangeUnicodeEscape, 57 | 58 | /// Unicode escape code in byte literal. 59 | UnicodeEscapeInByte, 60 | /// Non-ascii character in byte literal, byte string literal, or raw byte string literal. 61 | NonAsciiCharInByte, 62 | 63 | /// `\0` in a C string literal. 64 | NulInCStr, 65 | 66 | /// After a line ending with '\', the next line contains whitespace 67 | /// characters that are not skipped. 68 | UnskippedWhitespaceWarning, 69 | 70 | /// After a line ending with '\', multiple lines are skipped. 71 | MultipleSkippedLinesWarning, 72 | } 73 | 74 | impl EscapeError { 75 | /// Returns true for actual errors, as opposed to warnings. 76 | pub fn is_fatal(&self) -> bool { 77 | !matches!( 78 | self, 79 | EscapeError::UnskippedWhitespaceWarning | EscapeError::MultipleSkippedLinesWarning 80 | ) 81 | } 82 | } 83 | 84 | /// Check a raw string literal for validity 85 | /// 86 | /// Takes the contents of a raw string literal (without quotes) 87 | /// and produces a sequence of characters or errors, 88 | /// which are returned by invoking `callback`. 89 | /// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). 90 | pub fn check_raw_str(src: &str, callback: impl FnMut(Range, Result)) { 91 | str::check_raw(src, callback); 92 | } 93 | 94 | /// Check a raw byte string literal for validity 95 | /// 96 | /// Takes the contents of a raw byte string literal (without quotes) 97 | /// and produces a sequence of bytes or errors, 98 | /// which are returned by invoking `callback`. 99 | /// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). 100 | pub fn check_raw_byte_str(src: &str, callback: impl FnMut(Range, Result)) { 101 | <[u8]>::check_raw(src, callback); 102 | } 103 | 104 | /// Check a raw C string literal for validity 105 | /// 106 | /// Takes the contents of a raw C string literal (without quotes) 107 | /// and produces a sequence of characters or errors, 108 | /// which are returned by invoking `callback`. 109 | /// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). 110 | pub fn check_raw_c_str( 111 | src: &str, 112 | callback: impl FnMut(Range, Result), 113 | ) { 114 | CStr::check_raw(src, callback); 115 | } 116 | 117 | /// Trait for checking raw string literals for validity 118 | trait CheckRaw { 119 | /// Unit type of the implementing string type (`char` for string, `u8` for byte string) 120 | type RawUnit; 121 | 122 | /// Converts chars to the unit type of the literal type 123 | fn char2raw_unit(c: char) -> Result; 124 | 125 | /// Takes the contents of a raw literal (without quotes) 126 | /// and produces a sequence of `Result` 127 | /// which are returned via `callback`. 128 | /// 129 | /// NOTE: Does no escaping, but produces errors for bare carriage return ('\r'). 130 | fn check_raw( 131 | src: &str, 132 | mut callback: impl FnMut(Range, Result), 133 | ) { 134 | let mut chars = src.chars(); 135 | while let Some(c) = chars.next() { 136 | let start = src.len() - chars.as_str().len() - c.len_utf8(); 137 | let res = match c { 138 | '\r' => Err(EscapeError::BareCarriageReturnInRawString), 139 | _ => Self::char2raw_unit(c), 140 | }; 141 | let end = src.len() - chars.as_str().len(); 142 | callback(start..end, res); 143 | } 144 | 145 | // Unfortunately, it is a bit unclear whether the following equivalent code is slower or faster: bug 141855 146 | // src.char_indices().for_each(|(pos, c)| { 147 | // callback( 148 | // pos..pos + c.len_utf8(), 149 | // if c == '\r' { 150 | // Err(EscapeError::BareCarriageReturnInRawString) 151 | // } else { 152 | // Self::char2raw_unit(c) 153 | // }, 154 | // ); 155 | // }); 156 | } 157 | } 158 | 159 | impl CheckRaw for str { 160 | type RawUnit = char; 161 | 162 | #[inline] 163 | fn char2raw_unit(c: char) -> Result { 164 | Ok(c) 165 | } 166 | } 167 | 168 | impl CheckRaw for [u8] { 169 | type RawUnit = u8; 170 | 171 | #[inline] 172 | fn char2raw_unit(c: char) -> Result { 173 | char2byte(c) 174 | } 175 | } 176 | 177 | /// Turn an ascii char into a byte 178 | #[inline] 179 | fn char2byte(c: char) -> Result { 180 | // do NOT do: c.try_into().ok_or(EscapeError::NonAsciiCharInByte) 181 | if c.is_ascii() { 182 | Ok(c as u8) 183 | } else { 184 | Err(EscapeError::NonAsciiCharInByte) 185 | } 186 | } 187 | 188 | impl CheckRaw for CStr { 189 | type RawUnit = NonZeroChar; 190 | 191 | #[inline] 192 | fn char2raw_unit(c: char) -> Result { 193 | NonZeroChar::new(c).ok_or(EscapeError::NulInCStr) 194 | } 195 | } 196 | 197 | /// Unescape a char literal 198 | /// 199 | /// Takes the contents of a char literal (without quotes), 200 | /// and returns an unescaped char or an error. 201 | #[inline] 202 | pub fn unescape_char(src: &str) -> Result { 203 | str::unescape_single(&mut src.chars()) 204 | } 205 | 206 | /// Unescape a byte literal 207 | /// 208 | /// Takes the contents of a byte literal (without quotes), 209 | /// and returns an unescaped byte or an error. 210 | #[inline] 211 | pub fn unescape_byte(src: &str) -> Result { 212 | <[u8]>::unescape_single(&mut src.chars()) 213 | } 214 | 215 | /// Unescape a string literal 216 | /// 217 | /// Takes the contents of a string literal (without quotes) 218 | /// and produces a sequence of escaped characters or errors, 219 | /// which are returned by invoking `callback`. 220 | pub fn unescape_str(src: &str, callback: impl FnMut(Range, Result)) { 221 | str::unescape(src, callback) 222 | } 223 | 224 | /// Unescape a byte string literal 225 | /// 226 | /// Takes the contents of a byte string literal (without quotes) 227 | /// and produces a sequence of escaped bytes or errors, 228 | /// which are returned by invoking `callback`. 229 | pub fn unescape_byte_str(src: &str, callback: impl FnMut(Range, Result)) { 230 | <[u8]>::unescape(src, callback) 231 | } 232 | 233 | /// Unescape a C string literal 234 | /// 235 | /// Takes the contents of a C string literal (without quotes) 236 | /// and produces a sequence of escaped MixedUnits or errors, 237 | /// which are returned by invoking `callback`. 238 | pub fn unescape_c_str( 239 | src: &str, 240 | callback: impl FnMut(Range, Result), 241 | ) { 242 | CStr::unescape(src, callback) 243 | } 244 | 245 | /// Enum representing either a char or a byte 246 | /// 247 | /// Used for mixed utf8 string literals, i.e. those that allow both unicode 248 | /// chars and high bytes. 249 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 250 | pub enum MixedUnit { 251 | /// Used for ASCII chars (written directly or via `\x00`..`\x7f` escapes) 252 | /// and Unicode chars (written directly or via `\u` escapes). 253 | /// 254 | /// For example, if '¥' appears in a string it is represented here as 255 | /// `MixedUnit::Char('¥')`, and it will be appended to the relevant byte 256 | /// string as the two-byte UTF-8 sequence `[0xc2, 0xa5]` 257 | Char(NonZeroChar), 258 | 259 | /// Used for high bytes (`\x80`..`\xff`). 260 | /// 261 | /// For example, if `\xa5` appears in a string it is represented here as 262 | /// `MixedUnit::HighByte(0xa5)`, and it will be appended to the relevant 263 | /// byte string as the single byte `0xa5`. 264 | HighByte(NonZeroU8), 265 | } 266 | 267 | impl From for MixedUnit { 268 | #[inline] 269 | fn from(c: NonZeroChar) -> Self { 270 | MixedUnit::Char(c) 271 | } 272 | } 273 | 274 | impl From for MixedUnit { 275 | #[inline] 276 | fn from(byte: NonZeroU8) -> Self { 277 | if byte.get().is_ascii() { 278 | MixedUnit::Char(NonZeroChar::new(byte.get() as char).unwrap()) 279 | } else { 280 | MixedUnit::HighByte(byte) 281 | } 282 | } 283 | } 284 | 285 | impl TryFrom for MixedUnit { 286 | type Error = EscapeError; 287 | 288 | #[inline] 289 | fn try_from(c: char) -> Result { 290 | NonZeroChar::new(c) 291 | .map(MixedUnit::Char) 292 | .ok_or(EscapeError::NulInCStr) 293 | } 294 | } 295 | 296 | impl TryFrom for MixedUnit { 297 | type Error = EscapeError; 298 | 299 | #[inline] 300 | fn try_from(byte: u8) -> Result { 301 | NonZeroU8::new(byte) 302 | .map(From::from) 303 | .ok_or(EscapeError::NulInCStr) 304 | } 305 | } 306 | 307 | /// Trait for unescaping escape sequences in strings 308 | trait Unescape { 309 | /// Unit type of the implementing string type (`char` for string, `u8` for byte string) 310 | type Unit; 311 | 312 | /// Result of unescaping the zero char ('\0') 313 | const ZERO_RESULT: Result; 314 | 315 | /// Converts non-zero bytes to the unit type 316 | fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit; 317 | 318 | /// Converts chars to the unit type 319 | fn char2unit(c: char) -> Result; 320 | 321 | /// Converts the byte of a hex escape to the unit type 322 | fn hex2unit(b: u8) -> Result; 323 | 324 | /// Converts the result of a unicode escape to the unit type 325 | fn unicode2unit(r: Result) -> Result; 326 | 327 | /// Unescape a single unit (single quote syntax) 328 | fn unescape_single(chars: &mut Chars<'_>) -> Result { 329 | let res = match chars.next().ok_or(EscapeError::ZeroChars)? { 330 | '\\' => Self::unescape_1(chars), 331 | '\n' | '\t' | '\'' => Err(EscapeError::EscapeOnlyChar), 332 | '\r' => Err(EscapeError::BareCarriageReturn), 333 | c => Self::char2unit(c), 334 | }?; 335 | if chars.next().is_some() { 336 | return Err(EscapeError::MoreThanOneChar); 337 | } 338 | Ok(res) 339 | } 340 | 341 | /// Unescape the first unit of a string (double quoted syntax) 342 | fn unescape_1(chars: &mut Chars<'_>) -> Result { 343 | // Previous character was '\\', unescape what follows. 344 | let c = chars.next().ok_or(EscapeError::LoneSlash)?; 345 | if c == '0' { 346 | Self::ZERO_RESULT 347 | } else { 348 | simple_escape(c) 349 | .map(|b| Self::nonzero_byte2unit(b)) 350 | .or_else(|c| match c { 351 | 'x' => Self::hex2unit(hex_escape(chars)?), 352 | 'u' => Self::unicode2unit({ 353 | let value = unicode_escape(chars)?; 354 | if value > char::MAX as u32 { 355 | Err(EscapeError::OutOfRangeUnicodeEscape) 356 | } else { 357 | char::from_u32(value).ok_or(EscapeError::LoneSurrogateUnicodeEscape) 358 | } 359 | }), 360 | _ => Err(EscapeError::InvalidEscape), 361 | }) 362 | } 363 | } 364 | 365 | /// Unescape a string literal 366 | /// 367 | /// Takes the contents of a raw string literal (without quotes) 368 | /// and produces a sequence of `Result` 369 | /// which are returned via `callback`. 370 | fn unescape( 371 | src: &str, 372 | mut callback: impl FnMut(Range, Result), 373 | ) { 374 | let mut chars = src.chars(); 375 | while let Some(c) = chars.next() { 376 | let start = src.len() - chars.as_str().len() - c.len_utf8(); 377 | let res = match c { 378 | '\\' => { 379 | if let Some(b'\n') = chars.as_str().as_bytes().first() { 380 | let _ = chars.next(); 381 | // skip whitespace for backslash newline, see [Rust language reference] 382 | // (https://doc.rust-lang.org/reference/tokens.html#string-literals). 383 | let callback_err = |range, err| callback(range, Err(err)); 384 | skip_ascii_whitespace(&mut chars, start, callback_err); 385 | continue; 386 | } else { 387 | Self::unescape_1(&mut chars) 388 | } 389 | } 390 | '"' => Err(EscapeError::EscapeOnlyChar), 391 | '\r' => Err(EscapeError::BareCarriageReturn), 392 | c => Self::char2unit(c), 393 | }; 394 | let end = src.len() - chars.as_str().len(); 395 | callback(start..end, res); 396 | } 397 | } 398 | } 399 | 400 | /// Interpret a non-nul ASCII escape 401 | /// 402 | /// Parses the character of an ASCII escape (except nul) without the leading backslash. 403 | #[inline] // single use in Unescape::unescape_1 404 | fn simple_escape(c: char) -> Result { 405 | // Previous character was '\\', unescape what follows. 406 | Ok(NonZeroU8::new(match c { 407 | '"' => b'"', 408 | 'n' => b'\n', 409 | 'r' => b'\r', 410 | 't' => b'\t', 411 | '\\' => b'\\', 412 | '\'' => b'\'', 413 | _ => Err(c)?, 414 | }) 415 | .unwrap()) 416 | } 417 | 418 | /// Interpret a hexadecimal escape 419 | /// 420 | /// Parses the two hexadecimal characters of a hexadecimal escape without the leading r"\x". 421 | #[inline] // single use in Unescape::unescape_1 422 | fn hex_escape(chars: &mut impl Iterator) -> Result { 423 | let hi = chars.next().ok_or(EscapeError::TooShortHexEscape)?; 424 | let hi = hi.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; 425 | 426 | let lo = chars.next().ok_or(EscapeError::TooShortHexEscape)?; 427 | let lo = lo.to_digit(16).ok_or(EscapeError::InvalidCharInHexEscape)?; 428 | 429 | Ok((hi * 16 + lo) as u8) 430 | } 431 | 432 | /// Interpret a unicode escape 433 | /// 434 | /// Parse the braces with hexadecimal characters (and underscores) part of a unicode escape. 435 | /// This r"{...}" normally comes after r"\u" and cannot start with an underscore. 436 | #[inline] // single use in Unescape::unescape_1 437 | fn unicode_escape(chars: &mut impl Iterator) -> Result { 438 | if chars.next() != Some('{') { 439 | return Err(EscapeError::NoBraceInUnicodeEscape); 440 | } 441 | 442 | // First character must be a hexadecimal digit. 443 | let mut value: u32 = match chars.next().ok_or(EscapeError::UnclosedUnicodeEscape)? { 444 | '_' => return Err(EscapeError::LeadingUnderscoreUnicodeEscape), 445 | '}' => return Err(EscapeError::EmptyUnicodeEscape), 446 | c => c 447 | .to_digit(16) 448 | .ok_or(EscapeError::InvalidCharInUnicodeEscape)?, 449 | }; 450 | 451 | // First character is valid, now parse the rest of the number 452 | // and closing brace. 453 | let mut n_digits = 1; 454 | loop { 455 | match chars.next() { 456 | None => return Err(EscapeError::UnclosedUnicodeEscape), 457 | Some('_') => continue, 458 | Some('}') => { 459 | // Incorrect syntax has higher priority for error reporting 460 | // than unallowed value for a literal. 461 | return if n_digits > 6 { 462 | Err(EscapeError::OverlongUnicodeEscape) 463 | } else { 464 | Ok(value) 465 | }; 466 | } 467 | Some(c) => { 468 | let digit: u32 = c 469 | .to_digit(16) 470 | .ok_or(EscapeError::InvalidCharInUnicodeEscape)?; 471 | n_digits += 1; 472 | if n_digits > 6 { 473 | // Stop updating value since we're sure that it's incorrect already. 474 | continue; 475 | } 476 | value = value * 16 + digit; 477 | } 478 | }; 479 | } 480 | } 481 | 482 | /// Interpret a string continuation escape (https://doc.rust-lang.org/reference/expressions/literal-expr.html#string-continuation-escapes) 483 | /// 484 | /// Skip ASCII whitespace, except for the formfeed character 485 | /// (see [this issue](https://github.com/rust-lang/rust/issues/136600)). 486 | /// Warns on unescaped newline and following non-ASCII whitespace. 487 | #[inline] // single use in Unescape::unescape 488 | fn skip_ascii_whitespace( 489 | chars: &mut Chars<'_>, 490 | start: usize, 491 | mut callback: impl FnMut(Range, EscapeError), 492 | ) { 493 | let rest = chars.as_str(); 494 | let first_non_space = rest 495 | .bytes() 496 | .position(|b| b != b' ' && b != b'\t' && b != b'\n' && b != b'\r') 497 | .unwrap_or(rest.len()); 498 | let (space, rest) = rest.split_at(first_non_space); 499 | // backslash newline adds 2 bytes 500 | let end = start + 2 + first_non_space; 501 | if space.contains('\n') { 502 | callback(start..end, EscapeError::MultipleSkippedLinesWarning); 503 | } 504 | *chars = rest.chars(); 505 | if let Some(c) = chars.clone().next() { 506 | if c.is_whitespace() { 507 | // for error reporting, include the character that was not skipped in the span 508 | callback( 509 | start..end + c.len_utf8(), 510 | EscapeError::UnskippedWhitespaceWarning, 511 | ); 512 | } 513 | } 514 | } 515 | 516 | impl Unescape for str { 517 | type Unit = char; 518 | 519 | const ZERO_RESULT: Result = Ok('\0'); 520 | 521 | #[inline] 522 | fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { 523 | b.get().into() 524 | } 525 | 526 | #[inline] 527 | fn char2unit(c: char) -> Result { 528 | Ok(c) 529 | } 530 | 531 | #[inline] 532 | fn hex2unit(b: u8) -> Result { 533 | if b.is_ascii() { 534 | Ok(b as char) 535 | } else { 536 | Err(EscapeError::OutOfRangeHexEscape) 537 | } 538 | } 539 | 540 | #[inline] 541 | fn unicode2unit(r: Result) -> Result { 542 | r 543 | } 544 | } 545 | 546 | impl Unescape for [u8] { 547 | type Unit = u8; 548 | 549 | const ZERO_RESULT: Result = Ok(b'\0'); 550 | 551 | #[inline] 552 | fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { 553 | b.get() 554 | } 555 | 556 | #[inline] 557 | fn char2unit(c: char) -> Result { 558 | char2byte(c) 559 | } 560 | 561 | #[inline] 562 | fn hex2unit(b: u8) -> Result { 563 | Ok(b) 564 | } 565 | 566 | #[inline] 567 | fn unicode2unit(_r: Result) -> Result { 568 | Err(EscapeError::UnicodeEscapeInByte) 569 | } 570 | } 571 | 572 | impl Unescape for CStr { 573 | type Unit = MixedUnit; 574 | 575 | const ZERO_RESULT: Result = Err(EscapeError::NulInCStr); 576 | 577 | #[inline] 578 | fn nonzero_byte2unit(b: NonZeroU8) -> Self::Unit { 579 | b.into() 580 | } 581 | 582 | #[inline] 583 | fn char2unit(c: char) -> Result { 584 | c.try_into() 585 | } 586 | 587 | #[inline] 588 | fn hex2unit(byte: u8) -> Result { 589 | byte.try_into() 590 | } 591 | 592 | #[inline] 593 | fn unicode2unit(r: Result) -> Result { 594 | Self::char2unit(r?) 595 | } 596 | } 597 | 598 | /// Enum of the different kinds of literal 599 | #[derive(Debug, Clone, Copy, PartialEq)] 600 | pub enum Mode { 601 | /// `'a'` 602 | Char, 603 | 604 | /// `b'a'` 605 | Byte, 606 | 607 | /// `"hello"` 608 | Str, 609 | /// `r"hello"` 610 | RawStr, 611 | 612 | /// `b"hello"` 613 | ByteStr, 614 | /// `br"hello"` 615 | RawByteStr, 616 | 617 | /// `c"hello"` 618 | CStr, 619 | /// `cr"hello"` 620 | RawCStr, 621 | } 622 | 623 | impl Mode { 624 | pub fn in_double_quotes(self) -> bool { 625 | match self { 626 | Mode::Str 627 | | Mode::RawStr 628 | | Mode::ByteStr 629 | | Mode::RawByteStr 630 | | Mode::CStr 631 | | Mode::RawCStr => true, 632 | Mode::Char | Mode::Byte => false, 633 | } 634 | } 635 | 636 | pub fn prefix_noraw(self) -> &'static str { 637 | match self { 638 | Mode::Char | Mode::Str | Mode::RawStr => "", 639 | Mode::Byte | Mode::ByteStr | Mode::RawByteStr => "b", 640 | Mode::CStr | Mode::RawCStr => "c", 641 | } 642 | } 643 | } 644 | 645 | /// Check a literal only for errors 646 | /// 647 | /// Takes the contents of a literal (without quotes) 648 | /// and produces a sequence of only errors, 649 | /// which are returned by invoking `error_callback`. 650 | /// 651 | /// NB Does not produce any output other than errors 652 | pub fn check_for_errors( 653 | src: &str, 654 | mode: Mode, 655 | mut error_callback: impl FnMut(Range, EscapeError), 656 | ) { 657 | match mode { 658 | Mode::Char => { 659 | let mut chars = src.chars(); 660 | if let Err(e) = str::unescape_single(&mut chars) { 661 | error_callback(0..(src.len() - chars.as_str().len()), e); 662 | } 663 | } 664 | Mode::Byte => { 665 | let mut chars = src.chars(); 666 | if let Err(e) = <[u8]>::unescape_single(&mut chars) { 667 | error_callback(0..(src.len() - chars.as_str().len()), e); 668 | } 669 | } 670 | Mode::Str => unescape_str(src, |range, res| { 671 | if let Err(e) = res { 672 | error_callback(range, e); 673 | } 674 | }), 675 | Mode::ByteStr => unescape_byte_str(src, |range, res| { 676 | if let Err(e) = res { 677 | error_callback(range, e); 678 | } 679 | }), 680 | Mode::CStr => unescape_c_str(src, |range, res| { 681 | if let Err(e) = res { 682 | error_callback(range, e); 683 | } 684 | }), 685 | Mode::RawStr => check_raw_str(src, |range, res| { 686 | if let Err(e) = res { 687 | error_callback(range, e); 688 | } 689 | }), 690 | Mode::RawByteStr => check_raw_byte_str(src, |range, res| { 691 | if let Err(e) = res { 692 | error_callback(range, e); 693 | } 694 | }), 695 | Mode::RawCStr => check_raw_c_str(src, |range, res| { 696 | if let Err(e) = res { 697 | error_callback(range, e); 698 | } 699 | }), 700 | } 701 | } 702 | -------------------------------------------------------------------------------- /src/wrapper.rs: -------------------------------------------------------------------------------- 1 | use crate::detection::inside_proc_macro; 2 | use crate::fallback::{self, FromStr2 as _}; 3 | #[cfg(span_locations)] 4 | use crate::location::LineColumn; 5 | #[cfg(proc_macro_span)] 6 | use crate::probe::proc_macro_span; 7 | #[cfg(all(span_locations, proc_macro_span_file))] 8 | use crate::probe::proc_macro_span_file; 9 | #[cfg(all(span_locations, proc_macro_span_location))] 10 | use crate::probe::proc_macro_span_location; 11 | use crate::{Delimiter, Punct, Spacing, TokenTree}; 12 | use core::fmt::{self, Debug, Display}; 13 | #[cfg(span_locations)] 14 | use core::ops::Range; 15 | use core::ops::RangeBounds; 16 | use std::ffi::CStr; 17 | #[cfg(span_locations)] 18 | use std::path::PathBuf; 19 | 20 | #[derive(Clone)] 21 | pub(crate) enum TokenStream { 22 | Compiler(DeferredTokenStream), 23 | Fallback(fallback::TokenStream), 24 | } 25 | 26 | // Work around https://github.com/rust-lang/rust/issues/65080. 27 | // In `impl Extend for TokenStream` which is used heavily by quote, 28 | // we hold on to the appended tokens and do proc_macro::TokenStream::extend as 29 | // late as possible to batch together consecutive uses of the Extend impl. 30 | #[derive(Clone)] 31 | pub(crate) struct DeferredTokenStream { 32 | stream: proc_macro::TokenStream, 33 | extra: Vec, 34 | } 35 | 36 | pub(crate) enum LexError { 37 | Compiler(proc_macro::LexError), 38 | Fallback(fallback::LexError), 39 | 40 | // Rustc was supposed to return a LexError, but it panicked instead. 41 | // https://github.com/rust-lang/rust/issues/58736 42 | CompilerPanic, 43 | } 44 | 45 | #[cold] 46 | fn mismatch(line: u32) -> ! { 47 | #[cfg(procmacro2_backtrace)] 48 | { 49 | let backtrace = std::backtrace::Backtrace::force_capture(); 50 | panic!("compiler/fallback mismatch L{}\n\n{}", line, backtrace) 51 | } 52 | #[cfg(not(procmacro2_backtrace))] 53 | { 54 | panic!("compiler/fallback mismatch L{}", line) 55 | } 56 | } 57 | 58 | impl DeferredTokenStream { 59 | fn new(stream: proc_macro::TokenStream) -> Self { 60 | DeferredTokenStream { 61 | stream, 62 | extra: Vec::new(), 63 | } 64 | } 65 | 66 | fn is_empty(&self) -> bool { 67 | self.stream.is_empty() && self.extra.is_empty() 68 | } 69 | 70 | fn evaluate_now(&mut self) { 71 | // If-check provides a fast short circuit for the common case of `extra` 72 | // being empty, which saves a round trip over the proc macro bridge. 73 | // Improves macro expansion time in winrt by 6% in debug mode. 74 | if !self.extra.is_empty() { 75 | self.stream.extend(self.extra.drain(..)); 76 | } 77 | } 78 | 79 | fn into_token_stream(mut self) -> proc_macro::TokenStream { 80 | self.evaluate_now(); 81 | self.stream 82 | } 83 | } 84 | 85 | impl TokenStream { 86 | pub(crate) fn new() -> Self { 87 | if inside_proc_macro() { 88 | TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new())) 89 | } else { 90 | TokenStream::Fallback(fallback::TokenStream::new()) 91 | } 92 | } 93 | 94 | pub(crate) fn from_str_checked(src: &str) -> Result { 95 | if inside_proc_macro() { 96 | Ok(TokenStream::Compiler(DeferredTokenStream::new( 97 | proc_macro::TokenStream::from_str_checked(src)?, 98 | ))) 99 | } else { 100 | Ok(TokenStream::Fallback( 101 | fallback::TokenStream::from_str_checked(src)?, 102 | )) 103 | } 104 | } 105 | 106 | pub(crate) fn is_empty(&self) -> bool { 107 | match self { 108 | TokenStream::Compiler(tts) => tts.is_empty(), 109 | TokenStream::Fallback(tts) => tts.is_empty(), 110 | } 111 | } 112 | 113 | fn unwrap_nightly(self) -> proc_macro::TokenStream { 114 | match self { 115 | TokenStream::Compiler(s) => s.into_token_stream(), 116 | TokenStream::Fallback(_) => mismatch(line!()), 117 | } 118 | } 119 | 120 | fn unwrap_stable(self) -> fallback::TokenStream { 121 | match self { 122 | TokenStream::Compiler(_) => mismatch(line!()), 123 | TokenStream::Fallback(s) => s, 124 | } 125 | } 126 | } 127 | 128 | impl Display for TokenStream { 129 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 130 | match self { 131 | TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f), 132 | TokenStream::Fallback(tts) => Display::fmt(tts, f), 133 | } 134 | } 135 | } 136 | 137 | impl From for TokenStream { 138 | fn from(inner: proc_macro::TokenStream) -> Self { 139 | TokenStream::Compiler(DeferredTokenStream::new(inner)) 140 | } 141 | } 142 | 143 | impl From for proc_macro::TokenStream { 144 | fn from(inner: TokenStream) -> Self { 145 | match inner { 146 | TokenStream::Compiler(inner) => inner.into_token_stream(), 147 | TokenStream::Fallback(inner) => { 148 | proc_macro::TokenStream::from_str_unchecked(&inner.to_string()) 149 | } 150 | } 151 | } 152 | } 153 | 154 | impl From for TokenStream { 155 | fn from(inner: fallback::TokenStream) -> Self { 156 | TokenStream::Fallback(inner) 157 | } 158 | } 159 | 160 | // Assumes inside_proc_macro(). 161 | fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { 162 | match token { 163 | TokenTree::Group(tt) => proc_macro::TokenTree::Group(tt.inner.unwrap_nightly()), 164 | TokenTree::Punct(tt) => { 165 | let spacing = match tt.spacing() { 166 | Spacing::Joint => proc_macro::Spacing::Joint, 167 | Spacing::Alone => proc_macro::Spacing::Alone, 168 | }; 169 | let mut punct = proc_macro::Punct::new(tt.as_char(), spacing); 170 | punct.set_span(tt.span().inner.unwrap_nightly()); 171 | proc_macro::TokenTree::Punct(punct) 172 | } 173 | TokenTree::Ident(tt) => proc_macro::TokenTree::Ident(tt.inner.unwrap_nightly()), 174 | TokenTree::Literal(tt) => proc_macro::TokenTree::Literal(tt.inner.unwrap_nightly()), 175 | } 176 | } 177 | 178 | impl From for TokenStream { 179 | fn from(token: TokenTree) -> Self { 180 | if inside_proc_macro() { 181 | TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::from( 182 | into_compiler_token(token), 183 | ))) 184 | } else { 185 | TokenStream::Fallback(fallback::TokenStream::from(token)) 186 | } 187 | } 188 | } 189 | 190 | impl FromIterator for TokenStream { 191 | fn from_iter>(trees: I) -> Self { 192 | if inside_proc_macro() { 193 | TokenStream::Compiler(DeferredTokenStream::new( 194 | trees.into_iter().map(into_compiler_token).collect(), 195 | )) 196 | } else { 197 | TokenStream::Fallback(trees.into_iter().collect()) 198 | } 199 | } 200 | } 201 | 202 | impl FromIterator for TokenStream { 203 | fn from_iter>(streams: I) -> Self { 204 | let mut streams = streams.into_iter(); 205 | match streams.next() { 206 | Some(TokenStream::Compiler(mut first)) => { 207 | first.evaluate_now(); 208 | first.stream.extend(streams.map(|s| match s { 209 | TokenStream::Compiler(s) => s.into_token_stream(), 210 | TokenStream::Fallback(_) => mismatch(line!()), 211 | })); 212 | TokenStream::Compiler(first) 213 | } 214 | Some(TokenStream::Fallback(mut first)) => { 215 | first.extend(streams.map(|s| match s { 216 | TokenStream::Fallback(s) => s, 217 | TokenStream::Compiler(_) => mismatch(line!()), 218 | })); 219 | TokenStream::Fallback(first) 220 | } 221 | None => TokenStream::new(), 222 | } 223 | } 224 | } 225 | 226 | impl Extend for TokenStream { 227 | fn extend>(&mut self, stream: I) { 228 | match self { 229 | TokenStream::Compiler(tts) => { 230 | // Here is the reason for DeferredTokenStream. 231 | for token in stream { 232 | tts.extra.push(into_compiler_token(token)); 233 | } 234 | } 235 | TokenStream::Fallback(tts) => tts.extend(stream), 236 | } 237 | } 238 | } 239 | 240 | impl Extend for TokenStream { 241 | fn extend>(&mut self, streams: I) { 242 | match self { 243 | TokenStream::Compiler(tts) => { 244 | tts.evaluate_now(); 245 | tts.stream 246 | .extend(streams.into_iter().map(TokenStream::unwrap_nightly)); 247 | } 248 | TokenStream::Fallback(tts) => { 249 | tts.extend(streams.into_iter().map(TokenStream::unwrap_stable)); 250 | } 251 | } 252 | } 253 | } 254 | 255 | impl Debug for TokenStream { 256 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 257 | match self { 258 | TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f), 259 | TokenStream::Fallback(tts) => Debug::fmt(tts, f), 260 | } 261 | } 262 | } 263 | 264 | impl LexError { 265 | pub(crate) fn span(&self) -> Span { 266 | match self { 267 | LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(), 268 | LexError::Fallback(e) => Span::Fallback(e.span()), 269 | } 270 | } 271 | } 272 | 273 | impl From for LexError { 274 | fn from(e: proc_macro::LexError) -> Self { 275 | LexError::Compiler(e) 276 | } 277 | } 278 | 279 | impl From for LexError { 280 | fn from(e: fallback::LexError) -> Self { 281 | LexError::Fallback(e) 282 | } 283 | } 284 | 285 | impl Debug for LexError { 286 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 287 | match self { 288 | LexError::Compiler(e) => Debug::fmt(e, f), 289 | LexError::Fallback(e) => Debug::fmt(e, f), 290 | LexError::CompilerPanic => { 291 | let fallback = fallback::LexError::call_site(); 292 | Debug::fmt(&fallback, f) 293 | } 294 | } 295 | } 296 | } 297 | 298 | impl Display for LexError { 299 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 300 | match self { 301 | LexError::Compiler(e) => Display::fmt(e, f), 302 | LexError::Fallback(e) => Display::fmt(e, f), 303 | LexError::CompilerPanic => { 304 | let fallback = fallback::LexError::call_site(); 305 | Display::fmt(&fallback, f) 306 | } 307 | } 308 | } 309 | } 310 | 311 | #[derive(Clone)] 312 | pub(crate) enum TokenTreeIter { 313 | Compiler(proc_macro::token_stream::IntoIter), 314 | Fallback(fallback::TokenTreeIter), 315 | } 316 | 317 | impl IntoIterator for TokenStream { 318 | type Item = TokenTree; 319 | type IntoIter = TokenTreeIter; 320 | 321 | fn into_iter(self) -> TokenTreeIter { 322 | match self { 323 | TokenStream::Compiler(tts) => { 324 | TokenTreeIter::Compiler(tts.into_token_stream().into_iter()) 325 | } 326 | TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()), 327 | } 328 | } 329 | } 330 | 331 | impl Iterator for TokenTreeIter { 332 | type Item = TokenTree; 333 | 334 | fn next(&mut self) -> Option { 335 | let token = match self { 336 | TokenTreeIter::Compiler(iter) => iter.next()?, 337 | TokenTreeIter::Fallback(iter) => return iter.next(), 338 | }; 339 | Some(match token { 340 | proc_macro::TokenTree::Group(tt) => { 341 | TokenTree::Group(crate::Group::_new(Group::Compiler(tt))) 342 | } 343 | proc_macro::TokenTree::Punct(tt) => { 344 | let spacing = match tt.spacing() { 345 | proc_macro::Spacing::Joint => Spacing::Joint, 346 | proc_macro::Spacing::Alone => Spacing::Alone, 347 | }; 348 | let mut o = Punct::new(tt.as_char(), spacing); 349 | o.set_span(crate::Span::_new(Span::Compiler(tt.span()))); 350 | TokenTree::Punct(o) 351 | } 352 | proc_macro::TokenTree::Ident(s) => { 353 | TokenTree::Ident(crate::Ident::_new(Ident::Compiler(s))) 354 | } 355 | proc_macro::TokenTree::Literal(l) => { 356 | TokenTree::Literal(crate::Literal::_new(Literal::Compiler(l))) 357 | } 358 | }) 359 | } 360 | 361 | fn size_hint(&self) -> (usize, Option) { 362 | match self { 363 | TokenTreeIter::Compiler(tts) => tts.size_hint(), 364 | TokenTreeIter::Fallback(tts) => tts.size_hint(), 365 | } 366 | } 367 | } 368 | 369 | #[derive(Copy, Clone)] 370 | pub(crate) enum Span { 371 | Compiler(proc_macro::Span), 372 | Fallback(fallback::Span), 373 | } 374 | 375 | impl Span { 376 | pub(crate) fn call_site() -> Self { 377 | if inside_proc_macro() { 378 | Span::Compiler(proc_macro::Span::call_site()) 379 | } else { 380 | Span::Fallback(fallback::Span::call_site()) 381 | } 382 | } 383 | 384 | pub(crate) fn mixed_site() -> Self { 385 | if inside_proc_macro() { 386 | Span::Compiler(proc_macro::Span::mixed_site()) 387 | } else { 388 | Span::Fallback(fallback::Span::mixed_site()) 389 | } 390 | } 391 | 392 | #[cfg(super_unstable)] 393 | pub(crate) fn def_site() -> Self { 394 | if inside_proc_macro() { 395 | Span::Compiler(proc_macro::Span::def_site()) 396 | } else { 397 | Span::Fallback(fallback::Span::def_site()) 398 | } 399 | } 400 | 401 | pub(crate) fn resolved_at(&self, other: Span) -> Span { 402 | match (self, other) { 403 | (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), 404 | (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), 405 | (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), 406 | (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), 407 | } 408 | } 409 | 410 | pub(crate) fn located_at(&self, other: Span) -> Span { 411 | match (self, other) { 412 | (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), 413 | (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), 414 | (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), 415 | (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), 416 | } 417 | } 418 | 419 | pub(crate) fn unwrap(self) -> proc_macro::Span { 420 | match self { 421 | Span::Compiler(s) => s, 422 | Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"), 423 | } 424 | } 425 | 426 | #[cfg(span_locations)] 427 | pub(crate) fn byte_range(&self) -> Range { 428 | match self { 429 | #[cfg(proc_macro_span)] 430 | Span::Compiler(s) => proc_macro_span::byte_range(s), 431 | #[cfg(not(proc_macro_span))] 432 | Span::Compiler(_) => 0..0, 433 | Span::Fallback(s) => s.byte_range(), 434 | } 435 | } 436 | 437 | #[cfg(span_locations)] 438 | pub(crate) fn start(&self) -> LineColumn { 439 | match self { 440 | #[cfg(proc_macro_span_location)] 441 | Span::Compiler(s) => LineColumn { 442 | line: proc_macro_span_location::line(s), 443 | column: proc_macro_span_location::column(s).saturating_sub(1), 444 | }, 445 | #[cfg(not(proc_macro_span_location))] 446 | Span::Compiler(_) => LineColumn { line: 0, column: 0 }, 447 | Span::Fallback(s) => s.start(), 448 | } 449 | } 450 | 451 | #[cfg(span_locations)] 452 | pub(crate) fn end(&self) -> LineColumn { 453 | match self { 454 | #[cfg(proc_macro_span_location)] 455 | Span::Compiler(s) => { 456 | let end = proc_macro_span_location::end(s); 457 | LineColumn { 458 | line: proc_macro_span_location::line(&end), 459 | column: proc_macro_span_location::column(&end).saturating_sub(1), 460 | } 461 | } 462 | #[cfg(not(proc_macro_span_location))] 463 | Span::Compiler(_) => LineColumn { line: 0, column: 0 }, 464 | Span::Fallback(s) => s.end(), 465 | } 466 | } 467 | 468 | #[cfg(span_locations)] 469 | pub(crate) fn file(&self) -> String { 470 | match self { 471 | #[cfg(proc_macro_span_file)] 472 | Span::Compiler(s) => proc_macro_span_file::file(s), 473 | #[cfg(not(proc_macro_span_file))] 474 | Span::Compiler(_) => "".to_owned(), 475 | Span::Fallback(s) => s.file(), 476 | } 477 | } 478 | 479 | #[cfg(span_locations)] 480 | pub(crate) fn local_file(&self) -> Option { 481 | match self { 482 | #[cfg(proc_macro_span_file)] 483 | Span::Compiler(s) => proc_macro_span_file::local_file(s), 484 | #[cfg(not(proc_macro_span_file))] 485 | Span::Compiler(_) => None, 486 | Span::Fallback(s) => s.local_file(), 487 | } 488 | } 489 | 490 | pub(crate) fn join(&self, other: Span) -> Option { 491 | let ret = match (self, other) { 492 | #[cfg(proc_macro_span)] 493 | (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(proc_macro_span::join(a, b)?), 494 | (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?), 495 | _ => return None, 496 | }; 497 | Some(ret) 498 | } 499 | 500 | #[cfg(super_unstable)] 501 | pub(crate) fn eq(&self, other: &Span) -> bool { 502 | match (self, other) { 503 | (Span::Compiler(a), Span::Compiler(b)) => a.eq(b), 504 | (Span::Fallback(a), Span::Fallback(b)) => a.eq(b), 505 | _ => false, 506 | } 507 | } 508 | 509 | pub(crate) fn source_text(&self) -> Option { 510 | match self { 511 | #[cfg(not(no_source_text))] 512 | Span::Compiler(s) => s.source_text(), 513 | #[cfg(no_source_text)] 514 | Span::Compiler(_) => None, 515 | Span::Fallback(s) => s.source_text(), 516 | } 517 | } 518 | 519 | fn unwrap_nightly(self) -> proc_macro::Span { 520 | match self { 521 | Span::Compiler(s) => s, 522 | Span::Fallback(_) => mismatch(line!()), 523 | } 524 | } 525 | } 526 | 527 | impl From for crate::Span { 528 | fn from(proc_span: proc_macro::Span) -> Self { 529 | crate::Span::_new(Span::Compiler(proc_span)) 530 | } 531 | } 532 | 533 | impl From for Span { 534 | fn from(inner: fallback::Span) -> Self { 535 | Span::Fallback(inner) 536 | } 537 | } 538 | 539 | impl Debug for Span { 540 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 541 | match self { 542 | Span::Compiler(s) => Debug::fmt(s, f), 543 | Span::Fallback(s) => Debug::fmt(s, f), 544 | } 545 | } 546 | } 547 | 548 | pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { 549 | match span { 550 | Span::Compiler(s) => { 551 | debug.field("span", &s); 552 | } 553 | Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s), 554 | } 555 | } 556 | 557 | #[derive(Clone)] 558 | pub(crate) enum Group { 559 | Compiler(proc_macro::Group), 560 | Fallback(fallback::Group), 561 | } 562 | 563 | impl Group { 564 | pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self { 565 | match stream { 566 | TokenStream::Compiler(tts) => { 567 | let delimiter = match delimiter { 568 | Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, 569 | Delimiter::Bracket => proc_macro::Delimiter::Bracket, 570 | Delimiter::Brace => proc_macro::Delimiter::Brace, 571 | Delimiter::None => proc_macro::Delimiter::None, 572 | }; 573 | Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream())) 574 | } 575 | TokenStream::Fallback(stream) => { 576 | Group::Fallback(fallback::Group::new(delimiter, stream)) 577 | } 578 | } 579 | } 580 | 581 | pub(crate) fn delimiter(&self) -> Delimiter { 582 | match self { 583 | Group::Compiler(g) => match g.delimiter() { 584 | proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, 585 | proc_macro::Delimiter::Bracket => Delimiter::Bracket, 586 | proc_macro::Delimiter::Brace => Delimiter::Brace, 587 | proc_macro::Delimiter::None => Delimiter::None, 588 | }, 589 | Group::Fallback(g) => g.delimiter(), 590 | } 591 | } 592 | 593 | pub(crate) fn stream(&self) -> TokenStream { 594 | match self { 595 | Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())), 596 | Group::Fallback(g) => TokenStream::Fallback(g.stream()), 597 | } 598 | } 599 | 600 | pub(crate) fn span(&self) -> Span { 601 | match self { 602 | Group::Compiler(g) => Span::Compiler(g.span()), 603 | Group::Fallback(g) => Span::Fallback(g.span()), 604 | } 605 | } 606 | 607 | pub(crate) fn span_open(&self) -> Span { 608 | match self { 609 | Group::Compiler(g) => Span::Compiler(g.span_open()), 610 | Group::Fallback(g) => Span::Fallback(g.span_open()), 611 | } 612 | } 613 | 614 | pub(crate) fn span_close(&self) -> Span { 615 | match self { 616 | Group::Compiler(g) => Span::Compiler(g.span_close()), 617 | Group::Fallback(g) => Span::Fallback(g.span_close()), 618 | } 619 | } 620 | 621 | pub(crate) fn set_span(&mut self, span: Span) { 622 | match (self, span) { 623 | (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), 624 | (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), 625 | (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()), 626 | (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()), 627 | } 628 | } 629 | 630 | fn unwrap_nightly(self) -> proc_macro::Group { 631 | match self { 632 | Group::Compiler(g) => g, 633 | Group::Fallback(_) => mismatch(line!()), 634 | } 635 | } 636 | } 637 | 638 | impl From for Group { 639 | fn from(g: fallback::Group) -> Self { 640 | Group::Fallback(g) 641 | } 642 | } 643 | 644 | impl Display for Group { 645 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 646 | match self { 647 | Group::Compiler(group) => Display::fmt(group, formatter), 648 | Group::Fallback(group) => Display::fmt(group, formatter), 649 | } 650 | } 651 | } 652 | 653 | impl Debug for Group { 654 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 655 | match self { 656 | Group::Compiler(group) => Debug::fmt(group, formatter), 657 | Group::Fallback(group) => Debug::fmt(group, formatter), 658 | } 659 | } 660 | } 661 | 662 | #[derive(Clone)] 663 | pub(crate) enum Ident { 664 | Compiler(proc_macro::Ident), 665 | Fallback(fallback::Ident), 666 | } 667 | 668 | impl Ident { 669 | #[track_caller] 670 | pub(crate) fn new_checked(string: &str, span: Span) -> Self { 671 | match span { 672 | Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), 673 | Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)), 674 | } 675 | } 676 | 677 | #[track_caller] 678 | pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self { 679 | match span { 680 | Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)), 681 | Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)), 682 | } 683 | } 684 | 685 | pub(crate) fn span(&self) -> Span { 686 | match self { 687 | Ident::Compiler(t) => Span::Compiler(t.span()), 688 | Ident::Fallback(t) => Span::Fallback(t.span()), 689 | } 690 | } 691 | 692 | pub(crate) fn set_span(&mut self, span: Span) { 693 | match (self, span) { 694 | (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), 695 | (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), 696 | (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()), 697 | (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()), 698 | } 699 | } 700 | 701 | fn unwrap_nightly(self) -> proc_macro::Ident { 702 | match self { 703 | Ident::Compiler(s) => s, 704 | Ident::Fallback(_) => mismatch(line!()), 705 | } 706 | } 707 | } 708 | 709 | impl From for Ident { 710 | fn from(inner: fallback::Ident) -> Self { 711 | Ident::Fallback(inner) 712 | } 713 | } 714 | 715 | impl PartialEq for Ident { 716 | fn eq(&self, other: &Ident) -> bool { 717 | match (self, other) { 718 | (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), 719 | (Ident::Fallback(t), Ident::Fallback(o)) => t == o, 720 | (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()), 721 | (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()), 722 | } 723 | } 724 | } 725 | 726 | impl PartialEq for Ident 727 | where 728 | T: ?Sized + AsRef, 729 | { 730 | fn eq(&self, other: &T) -> bool { 731 | let other = other.as_ref(); 732 | match self { 733 | Ident::Compiler(t) => t.to_string() == other, 734 | Ident::Fallback(t) => t == other, 735 | } 736 | } 737 | } 738 | 739 | impl Display for Ident { 740 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 741 | match self { 742 | Ident::Compiler(t) => Display::fmt(t, f), 743 | Ident::Fallback(t) => Display::fmt(t, f), 744 | } 745 | } 746 | } 747 | 748 | impl Debug for Ident { 749 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 750 | match self { 751 | Ident::Compiler(t) => Debug::fmt(t, f), 752 | Ident::Fallback(t) => Debug::fmt(t, f), 753 | } 754 | } 755 | } 756 | 757 | #[derive(Clone)] 758 | pub(crate) enum Literal { 759 | Compiler(proc_macro::Literal), 760 | Fallback(fallback::Literal), 761 | } 762 | 763 | macro_rules! suffixed_numbers { 764 | ($($name:ident => $kind:ident,)*) => ($( 765 | pub(crate) fn $name(n: $kind) -> Literal { 766 | if inside_proc_macro() { 767 | Literal::Compiler(proc_macro::Literal::$name(n)) 768 | } else { 769 | Literal::Fallback(fallback::Literal::$name(n)) 770 | } 771 | } 772 | )*) 773 | } 774 | 775 | macro_rules! unsuffixed_integers { 776 | ($($name:ident => $kind:ident,)*) => ($( 777 | pub(crate) fn $name(n: $kind) -> Literal { 778 | if inside_proc_macro() { 779 | Literal::Compiler(proc_macro::Literal::$name(n)) 780 | } else { 781 | Literal::Fallback(fallback::Literal::$name(n)) 782 | } 783 | } 784 | )*) 785 | } 786 | 787 | impl Literal { 788 | pub(crate) fn from_str_checked(repr: &str) -> Result { 789 | if inside_proc_macro() { 790 | let literal = proc_macro::Literal::from_str_checked(repr)?; 791 | Ok(Literal::Compiler(literal)) 792 | } else { 793 | let literal = fallback::Literal::from_str_checked(repr)?; 794 | Ok(Literal::Fallback(literal)) 795 | } 796 | } 797 | 798 | pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self { 799 | if inside_proc_macro() { 800 | Literal::Compiler(proc_macro::Literal::from_str_unchecked(repr)) 801 | } else { 802 | Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) }) 803 | } 804 | } 805 | 806 | suffixed_numbers! { 807 | u8_suffixed => u8, 808 | u16_suffixed => u16, 809 | u32_suffixed => u32, 810 | u64_suffixed => u64, 811 | u128_suffixed => u128, 812 | usize_suffixed => usize, 813 | i8_suffixed => i8, 814 | i16_suffixed => i16, 815 | i32_suffixed => i32, 816 | i64_suffixed => i64, 817 | i128_suffixed => i128, 818 | isize_suffixed => isize, 819 | 820 | f32_suffixed => f32, 821 | f64_suffixed => f64, 822 | } 823 | 824 | unsuffixed_integers! { 825 | u8_unsuffixed => u8, 826 | u16_unsuffixed => u16, 827 | u32_unsuffixed => u32, 828 | u64_unsuffixed => u64, 829 | u128_unsuffixed => u128, 830 | usize_unsuffixed => usize, 831 | i8_unsuffixed => i8, 832 | i16_unsuffixed => i16, 833 | i32_unsuffixed => i32, 834 | i64_unsuffixed => i64, 835 | i128_unsuffixed => i128, 836 | isize_unsuffixed => isize, 837 | } 838 | 839 | pub(crate) fn f32_unsuffixed(f: f32) -> Literal { 840 | if inside_proc_macro() { 841 | Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f)) 842 | } else { 843 | Literal::Fallback(fallback::Literal::f32_unsuffixed(f)) 844 | } 845 | } 846 | 847 | pub(crate) fn f64_unsuffixed(f: f64) -> Literal { 848 | if inside_proc_macro() { 849 | Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f)) 850 | } else { 851 | Literal::Fallback(fallback::Literal::f64_unsuffixed(f)) 852 | } 853 | } 854 | 855 | pub(crate) fn string(string: &str) -> Literal { 856 | if inside_proc_macro() { 857 | Literal::Compiler(proc_macro::Literal::string(string)) 858 | } else { 859 | Literal::Fallback(fallback::Literal::string(string)) 860 | } 861 | } 862 | 863 | pub(crate) fn character(ch: char) -> Literal { 864 | if inside_proc_macro() { 865 | Literal::Compiler(proc_macro::Literal::character(ch)) 866 | } else { 867 | Literal::Fallback(fallback::Literal::character(ch)) 868 | } 869 | } 870 | 871 | pub(crate) fn byte_character(byte: u8) -> Literal { 872 | if inside_proc_macro() { 873 | Literal::Compiler({ 874 | #[cfg(not(no_literal_byte_character))] 875 | { 876 | proc_macro::Literal::byte_character(byte) 877 | } 878 | 879 | #[cfg(no_literal_byte_character)] 880 | { 881 | let fallback = fallback::Literal::byte_character(byte); 882 | proc_macro::Literal::from_str_unchecked(&fallback.repr) 883 | } 884 | }) 885 | } else { 886 | Literal::Fallback(fallback::Literal::byte_character(byte)) 887 | } 888 | } 889 | 890 | pub(crate) fn byte_string(bytes: &[u8]) -> Literal { 891 | if inside_proc_macro() { 892 | Literal::Compiler(proc_macro::Literal::byte_string(bytes)) 893 | } else { 894 | Literal::Fallback(fallback::Literal::byte_string(bytes)) 895 | } 896 | } 897 | 898 | pub(crate) fn c_string(string: &CStr) -> Literal { 899 | if inside_proc_macro() { 900 | Literal::Compiler({ 901 | #[cfg(not(no_literal_c_string))] 902 | { 903 | proc_macro::Literal::c_string(string) 904 | } 905 | 906 | #[cfg(no_literal_c_string)] 907 | { 908 | let fallback = fallback::Literal::c_string(string); 909 | proc_macro::Literal::from_str_unchecked(&fallback.repr) 910 | } 911 | }) 912 | } else { 913 | Literal::Fallback(fallback::Literal::c_string(string)) 914 | } 915 | } 916 | 917 | pub(crate) fn span(&self) -> Span { 918 | match self { 919 | Literal::Compiler(lit) => Span::Compiler(lit.span()), 920 | Literal::Fallback(lit) => Span::Fallback(lit.span()), 921 | } 922 | } 923 | 924 | pub(crate) fn set_span(&mut self, span: Span) { 925 | match (self, span) { 926 | (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), 927 | (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), 928 | (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()), 929 | (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()), 930 | } 931 | } 932 | 933 | pub(crate) fn subspan>(&self, range: R) -> Option { 934 | match self { 935 | #[cfg(proc_macro_span)] 936 | Literal::Compiler(lit) => proc_macro_span::subspan(lit, range).map(Span::Compiler), 937 | #[cfg(not(proc_macro_span))] 938 | Literal::Compiler(_lit) => None, 939 | Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback), 940 | } 941 | } 942 | 943 | fn unwrap_nightly(self) -> proc_macro::Literal { 944 | match self { 945 | Literal::Compiler(s) => s, 946 | Literal::Fallback(_) => mismatch(line!()), 947 | } 948 | } 949 | } 950 | 951 | impl From for Literal { 952 | fn from(s: fallback::Literal) -> Self { 953 | Literal::Fallback(s) 954 | } 955 | } 956 | 957 | impl Display for Literal { 958 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 959 | match self { 960 | Literal::Compiler(t) => Display::fmt(t, f), 961 | Literal::Fallback(t) => Display::fmt(t, f), 962 | } 963 | } 964 | } 965 | 966 | impl Debug for Literal { 967 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 968 | match self { 969 | Literal::Compiler(t) => Debug::fmt(t, f), 970 | Literal::Fallback(t) => Debug::fmt(t, f), 971 | } 972 | } 973 | } 974 | 975 | #[cfg(span_locations)] 976 | pub(crate) fn invalidate_current_thread_spans() { 977 | if inside_proc_macro() { 978 | panic!( 979 | "proc_macro2::extra::invalidate_current_thread_spans is not available in procedural macros" 980 | ); 981 | } else { 982 | crate::fallback::invalidate_current_thread_spans(); 983 | } 984 | } 985 | -------------------------------------------------------------------------------- /tests/comments.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::assertions_on_result_states, clippy::uninlined_format_args)] 2 | 3 | use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; 4 | 5 | // #[doc = "..."] -> "..." 6 | fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal { 7 | lit_of_doc_comment(tokens, false) 8 | } 9 | 10 | // #![doc = "..."] -> "..." 11 | fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal { 12 | lit_of_doc_comment(tokens, true) 13 | } 14 | 15 | fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal { 16 | let mut iter = tokens.clone().into_iter(); 17 | match iter.next().unwrap() { 18 | TokenTree::Punct(punct) => { 19 | assert_eq!(punct.as_char(), '#'); 20 | assert_eq!(punct.spacing(), Spacing::Alone); 21 | } 22 | _ => panic!("wrong token {:?}", tokens), 23 | } 24 | if inner { 25 | match iter.next().unwrap() { 26 | TokenTree::Punct(punct) => { 27 | assert_eq!(punct.as_char(), '!'); 28 | assert_eq!(punct.spacing(), Spacing::Alone); 29 | } 30 | _ => panic!("wrong token {:?}", tokens), 31 | } 32 | } 33 | iter = match iter.next().unwrap() { 34 | TokenTree::Group(group) => { 35 | assert_eq!(group.delimiter(), Delimiter::Bracket); 36 | assert!(iter.next().is_none(), "unexpected token {:?}", tokens); 37 | group.stream().into_iter() 38 | } 39 | _ => panic!("wrong token {:?}", tokens), 40 | }; 41 | match iter.next().unwrap() { 42 | TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"), 43 | _ => panic!("wrong token {:?}", tokens), 44 | } 45 | match iter.next().unwrap() { 46 | TokenTree::Punct(punct) => { 47 | assert_eq!(punct.as_char(), '='); 48 | assert_eq!(punct.spacing(), Spacing::Alone); 49 | } 50 | _ => panic!("wrong token {:?}", tokens), 51 | } 52 | match iter.next().unwrap() { 53 | TokenTree::Literal(literal) => { 54 | assert!(iter.next().is_none(), "unexpected token {:?}", tokens); 55 | literal 56 | } 57 | _ => panic!("wrong token {:?}", tokens), 58 | } 59 | } 60 | 61 | #[test] 62 | fn closed_immediately() { 63 | let stream = "/**/".parse::().unwrap(); 64 | let tokens = stream.into_iter().collect::>(); 65 | assert!(tokens.is_empty(), "not empty -- {:?}", tokens); 66 | } 67 | 68 | #[test] 69 | fn incomplete() { 70 | assert!("/*/".parse::().is_err()); 71 | } 72 | 73 | #[test] 74 | fn lit() { 75 | let stream = "/// doc".parse::().unwrap(); 76 | let lit = lit_of_outer_doc_comment(&stream); 77 | assert_eq!(lit.to_string(), "\" doc\""); 78 | 79 | let stream = "//! doc".parse::().unwrap(); 80 | let lit = lit_of_inner_doc_comment(&stream); 81 | assert_eq!(lit.to_string(), "\" doc\""); 82 | 83 | let stream = "/** doc */".parse::().unwrap(); 84 | let lit = lit_of_outer_doc_comment(&stream); 85 | assert_eq!(lit.to_string(), "\" doc \""); 86 | 87 | let stream = "/*! doc */".parse::().unwrap(); 88 | let lit = lit_of_inner_doc_comment(&stream); 89 | assert_eq!(lit.to_string(), "\" doc \""); 90 | } 91 | 92 | #[test] 93 | fn carriage_return() { 94 | let stream = "///\r\n".parse::().unwrap(); 95 | let lit = lit_of_outer_doc_comment(&stream); 96 | assert_eq!(lit.to_string(), "\"\""); 97 | 98 | let stream = "/**\r\n*/".parse::().unwrap(); 99 | let lit = lit_of_outer_doc_comment(&stream); 100 | assert_eq!(lit.to_string(), "\"\\r\\n\""); 101 | 102 | "///\r".parse::().unwrap_err(); 103 | "///\r \n".parse::().unwrap_err(); 104 | "/**\r \n*/".parse::().unwrap_err(); 105 | } 106 | -------------------------------------------------------------------------------- /tests/features.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::assertions_on_constants, clippy::ignore_without_reason)] 2 | 3 | #[test] 4 | #[ignore] 5 | fn make_sure_no_proc_macro() { 6 | assert!( 7 | !cfg!(feature = "proc-macro"), 8 | "still compiled with proc_macro?" 9 | ); 10 | } 11 | -------------------------------------------------------------------------------- /tests/marker.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::extra_unused_type_parameters)] 2 | 3 | use proc_macro2::{ 4 | Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, 5 | }; 6 | 7 | macro_rules! assert_impl { 8 | ($ty:ident is $($marker:ident) and +) => { 9 | #[test] 10 | #[allow(non_snake_case)] 11 | fn $ty() { 12 | fn assert_implemented() {} 13 | assert_implemented::<$ty>(); 14 | } 15 | }; 16 | 17 | ($ty:ident is not $($marker:ident) or +) => { 18 | #[test] 19 | #[allow(non_snake_case)] 20 | fn $ty() { 21 | $( 22 | { 23 | // Implemented for types that implement $marker. 24 | #[allow(dead_code)] 25 | trait IsNotImplemented { 26 | fn assert_not_implemented() {} 27 | } 28 | impl IsNotImplemented for T {} 29 | 30 | // Implemented for the type being tested. 31 | trait IsImplemented { 32 | fn assert_not_implemented() {} 33 | } 34 | impl IsImplemented for $ty {} 35 | 36 | // If $ty does not implement $marker, there is no ambiguity 37 | // in the following trait method call. 38 | <$ty>::assert_not_implemented(); 39 | } 40 | )+ 41 | } 42 | }; 43 | } 44 | 45 | assert_impl!(Delimiter is Send and Sync); 46 | assert_impl!(Spacing is Send and Sync); 47 | 48 | assert_impl!(Group is not Send or Sync); 49 | assert_impl!(Ident is not Send or Sync); 50 | assert_impl!(LexError is not Send or Sync); 51 | assert_impl!(Literal is not Send or Sync); 52 | assert_impl!(Punct is not Send or Sync); 53 | assert_impl!(Span is not Send or Sync); 54 | assert_impl!(TokenStream is not Send or Sync); 55 | assert_impl!(TokenTree is not Send or Sync); 56 | 57 | #[cfg(procmacro2_semver_exempt)] 58 | mod semver_exempt { 59 | use proc_macro2::LineColumn; 60 | 61 | assert_impl!(LineColumn is Send and Sync); 62 | } 63 | 64 | mod unwind_safe { 65 | #[cfg(procmacro2_semver_exempt)] 66 | use proc_macro2::LineColumn; 67 | use proc_macro2::{ 68 | Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, 69 | }; 70 | use std::panic::{RefUnwindSafe, UnwindSafe}; 71 | 72 | macro_rules! assert_unwind_safe { 73 | ($($types:ident)*) => { 74 | $( 75 | assert_impl!($types is UnwindSafe and RefUnwindSafe); 76 | )* 77 | }; 78 | } 79 | 80 | assert_unwind_safe! { 81 | Delimiter 82 | Group 83 | Ident 84 | LexError 85 | Literal 86 | Punct 87 | Spacing 88 | Span 89 | TokenStream 90 | TokenTree 91 | } 92 | 93 | #[cfg(procmacro2_semver_exempt)] 94 | assert_unwind_safe! { 95 | LineColumn 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /tests/test.rs: -------------------------------------------------------------------------------- 1 | #![allow( 2 | clippy::assertions_on_result_states, 3 | clippy::items_after_statements, 4 | clippy::needless_pass_by_value, 5 | clippy::needless_raw_string_hashes, 6 | clippy::non_ascii_literal, 7 | clippy::octal_escapes, 8 | clippy::uninlined_format_args 9 | )] 10 | 11 | use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; 12 | use std::ffi::CStr; 13 | use std::iter; 14 | use std::str::{self, FromStr}; 15 | 16 | #[test] 17 | fn idents() { 18 | assert_eq!( 19 | Ident::new("String", Span::call_site()).to_string(), 20 | "String" 21 | ); 22 | assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); 23 | assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); 24 | } 25 | 26 | #[test] 27 | fn raw_idents() { 28 | assert_eq!( 29 | Ident::new_raw("String", Span::call_site()).to_string(), 30 | "r#String" 31 | ); 32 | assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); 33 | } 34 | 35 | #[test] 36 | #[should_panic(expected = "`r#_` cannot be a raw identifier")] 37 | fn ident_raw_underscore() { 38 | Ident::new_raw("_", Span::call_site()); 39 | } 40 | 41 | #[test] 42 | #[should_panic(expected = "`r#super` cannot be a raw identifier")] 43 | fn ident_raw_reserved() { 44 | Ident::new_raw("super", Span::call_site()); 45 | } 46 | 47 | #[test] 48 | #[should_panic(expected = "Ident is not allowed to be empty; use Option")] 49 | fn ident_empty() { 50 | Ident::new("", Span::call_site()); 51 | } 52 | 53 | #[test] 54 | #[should_panic(expected = "Ident cannot be a number; use Literal instead")] 55 | fn ident_number() { 56 | Ident::new("255", Span::call_site()); 57 | } 58 | 59 | #[test] 60 | #[should_panic(expected = "\"a#\" is not a valid Ident")] 61 | fn ident_invalid() { 62 | Ident::new("a#", Span::call_site()); 63 | } 64 | 65 | #[test] 66 | #[should_panic(expected = "not a valid Ident")] 67 | fn raw_ident_empty() { 68 | Ident::new("r#", Span::call_site()); 69 | } 70 | 71 | #[test] 72 | #[should_panic(expected = "not a valid Ident")] 73 | fn raw_ident_number() { 74 | Ident::new("r#255", Span::call_site()); 75 | } 76 | 77 | #[test] 78 | #[should_panic(expected = "\"r#a#\" is not a valid Ident")] 79 | fn raw_ident_invalid() { 80 | Ident::new("r#a#", Span::call_site()); 81 | } 82 | 83 | #[test] 84 | #[should_panic(expected = "not a valid Ident")] 85 | fn lifetime_empty() { 86 | Ident::new("'", Span::call_site()); 87 | } 88 | 89 | #[test] 90 | #[should_panic(expected = "not a valid Ident")] 91 | fn lifetime_number() { 92 | Ident::new("'255", Span::call_site()); 93 | } 94 | 95 | #[test] 96 | #[should_panic(expected = r#""'a#" is not a valid Ident"#)] 97 | fn lifetime_invalid() { 98 | Ident::new("'a#", Span::call_site()); 99 | } 100 | 101 | #[test] 102 | fn literal_string() { 103 | #[track_caller] 104 | fn assert(literal: Literal, expected: &str) { 105 | assert_eq!(literal.to_string(), expected.trim()); 106 | } 107 | 108 | assert(Literal::string(""), r#" "" "#); 109 | assert(Literal::string("aA"), r#" "aA" "#); 110 | assert(Literal::string("\t"), r#" "\t" "#); 111 | assert(Literal::string("❤"), r#" "❤" "#); 112 | assert(Literal::string("'"), r#" "'" "#); 113 | assert(Literal::string("\""), r#" "\"" "#); 114 | assert(Literal::string("\0"), r#" "\0" "#); 115 | assert(Literal::string("\u{1}"), r#" "\u{1}" "#); 116 | assert( 117 | Literal::string("a\00b\07c\08d\0e\0"), 118 | r#" "a\x000b\x007c\08d\0e\0" "#, 119 | ); 120 | 121 | "\"\\\r\n x\"".parse::().unwrap(); 122 | "\"\\\r\n \rx\"".parse::().unwrap_err(); 123 | } 124 | 125 | #[test] 126 | fn literal_raw_string() { 127 | "r\"\r\n\"".parse::().unwrap(); 128 | 129 | fn raw_string_literal_with_hashes(n: usize) -> String { 130 | let mut literal = String::new(); 131 | literal.push('r'); 132 | literal.extend(iter::repeat('#').take(n)); 133 | literal.push('"'); 134 | literal.push('"'); 135 | literal.extend(iter::repeat('#').take(n)); 136 | literal 137 | } 138 | 139 | raw_string_literal_with_hashes(255) 140 | .parse::() 141 | .unwrap(); 142 | 143 | // https://github.com/rust-lang/rust/pull/95251 144 | raw_string_literal_with_hashes(256) 145 | .parse::() 146 | .unwrap_err(); 147 | } 148 | 149 | #[cfg(procmacro2_semver_exempt)] 150 | #[test] 151 | fn literal_string_value() { 152 | for string in ["", "...", "...\t...", "...\\...", "...\0...", "...\u{1}..."] { 153 | assert_eq!(string, Literal::string(string).str_value().unwrap()); 154 | assert_eq!( 155 | string, 156 | format!("r\"{string}\"") 157 | .parse::() 158 | .unwrap() 159 | .str_value() 160 | .unwrap(), 161 | ); 162 | assert_eq!( 163 | string, 164 | format!("r##\"{string}\"##") 165 | .parse::() 166 | .unwrap() 167 | .str_value() 168 | .unwrap(), 169 | ); 170 | } 171 | } 172 | 173 | #[test] 174 | fn literal_byte_character() { 175 | #[track_caller] 176 | fn assert(literal: Literal, expected: &str) { 177 | assert_eq!(literal.to_string(), expected.trim()); 178 | } 179 | 180 | assert(Literal::byte_character(b'a'), r#" b'a' "#); 181 | assert(Literal::byte_character(b'\0'), r#" b'\0' "#); 182 | assert(Literal::byte_character(b'\t'), r#" b'\t' "#); 183 | assert(Literal::byte_character(b'\n'), r#" b'\n' "#); 184 | assert(Literal::byte_character(b'\r'), r#" b'\r' "#); 185 | assert(Literal::byte_character(b'\''), r#" b'\'' "#); 186 | assert(Literal::byte_character(b'\\'), r#" b'\\' "#); 187 | assert(Literal::byte_character(b'\x1f'), r#" b'\x1F' "#); 188 | assert(Literal::byte_character(b'"'), r#" b'"' "#); 189 | } 190 | 191 | #[test] 192 | fn literal_byte_string() { 193 | #[track_caller] 194 | fn assert(literal: Literal, expected: &str) { 195 | assert_eq!(literal.to_string(), expected.trim()); 196 | } 197 | 198 | assert(Literal::byte_string(b""), r#" b"" "#); 199 | assert(Literal::byte_string(b"\0"), r#" b"\0" "#); 200 | assert(Literal::byte_string(b"\t"), r#" b"\t" "#); 201 | assert(Literal::byte_string(b"\n"), r#" b"\n" "#); 202 | assert(Literal::byte_string(b"\r"), r#" b"\r" "#); 203 | assert(Literal::byte_string(b"\""), r#" b"\"" "#); 204 | assert(Literal::byte_string(b"\\"), r#" b"\\" "#); 205 | assert(Literal::byte_string(b"\x1f"), r#" b"\x1F" "#); 206 | assert(Literal::byte_string(b"'"), r#" b"'" "#); 207 | assert( 208 | Literal::byte_string(b"a\00b\07c\08d\0e\0"), 209 | r#" b"a\x000b\x007c\08d\0e\0" "#, 210 | ); 211 | 212 | "b\"\\\r\n x\"".parse::().unwrap(); 213 | "b\"\\\r\n \rx\"".parse::().unwrap_err(); 214 | "b\"\\\r\n \u{a0}x\"".parse::().unwrap_err(); 215 | "br\"\u{a0}\"".parse::().unwrap_err(); 216 | } 217 | 218 | #[cfg(procmacro2_semver_exempt)] 219 | #[test] 220 | fn literal_byte_string_value() { 221 | for bytestr in [ 222 | &b""[..], 223 | b"...", 224 | b"...\t...", 225 | b"...\\...", 226 | b"...\0...", 227 | b"...\xF0...", 228 | ] { 229 | assert_eq!( 230 | bytestr, 231 | Literal::byte_string(bytestr).byte_str_value().unwrap(), 232 | ); 233 | if let Ok(string) = str::from_utf8(bytestr) { 234 | assert_eq!( 235 | bytestr, 236 | format!("br\"{string}\"") 237 | .parse::() 238 | .unwrap() 239 | .byte_str_value() 240 | .unwrap(), 241 | ); 242 | assert_eq!( 243 | bytestr, 244 | format!("br##\"{string}\"##") 245 | .parse::() 246 | .unwrap() 247 | .byte_str_value() 248 | .unwrap(), 249 | ); 250 | } 251 | } 252 | } 253 | 254 | #[test] 255 | fn literal_c_string() { 256 | #[track_caller] 257 | fn assert(literal: Literal, expected: &str) { 258 | assert_eq!(literal.to_string(), expected.trim()); 259 | } 260 | 261 | assert(Literal::c_string(<&CStr>::default()), r#" c"" "#); 262 | assert( 263 | Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), 264 | r#" c"aA" "#, 265 | ); 266 | assert( 267 | Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), 268 | r#" c"aA" "#, 269 | ); 270 | assert( 271 | Literal::c_string(CStr::from_bytes_with_nul(b"\t\0").unwrap()), 272 | r#" c"\t" "#, 273 | ); 274 | assert( 275 | Literal::c_string(CStr::from_bytes_with_nul(b"\xE2\x9D\xA4\0").unwrap()), 276 | r#" c"❤" "#, 277 | ); 278 | assert( 279 | Literal::c_string(CStr::from_bytes_with_nul(b"'\0").unwrap()), 280 | r#" c"'" "#, 281 | ); 282 | assert( 283 | Literal::c_string(CStr::from_bytes_with_nul(b"\"\0").unwrap()), 284 | r#" c"\"" "#, 285 | ); 286 | assert( 287 | Literal::c_string(CStr::from_bytes_with_nul(b"\x7F\xFF\xFE\xCC\xB3\0").unwrap()), 288 | r#" c"\u{7f}\xFF\xFE\u{333}" "#, 289 | ); 290 | 291 | let strings = r###" 292 | c"hello\x80我叫\u{1F980}" // from the RFC 293 | cr"\" 294 | cr##"Hello "world"!"## 295 | c"\t\n\r\"\\" 296 | "###; 297 | 298 | let mut tokens = strings.parse::().unwrap().into_iter(); 299 | 300 | for expected in &[ 301 | r#"c"hello\x80我叫\u{1F980}""#, 302 | r#"cr"\""#, 303 | r###"cr##"Hello "world"!"##"###, 304 | r#"c"\t\n\r\"\\""#, 305 | ] { 306 | match tokens.next().unwrap() { 307 | TokenTree::Literal(literal) => { 308 | assert_eq!(literal.to_string(), *expected); 309 | } 310 | unexpected => panic!("unexpected token: {:?}", unexpected), 311 | } 312 | } 313 | 314 | if let Some(unexpected) = tokens.next() { 315 | panic!("unexpected token: {:?}", unexpected); 316 | } 317 | 318 | for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] { 319 | if let Ok(unexpected) = invalid.parse::() { 320 | panic!("unexpected token: {:?}", unexpected); 321 | } 322 | } 323 | } 324 | 325 | #[cfg(procmacro2_semver_exempt)] 326 | #[test] 327 | fn literal_c_string_value() { 328 | for cstr in [ 329 | c"", 330 | c"...", 331 | c"...\t...", 332 | c"...\\...", 333 | c"...\u{1}...", 334 | c"...\xF0...", 335 | ] { 336 | assert_eq!( 337 | cstr.to_bytes_with_nul(), 338 | Literal::c_string(cstr).cstr_value().unwrap(), 339 | ); 340 | if let Ok(string) = cstr.to_str() { 341 | assert_eq!( 342 | cstr.to_bytes_with_nul(), 343 | format!("cr\"{string}\"") 344 | .parse::() 345 | .unwrap() 346 | .cstr_value() 347 | .unwrap(), 348 | ); 349 | assert_eq!( 350 | cstr.to_bytes_with_nul(), 351 | format!("cr##\"{string}\"##") 352 | .parse::() 353 | .unwrap() 354 | .cstr_value() 355 | .unwrap(), 356 | ); 357 | } 358 | } 359 | } 360 | 361 | #[test] 362 | fn literal_character() { 363 | #[track_caller] 364 | fn assert(literal: Literal, expected: &str) { 365 | assert_eq!(literal.to_string(), expected.trim()); 366 | } 367 | 368 | assert(Literal::character('a'), r#" 'a' "#); 369 | assert(Literal::character('\t'), r#" '\t' "#); 370 | assert(Literal::character('❤'), r#" '❤' "#); 371 | assert(Literal::character('\''), r#" '\'' "#); 372 | assert(Literal::character('"'), r#" '"' "#); 373 | assert(Literal::character('\0'), r#" '\0' "#); 374 | assert(Literal::character('\u{1}'), r#" '\u{1}' "#); 375 | } 376 | 377 | #[test] 378 | fn literal_integer() { 379 | #[track_caller] 380 | fn assert(literal: Literal, expected: &str) { 381 | assert_eq!(literal.to_string(), expected); 382 | } 383 | 384 | assert(Literal::u8_suffixed(10), "10u8"); 385 | assert(Literal::u16_suffixed(10), "10u16"); 386 | assert(Literal::u32_suffixed(10), "10u32"); 387 | assert(Literal::u64_suffixed(10), "10u64"); 388 | assert(Literal::u128_suffixed(10), "10u128"); 389 | assert(Literal::usize_suffixed(10), "10usize"); 390 | 391 | assert(Literal::i8_suffixed(10), "10i8"); 392 | assert(Literal::i16_suffixed(10), "10i16"); 393 | assert(Literal::i32_suffixed(10), "10i32"); 394 | assert(Literal::i64_suffixed(10), "10i64"); 395 | assert(Literal::i128_suffixed(10), "10i128"); 396 | assert(Literal::isize_suffixed(10), "10isize"); 397 | 398 | assert(Literal::u8_unsuffixed(10), "10"); 399 | assert(Literal::u16_unsuffixed(10), "10"); 400 | assert(Literal::u32_unsuffixed(10), "10"); 401 | assert(Literal::u64_unsuffixed(10), "10"); 402 | assert(Literal::u128_unsuffixed(10), "10"); 403 | assert(Literal::usize_unsuffixed(10), "10"); 404 | 405 | assert(Literal::i8_unsuffixed(10), "10"); 406 | assert(Literal::i16_unsuffixed(10), "10"); 407 | assert(Literal::i32_unsuffixed(10), "10"); 408 | assert(Literal::i64_unsuffixed(10), "10"); 409 | assert(Literal::i128_unsuffixed(10), "10"); 410 | assert(Literal::isize_unsuffixed(10), "10"); 411 | 412 | assert(Literal::i32_suffixed(-10), "-10i32"); 413 | assert(Literal::i32_unsuffixed(-10), "-10"); 414 | } 415 | 416 | #[test] 417 | fn literal_float() { 418 | #[track_caller] 419 | fn assert(literal: Literal, expected: &str) { 420 | assert_eq!(literal.to_string(), expected); 421 | } 422 | 423 | assert(Literal::f32_suffixed(10.0), "10f32"); 424 | assert(Literal::f32_suffixed(-10.0), "-10f32"); 425 | assert(Literal::f64_suffixed(10.0), "10f64"); 426 | assert(Literal::f64_suffixed(-10.0), "-10f64"); 427 | 428 | assert(Literal::f32_unsuffixed(10.0), "10.0"); 429 | assert(Literal::f32_unsuffixed(-10.0), "-10.0"); 430 | assert(Literal::f64_unsuffixed(10.0), "10.0"); 431 | assert(Literal::f64_unsuffixed(-10.0), "-10.0"); 432 | 433 | assert( 434 | Literal::f64_unsuffixed(1e100), 435 | "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0", 436 | ); 437 | } 438 | 439 | #[test] 440 | fn literal_suffix() { 441 | fn token_count(p: &str) -> usize { 442 | p.parse::().unwrap().into_iter().count() 443 | } 444 | 445 | assert_eq!(token_count("999u256"), 1); 446 | assert_eq!(token_count("999r#u256"), 3); 447 | assert_eq!(token_count("1."), 1); 448 | assert_eq!(token_count("1.f32"), 3); 449 | assert_eq!(token_count("1.0_0"), 1); 450 | assert_eq!(token_count("1._0"), 3); 451 | assert_eq!(token_count("1._m"), 3); 452 | assert_eq!(token_count("\"\"s"), 1); 453 | assert_eq!(token_count("r\"\"r"), 1); 454 | assert_eq!(token_count("r#\"\"#r"), 1); 455 | assert_eq!(token_count("b\"\"b"), 1); 456 | assert_eq!(token_count("br\"\"br"), 1); 457 | assert_eq!(token_count("br#\"\"#br"), 1); 458 | assert_eq!(token_count("c\"\"c"), 1); 459 | assert_eq!(token_count("cr\"\"cr"), 1); 460 | assert_eq!(token_count("cr#\"\"#cr"), 1); 461 | assert_eq!(token_count("'c'c"), 1); 462 | assert_eq!(token_count("b'b'b"), 1); 463 | assert_eq!(token_count("0E"), 1); 464 | assert_eq!(token_count("0o0A"), 1); 465 | assert_eq!(token_count("0E--0"), 4); 466 | assert_eq!(token_count("0.0ECMA"), 1); 467 | } 468 | 469 | #[test] 470 | fn literal_iter_negative() { 471 | let negative_literal = Literal::i32_suffixed(-3); 472 | let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); 473 | let mut iter = tokens.into_iter(); 474 | match iter.next().unwrap() { 475 | TokenTree::Punct(punct) => { 476 | assert_eq!(punct.as_char(), '-'); 477 | assert_eq!(punct.spacing(), Spacing::Alone); 478 | } 479 | unexpected => panic!("unexpected token {:?}", unexpected), 480 | } 481 | match iter.next().unwrap() { 482 | TokenTree::Literal(literal) => { 483 | assert_eq!(literal.to_string(), "3i32"); 484 | } 485 | unexpected => panic!("unexpected token {:?}", unexpected), 486 | } 487 | assert!(iter.next().is_none()); 488 | } 489 | 490 | #[test] 491 | fn literal_parse() { 492 | assert!("1".parse::().is_ok()); 493 | assert!("-1".parse::().is_ok()); 494 | assert!("-1u12".parse::().is_ok()); 495 | assert!("1.0".parse::().is_ok()); 496 | assert!("-1.0".parse::().is_ok()); 497 | assert!("-1.0f12".parse::().is_ok()); 498 | assert!("'a'".parse::().is_ok()); 499 | assert!("\"\n\"".parse::().is_ok()); 500 | assert!("0 1".parse::().is_err()); 501 | assert!(" 0".parse::().is_err()); 502 | assert!("0 ".parse::().is_err()); 503 | assert!("/* comment */0".parse::().is_err()); 504 | assert!("0/* comment */".parse::().is_err()); 505 | assert!("0// comment".parse::().is_err()); 506 | assert!("- 1".parse::().is_err()); 507 | assert!("- 1.0".parse::().is_err()); 508 | assert!("-\"\"".parse::().is_err()); 509 | } 510 | 511 | #[test] 512 | fn literal_span() { 513 | let positive = "0.1".parse::().unwrap(); 514 | let negative = "-0.1".parse::().unwrap(); 515 | let subspan = positive.subspan(1..2); 516 | 517 | #[cfg(not(span_locations))] 518 | { 519 | let _ = negative; 520 | assert!(subspan.is_none()); 521 | } 522 | 523 | #[cfg(span_locations)] 524 | { 525 | assert_eq!(positive.span().start().column, 0); 526 | assert_eq!(positive.span().end().column, 3); 527 | assert_eq!(negative.span().start().column, 0); 528 | assert_eq!(negative.span().end().column, 4); 529 | assert_eq!(subspan.unwrap().source_text().unwrap(), "."); 530 | } 531 | 532 | assert!(positive.subspan(1..4).is_none()); 533 | } 534 | 535 | #[cfg(span_locations)] 536 | #[test] 537 | fn source_text() { 538 | let input = " 𓀕 a z "; 539 | let mut tokens = input 540 | .parse::() 541 | .unwrap() 542 | .into_iter(); 543 | 544 | let first = tokens.next().unwrap(); 545 | assert_eq!("𓀕", first.span().source_text().unwrap()); 546 | 547 | let second = tokens.next().unwrap(); 548 | let third = tokens.next().unwrap(); 549 | assert_eq!("z", third.span().source_text().unwrap()); 550 | assert_eq!("a", second.span().source_text().unwrap()); 551 | } 552 | 553 | #[test] 554 | fn lifetimes() { 555 | let mut tokens = "'a 'static 'struct 'r#gen 'r#prefix#lifetime" 556 | .parse::() 557 | .unwrap() 558 | .into_iter(); 559 | assert!(match tokens.next() { 560 | Some(TokenTree::Punct(punct)) => { 561 | punct.as_char() == '\'' && punct.spacing() == Spacing::Joint 562 | } 563 | _ => false, 564 | }); 565 | assert!(match tokens.next() { 566 | Some(TokenTree::Ident(ident)) => ident == "a", 567 | _ => false, 568 | }); 569 | assert!(match tokens.next() { 570 | Some(TokenTree::Punct(punct)) => { 571 | punct.as_char() == '\'' && punct.spacing() == Spacing::Joint 572 | } 573 | _ => false, 574 | }); 575 | assert!(match tokens.next() { 576 | Some(TokenTree::Ident(ident)) => ident == "static", 577 | _ => false, 578 | }); 579 | assert!(match tokens.next() { 580 | Some(TokenTree::Punct(punct)) => { 581 | punct.as_char() == '\'' && punct.spacing() == Spacing::Joint 582 | } 583 | _ => false, 584 | }); 585 | assert!(match tokens.next() { 586 | Some(TokenTree::Ident(ident)) => ident == "struct", 587 | _ => false, 588 | }); 589 | assert!(match tokens.next() { 590 | Some(TokenTree::Punct(punct)) => { 591 | punct.as_char() == '\'' && punct.spacing() == Spacing::Joint 592 | } 593 | _ => false, 594 | }); 595 | assert!(match tokens.next() { 596 | Some(TokenTree::Ident(ident)) => ident == "r#gen", 597 | _ => false, 598 | }); 599 | assert!(match tokens.next() { 600 | Some(TokenTree::Punct(punct)) => { 601 | punct.as_char() == '\'' && punct.spacing() == Spacing::Joint 602 | } 603 | _ => false, 604 | }); 605 | assert!(match tokens.next() { 606 | Some(TokenTree::Ident(ident)) => ident == "r#prefix", 607 | _ => false, 608 | }); 609 | assert!(match tokens.next() { 610 | Some(TokenTree::Punct(punct)) => { 611 | punct.as_char() == '#' && punct.spacing() == Spacing::Alone 612 | } 613 | _ => false, 614 | }); 615 | assert!(match tokens.next() { 616 | Some(TokenTree::Ident(ident)) => ident == "lifetime", 617 | _ => false, 618 | }); 619 | 620 | "' a".parse::().unwrap_err(); 621 | "' r#gen".parse::().unwrap_err(); 622 | "' prefix#lifetime".parse::().unwrap_err(); 623 | "'prefix#lifetime".parse::().unwrap_err(); 624 | "'aa'bb".parse::().unwrap_err(); 625 | "'r#gen'a".parse::().unwrap_err(); 626 | } 627 | 628 | #[test] 629 | fn roundtrip() { 630 | fn roundtrip(p: &str) { 631 | println!("parse: {}", p); 632 | let s = p.parse::().unwrap().to_string(); 633 | println!("first: {}", s); 634 | let s2 = s.parse::().unwrap().to_string(); 635 | assert_eq!(s, s2); 636 | } 637 | roundtrip("a"); 638 | roundtrip("<<"); 639 | roundtrip("<<="); 640 | roundtrip( 641 | " 642 | 1 643 | 1.0 644 | 1f32 645 | 2f64 646 | 1usize 647 | 4isize 648 | 4e10 649 | 1_000 650 | 1_0i32 651 | 8u8 652 | 9 653 | 0 654 | 0xffffffffffffffffffffffffffffffff 655 | 1x 656 | 1u80 657 | 1f320 658 | ", 659 | ); 660 | roundtrip("'a"); 661 | roundtrip("'_"); 662 | roundtrip("'static"); 663 | roundtrip(r"'\u{10__FFFF}'"); 664 | roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); 665 | } 666 | 667 | #[test] 668 | fn fail() { 669 | fn fail(p: &str) { 670 | if let Ok(s) = p.parse::() { 671 | panic!("should have failed to parse: {}\n{:#?}", p, s); 672 | } 673 | } 674 | fail("' static"); 675 | fail("r#1"); 676 | fail("r#_"); 677 | fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits) 678 | fail("\"\\u{999999}\""); // outside of valid range of char 679 | fail("\"\\u{_0}\""); // leading underscore 680 | fail("\"\\u{}\""); // empty 681 | fail("b\"\r\""); // bare carriage return in byte string 682 | fail("r\"\r\""); // bare carriage return in raw string 683 | fail("\"\\\r \""); // backslash carriage return 684 | fail("'aa'aa"); 685 | fail("br##\"\"#"); 686 | fail("cr##\"\"#"); 687 | fail("\"\\\n\u{85}\r\""); 688 | } 689 | 690 | #[cfg(span_locations)] 691 | #[test] 692 | fn span_test() { 693 | check_spans( 694 | "\ 695 | /// This is a document comment 696 | testing 123 697 | { 698 | testing 234 699 | }", 700 | &[ 701 | (1, 0, 1, 30), // # 702 | (1, 0, 1, 30), // [ ... ] 703 | (1, 0, 1, 30), // doc 704 | (1, 0, 1, 30), // = 705 | (1, 0, 1, 30), // "This is..." 706 | (2, 0, 2, 7), // testing 707 | (2, 8, 2, 11), // 123 708 | (3, 0, 5, 1), // { ... } 709 | (4, 2, 4, 9), // testing 710 | (4, 10, 4, 13), // 234 711 | ], 712 | ); 713 | } 714 | 715 | #[cfg(procmacro2_semver_exempt)] 716 | #[test] 717 | fn default_span() { 718 | let start = Span::call_site().start(); 719 | assert_eq!(start.line, 1); 720 | assert_eq!(start.column, 0); 721 | let end = Span::call_site().end(); 722 | assert_eq!(end.line, 1); 723 | assert_eq!(end.column, 0); 724 | assert_eq!(Span::call_site().file(), ""); 725 | assert!(Span::call_site().local_file().is_none()); 726 | } 727 | 728 | #[cfg(procmacro2_semver_exempt)] 729 | #[test] 730 | fn span_join() { 731 | let source1 = "aaa\nbbb" 732 | .parse::() 733 | .unwrap() 734 | .into_iter() 735 | .collect::>(); 736 | let source2 = "ccc\nddd" 737 | .parse::() 738 | .unwrap() 739 | .into_iter() 740 | .collect::>(); 741 | 742 | assert!(source1[0].span().file() != source2[0].span().file()); 743 | assert_eq!(source1[0].span().file(), source1[1].span().file()); 744 | 745 | let joined1 = source1[0].span().join(source1[1].span()); 746 | let joined2 = source1[0].span().join(source2[0].span()); 747 | assert!(joined1.is_some()); 748 | assert!(joined2.is_none()); 749 | 750 | let start = joined1.unwrap().start(); 751 | let end = joined1.unwrap().end(); 752 | assert_eq!(start.line, 1); 753 | assert_eq!(start.column, 0); 754 | assert_eq!(end.line, 2); 755 | assert_eq!(end.column, 3); 756 | 757 | assert_eq!(joined1.unwrap().file(), source1[0].span().file()); 758 | } 759 | 760 | #[test] 761 | fn no_panic() { 762 | let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); 763 | assert!(s.parse::().is_err()); 764 | } 765 | 766 | #[test] 767 | fn punct_before_comment() { 768 | let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); 769 | match tts.next().unwrap() { 770 | TokenTree::Punct(tt) => { 771 | assert_eq!(tt.as_char(), '~'); 772 | assert_eq!(tt.spacing(), Spacing::Alone); 773 | } 774 | wrong => panic!("wrong token {:?}", wrong), 775 | } 776 | } 777 | 778 | #[test] 779 | fn joint_last_token() { 780 | // This test verifies that we match the behavior of libproc_macro *not* in 781 | // the range nightly-2020-09-06 through nightly-2020-09-10, in which this 782 | // behavior was temporarily broken. 783 | // See https://github.com/rust-lang/rust/issues/76399 784 | 785 | let joint_punct = Punct::new(':', Spacing::Joint); 786 | let stream = TokenStream::from(TokenTree::Punct(joint_punct)); 787 | let punct = match stream.into_iter().next().unwrap() { 788 | TokenTree::Punct(punct) => punct, 789 | _ => unreachable!(), 790 | }; 791 | assert_eq!(punct.spacing(), Spacing::Joint); 792 | } 793 | 794 | #[test] 795 | fn raw_identifier() { 796 | let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); 797 | match tts.next().unwrap() { 798 | TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), 799 | wrong => panic!("wrong token {:?}", wrong), 800 | } 801 | assert!(tts.next().is_none()); 802 | } 803 | 804 | #[test] 805 | fn test_display_ident() { 806 | let ident = Ident::new("proc_macro", Span::call_site()); 807 | assert_eq!(format!("{ident}"), "proc_macro"); 808 | assert_eq!(format!("{ident:-^14}"), "proc_macro"); 809 | 810 | let ident = Ident::new_raw("proc_macro", Span::call_site()); 811 | assert_eq!(format!("{ident}"), "r#proc_macro"); 812 | assert_eq!(format!("{ident:-^14}"), "r#proc_macro"); 813 | } 814 | 815 | #[test] 816 | fn test_debug_ident() { 817 | let ident = Ident::new("proc_macro", Span::call_site()); 818 | let expected = if cfg!(span_locations) { 819 | "Ident { sym: proc_macro }" 820 | } else { 821 | "Ident(proc_macro)" 822 | }; 823 | assert_eq!(expected, format!("{:?}", ident)); 824 | 825 | let ident = Ident::new_raw("proc_macro", Span::call_site()); 826 | let expected = if cfg!(span_locations) { 827 | "Ident { sym: r#proc_macro }" 828 | } else { 829 | "Ident(r#proc_macro)" 830 | }; 831 | assert_eq!(expected, format!("{:?}", ident)); 832 | } 833 | 834 | #[test] 835 | fn test_display_tokenstream() { 836 | let tts = TokenStream::from_str("[a + 1]").unwrap(); 837 | assert_eq!(format!("{tts}"), "[a + 1]"); 838 | assert_eq!(format!("{tts:-^5}"), "[a + 1]"); 839 | } 840 | 841 | #[test] 842 | fn test_debug_tokenstream() { 843 | let tts = TokenStream::from_str("[a + 1]").unwrap(); 844 | 845 | #[cfg(not(span_locations))] 846 | let expected = "\ 847 | TokenStream [ 848 | Group { 849 | delimiter: Bracket, 850 | stream: TokenStream [ 851 | Ident { 852 | sym: a, 853 | }, 854 | Punct { 855 | char: '+', 856 | spacing: Alone, 857 | }, 858 | Literal { 859 | lit: 1, 860 | }, 861 | ], 862 | }, 863 | ]\ 864 | "; 865 | 866 | #[cfg(not(span_locations))] 867 | let expected_before_trailing_commas = "\ 868 | TokenStream [ 869 | Group { 870 | delimiter: Bracket, 871 | stream: TokenStream [ 872 | Ident { 873 | sym: a 874 | }, 875 | Punct { 876 | char: '+', 877 | spacing: Alone 878 | }, 879 | Literal { 880 | lit: 1 881 | } 882 | ] 883 | } 884 | ]\ 885 | "; 886 | 887 | #[cfg(span_locations)] 888 | let expected = "\ 889 | TokenStream [ 890 | Group { 891 | delimiter: Bracket, 892 | stream: TokenStream [ 893 | Ident { 894 | sym: a, 895 | span: bytes(2..3), 896 | }, 897 | Punct { 898 | char: '+', 899 | spacing: Alone, 900 | span: bytes(4..5), 901 | }, 902 | Literal { 903 | lit: 1, 904 | span: bytes(6..7), 905 | }, 906 | ], 907 | span: bytes(1..8), 908 | }, 909 | ]\ 910 | "; 911 | 912 | #[cfg(span_locations)] 913 | let expected_before_trailing_commas = "\ 914 | TokenStream [ 915 | Group { 916 | delimiter: Bracket, 917 | stream: TokenStream [ 918 | Ident { 919 | sym: a, 920 | span: bytes(2..3) 921 | }, 922 | Punct { 923 | char: '+', 924 | spacing: Alone, 925 | span: bytes(4..5) 926 | }, 927 | Literal { 928 | lit: 1, 929 | span: bytes(6..7) 930 | } 931 | ], 932 | span: bytes(1..8) 933 | } 934 | ]\ 935 | "; 936 | 937 | let actual = format!("{:#?}", tts); 938 | if actual.ends_with(",\n]") { 939 | assert_eq!(expected, actual); 940 | } else { 941 | assert_eq!(expected_before_trailing_commas, actual); 942 | } 943 | } 944 | 945 | #[test] 946 | fn default_tokenstream_is_empty() { 947 | let default_token_stream = ::default(); 948 | 949 | assert!(default_token_stream.is_empty()); 950 | } 951 | 952 | #[test] 953 | fn tokenstream_size_hint() { 954 | let tokens = "a b (c d) e".parse::().unwrap(); 955 | 956 | assert_eq!(tokens.into_iter().size_hint(), (4, Some(4))); 957 | } 958 | 959 | #[test] 960 | fn tuple_indexing() { 961 | // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 962 | let mut tokens = "tuple.0.0".parse::().unwrap().into_iter(); 963 | assert_eq!("tuple", tokens.next().unwrap().to_string()); 964 | assert_eq!(".", tokens.next().unwrap().to_string()); 965 | assert_eq!("0.0", tokens.next().unwrap().to_string()); 966 | assert!(tokens.next().is_none()); 967 | } 968 | 969 | #[cfg(span_locations)] 970 | #[test] 971 | fn non_ascii_tokens() { 972 | check_spans("// abc", &[]); 973 | check_spans("// ábc", &[]); 974 | check_spans("// abc x", &[]); 975 | check_spans("// ábc x", &[]); 976 | check_spans("/* abc */ x", &[(1, 10, 1, 11)]); 977 | check_spans("/* ábc */ x", &[(1, 10, 1, 11)]); 978 | check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]); 979 | check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]); 980 | check_spans("/*** abc */ x", &[(1, 12, 1, 13)]); 981 | check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); 982 | check_spans(r#""abc""#, &[(1, 0, 1, 5)]); 983 | check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); 984 | check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]); 985 | check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]); 986 | check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); 987 | check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); 988 | check_spans("'a'", &[(1, 0, 1, 3)]); 989 | check_spans("'á'", &[(1, 0, 1, 3)]); 990 | check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); 991 | check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); 992 | check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); 993 | check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); 994 | check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); 995 | check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); 996 | check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); 997 | check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); 998 | check_spans("abc", &[(1, 0, 1, 3)]); 999 | check_spans("ábc", &[(1, 0, 1, 3)]); 1000 | check_spans("ábć", &[(1, 0, 1, 3)]); 1001 | check_spans("abc// foo", &[(1, 0, 1, 3)]); 1002 | check_spans("ábc// foo", &[(1, 0, 1, 3)]); 1003 | check_spans("ábć// foo", &[(1, 0, 1, 3)]); 1004 | check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); 1005 | } 1006 | 1007 | #[cfg(span_locations)] 1008 | fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { 1009 | let ts = p.parse::().unwrap(); 1010 | check_spans_internal(ts, &mut lines); 1011 | assert!(lines.is_empty(), "leftover ranges: {:?}", lines); 1012 | } 1013 | 1014 | #[cfg(span_locations)] 1015 | fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { 1016 | for i in ts { 1017 | if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { 1018 | *lines = rest; 1019 | 1020 | let start = i.span().start(); 1021 | assert_eq!(start.line, sline, "sline did not match for {}", i); 1022 | assert_eq!(start.column, scol, "scol did not match for {}", i); 1023 | 1024 | let end = i.span().end(); 1025 | assert_eq!(end.line, eline, "eline did not match for {}", i); 1026 | assert_eq!(end.column, ecol, "ecol did not match for {}", i); 1027 | 1028 | if let TokenTree::Group(g) = i { 1029 | check_spans_internal(g.stream().clone(), lines); 1030 | } 1031 | } 1032 | } 1033 | } 1034 | 1035 | #[test] 1036 | fn whitespace() { 1037 | // space, horizontal tab, vertical tab, form feed, carriage return, line 1038 | // feed, non-breaking space, left-to-right mark, right-to-left mark 1039 | let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}"; 1040 | let tokens = various_spaces.parse::().unwrap(); 1041 | assert_eq!(tokens.into_iter().count(), 0); 1042 | 1043 | let lone_carriage_returns = " \r \r\r\n "; 1044 | lone_carriage_returns.parse::().unwrap(); 1045 | } 1046 | 1047 | #[test] 1048 | fn byte_order_mark() { 1049 | let string = "\u{feff}foo"; 1050 | let tokens = string.parse::().unwrap(); 1051 | match tokens.into_iter().next().unwrap() { 1052 | TokenTree::Ident(ident) => assert_eq!(ident, "foo"), 1053 | _ => unreachable!(), 1054 | } 1055 | 1056 | let string = "foo\u{feff}"; 1057 | string.parse::().unwrap_err(); 1058 | } 1059 | 1060 | #[cfg(span_locations)] 1061 | fn create_span() -> proc_macro2::Span { 1062 | let tts: TokenStream = "1".parse().unwrap(); 1063 | match tts.into_iter().next().unwrap() { 1064 | TokenTree::Literal(literal) => literal.span(), 1065 | _ => unreachable!(), 1066 | } 1067 | } 1068 | 1069 | #[cfg(span_locations)] 1070 | #[test] 1071 | fn test_invalidate_current_thread_spans() { 1072 | let actual = format!("{:#?}", create_span()); 1073 | assert_eq!(actual, "bytes(1..2)"); 1074 | let actual = format!("{:#?}", create_span()); 1075 | assert_eq!(actual, "bytes(3..4)"); 1076 | 1077 | proc_macro2::extra::invalidate_current_thread_spans(); 1078 | 1079 | let actual = format!("{:#?}", create_span()); 1080 | // Test that span offsets have been reset after the call 1081 | // to invalidate_current_thread_spans() 1082 | assert_eq!(actual, "bytes(1..2)"); 1083 | } 1084 | 1085 | #[cfg(span_locations)] 1086 | #[test] 1087 | #[should_panic(expected = "Invalid span with no related FileInfo!")] 1088 | fn test_use_span_after_invalidation() { 1089 | let span = create_span(); 1090 | 1091 | proc_macro2::extra::invalidate_current_thread_spans(); 1092 | 1093 | span.source_text(); 1094 | } 1095 | -------------------------------------------------------------------------------- /tests/test_fmt.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::from_iter_instead_of_collect)] 2 | 3 | use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; 4 | use std::iter; 5 | 6 | #[test] 7 | fn test_fmt_group() { 8 | let ident = Ident::new("x", Span::call_site()); 9 | let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident))); 10 | let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new()); 11 | let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone()); 12 | let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new()); 13 | let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone()); 14 | let braces_empty = Group::new(Delimiter::Brace, TokenStream::new()); 15 | let braces_nonempty = Group::new(Delimiter::Brace, inner.clone()); 16 | let none_empty = Group::new(Delimiter::None, TokenStream::new()); 17 | let none_nonempty = Group::new(Delimiter::None, inner); 18 | 19 | // Matches libproc_macro. 20 | assert_eq!("()", parens_empty.to_string()); 21 | assert_eq!("(x)", parens_nonempty.to_string()); 22 | assert_eq!("[]", brackets_empty.to_string()); 23 | assert_eq!("[x]", brackets_nonempty.to_string()); 24 | assert_eq!("{ }", braces_empty.to_string()); 25 | assert_eq!("{ x }", braces_nonempty.to_string()); 26 | assert_eq!("", none_empty.to_string()); 27 | assert_eq!("x", none_nonempty.to_string()); 28 | } 29 | -------------------------------------------------------------------------------- /tests/test_size.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_attributes)] 2 | 3 | extern crate proc_macro; 4 | 5 | use std::mem; 6 | 7 | #[rustversion::attr(before(1.64), ignore = "requires Rust 1.64+")] 8 | #[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] 9 | #[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] 10 | #[test] 11 | fn test_proc_macro_size() { 12 | assert_eq!(mem::size_of::(), 4); 13 | assert_eq!(mem::size_of::>(), 4); 14 | assert_eq!(mem::size_of::(), 20); 15 | assert_eq!(mem::size_of::(), 12); 16 | assert_eq!(mem::size_of::(), 8); 17 | assert_eq!(mem::size_of::(), 16); 18 | assert_eq!(mem::size_of::(), 4); 19 | } 20 | 21 | #[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] 22 | #[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] 23 | #[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] 24 | #[cfg_attr(span_locations, ignore = "span locations are on")] 25 | #[test] 26 | fn test_proc_macro2_fallback_size_without_locations() { 27 | assert_eq!(mem::size_of::(), 0); 28 | assert_eq!(mem::size_of::>(), 1); 29 | assert_eq!(mem::size_of::(), 16); 30 | assert_eq!(mem::size_of::(), 24); 31 | assert_eq!(mem::size_of::(), 8); 32 | assert_eq!(mem::size_of::(), 24); 33 | assert_eq!(mem::size_of::(), 8); 34 | } 35 | 36 | #[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] 37 | #[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] 38 | #[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] 39 | #[cfg_attr(not(span_locations), ignore = "span locations are off")] 40 | #[test] 41 | fn test_proc_macro2_fallback_size_with_locations() { 42 | assert_eq!(mem::size_of::(), 8); 43 | assert_eq!(mem::size_of::>(), 12); 44 | assert_eq!(mem::size_of::(), 24); 45 | assert_eq!(mem::size_of::(), 32); 46 | assert_eq!(mem::size_of::(), 16); 47 | assert_eq!(mem::size_of::(), 32); 48 | assert_eq!(mem::size_of::(), 8); 49 | } 50 | 51 | #[rustversion::attr(before(1.71), ignore = "requires Rust 1.71+")] 52 | #[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] 53 | #[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] 54 | #[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] 55 | #[cfg_attr(span_locations, ignore = "span locations are on")] 56 | #[test] 57 | fn test_proc_macro2_wrapper_size_without_locations() { 58 | assert_eq!(mem::size_of::(), 4); 59 | assert_eq!(mem::size_of::>(), 8); 60 | assert_eq!(mem::size_of::(), 24); 61 | assert_eq!(mem::size_of::(), 24); 62 | assert_eq!(mem::size_of::(), 12); 63 | assert_eq!(mem::size_of::(), 24); 64 | assert_eq!(mem::size_of::(), 32); 65 | } 66 | 67 | #[rustversion::attr(before(1.65), ignore = "requires Rust 1.65+")] 68 | #[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] 69 | #[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] 70 | #[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] 71 | #[cfg_attr(not(span_locations), ignore = "span locations are off")] 72 | #[test] 73 | fn test_proc_macro2_wrapper_size_with_locations() { 74 | assert_eq!(mem::size_of::(), 12); 75 | assert_eq!(mem::size_of::>(), 12); 76 | assert_eq!(mem::size_of::(), 32); 77 | assert_eq!(mem::size_of::(), 32); 78 | assert_eq!(mem::size_of::(), 20); 79 | assert_eq!(mem::size_of::(), 32); 80 | assert_eq!(mem::size_of::(), 32); 81 | } 82 | -------------------------------------------------------------------------------- /tests/ui/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "proc-macro2-ui-test" 3 | version = "0.0.0" 4 | authors = ["David Tolnay "] 5 | edition = "2018" 6 | publish = false 7 | 8 | [[test]] 9 | name = "compiletest" 10 | path = "compiletest.rs" 11 | 12 | [dev-dependencies] 13 | proc-macro2 = { path = "../.." } 14 | rustversion = "1.0" 15 | trybuild = { version = "1.0.108", features = ["diff"] } 16 | -------------------------------------------------------------------------------- /tests/ui/compiletest.rs: -------------------------------------------------------------------------------- 1 | #[rustversion::attr(not(nightly), ignore = "requires nightly")] 2 | #[rustversion::attr(nightly, cfg_attr(miri, ignore = "incompatible with miri"))] 3 | #[test] 4 | fn ui() { 5 | let t = trybuild::TestCases::new(); 6 | t.compile_fail("test-*.rs"); 7 | } 8 | -------------------------------------------------------------------------------- /tests/ui/test-not-send.rs: -------------------------------------------------------------------------------- 1 | use proc_macro2::Span; 2 | 3 | fn main() { 4 | fn requires_send() {} 5 | requires_send::(); 6 | } 7 | -------------------------------------------------------------------------------- /tests/ui/test-not-send.stderr: -------------------------------------------------------------------------------- 1 | error[E0277]: `proc_macro::Span` cannot be sent between threads safely 2 | --> test-not-send.rs:5:21 3 | | 4 | 5 | requires_send::(); 5 | | ^^^^ `proc_macro::Span` cannot be sent between threads safely 6 | | 7 | = help: within `Span`, the trait `Send` is not implemented for `proc_macro::Span` 8 | note: required because it appears within the type `proc_macro2::imp::Span` 9 | --> $WORKSPACE/src/wrapper.rs 10 | | 11 | | pub(crate) enum Span { 12 | | ^^^^ 13 | note: required because it appears within the type `Span` 14 | --> $WORKSPACE/src/lib.rs 15 | | 16 | | pub struct Span { 17 | | ^^^^ 18 | note: required by a bound in `requires_send` 19 | --> test-not-send.rs:4:25 20 | | 21 | 4 | fn requires_send() {} 22 | | ^^^^ required by this bound in `requires_send` 23 | 24 | error[E0277]: `Rc<()>` cannot be sent between threads safely 25 | --> test-not-send.rs:5:21 26 | | 27 | 5 | requires_send::(); 28 | | ^^^^ `Rc<()>` cannot be sent between threads safely 29 | | 30 | = help: within `Span`, the trait `Send` is not implemented for `Rc<()>` 31 | note: required because it appears within the type `PhantomData>` 32 | --> $RUST/core/src/marker.rs 33 | | 34 | | pub struct PhantomData; 35 | | ^^^^^^^^^^^ 36 | note: required because it appears within the type `proc_macro2::marker::ProcMacroAutoTraits` 37 | --> $WORKSPACE/src/marker.rs 38 | | 39 | | pub(crate) struct ProcMacroAutoTraits(PhantomData>); 40 | | ^^^^^^^^^^^^^^^^^^^ 41 | note: required because it appears within the type `Span` 42 | --> $WORKSPACE/src/lib.rs 43 | | 44 | | pub struct Span { 45 | | ^^^^ 46 | note: required by a bound in `requires_send` 47 | --> test-not-send.rs:4:25 48 | | 49 | 4 | fn requires_send() {} 50 | | ^^^^ required by this bound in `requires_send` 51 | --------------------------------------------------------------------------------