├── .gitignore ├── rust-toolchain ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── custom_issue.md │ ├── feature_request.md │ └── bug_report.md └── PULL_REQUEST_TEMPLATE.md ├── loess-rust-lex ├── src │ ├── lib.rs │ ├── lex.rs │ ├── lex │ │ ├── token │ │ │ ├── delim.rs │ │ │ ├── life.rs │ │ │ ├── literal.rs │ │ │ └── punct.rs │ │ ├── token.rs │ │ └── keywords.rs │ └── ident.rs └── Cargo.toml ├── loess-rust ├── src │ ├── paths.rs │ ├── macro.rs │ ├── lex.rs │ ├── statement.rs │ ├── attributes.rs │ ├── vis.rs │ ├── paths │ │ └── simple.rs │ ├── statement │ │ └── let.rs │ ├── lib.rs │ ├── items │ │ ├── extern_crate.rs │ │ └── mod.rs │ ├── expr │ │ ├── block.rs │ │ └── literal.rs │ ├── items.rs │ ├── macro │ │ └── invocation.rs │ └── expr.rs ├── CHANGELOG.md ├── Cargo.toml └── README.md ├── loess-rust-opaque ├── CHANGELOG.md ├── README.md ├── Cargo.toml └── src │ └── lib.rs ├── .markdownlint.jsonc ├── .editorconfig ├── .vscode ├── extensions.json ├── settings.json ├── documentation.code-snippets └── Loess.code-snippets ├── rustfmt.toml ├── supply-chain ├── audits.toml ├── config.toml └── imports.lock ├── CODEOWNERS ├── .gitattributes ├── CODE_OF_CONDUCT.md ├── Cargo.toml ├── loess-rust-fragments ├── Cargo.toml └── src │ └── lib.rs ├── loess ├── src │ ├── macros.rs │ ├── scaffold │ │ └── groups.rs │ ├── proc_macro2_impls.rs │ ├── macros │ │ ├── punctuation.rs │ │ ├── words.rs │ │ ├── lifetimes.rs │ │ └── grammar.rs │ ├── stateful.rs │ └── scaffold.rs ├── Cargo.toml ├── CHANGELOG.md └── tests │ └── quote_expansions.rs ├── COPYRIGHT.md ├── Cargo.lock ├── CONTRIBUTING.md ├── SECURITY.md ├── README.md ├── deny.toml └── labels.json /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | stable -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: Tamschi 2 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod ident; 2 | pub mod lex; 3 | -------------------------------------------------------------------------------- /loess-rust/src/paths.rs: -------------------------------------------------------------------------------- 1 | //! TODO 2 | 3 | pub mod simple; 4 | -------------------------------------------------------------------------------- /loess-rust/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Loess-Rust Changelog 2 | 3 | ## 0.1.0 4 | 5 | 2025-05-02 6 | 7 | Initial release. 8 | -------------------------------------------------------------------------------- /loess-rust-opaque/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Loess-Rust-Opaque Changelog 2 | 3 | ## 0.1.0 4 | 5 | 2025-05-02 6 | 7 | Initial release. 8 | -------------------------------------------------------------------------------- /loess-rust/src/macro.rs: -------------------------------------------------------------------------------- 1 | //! [macro](https://doc.rust-lang.org/reference/macros.html#r-macro): Macros 2 | 3 | pub mod invocation; -------------------------------------------------------------------------------- /.markdownlint.jsonc: -------------------------------------------------------------------------------- 1 | { 2 | "line-length": false, 3 | "no-duplicate-heading": false, 4 | "no-inline-html": false 5 | } -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | indent_style = tab 6 | 7 | [*.{md,yml}] 8 | indent_style = space 9 | indent_size = 2 10 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "rust-lang.rust-analyzer", 4 | ], 5 | "unwantedRecommendations": [ 6 | "rust-lang.rust" 7 | ] 8 | } -------------------------------------------------------------------------------- /loess-rust/src/lex.rs: -------------------------------------------------------------------------------- 1 | //! lex: 2 | //! [Lexical structure](https://doc.rust-lang.org/stable/reference/lexical-structure.html) 3 | 4 | pub mod keywords; 5 | pub mod token; 6 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex.rs: -------------------------------------------------------------------------------- 1 | //! lex: 2 | //! [Lexical structure](https://doc.rust-lang.org/stable/reference/lexical-structure.html) 3 | 4 | pub mod keywords; 5 | pub mod token; 6 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | hard_tabs = true 2 | newline_style = "Unix" 3 | use_field_init_shorthand = true 4 | use_try_shorthand = true 5 | 6 | unstable_features = true 7 | imports_granularity = "Crate" 8 | -------------------------------------------------------------------------------- /supply-chain/audits.toml: -------------------------------------------------------------------------------- 1 | 2 | # cargo-vet audits file 3 | 4 | [[audits.proc-macro2]] 5 | who = "Tamme Schichler " 6 | criteria = "safe-to-deploy" 7 | delta = "1.0.94 -> 1.0.95" 8 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/token/delim.rs: -------------------------------------------------------------------------------- 1 | //! [lex.token.delim](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.delim): Delimiters 2 | 3 | pub use loess::scaffold::{CurlyBraces, Parentheses, SquareBrackets}; 4 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # See 2 | # or . 3 | 4 | * @Tamschi 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * -text 2 | 3 | /.git* text eol=lf 4 | 5 | *.json text eol=lf 6 | *.md text eol=lf 7 | *.rs text eol=lf 8 | *.toml text eol=lf 9 | *.txt text eol=lf 10 | *.yml text eol=lf 11 | 12 | .editorconfig text eol=lf 13 | Cargo.lock text eol=lf 14 | -------------------------------------------------------------------------------- /loess-rust-lex/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loess-rust-lex" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [dependencies] 7 | loess = { version = "0.2.3", path = "../loess" } 8 | proc-macro2 = { version = "1.0.95", default-features = false } 9 | 10 | [package.metadata.cargo-semver-checks.lints] 11 | workspace = true 12 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This repository follows [the same code of conduct as the Rust project](https://www.rust-lang.org/policies/code-of-conduct). 4 | However, since this project isn't directly part of the latter, you should contact me at [tamme@schichler.dev](mailto:tamme@schichler.dev) (email or XMPP) in case there's an issue in this regard. 5 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "3" 3 | members = [ 4 | "loess", 5 | "loess-rust", 6 | "loess-rust-fragments", 7 | "loess-rust-lex", 8 | "loess-rust-opaque", 9 | ] 10 | 11 | [workspace.metadata.cargo-semver-checks.lints] 12 | function_marked_deprecated = "warn" 13 | type_marked_deprecated = "warn" 14 | type_method_marked_deprecated = "warn" 15 | -------------------------------------------------------------------------------- /loess-rust-fragments/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loess-rust-fragments" 3 | version = "0.1.0" 4 | edition = "2024" 5 | 6 | [dependencies] 7 | loess = { version = "0.2.3", path = "../loess" } 8 | loess-rust-lex = { version = "0.1.0", path = "../loess-rust-lex" } 9 | proc-macro2 = { version = "1.0.95", default-features = false } 10 | 11 | [package.metadata.cargo-semver-checks.lints] 12 | workspace = true 13 | -------------------------------------------------------------------------------- /supply-chain/config.toml: -------------------------------------------------------------------------------- 1 | 2 | # cargo-vet config file 3 | 4 | [cargo-vet] 5 | version = "0.10" 6 | 7 | [imports.google] 8 | url = "https://raw.githubusercontent.com/google/supply-chain/main/audits.toml" 9 | 10 | [policy.loess] 11 | audit-as-crates-io = true 12 | 13 | [[exemptions.loess]] 14 | version = "0.2.2" 15 | criteria = "safe-to-deploy" 16 | 17 | [[exemptions.syn]] 18 | version = "2.0.100" 19 | criteria = "safe-to-deploy" 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | # See . 2 | blank_issues_enabled: false 3 | contact_links: 4 | - name: Discuss anything else 5 | url: https://github.com/Tamschi/Asteracea/discussions 6 | about: For questions, ideas or general discussion, please use the Discussions tab linked here. 7 | - name: Show me your project 8 | url: https://github.com/Tamschi/Asteracea/discussions/categories/show-and-tell 9 | about: I'd love to hear about your projects using this crate here! 10 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/custom_issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Custom Issue 3 | about: Open an issue not covered by the other templates 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 18 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "rust-analyzer.procMacro.enable": true, 3 | "todo-tree.general.tags": [ 4 | "BUG", 5 | "HACK", 6 | "FIXME", 7 | "TODO", 8 | "XXX", 9 | "todo!" 10 | ], 11 | "todo-tree.regex.regex": "(($TAGS)|^\\s*- \\[ \\])", 12 | "rust-analyzer.cargo.allFeatures": true, 13 | "rust-analyzer.cargo.buildScripts.enable": true, 14 | "cSpell.words": [ 15 | "adler", 16 | "ansi", 17 | "ascii", 18 | "asteracea", 19 | "binstall", 20 | "bitbucket", 21 | "github", 22 | "gitlab", 23 | "preprocess", 24 | "punct", 25 | "Punct", 26 | "RUSTSEC", 27 | "Schichler", 28 | "Tamme" 29 | ], 30 | "cSpell.language": "en,en-GB" 31 | } -------------------------------------------------------------------------------- /loess/src/macros.rs: -------------------------------------------------------------------------------- 1 | mod grammar; 2 | mod lifetimes; 3 | mod punctuation; 4 | mod quotes; 5 | mod words; 6 | 7 | #[doc(hidden)] 8 | pub mod __ { 9 | #![allow(missing_docs)] // Internal. 10 | 11 | pub use core::{ 12 | clone::Clone, compile_error, concat, convert::From, debug_assert, iter::Extend, 13 | primitive::bool, result::Result, stringify, 14 | }; 15 | 16 | pub use std::string::ToString; 17 | 18 | pub use proc_macro2::{ 19 | Delimiter::{Brace, Bracket, Parenthesis}, 20 | Ident, Punct, Spacing, Span, TokenStream, TokenTree, 21 | }; 22 | 23 | pub use super::quotes::{ 24 | Paste, block_directive, grouped, quote_one2, raw, rust_statement_directive, strip_dot, tt, 25 | }; 26 | } 27 | -------------------------------------------------------------------------------- /COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | # Copyright 2 | 3 | Loess and its associated crates in this repository are licensed under either of 4 | 5 | * Apache License, Version 2.0 6 | ([LICENSE-APACHE](LICENSE-APACHE) or ) 7 | * MIT license 8 | ([LICENSE-MIT](LICENSE-MIT) or ) 9 | 10 | at your option. 11 | 12 | Copyrights in the library/the libraries are retained by their contributors. 13 | No copyright assignment is required to contribute to the project. 14 | 15 | Unless you explicitly state otherwise, any contribution intentionally submitted 16 | for inclusion in the work by you, as defined in the Apache-2.0 license, shall be 17 | dual licensed as above, without any additional terms or conditions. 18 | -------------------------------------------------------------------------------- /loess-rust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loess-rust" 3 | version = "0.1.0+0.2" 4 | authors = ["Tamme Schichler "] 5 | edition = "2024" 6 | rust-version = "1.86" 7 | description = "Loess-based Rust grammar (as needed)." 8 | readme = true 9 | repository = "https://github.com/Tamschi/loess" 10 | license = "MIT OR Apache-2.0" 11 | keywords = ["proc-macro", "grammar", "parser"] 12 | categories = ["development-tools::procedural-macro-helpers"] 13 | 14 | publish = false 15 | 16 | [dependencies] 17 | loess-rust-lex = { version = "0.1.0", path = "../loess-rust-lex" } 18 | loess = { version = "0.2.3", path = "../loess" } 19 | proc-macro2 = { version = "1.0.95", default-features = false } 20 | 21 | [package.metadata.cargo-semver-checks.lints] 22 | workspace = true 23 | -------------------------------------------------------------------------------- /loess-rust/src/statement.rs: -------------------------------------------------------------------------------- 1 | //! [statement](https://doc.rust-lang.org/reference/statements.html#r-statement): Statements 2 | 3 | use loess::{grammar, scaffold::Greedy}; 4 | 5 | use crate::{attributes::OuterAttribute, items::Item, lex::token::punct::Semi, r#macro::invocation::MacroInvocationSemi, statement::r#let::LetStatement}; 6 | 7 | pub mod r#let; 8 | 9 | grammar! { 10 | #[derive(Clone)] 11 | #[non_exhaustive] 12 | /// [Statement](https://doc.rust-lang.org/reference/statements.html?highlight=statement#r-statement.syntax) 13 | pub enum Statement: PeekFrom, PopFrom, IntoTokens { 14 | Semi(Semi), 15 | Item(Item), 16 | LetStatement(LetStatement), 17 | OuterAttributesMacroInvocationSemi(Greedy>, MacroInvocationSemi), 18 | } else "Expected statement."; 19 | } 20 | -------------------------------------------------------------------------------- /loess/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loess" 3 | version = "0.2.5" 4 | authors = ["Tamme Schichler "] 5 | edition = "2024" 6 | rust-version = "1.86" 7 | description = "Small grammar- and parser-generator for proc macros that provides great UX." 8 | # documentation intentionally defaulted 9 | readme = "../README.md" 10 | # homepage 11 | repository = "https://github.com/Tamschi/loess" 12 | license = "MIT OR Apache-2.0" 13 | keywords = ["proc-macro", "grammar", "parser", "generator", "dsl"] 14 | categories = ["development-tools::procedural-macro-helpers"] 15 | 16 | publish = false 17 | 18 | [features] 19 | 20 | [dependencies] 21 | proc-macro2 = { version = "1.0.34", features = ["span-locations"] } 22 | 23 | [package.metadata.cargo-semver-checks.lints] 24 | workspace = true 25 | -------------------------------------------------------------------------------- /loess-rust-opaque/README.md: -------------------------------------------------------------------------------- 1 | # Loess-Rust-Opaque 2 | 3 | `loess-rust-opaque` is a companion crate to [`loess-rust`](https://crates.io/crates/loess-rust) that contains additional *opaque* grammar types not yet implemented natively for [Loess](https://crates.io/crates/loess). In practice, here that usually means Syn-wrappers. 4 | 5 | Note that this crate will **accidentally** accept unstable grammar. Narrowing that towards stable Rust grammar only is not considered a breaking change. 6 | 7 | When a part of the grammar becomes available through `loess-rust`, the version available through `loess-rust-opaque` is marked deprecated. Where compatible, which should usually be the case, the latter may also be switched over to that implementation as a non-breaking change. 8 | 9 | How to read the version number: 10 | 11 | After the "+", the most-major version of the (most directly) compatible Loess releases is listed. 12 | -------------------------------------------------------------------------------- /loess-rust/src/attributes.rs: -------------------------------------------------------------------------------- 1 | //! [attributes](https://doc.rust-lang.org/stable/reference/attributes.html#r-attributes): Attributes 2 | 3 | use loess::{grammar, scaffold::SquareBrackets}; 4 | 5 | use crate::lex::token::punct::{Not, Pound}; 6 | 7 | grammar! { 8 | #[derive(Clone)] 9 | #[non_exhaustive] 10 | /// [InnerAttribute](https://doc.rust-lang.org/reference/attributes.html#grammar-InnerAttribute) 11 | pub struct InnerAttribute: PeekFrom, PopFrom, IntoTokens { 12 | pound: Pound, 13 | not: Not, 14 | /// Continue inside with [`Attr`]. 15 | brackets: SquareBrackets, 16 | } 17 | 18 | #[derive(Clone)] 19 | #[non_exhaustive] 20 | /// [OuterAttribute](https://doc.rust-lang.org/reference/attributes.html#grammar-OuterAttribute) 21 | pub struct OuterAttribute: PeekFrom, PopFrom, IntoTokens { 22 | pound: Pound, 23 | /// Continue inside with [`Attr`]. 24 | brackets: SquareBrackets, 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /loess-rust-opaque/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loess-rust-opaque" 3 | version = "0.1.0+0.2" 4 | authors = ["Tamme Schichler "] 5 | edition = "2024" 6 | rust-version = "1.86" 7 | description = "Loess-compatible opaque wrappers for Syn-parsed tokens (as needed)." 8 | readme = true 9 | repository = "https://github.com/Tamschi/loess" 10 | license = "MIT OR Apache-2.0" 11 | keywords = ["proc-macro", "grammar", "parser"] 12 | categories = ["development-tools::procedural-macro-helpers"] 13 | 14 | publish = false 15 | 16 | [dependencies] 17 | loess = { version = "0.2.3", path = "../loess" } 18 | proc-macro2 = { version = "1.0.95", default-features = false } 19 | quote = { version = "1.0.40", default-features = false } 20 | syn = { version = "2.0.101", default-features = false, features = ["clone-impls", "full", "parsing", "printing"] } 21 | 22 | [package.metadata.cargo-semver-checks.lints] 23 | workspace = true 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: 'type: feature' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 16 | 17 | **Is your feature request related to a problem? Please describe.** 18 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 19 | 20 | **Describe the solution you'd like** 21 | A clear and concise description of what you want to happen. 22 | 23 | **Describe alternatives you've considered** 24 | A clear and concise description of any alternative solutions or features you've considered. 25 | 26 | **Additional context** 27 | Add any other context or screenshots about the feature request here. 28 | -------------------------------------------------------------------------------- /loess-rust/src/vis.rs: -------------------------------------------------------------------------------- 1 | //! [vis](https://doc.rust-lang.org/stable/reference/visibility-and-privacy.html#r-vis): Visibility and Privacy 2 | 3 | use loess::{grammar, scaffold::Parentheses}; 4 | 5 | use crate::{ 6 | lex::keywords::{Crate, In, Pub, SelfLowercase, Super}, 7 | paths::simple::SimplePath, 8 | }; 9 | 10 | grammar! { 11 | #[derive(Clone)] 12 | #[non_exhaustive] 13 | /// [Visibility](https://doc.rust-lang.org/reference/visibility-and-privacy.html#grammar-Visibility) 14 | pub struct Visibility: PeekFrom, PopFrom, IntoTokens { 15 | pub r#pub: Pub, 16 | /// Continue inside with [`VisibilityContent`]. 17 | pub parentheses: Option, 18 | } 19 | 20 | #[derive(Clone)] 21 | #[non_exhaustive] 22 | /// Inside [`Visibility::parentheses`]. 23 | pub enum VisibilityContent: PeekFrom, PopFrom, IntoTokens { 24 | Crate(Crate), 25 | SelfLowercase(SelfLowercase), 26 | Super(Super), 27 | InSimplePath(In, SimplePath), 28 | 29 | } else "Expected VisibilityContent"; 30 | } 31 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | Thank you for your contribution to Loess! 4 | 5 | Before submitting this pull request, please make sure: 6 | 7 | * [ ] You've read `CONTRIBUTING.md`. 8 | 9 | * [ ] The `version` value in `Cargo.toml` is unchanged. 10 | 11 | * [ ] Your code builds cleanly without any errors or warnings and passes all tests. 12 | 13 | * [ ] You have added documentation with examples and integration tests (in the `tests/` folder) where applicable. 14 | 15 | Once this all is taken care of, please erase this default text and briefly describe your changes. 16 | If your pull request adds or changes features (outside of documentation), please also add the motivation for these changes or link the matching issue. 17 | 18 | **Drafts welcome!** If you're unsure whether a feature is in scope or how to implement it, feel to post incomplete work here. 19 | I'll try to get back to you quickly, but depending on how busy I am, I might need up to a week for the initial response. 20 | -------------------------------------------------------------------------------- /loess-rust/src/paths/simple.rs: -------------------------------------------------------------------------------- 1 | //! [paths.simple](https://doc.rust-lang.org/reference/paths.html#r-paths.simple): Simple Paths 2 | 3 | use loess::{grammar, scaffold::Greedy}; 4 | 5 | use crate::{ 6 | ident::Identifier, 7 | lex::{ 8 | keywords::{Crate, SelfUppercase, Super}, 9 | token::punct::{Dollar, PathSep}, 10 | }, 11 | }; 12 | 13 | grammar! { 14 | #[non_exhaustive] 15 | /// [SimplePath](https://doc.rust-lang.org/reference/paths.html#grammar-SimplePath) 16 | pub struct SimplePath: PeekFrom, PopFrom, IntoTokens { 17 | path_sep: Option, 18 | simple_path_segment: SimplePathSegment, 19 | path_sep_simple_path_segments: Greedy>, 20 | } 21 | 22 | #[non_exhaustive] 23 | /// [SimplePathSegment](https://doc.rust-lang.org/reference/paths.html#grammar-SimplePathSegment) 24 | pub enum SimplePathSegment: PeekFrom, PopFrom, IntoTokens { 25 | Identifier(Identifier), 26 | Super(Super), 27 | SelfUppercase(SelfUppercase), 28 | Crate(Crate), 29 | DollarCrate(Dollar, Crate), 30 | } else "Expected SimplePathSegment."; 31 | } 32 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/token.rs: -------------------------------------------------------------------------------- 1 | //! [lex.token](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token>) 2 | 3 | use loess::grammar; 4 | 5 | use self::literal::{RawStringLiteral, StringLiteral}; 6 | 7 | pub mod delim; 8 | pub mod life; 9 | pub mod literal; 10 | pub mod punct; 11 | 12 | grammar! { 13 | #[derive(Clone)] 14 | #[non_exhaustive] 15 | /// [Token](https://doc.rust-lang.org/reference/tokens.html#grammar-Token) 16 | pub enum Token: doc, PeekFrom, PopFrom, IntoTokens { 17 | // IdentifierOrKeyword(IdentifierOrKeyword), 18 | // RawIdentifier(RawIdentifier), 19 | // CharLiteral(CharLiteral), 20 | StringLiteral(StringLiteral), 21 | RawStringLiteral(RawStringLiteral), 22 | // ByteLiteral(ByteLiteral), 23 | // ByteStringLiteral(ByteStringLiteral), 24 | // RawByteStringLiteral(RawByteStringLiteral), 25 | // CStringLiteral(CStringLiteral), 26 | // RawCStringLiteral(RawCStringLiteral), 27 | // IntegerLiteral(IntegerLiteral), 28 | // FloatLiteral(FloatLiteral), 29 | // LifetimeToken(LifetimeToken), 30 | // Punctuation(Punctuation), 31 | // ReservedToken(ReservedToken), 32 | } else "Expected Token."; 33 | } 34 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Create a report to help this project improve 4 | title: '' 5 | labels: 'type: bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 17 | 18 | **Describe the bug** 19 | A clear and concise description of what the bug is. 20 | 21 | **To Reproduce** 22 | Steps to reproduce the behavior: 23 | 24 | 1. Go to '...' 25 | 2. Click on '....' 26 | 3. Scroll down to '....' 27 | 4. See error 28 | 29 | **Expected behavior** 30 | A clear and concise description of what you expected to happen. 31 | 32 | **Screenshots** 33 | If applicable, add screenshots to help explain your problem. 34 | 35 | **please complete the following information:** 36 | 37 | - `rustc --version`: [e.g. 1.58.0] 38 | - Crate name (if applicable): [e.g. `loess`] 39 | - Crate version (if applicable): [e.g. 0.0.0] 40 | 41 | **Additional context** 42 | Add any other context about the problem here. 43 | -------------------------------------------------------------------------------- /loess-rust/src/statement/let.rs: -------------------------------------------------------------------------------- 1 | //! [statement.let](https://doc.rust-lang.org/reference/statements.html#r-statement.let): `let`` statements 2 | 3 | use loess::{grammar, scaffold::Greedy}; 4 | 5 | use crate::{ 6 | attributes::OuterAttribute, 7 | expr::{Expression, block::BlockExpression}, 8 | lex::{ 9 | keywords::{Else, Let}, 10 | token::punct::{Colon, Eq, Semi}, 11 | }, 12 | }; 13 | 14 | grammar! { 15 | #[derive(Clone)] 16 | #[non_exhaustive] 17 | /// [LetStatement](https://doc.rust-lang.org/reference/statements.html#grammar-LetStatement) 18 | pub struct LetStatement: PeekFrom, PopFrom, IntoTokens { 19 | outer_attributes: Greedy>, 20 | r#let: Let, 21 | pattern_no_top_alt: PatternNoTopAlt, 22 | colon_type: Option<(Colon, Type)>, 23 | variant: LetStatementVariant, 24 | semi: Semi, 25 | } 26 | 27 | #[derive(Clone)] 28 | #[non_exhaustive] 29 | /// [`LetStatement::variant`] 30 | pub enum LetStatementVariant: PeekFrom, PopFrom, IntoTokens { 31 | EqExpression(Eq, Expression), 32 | ///TODO: Special parsing rule. 33 | EqExpressionElseBlockExpression(Eq, Expression, Else, BlockExpression), 34 | } else "Expected LetStatementVariant"; 35 | } 36 | -------------------------------------------------------------------------------- /loess-rust/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! **Shallow** [Loess](`loess`) grammar representing the stable Rust programming language, 2 | //! closely following [The Rust Reference](https://doc.rust-lang.org/stable/reference/). 3 | //! 4 | //! Corrections in that regard are not automatically considered breaking changes, 5 | //! unless they became necessary due to a change in Rust **and** reduce what is considered valid. 6 | //! 7 | //! Breaking changes to the API are considered breaking as normal. 8 | //! 9 | //! This crate is focused on parsing, so while the types implement [`loess::IntoTokens`], 10 | //! essentially anything slightly complex is tagged with `#[non_exhaustive]` and constructing 11 | //! those types may be difficult. Consider emitting through [`loess::quote_into_mixed_site!`] instead. 12 | //! 13 | //! *Note that unstable grammar may be accidentally accepted in some cases.* 14 | //! ***Ceasing to accept unstable grammar is not by itself considered a breaking change for Loess.*** 15 | 16 | //TODO: Make another crate that provides quote-like e.g. `reparse_mixed_site` macros? 17 | 18 | pub use loess_rust_lex::{ident, lex}; 19 | 20 | pub mod attributes; 21 | pub mod expr; 22 | #[path = "items.rs"] 23 | pub mod items; 24 | pub mod r#macro; 25 | pub mod paths; 26 | pub mod statement; 27 | pub mod vis; 28 | -------------------------------------------------------------------------------- /loess-rust/src/items/extern_crate.rs: -------------------------------------------------------------------------------- 1 | //! [items.extern-crate](https://doc.rust-lang.org/reference/items/extern-crates.html#r-items.extern-crate): Extern crate declarations 2 | 3 | use loess::grammar; 4 | 5 | use crate::{ 6 | ident::Identifier, 7 | lex::{ 8 | keywords::{As, Crate, Extern, SelfLowercase}, 9 | token::punct::{Semi, Underscore}, 10 | }, 11 | }; 12 | 13 | grammar! { 14 | #[derive(Clone)] 15 | #[non_exhaustive] 16 | /// [ExternCrate](https://doc.rust-lang.org/reference/items/extern-crates.html#grammar-ExternCrate) 17 | pub struct ExternCrate: PeekFrom, PopFrom, IntoTokens { 18 | r#extern: Extern, 19 | crate_: Crate, 20 | crate_ref: CrateRef, 21 | as_clause: Option, 22 | semi: Semi, 23 | } 24 | 25 | #[derive(Clone)] 26 | #[non_exhaustive] 27 | /// [CrateRef](https://doc.rust-lang.org/reference/items/extern-crates.html#grammar-CrateRef) 28 | pub enum CrateRef: PeekFrom, PopFrom, IntoTokens { 29 | Identifier(Identifier), 30 | SelfLowercase(SelfLowercase), 31 | } else "Expected CrateRef."; 32 | 33 | #[derive(Clone)] 34 | #[non_exhaustive] 35 | /// [AsClause](https://doc.rust-lang.org/reference/items/extern-crates.html#grammar-AsClause) 36 | pub struct AsClause: PeekFrom, PopFrom, IntoTokens { 37 | r#as: As, 38 | variant: AsClauseVariant, 39 | } 40 | 41 | #[derive(Clone)] 42 | #[non_exhaustive] 43 | /// [`AsClause::variant`] 44 | pub enum AsClauseVariant: PeekFrom, PopFrom, IntoTokens { 45 | Identifier(Identifier), 46 | Underscore(Underscore), 47 | } else "Expected AsClauseVariant."; 48 | } 49 | -------------------------------------------------------------------------------- /.vscode/documentation.code-snippets: -------------------------------------------------------------------------------- 1 | { 2 | // Place your Asteracea workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and 3 | // description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope 4 | // is left empty or omitted, the snippet gets applied to all languages. The prefix is what is 5 | // used to trigger the snippet and the body will be expanded and inserted. Possible variables are: 6 | // $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders. 7 | // Placeholders with the same ids are connected. 8 | // Example: 9 | // "Print to console": { 10 | // "scope": "javascript,typescript", 11 | // "prefix": "log", 12 | // "body": [ 13 | // "console.log('$1');", 14 | // "$2" 15 | // ], 16 | // "description": "Log output to console" 17 | // } 18 | "Small-caps": { 19 | "scope": "markdown,rust", 20 | "prefix": "smallcaps", 21 | "body": [ 22 | "$1$0" 23 | ], 24 | "description": "In documentation, renders text as small-caps." 25 | }, 26 | "": { 27 | "scope": "markdown,rust", 28 | "prefix": "code", 29 | "body": [ 30 | "$1$0" 31 | ], 32 | "description": "In documentation, renders as code." 33 | }, 34 | "": { 35 | "scope": "markdown,rust", 36 | "prefix": "sup", 37 | "body": [ 38 | "$1$0" 39 | ], 40 | "description": "In documentation, renders as superscript." 41 | }, 42 | "": { 43 | "scope": "markdown,rust", 44 | "prefix": "sub", 45 | "body": [ 46 | "$1$0" 47 | ], 48 | "description": "In documentation, renders as subscript." 49 | }, 50 | } -------------------------------------------------------------------------------- /loess-rust/src/expr/block.rs: -------------------------------------------------------------------------------- 1 | use loess::{ 2 | grammar, 3 | scaffold::{CurlyBraces, Greedy, RepeatCount}, 4 | }; 5 | 6 | use crate::{attributes::InnerAttribute, expr::ExpressionWithoutBlock, statement::Statement}; 7 | 8 | grammar! { 9 | #[derive(Clone)] 10 | #[non_exhaustive] 11 | /// [BlockExpression](https://doc.rust-lang.org/stable/reference/expressions/block-expr.html?highlight=BlockExpression#r-expr.block.syntax) 12 | pub struct BlockExpression: PeekFrom, PopFrom, IntoTokens { 13 | /// Continue inside with [`BlockExpressionContent`] or [`BlockExpressionContentFlattened`]. 14 | pub braces: CurlyBraces, 15 | } 16 | 17 | #[derive(Clone)] 18 | #[non_exhaustive] 19 | /// Content of [`BlockExpression::braces`] 20 | pub struct BlockExpressionContent: PeekFrom, PopFrom, IntoTokens { 21 | inner_attributes: Greedy>, 22 | statements: Option, 23 | } 24 | 25 | #[derive(Clone)] 26 | #[non_exhaustive] 27 | /// [BlockExpression](https://doc.rust-lang.org/stable/reference/expressions/block-expr.html?highlight=Statements#r-expr.block.syntax) 28 | pub enum Statements: PeekFrom, PopFrom, IntoTokens { 29 | Statements(RepeatCount, 1, { usize::MAX }>), 30 | StatementsExpressionWithoutBlock(RepeatCount, 1, { usize::MAX }>, ExpressionWithoutBlock), 31 | ExpressionWithoutBlock(ExpressionWithoutBlock), 32 | } else "Expected statements with optional trailing expression without block."; 33 | 34 | #[derive(Clone)] 35 | #[non_exhaustive] 36 | /// [BlockExpression](https://doc.rust-lang.org/stable/reference/expressions/block-expr.html?highlight=Statements#r-expr.block.syntax) 37 | pub struct BlockExpressionContentFlattened: PeekFrom, PopFrom, IntoTokens { 38 | inner_attributes: Greedy>, 39 | statements: Greedy>, 40 | expression: Option, 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "loess" 7 | version = "0.2.5" 8 | dependencies = [ 9 | "proc-macro2", 10 | ] 11 | 12 | [[package]] 13 | name = "loess-rust" 14 | version = "0.1.0+0.2" 15 | dependencies = [ 16 | "loess", 17 | "loess-rust-lex", 18 | "proc-macro2", 19 | ] 20 | 21 | [[package]] 22 | name = "loess-rust-fragments" 23 | version = "0.1.0" 24 | dependencies = [ 25 | "loess", 26 | "loess-rust-lex", 27 | "proc-macro2", 28 | ] 29 | 30 | [[package]] 31 | name = "loess-rust-lex" 32 | version = "0.1.0" 33 | dependencies = [ 34 | "loess", 35 | "proc-macro2", 36 | ] 37 | 38 | [[package]] 39 | name = "loess-rust-opaque" 40 | version = "0.1.0+0.2" 41 | dependencies = [ 42 | "loess", 43 | "proc-macro2", 44 | "quote", 45 | "syn", 46 | ] 47 | 48 | [[package]] 49 | name = "proc-macro2" 50 | version = "1.0.95" 51 | source = "registry+https://github.com/rust-lang/crates.io-index" 52 | checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" 53 | dependencies = [ 54 | "unicode-ident", 55 | ] 56 | 57 | [[package]] 58 | name = "quote" 59 | version = "1.0.40" 60 | source = "registry+https://github.com/rust-lang/crates.io-index" 61 | checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" 62 | dependencies = [ 63 | "proc-macro2", 64 | ] 65 | 66 | [[package]] 67 | name = "syn" 68 | version = "2.0.101" 69 | source = "registry+https://github.com/rust-lang/crates.io-index" 70 | checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" 71 | dependencies = [ 72 | "proc-macro2", 73 | "quote", 74 | "unicode-ident", 75 | ] 76 | 77 | [[package]] 78 | name = "unicode-ident" 79 | version = "1.0.18" 80 | source = "registry+https://github.com/rust-lang/crates.io-index" 81 | checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" 82 | -------------------------------------------------------------------------------- /loess-rust/README.md: -------------------------------------------------------------------------------- 1 | # Loess-Rust 2 | 3 | `loess-rust` is a [Loess](https://crates.io/crates/loess)-grammar that tracks Rust's stable grammar by closely following [The Rust Reference](https://doc.rust-lang.org/stable/reference/). 4 | 5 | Here's what to expect: 6 | 7 | - Shallow parsing (by default). For tokens with delimited groups, like `Visibility`, you can opt into deeper (or customised!) parsing via generics. 8 | 9 | - Public fields and one-time validation. The parser checks token specifics once when processing input, but trusts you otherwise. 10 | 11 | - Some bugs. For example, none-delimited groups aren't handled yet, which can cause issues when generating macro input with a `macro_rules!` macro. 12 | 13 | Here's what not to expect: 14 | 15 | - Complete coverage of Rust's grammar. In fact, `loess-rust` really makes no attempt at all in this regard, since I only implement what I need. In particular, unstable grammar is generally out of scope. (Loess can help you supply it yourself!) 16 | 17 | Temporary opaque implementations of additional grammar tokens are available in the [`loess-rust-opaque`](https://crates.io/crates/loess-rust-opaque) crate. 18 | 19 | - `Debug`-implementations. They aren't that useful here in my experience, but they would increase compile-times. 20 | 21 | - Absence of major version bumps. Rust's grammar is a moving target and Loess's grammar tokens aren't marked `#[non_exhaustive]` for ease of use. 22 | 23 | However, shallow parsing should make upgrades fairly painless and errors should alert you specifically to grammar changes that are relevant to you. 24 | 25 | I should also be able to use the semver trick each time (reexport compatible new-major-version types in the older version) to keep incompatibilities and overall compile time to a minimum. 26 | 27 | How to read the version number: 28 | 29 | After the "+", the most-major version of the (most directly) compatible Loess releases is listed. 30 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/token/life.rs: -------------------------------------------------------------------------------- 1 | //! [lex.token.life](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.life): Lifetimes and loop labels 2 | 3 | use loess::{Error, ErrorPriority, Errors, Input, PeekFrom, PopFrom, PopParsedFrom, lifetimes}; 4 | use proc_macro2::{Ident, TokenTree}; 5 | 6 | lifetimes! { 7 | /// [LIFETIME_TOKEN](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.life.syntax) 8 | #[derive(Clone)] pub _ as pub Lifetime: IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 9 | } 10 | 11 | impl PeekFrom for Lifetime { 12 | fn peek_from(input: &Input) -> bool { 13 | matches!(input.front(), Some(TokenTree::Ident(ident)) if is_lifetime(ident)) 14 | } 15 | } 16 | 17 | /// See as of 2025-12-04. 18 | impl PopParsedFrom for Lifetime { 19 | type Parsed = Self; 20 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 21 | let ident = Ident::peek_pop_from(input, errors)?; 22 | 23 | match ident { 24 | Some(ident) if is_lifetime(&ident) => Ok(Self(ident)), 25 | ident => Err(if let Some(ident) = ident { 26 | errors.push(Error::new( 27 | ErrorPriority::GRAMMAR, 28 | if ident.to_string().starts_with("r#") { 29 | format!( 30 | "Expected Lifetime. (`{}` cannot be a raw identifier.)", 31 | &ident.to_string()[2..] 32 | ) 33 | } else { 34 | format!("Expected Identifier. (`{ident}` is a keyword.)") 35 | }, 36 | [ident.span()], 37 | )); 38 | 39 | input.push_front(TokenTree::Ident(ident)); 40 | } else { 41 | errors.push(Error::new( 42 | ErrorPriority::GRAMMAR, 43 | "Expected Identifier.", 44 | [input.front_span()], 45 | )); 46 | }), 47 | } 48 | } 49 | } 50 | 51 | pub fn is_lifetime(ident: &Ident) -> bool { 52 | !["'r#crate", "'r#self", "'r#super", "'r#Self"] 53 | .into_iter() 54 | .any(|s| ident == s) 55 | && ident.to_string().starts_with('\'') 56 | } 57 | -------------------------------------------------------------------------------- /loess-rust-lex/src/ident.rs: -------------------------------------------------------------------------------- 1 | //! [ident](https://doc.rust-lang.org/stable/reference/identifiers.html#r-ident): 2 | //! Identifiers (not [keywords](`crate::lex::keywords`) or [lifetimes](`crate::lex::token::life`)). 3 | 4 | use loess::{Error, ErrorPriority, Errors, Input, PeekFrom, PopFrom as _, PopParsedFrom}; 5 | use proc_macro2::{Ident, TokenTree}; 6 | 7 | use crate::lex::keywords::{is_reserved_keyword, is_strict_keyword}; 8 | 9 | pub use crate::lex::keywords::words_impl::Identifier; 10 | 11 | impl PeekFrom for Identifier { 12 | fn peek_from(input: &Input) -> bool { 13 | matches!(input.front(), Some(TokenTree::Ident(ident)) if is_identifier(ident)) 14 | } 15 | } 16 | 17 | /// See as of 2025-04-13. 18 | impl PopParsedFrom for Identifier { 19 | type Parsed = Self; 20 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 21 | let ident = Ident::peek_pop_from(input, errors)?; 22 | 23 | match ident { 24 | Some(ident) if is_identifier(&ident) => Ok(Self(ident)), 25 | ident => Err(if let Some(ident) = ident { 26 | errors.push(Error::new( 27 | ErrorPriority::GRAMMAR, 28 | if ident.to_string().starts_with("r#") { 29 | format!( 30 | "Expected Identifier. (`{}` cannot be a raw identifier.)", 31 | &ident.to_string()[2..] 32 | ) 33 | } else { 34 | format!("Expected Identifier. (`{ident}` is a keyword.)") 35 | }, 36 | [ident.span()], 37 | )); 38 | 39 | input.push_front(TokenTree::Ident(ident)); 40 | } else { 41 | errors.push(Error::new( 42 | ErrorPriority::GRAMMAR, 43 | "Expected Identifier.", 44 | [input.front_span()], 45 | )); 46 | }), 47 | } 48 | } 49 | } 50 | 51 | pub fn is_identifier(ident: &Ident) -> bool { 52 | !(["r#crate", "r#self", "r#super", "r#Self"] 53 | .into_iter() 54 | .any(|s| ident == s) 55 | || is_strict_keyword(&ident) 56 | || is_reserved_keyword(&ident) 57 | || ident.to_string().starts_with('\'')) 58 | } 59 | -------------------------------------------------------------------------------- /loess-rust/src/items.rs: -------------------------------------------------------------------------------- 1 | //! [items](https://doc.rust-lang.org/reference/items.html#r-items): Items 2 | 3 | use loess::{grammar, scaffold::Greedy}; 4 | 5 | use crate::{ 6 | attributes::OuterAttribute, items::extern_crate::ExternCrate, 7 | r#macro::invocation::MacroInvocationSemi, vis::Visibility, 8 | }; 9 | 10 | #[path = "items/extern_crate.rs"] 11 | pub mod extern_crate; 12 | #[path = "items/mod.rs"] 13 | pub mod r#mod; 14 | 15 | grammar! { 16 | #[derive(Clone)] 17 | #[non_exhaustive] 18 | /// [Item](https://doc.rust-lang.org/reference/items.html#grammar-Item) 19 | pub struct Item: PeekFrom, PopFrom, IntoTokens { 20 | outer_attributes: Greedy>, 21 | variant: ItemVariant, 22 | } 23 | 24 | #[derive(Clone)] 25 | #[non_exhaustive] 26 | /// [`Item::variant`] 27 | pub enum ItemVariant: PeekFrom, PopFrom, IntoTokens { 28 | VisItem(VisItem), 29 | MacroItem(MacroItem), 30 | } else "Expected VisItem or MacroItem."; 31 | 32 | #[derive(Clone)] 33 | #[non_exhaustive] 34 | /// [VisItem](https://doc.rust-lang.org/reference/items.html#grammar-VisItem) 35 | pub struct VisItem: PeekFrom, PopFrom, IntoTokens { 36 | visibility: Option, 37 | variant: VisItemVariant, 38 | } 39 | 40 | #[derive(Clone)] 41 | #[non_exhaustive] 42 | /// [`VisItem::variant`] 43 | pub enum VisItemVariant: PeekFrom, PopFrom, IntoTokens { 44 | Module(Module), 45 | ExternCrate(ExternCrate), 46 | UseDeclaration(UseDeclaration), 47 | Function(Function), 48 | TypeAlias(TypeAlias), 49 | Struct(Struct), 50 | Enumeration(Enumeration), 51 | Union(Union), 52 | ConstantItem(ConstantItem), 53 | StaticItem(StaticItem), 54 | Trait(Trait), 55 | Implementation(Implementation), 56 | ExternBlock(ExternBlock), 57 | } else "Expected VisItem or MacroItem."; 58 | 59 | #[derive(Clone)] 60 | #[non_exhaustive] 61 | /// [MacroItem](https://doc.rust-lang.org/reference/items.html#grammar-MacroItem) 62 | pub enum MacroItem: PeekFrom, PopFrom, IntoTokens { 63 | MacroInvocationSemi(MacroInvocationSemi), 64 | MacroRulesDefinition(MacroRulesDefinition), 65 | } else "Expected VisItem or MacroItem."; 66 | } 67 | -------------------------------------------------------------------------------- /loess-rust/src/macro/invocation.rs: -------------------------------------------------------------------------------- 1 | //! [macro.invocation](https://doc.rust-lang.org/reference/macros.html#r-macro.invocation) 2 | 3 | use loess::{ 4 | grammar, 5 | scaffold::{CurlyBraces, Parentheses, SquareBrackets}, 6 | }; 7 | 8 | use crate::{lex::token::{ 9 | Token, 10 | punct::{Not, Semi}, 11 | }, paths::simple::SimplePath}; 12 | 13 | grammar! { 14 | #[derive(Clone)] 15 | #[non_exhaustive] 16 | /// [MacroInvocation](https://doc.rust-lang.org/reference/macros.html#grammar-MacroInvocation) 17 | pub struct MacroInvocation: PeekFrom, PopFrom, IntoTokens { 18 | simple_path: SimplePath, 19 | not: Not, 20 | delim_token_tree: DelimTokenTree, 21 | } 22 | 23 | #[derive(Clone)] 24 | #[non_exhaustive] 25 | /// [DelimTokenTree](https://doc.rust-lang.org/reference/macros.html#grammar-DelimTokenTree) 26 | pub enum DelimTokenTree: PeekFrom, PopFrom, IntoTokens { 27 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 28 | Parentheses(Parentheses), 29 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 30 | SquareBrackets(SquareBrackets), 31 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 32 | CurlyBraces(CurlyBraces), 33 | } else "Expected delimited token tree."; 34 | 35 | #[derive(Clone)] 36 | #[non_exhaustive] 37 | /// [TokenTree](https://doc.rust-lang.org/reference/macros.html#grammar-TokenTree) 38 | pub enum TokenTree: PeekFrom, PopFrom, IntoTokens { 39 | DelimTokenTree(DelimTokenTree), 40 | TokenExceptDelimiters(Token), 41 | } else "Expected delimited token tree."; 42 | 43 | #[derive(Clone)] 44 | #[non_exhaustive] 45 | /// [MacroInvocationSemi](https://doc.rust-lang.org/reference/macros.html#grammar-MacroInvocationSemi) 46 | pub enum MacroInvocationSemi: PeekFrom, PopFrom, IntoTokens { 47 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 48 | WithParentheses(SimplePath, Not, Parentheses, Semi), 49 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 50 | WithSquareBrackets(SimplePath, Not, SquareBrackets, Semi), 51 | /// Continue inside with [`Vec`]<[`TokenTree`]>. 52 | WithCurlyBraces(SimplePath, Not, CurlyBraces), 53 | } else "Expected delimited token tree."; 54 | } 55 | -------------------------------------------------------------------------------- /loess-rust/src/expr/literal.rs: -------------------------------------------------------------------------------- 1 | //! [expr.literal](https://doc.rust-lang.org/stable/reference/expressions/literal-expr.html#r-expr.literal): Literal expressions 2 | 3 | use loess::grammar; 4 | 5 | use crate::lex::{ 6 | keywords::{False, True}, 7 | token::literal::{AnyBoolLiteral, AnyStringLiteral, RawStringLiteral, StringLiteral}, 8 | }; 9 | 10 | grammar! { 11 | /// 12 | /// See . 13 | #[derive(Clone)] 14 | #[non_exhaustive] 15 | pub enum LiteralExpression: doc, PeekFrom, PopFrom, IntoTokens { 16 | // Char 17 | StringLiteral(StringLiteral), 18 | RawStringLiteral(RawStringLiteral), 19 | // Byte 20 | // ByteString 21 | // RawByteString 22 | // CString 23 | // RawCString 24 | // Integer 25 | // Float 26 | True(True), 27 | False(False), 28 | } else "Expected literal expression."; 29 | 30 | /// 31 | /// Simplified [`LiteralExpression`]. 32 | #[derive(Clone)] 33 | #[non_exhaustive] 34 | pub enum LiteralExpressionByType: doc, PeekFrom, PopFrom, IntoTokens { 35 | // AnyChar 36 | AnyString(AnyStringLiteral), 37 | // AnyByte 38 | // AnyByteString 39 | // RawByteString 40 | // AnyCString 41 | // RawCString 42 | // AnyInteger 43 | // AnyFloat 44 | AnyBool(AnyBoolLiteral), 45 | } else "Expected literal expression."; 46 | } 47 | 48 | impl From for LiteralExpressionByType { 49 | fn from(value: LiteralExpression) -> Self { 50 | use LiteralExpression::*; 51 | use LiteralExpressionByType::*; 52 | match value { 53 | StringLiteral(s) => AnyString(AnyStringLiteral::Plain(s)), 54 | RawStringLiteral(r) => AnyString(AnyStringLiteral::Raw(r)), 55 | True(t) => AnyBool(AnyBoolLiteral::True(t)), 56 | False(f) => AnyBool(AnyBoolLiteral::False(f)), 57 | } 58 | } 59 | } 60 | 61 | impl From for LiteralExpression { 62 | fn from(value: LiteralExpressionByType) -> Self { 63 | use LiteralExpression::*; 64 | use LiteralExpressionByType::*; 65 | match value { 66 | AnyString(s) => match s { 67 | AnyStringLiteral::Plain(s) => StringLiteral(s), 68 | AnyStringLiteral::Raw(r) => RawStringLiteral(r), 69 | }, 70 | AnyBool(b) => match b { 71 | AnyBoolLiteral::True(t) => True(t), 72 | AnyBoolLiteral::False(f) => False(f), 73 | }, 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Loess 2 | 3 | Please create a GitHub issue for your idea before starting work on a merge-request. 4 | Doing so makes it easier to see if the change is likely to be merged once complete. 5 | 6 | When you start work on your change, please create a draft PR for ongoing discussion. 7 | 8 | ## Guidelines 9 | 10 | Don't use procedural macros in and (in most cases) don't add dependencies to other crates to Loess. 11 | 12 | The *loess-rust* crate is organised roughly like [The Rust Reference](https://doc.rust-lang.org/stable/reference/), with opaque implementations split into a separate subfolder. Type names should correspond directly to named grammar symbols or other applicable names in The Rust Reference. 13 | 14 | Grammar symbols should be documented to show their immediate pattern in the module overview. 15 | (See *loess-rust* for examples. `grammar!` can generate this documentation for you in some cases.) This is not required for temporary opaque implementations, which instead should identify themselves as such. 16 | 17 | **I'm open to adding additional feature-gated grammars** for common text file formats, 18 | as long as they can be parsed accurately on stable Rust. (That means no YAML, but JSON would be okay.) 19 | Type names may be reused between distinct grammars, but must not be reused within the same grammar. Where a name collides with a Rust keyword, the respective raw identifier (`r#…`) should be used if available. 20 | 21 | You may introduce new `macro_rules!` macros to avoid large amounts of repetitive code. Please keep them reasonably simple (but try to match my standards, which mainly means optional trailing repeat-separators should be supported in the input). 22 | 23 | Try to focus on what you need as a consume and don't aim for completeness for completeness's sake. That saves work for us both 🙂 24 | 25 | ## Formatting 26 | 27 | Please format with: 28 | 29 | ```sh 30 | cargo +nightly fmt 31 | ``` 32 | 33 | ## Testing 34 | 35 | Please test with: 36 | 37 | ```sh 38 | cargo test 39 | ``` 40 | 41 | ## Publishing 42 | 43 | Check whether a version change is necessary with: 44 | 45 | ```sh 46 | # cargo binstall cargo-semver-checks 47 | # OR 48 | # cargo install --locked cargo-semver-checks 49 | cargo semver-checks 50 | ``` 51 | 52 | Publish with: 53 | 54 | ```sh 55 | cargo publish --dry-run --locked 56 | cargo publish --locked 57 | ``` 58 | -------------------------------------------------------------------------------- /loess-rust-fragments/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! **Inaccurate** but lightweight [Metavariable](https://doc.rust-lang.org/reference/macros-by-example.html#r-macro.decl.meta) match parsers. 2 | //! 3 | //! Generally, these will only work properly iff the input respects the follow set limitations explained in [macro.decl.follow-set](https://doc.rust-lang.org/reference/macros-by-example.html#r-macro.decl.follow-set). 4 | 5 | use loess::{ 6 | Errors, Input, PopParsedFrom, grammar, 7 | scaffold::{CurlyBraces, MetaGroup, Parentheses}, 8 | words, 9 | }; 10 | use loess_rust_lex::lex::token::punct::Lt; 11 | use proc_macro2::{Delimiter, Group, Span, TokenStream, TokenTree, extra::DelimSpan}; 12 | 13 | words! { 14 | #[derive(Clone)] pub(self) pub as Pub: PeekFrom, PopFrom, IntoTokens; 15 | } 16 | 17 | grammar! { 18 | // pub struct Block: PeekFrom, PopFrom, IntoTokens { } 19 | 20 | // pub struct Expr: PeekFrom, PopFrom, IntoTokens { } 21 | 22 | // pub struct Expr2021: PeekFrom, PopFrom, IntoTokens { } 23 | 24 | // pub struct Ident: PeekFrom, PopFrom, IntoTokens { } 25 | 26 | // pub struct Item: PeekFrom, PopFrom, IntoTokens { } 27 | 28 | // pub struct Lifetime: PeekFrom, PopFrom, IntoTokens { } 29 | 30 | // pub struct Literal: PeekFrom, PopFrom, IntoTokens { } 31 | 32 | // pub struct Meta: PeekFrom, PopFrom, IntoTokens { } 33 | 34 | // pub struct Pat: PeekFrom, PopFrom, IntoTokens { } 35 | 36 | // pub struct PatParam: PeekFrom, PopFrom, IntoTokens { } 37 | 38 | // pub struct Path: PeekFrom, PopFrom, IntoTokens { } 39 | 40 | // pub struct Stmt: PeekFrom, PopFrom, IntoTokens { } 41 | 42 | /// This groups operators! 43 | // pub struct TT: PeekFrom, PopFrom, IntoTokens { } 44 | 45 | pub struct Ty: IntoTokens { 46 | inner: Group, 47 | } 48 | 49 | //TODO: Check if not examining the parentheses content matches rustc! 50 | 51 | pub struct Vis: PopFrom, IntoTokens { 52 | inner: Option<(Pub, Option)>, 53 | } 54 | } 55 | 56 | impl PopParsedFrom for Ty { 57 | type Parsed = Self; 58 | 59 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 60 | let mut depth = 0_usize; 61 | let mut consumed = TokenStream::new(); 62 | while match depth { 63 | 0 => if let Some(lt) = Lt::peek_pop_from(input, errors) {}, 64 | 1.. => input 65 | .pop_or_replace(|[tt], _rest| match tt { 66 | TokenTree::Punct(punct) if punct.as_char() == '<' => { 67 | depth += 1; 68 | Ok(punct.into()) 69 | } 70 | TokenTree::Punct(punct) if punct.as_char() == '>' => { 71 | depth -= 1; 72 | Ok(punct.into()) 73 | } 74 | tt => Ok(tt), 75 | }) 76 | .ok(), 77 | } 78 | .map_or(false, |tt| { 79 | consumed.extend([tt]); 80 | true 81 | }) {} 82 | let mut group = Group::new(Delimiter::None, consumed); 83 | group.set_span(group.span().resolved_at(Span::mixed_site())); 84 | Ok(Self { inner: group }) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions (by Crate Name) 4 | 5 | ### `loess` 6 | 7 | | Version | Supported | ***Initial*** Reason for Removal | 8 | | ----------- | ------------------ | -------------------------------- | 9 | | 0.1.0 to .1 | :white_check_mark: | | 10 | | 0.2.1 to .2 | :white_check_mark: | | 11 | 12 | Faulty versions are [yanked](https://doc.rust-lang.org/cargo/commands/cargo-yank.html), where possible after a Semver-compatible update is made available, and added to the table above as unsupported. 13 | They are also marked with an additional `v….….…-yanked` tag in Git to make them easily recognisable, but original release tags are not removed. 14 | 15 | Security advisories are published through [the respective section on this repository here](https://github.com/Tamschi/loess/security/advisories) and [RustSec/advisory-db](https://github.com/RustSec/advisory-db). 16 | 17 | ## Reporting a Vulnerability 18 | 19 | If you find a security issue, please contact me privately first, so that I can publish a fix before the announcement! 20 | 21 | You can reach me via XMPP or email at [tamme@schichler.dev](mailto:tamme@schichler.dev). 22 | Prefer XMPP and mention "vulnerability" if you'd like an immediate response, though I can't always guarantee this of course. 23 | 24 | ## Notes 25 | 26 | As `0.0.z` versions cannot be upgraded in a Semver-compatible way, these can be yanked without an automatically resolved alternative becoming available. 27 | Should it become impossible to fix a vulnerability in an API-compatible way, an `x.….…` or `0.y.…` version can be yanked entirely, too. 28 | 29 | Yanked versions are still available for download, so your builds will not break with an existing `Cargo.lock` file. 30 | Please include it in your version control (and source release packages for executables). Cargo does this by default. 31 | 32 | While there will be advisories about any security issues and undefined behaviour, other bugfix releases are more quiet. 33 | To be notified automatically, either subscribe to releases through the repository watch feature on GitHub or use for example [Dependabot] with [`package-ecosystem: cargo`](https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates#package-ecosystem). 34 | To check only for vulnerabilities, use [cargo-audit](https://github.com/RustSec/rustsec/tree/main/cargo-audit#readme) instead, which is available as a GitHub Action as [actions-rs/audit-check](https://github.com/actions-rs/audit-check#readme). 35 | 36 | Once a version becomes yanked/unsupported, please update or upgrade to a supported version in a timely manner. 37 | I'll try to make this as painless as possible where manual changes are required, but a simple [`cargo update -p asteracea`](https://doc.rust-lang.org/cargo/commands/cargo-update.html) should do the trick in most cases. 38 | 39 | [Dependabot]: https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically 40 | -------------------------------------------------------------------------------- /loess-rust/src/items/mod.rs: -------------------------------------------------------------------------------- 1 | //! [items.mod](https://doc.rust-lang.org/reference/items/modules.html#r-items.mod): Modules 2 | 3 | use loess::{ 4 | Input, PeekFrom, grammar, 5 | scaffold::{CurlyBraces, Greedy}, 6 | }; 7 | use proc_macro2::TokenTree; 8 | 9 | use crate::{ 10 | attributes::InnerAttribute, 11 | ident::Identifier, 12 | items::Item, 13 | lex::{ 14 | keywords::{Mod, Unsafe}, 15 | token::punct::Semi, 16 | }, 17 | }; 18 | 19 | grammar! { 20 | #[derive(Clone)] 21 | #[non_exhaustive] 22 | /// [Module](https://doc.rust-lang.org/reference/items/modules.html#grammar-Module) 23 | pub enum Module: PeekFrom via ModuleFlattened, PopFrom via ModuleFlattened, IntoTokens { 24 | UnsafeModIdentifierSemi(Option, Mod, Identifier, Semi), 25 | /// Continue inside via [`ModuleContent`]. 26 | UnsafeModIdentifierCurlyBraces(Option, Mod, Identifier, CurlyBraces), 27 | } else "Expected Module."; 28 | 29 | #[derive(Clone)] 30 | #[non_exhaustive] 31 | /// Flattened [`Module`]. 32 | pub struct ModuleFlattened: PopFrom, IntoTokens { 33 | r#unsafe: Option, 34 | r#mod: Mod, 35 | identifier: Identifier, 36 | variant: ModuleFlattenedVariant, 37 | } 38 | 39 | #[derive(Clone)] 40 | #[non_exhaustive] 41 | /// [`ModuleFlattened::variant`] 42 | pub enum ModuleFlattenedVariant: PeekFrom, PopFrom, IntoTokens { 43 | Semi(Semi), 44 | /// Continue inside via [`ModuleContent`]. 45 | CurlyBraces(CurlyBraces), 46 | } else "Expected ModuleFlattenedVariant."; 47 | 48 | #[derive(Clone)] 49 | #[non_exhaustive] 50 | /// Inside [`Module::UnsafeModIdentifierCurlyBraces`] or [`ModuleFlattenedVariant::CurlyBraces`]. 51 | pub struct ModuleContent: PeekFrom, PopFrom, IntoTokens { 52 | inner_attributes: Greedy>, 53 | items: Vec, 54 | } 55 | } 56 | 57 | /// `mod` or `unsafe mod` 58 | impl PeekFrom for ModuleFlattened { 59 | fn peek_from(input: &Input) -> bool { 60 | input.peek(|[tt], mut rest| match tt { 61 | TokenTree::Ident(ident) => { 62 | ident == "mod" 63 | || ident == "unsafe" 64 | && matches!(rest.next(), Some(TokenTree::Ident(ident)) if ident == "mod") 65 | } 66 | TokenTree::Group(_) | TokenTree::Punct(_) | TokenTree::Literal(_) => false, 67 | }) 68 | } 69 | } 70 | 71 | impl From for ModuleFlattened { 72 | fn from(value: Module) -> Self { 73 | use Module::*; 74 | use ModuleFlattenedVariant::*; 75 | match value { 76 | UnsafeModIdentifierSemi(r#unsafe, r#mod, identifier, semi) => Self { 77 | r#unsafe, 78 | r#mod, 79 | identifier, 80 | variant: Semi(semi), 81 | }, 82 | UnsafeModIdentifierCurlyBraces(r#unsafe, r#mod, identifier, curly_braces) => Self { 83 | r#unsafe, 84 | r#mod, 85 | identifier, 86 | variant: CurlyBraces(curly_braces), 87 | }, 88 | } 89 | } 90 | } 91 | 92 | impl From for Module { 93 | fn from(value: ModuleFlattened) -> Self { 94 | use Module::*; 95 | use ModuleFlattenedVariant::*; 96 | let ModuleFlattened { 97 | r#unsafe, 98 | r#mod, 99 | identifier, 100 | variant, 101 | } = value; 102 | match variant { 103 | Semi(semi) => UnsafeModIdentifierSemi(r#unsafe, r#mod, identifier, semi), 104 | CurlyBraces(curly_braces) => { 105 | UnsafeModIdentifierCurlyBraces(r#unsafe, r#mod, identifier, curly_braces) 106 | } 107 | } 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/token/literal.rs: -------------------------------------------------------------------------------- 1 | //! [lex.token.literal](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.literal): Literals 2 | 3 | use loess::{Error, ErrorPriority, Errors, Input, IntoTokens, PeekFrom, PopParsedFrom, grammar}; 4 | use proc_macro2::{Literal, TokenTree}; 5 | 6 | use crate::lex::keywords::{False, True}; 7 | 8 | grammar! { 9 | /// [STRING_LITERAL](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.literal.str.syntax): 10 | /// `"`[…]`"` 11 | /// 12 | /// […]: https://doc.rust-lang.org/stable/reference/tokens.html#string-literals 13 | #[derive(Clone)] 14 | pub struct StringLiteral(pub Literal); 15 | 16 | /// [RAW_STRING_LITERAL](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.literal.str-raw.syntax): 17 | /// `r`​`#`n ≥ 0`"`[…]`"`​`#`n 18 | /// 19 | /// […]: https://doc.rust-lang.org/stable/reference/tokens.html#raw-string-literals 20 | #[derive(Clone)] 21 | pub struct RawStringLiteral(pub Literal); 22 | 23 | #[derive(Clone)] 24 | #[non_exhaustive] 25 | pub enum AnyStringLiteral: doc, PeekFrom, PopFrom, IntoTokens { 26 | /// `"…"` 27 | Plain(StringLiteral), 28 | /// `r#"…"#` and similar. 29 | Raw(RawStringLiteral), 30 | } else "Expected &str literal."; 31 | 32 | #[derive(Clone)] 33 | #[non_exhaustive] 34 | pub enum AnyBoolLiteral: doc, PeekFrom, PopFrom, IntoTokens { 35 | /// `true` 36 | True(True), 37 | /// `false` 38 | False(False), 39 | } else "Expected bool literal."; 40 | } 41 | 42 | impl PeekFrom for StringLiteral { 43 | fn peek_from(input: &Input) -> bool { 44 | matches!(input.front(), Some(TokenTree::Literal(literal)) if literal.to_string().starts_with('"')) 45 | } 46 | } 47 | 48 | impl PopParsedFrom for StringLiteral { 49 | type Parsed = Self; 50 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 51 | input 52 | .pop_or_replace(|t, _| match t { 53 | [TokenTree::Literal(literal)] if literal.to_string().starts_with('"') => { 54 | Ok(Self(literal)) 55 | } 56 | other => Err(other), 57 | }) 58 | .map_err(|spans| { 59 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `\"`.", spans)) 60 | }) 61 | } 62 | } 63 | 64 | impl IntoTokens for StringLiteral { 65 | fn into_tokens( 66 | self, 67 | root: &proc_macro2::TokenStream, 68 | tokens: &mut impl Extend, 69 | ) { 70 | self.0.into_tokens(root, tokens); 71 | } 72 | } 73 | 74 | impl PeekFrom for RawStringLiteral { 75 | fn peek_from(input: &Input) -> bool { 76 | //TODO: This might not be entirely accurate. 77 | matches!(input.front(), Some(TokenTree::Literal(literal)) if literal.to_string().starts_with('r')) 78 | } 79 | } 80 | 81 | impl PopParsedFrom for RawStringLiteral { 82 | type Parsed = Self; 83 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 84 | input 85 | .pop_or_replace(|t, _| match t { 86 | [TokenTree::Literal(literal)] if literal.to_string().starts_with('r') => { 87 | Ok(Self(literal)) 88 | } 89 | other => Err(other), 90 | }) 91 | .map_err(|spans| { 92 | errors.push(Error::new( 93 | ErrorPriority::GRAMMAR, 94 | "Expected raw string literal.", 95 | spans, 96 | )) 97 | }) 98 | } 99 | } 100 | 101 | impl IntoTokens for RawStringLiteral { 102 | fn into_tokens( 103 | self, 104 | root: &proc_macro2::TokenStream, 105 | tokens: &mut impl Extend, 106 | ) { 107 | self.0.into_tokens(root, tokens); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /loess-rust/src/expr.rs: -------------------------------------------------------------------------------- 1 | //! [expr](https://doc.rust-lang.org/stable/reference/expressions.html#r-expr): Expressions 2 | 3 | use loess::{grammar, scaffold::Greedy}; 4 | 5 | use crate::{ 6 | attributes::OuterAttribute, 7 | expr::{block::BlockExpression, literal::LiteralExpression}, 8 | }; 9 | 10 | pub mod block; 11 | pub mod literal; 12 | 13 | grammar! { 14 | /// [Expression](https://doc.rust-lang.org/reference/expressions.html#grammar-Expression) 15 | #[derive(Clone)] 16 | #[non_exhaustive] 17 | pub enum Expression: PeekFrom, PopFrom, IntoTokens { 18 | ExpressionWithoutBlock(ExpressionWithoutBlock), 19 | ExpressionWithBlock(ExpressionWithBlock), 20 | } else "Expected expression."; 21 | 22 | /// [ExpressionWithoutBlock](https://doc.rust-lang.org/reference/expressions.html#grammar-ExpressionWithoutBlock) 23 | #[derive(Clone)] 24 | #[non_exhaustive] 25 | pub struct ExpressionWithoutBlock: PeekFrom, PopFrom, IntoTokens { 26 | pub outer_attributes: Greedy>, 27 | pub variant: ExpressionWithoutBlockVariant, 28 | } 29 | 30 | #[derive(Clone)] 31 | #[non_exhaustive] 32 | /// [`ExpressionWithoutBlock::variant`] 33 | pub enum ExpressionWithoutBlockVariant: PeekFrom, PopFrom, IntoTokens { 34 | LiteralExpression(LiteralExpression), 35 | } else "Expected expression without block variant."; 36 | 37 | /// [ExpressionWithBlock](https://doc.rust-lang.org/reference/expressions.html#grammar-ExpressionWithBlock) 38 | #[derive(Clone)] 39 | #[non_exhaustive] 40 | pub struct ExpressionWithBlock: PeekFrom, PopFrom, IntoTokens { 41 | pub outer_attributes: Greedy>, 42 | pub variant: ExpressionWithBlockVariant, 43 | } 44 | 45 | /// [`ExpressionWithBlock::variant`] 46 | #[derive(Clone)] 47 | #[non_exhaustive] 48 | pub enum ExpressionWithBlockVariant: PeekFrom, PopFrom, IntoTokens { 49 | BlockExpression(BlockExpression), 50 | } else "Expected expression with block variant."; 51 | 52 | /// Flattened [`Expression`]. 53 | #[derive(Clone)] 54 | #[non_exhaustive] 55 | pub struct AnyExpression: PeekFrom, PopFrom, IntoTokens { 56 | pub outer_attributes: Greedy>, 57 | pub variant: AnyExpressionVariant, 58 | } 59 | 60 | #[derive(Clone)] 61 | #[non_exhaustive] 62 | pub enum AnyExpressionVariant: doc, PeekFrom, PopFrom, IntoTokens { 63 | LiteralExpression(LiteralExpression), 64 | // 65 | BlockExpression(BlockExpression), 66 | } else "Expected expression without block variant."; 67 | } 68 | 69 | impl From for AnyExpression { 70 | fn from(value: Expression) -> Self { 71 | match value { 72 | Expression::ExpressionWithoutBlock(ExpressionWithoutBlock { 73 | outer_attributes, 74 | variant, 75 | }) => Self { 76 | outer_attributes, 77 | variant: match variant { 78 | ExpressionWithoutBlockVariant::LiteralExpression(l) => { 79 | AnyExpressionVariant::LiteralExpression(l) 80 | } 81 | }, 82 | }, 83 | Expression::ExpressionWithBlock(ExpressionWithBlock { 84 | outer_attributes, 85 | variant, 86 | }) => Self { 87 | outer_attributes, 88 | variant: match variant { 89 | ExpressionWithBlockVariant::BlockExpression(b) => { 90 | AnyExpressionVariant::BlockExpression(b) 91 | } 92 | }, 93 | }, 94 | } 95 | } 96 | } 97 | 98 | impl From for Expression { 99 | fn from(value: AnyExpression) -> Self { 100 | let AnyExpression { 101 | outer_attributes, 102 | variant, 103 | } = value; 104 | match variant { 105 | AnyExpressionVariant::LiteralExpression(l) => { 106 | Self::ExpressionWithoutBlock(ExpressionWithoutBlock { 107 | outer_attributes, 108 | variant: ExpressionWithoutBlockVariant::LiteralExpression(l), 109 | }) 110 | } 111 | AnyExpressionVariant::BlockExpression(b) => { 112 | Self::ExpressionWithBlock(ExpressionWithBlock { 113 | outer_attributes, 114 | variant: ExpressionWithBlockVariant::BlockExpression(b), 115 | }) 116 | } 117 | } 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Loess 2 | 3 | Loess is a parser library and parser generator for proc macros. 4 | 5 | For a simple but representative **example** of using Loess, see the [*inline-json5*](https://crates.io/crates/inline-json5) crate. 6 | 7 | Premade Rust grammar implementations can be found in [*loess-rust*](https://crates.io/crates/loess-rust), with additional temporary wrappers available in [*loess-rust-opaque*](https://crates.io/crates/loess-rust-opaque). 8 | 9 | Here's what to expect: 10 | 11 | - **Fast builds.** Loess's core is compact and language agnostic. Direct grammar implementations like *loess-rust* should compile fairly quickly too. 12 | 13 | - **A simple, flexible API.** Loess is relatively unopinionated about how or what you parse, and you can construct (and destructure) `Input` at any time. 14 | 15 | - ***Really* good error reporting** from proc macros implemented with Loess, *by default*. 16 | 17 | Many parsing errors are easily or automatically recoverable, which means multiple errors can be reported at once while preserving as much regular output as possible (which means no or much fewer cascading errors!). 18 | 19 | Panics inside your parser can also be caught and reported as located errors with the original panic message. 20 | 21 | - **A reasonably powerful parser-generator**. 22 | 23 | `grammar!` can emit documentation (for enums) and `PeekFrom`, `PopFrom` and `IntoTokens` implementations on grammar types in general. 24 | 25 | - **Powerful `quote_into` macros** that expand efficiently and are language-agnostic. 26 | 27 | You can cleanly loop and/or branch within the template. 28 | 29 | - Low-allocation workflow. 30 | 31 | Loess can (usually) move tokens from input to output without cloning them. (You can still clone all included grammar types explicitly, including when pasting in quotes.) 32 | 33 | Here's what not to expect: 34 | 35 | - A general Syn-replacement (at least not soon). 36 | 37 | Loess is mainly aimed at implementing domain-specific languages that may cite fragments of Rust verbatim in their output. There is currently no focus on parsing Rust code or transforming it *in-depth*. 38 | 39 | ## Using `$crate` for full caller independence 40 | 41 | `loess::IntoTokens`-methods take an (optionally empty) `root: &TokenStream` parameter, 42 | which all emitted fully qualified paths should be prefixed with. 43 | 44 | In combination with a wrapper crate: This achieves full isolation regarding caller dependencies: 45 | 46 |
(click to expand code blocks) 47 | 48 | 50 | 51 | ```rust ,ignore 52 | // wrapper crate 53 | 54 | #[macro_export] 55 | macro_rules! my_macro { 56 | ($($tt:tt)*) => ( $crate::__::my_macro!([$crate] $($tt)*) ); 57 | } 58 | 59 | #[doc(hidden)] 60 | pub mod __ { 61 | pub use core; // Expected by `Errors`. 62 | pub use my_macro_impl::my_macro; 63 | } 64 | ``` 65 | 66 | ```rust 67 | // my_macro_impl (proc macro) 68 | 69 | use loess::{ 70 | grammar, parse_once, parse_all, 71 | Errors, Input, IntoTokens, 72 | scaffold::SquareBrackets, 73 | }; 74 | use proc_macro2::{Span, TokenStream, TokenTree}; 75 | 76 | // […] 77 | 78 | fn macro_impl(input: TokenStream) -> TokenStream { 79 | let mut input = Input { 80 | tokens: input.into_iter().collect(), 81 | end: Span::call_site(), 82 | }; 83 | let mut errors = Errors::new(); 84 | 85 | // `root` is implicitly a `TokenStream`. 86 | let Ok(SquareBrackets { contents: root, .. }) = parse_once( 87 | &mut input, 88 | &mut errors, 89 | ) else { return errors.collect_tokens(&TokenStream::new()) }; 90 | 91 | grammar! { 92 | /// This represents your complete input grammar. 93 | /// This here is a placeholder, so it's empty. 94 | struct Grammar: PopFrom {} 95 | } 96 | 97 | // Checks for exhaustiveness. 98 | let parsed = parse_all(&mut input, &mut errors).next(); 99 | let mut output = errors.collect_tokens(&root); 100 | 101 | if let Some(Grammar {}) = parsed { 102 | // Emit your output here. 103 | } 104 | 105 | output 106 | } 107 | ``` 108 | 109 |
110 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # Note that all fields that take a lint level have these possible values: 2 | # * deny - An error will be produced and the check will fail 3 | # * warn - A warning will be produced, but the check will not fail 4 | # * allow - No warning or error will be produced, though in some cases a note 5 | # will be 6 | 7 | [graph] 8 | all-features = true 9 | no-default-features = false 10 | 11 | [output] 12 | # When outputting inclusion graphs in diagnostics that include features, this 13 | # option can be used to specify the depth at which feature edges will be added. 14 | # This option is included since the graphs can be quite large and the addition 15 | # of features from the crate(s) to all of the graph roots can be far too verbose. 16 | # This option can be overridden via `--feature-depth` on the cmd line 17 | feature-depth = 1 18 | 19 | [advisories] 20 | ignore = [ 21 | #"RUSTSEC-0000-0000", 22 | #{ id = "RUSTSEC-0000-0000", reason = "you can specify a reason the advisory is ignored" }, 23 | #"a-crate-that-is-yanked@0.1.1", # you can also ignore yanked crate versions if you wish 24 | #{ crate = "a-crate-that-is-yanked@0.1.1", reason = "you can specify why you are ignoring the yanked crate" }, 25 | ] 26 | 27 | # This section is considered when running `cargo deny check licenses` 28 | # More documentation for the licenses section can be found here: 29 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 30 | [licenses] 31 | allow = [ 32 | "MIT", 33 | "Apache-2.0", 34 | "Unicode-3.0", 35 | ] 36 | confidence-threshold = 1.0 37 | exceptions = [ 38 | #{ allow = ["Zlib"], crate = "adler32" }, 39 | ] 40 | 41 | #[[licenses.clarify]] 42 | # The package spec the clarification applies to 43 | #crate = "ring" 44 | # The SPDX expression for the license requirements of the crate 45 | #expression = "MIT AND ISC AND OpenSSL" 46 | # One or more files in the crate's source used as the "source of truth" for 47 | # the license expression. If the contents match, the clarification will be used 48 | # when running the license check, otherwise the clarification will be ignored 49 | # and the crate will be checked normally, which may produce warnings or errors 50 | # depending on the rest of your configuration 51 | #license-files = [ 52 | # Each entry is a crate relative path, and the (opaque) hash of its contents 53 | #{ path = "LICENSE", hash = 0xbd0eed23 } 54 | #] 55 | 56 | # This section is considered when running `cargo deny check bans`. 57 | # More documentation about the 'bans' section can be found here: 58 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 59 | [bans] 60 | multiple-versions = "warn" 61 | wildcards = "deny" 62 | # List of crates that are allowed. Use with care! 63 | allow = [ 64 | #"ansi_term@0.11.0", 65 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is allowed" }, 66 | ] 67 | # List of crates to deny 68 | deny = [ 69 | #"ansi_term@0.11.0", 70 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason it is banned" }, 71 | # Wrapper crates can optionally be specified to allow the crate when it 72 | # is a direct dependency of the otherwise banned crate 73 | #{ crate = "ansi_term@0.11.0", wrappers = ["this-crate-directly-depends-on-ansi_term"] }, 74 | ] 75 | # Certain crates/versions that will be skipped when doing duplicate detection. 76 | skip = [ 77 | #"ansi_term@0.11.0", 78 | #{ crate = "ansi_term@0.11.0", reason = "you can specify a reason why it can't be updated/removed" }, 79 | ] 80 | # Similarly to `skip` allows you to skip certain crates during duplicate 81 | # detection. Unlike skip, it also includes the entire tree of transitive 82 | # dependencies starting at the specified crate, up to a certain depth, which is 83 | # by default infinite. 84 | skip-tree = [ 85 | #"ansi_term@0.11.0", # will be skipped along with _all_ of its direct and transitive dependencies 86 | #{ crate = "ansi_term@0.11.0", depth = 20 }, 87 | ] 88 | 89 | # This section is considered when running `cargo deny check sources`. 90 | # More documentation about the 'sources' section can be found here: 91 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 92 | [sources] 93 | unknown-registry = "deny" 94 | unknown-git = "deny" 95 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 96 | allow-git = [] 97 | 98 | [sources.allow-org] 99 | # github.com organizations to allow git sources for 100 | github = [] 101 | # gitlab.com organizations to allow git sources for 102 | gitlab = [] 103 | # bitbucket.org organizations to allow git sources for 104 | bitbucket = [] 105 | -------------------------------------------------------------------------------- /loess/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Loess Changelog 2 | 3 | ## 0.2.5 4 | 5 | 2025-09-15 6 | 7 | ### Features 8 | 9 | - Added `impl IntoTokens for &T`. 10 | You can now prefix pasted values with `&` to automatically clone them. 11 | 12 | ### Revisions 13 | 14 | - Updated docs wording ("token" vs. "symbol"). 15 | 16 | ## 0.2.4 17 | 18 | 2025-07-16 19 | 20 | ### Features 21 | 22 | - Added `Errors::into_of_highest_priority(self) -> impl Iterator`. 23 | - Added `Error::message(&self) -> &str` and `Error::span(&self) -> Option`. 24 | 25 | ### Revisions 26 | 27 | - Updated repository link in meta data. 28 | - Deprecated the [`rust_grammar`](https://docs.rs/loess/0.2.3/loess/rust_grammar/index.html) module. 29 | This has been separated-out into additional crates to avoid compilation bottlenecks for slim DSLs. 30 | 31 | ## 0.2.3 32 | 33 | 2025-04-26 34 | 35 | ### Features 36 | 37 | - Added revised syntax with cleaner directives to `quote_into…`. 38 | To use this new syntax, enclose the template parameter in curly braces (`{}`) instead of rectangular brackets (`[]`). 39 | 40 | ### Revisions 41 | 42 | - Deprecated the old `quote_into…` directive syntax. 43 | Its documentation can be found in older versions of this crate. 44 | 45 | ## 0.2.2 46 | 47 | 2025-04-22 48 | 49 | ### Revisions 50 | 51 | - Fixed the first [`grammar!`]-mention in the docs to be a link. 52 | - Terminology: "printer-generator" -> "serialiser-generator". 53 | - Removed unnecessary semicolon when emitting [`Error`](https://docs.rs/loess/0.2/loess/struct.Error.html). 54 | 55 | ## 0.2.1 56 | 57 | 2025-04-21 58 | 59 | ### Breaking changes 60 | 61 | - Signature change: [`Input::peek`](https://docs.rs/loess/0.2/loess/struct.Input.html#method.peek) 62 | 63 | The callback now is given a `vec_deque::Iter` as second argument to examine further tokens if needed. 64 | 65 | - Signature change: [`Input::pop_or_replace`](https://docs.rs/loess/0.2/loess/struct.Input.html#method.peek) 66 | 67 | The callback now is given the `&mut Input` as second parameter to examine or consume further tokens if needed. 68 | 69 | - Removed: `impl ToTokens for Identifier` 70 | 71 | I had forgotten to remove this before publishing the first version. 72 | 73 | `quote` is now an optional dependency with `default-features = false`, only required by the `"opaque_rust_grammar"` feature. 74 | 75 | ### Revisions 76 | 77 | - [`rust_grammar::DotDot`](https://docs.rs/loess/0.2/loess/rust_grammar/struct.DotDot.html) is now parsed more accurately: It must either be spaced or not followed by `.` or `=`. 78 | 79 | - Small documentation revision. 80 | 81 | ## 0.1.1 82 | 83 | 2025-04-21 84 | 85 | ### Features 86 | 87 | - Added: [`Input::peek(&self, f)`](https://docs.rs/loess/0.1/loess/struct.Input.html#method.peek) 88 | 89 | - Added: [`impl PeekFrom for RArrow`](https://docs.rs/loess/0.1/loess/rust_grammar/struct.RArrow.html#impl-PeekFrom-for-RArrow) (`->`) 90 | 91 | - Added: [`Eager(pub T)`](https://docs.rs/loess/0.1/loess/struct.Eager.html) 92 | 93 | This struct can be wrapped around `T` that are `IntoIterator` and also `FromIterator` regarding that same item type. It parses repeated values eagerly but stops without error when it detects that the value doesn't repeat. 94 | 95 | (Note that delimited groups still independently raise errors for unconsumed tokens when parsed directly.) 96 | 97 | - Added: [`quote_into_mixed_site!`](https://docs.rs/loess/0.1/loess/macro.quote_into_mixed_site.html) (recommended), [`quote_into_with_exact_span!`](https://docs.rs/loess/0.1/loess/macro.quote_into_with_exact_span.html) and [`quote_into_call_site!`](https://docs.rs/loess/0.1/loess/macro.quote_into_call_site.html) 98 | 99 | These statement macros take `span`, `root`, `tokens` and a bracketed `[…]` group as input, separated by commas. 100 | 101 | Inside the bracketed group, most tokens are translated directly into the output, but you can directives that paste [`IntoTokens`](https://docs.rs/loess/0.1/loess/trait.IntoTokens.html) values into the output or expand to control flow statements. You can find more information in the [`quote_into_mixed_site!`](https://docs.rs/loess/0.1/loess/macro.quote_into_mixed_site.html) documentation. 102 | 103 | - Added: [`raw_quote_into_mixed_site!`](https://docs.rs/loess/0.1/loess/macro.raw_quote_into_mixed_site.html) (recommended), [`raw_quote_into_with_exact_span!`](https://docs.rs/loess/0.1/loess/macro.raw_quote_into_with_exact_span.html) and [`raw_quote_into_call_site!`](https://docs.rs/loess/0.1/loess/macro.raw_quote_into_call_site.html) 104 | 105 | These statement macros quote tokens without processing directives, and as such don't accept a `root` parameter. Use them to efficiently emit static code. (Note that the `{#raw … }` directive has the same effect within other `quote_into…` macros.) 106 | 107 | ### Revisions 108 | 109 | Various small documentation additions. 110 | 111 | ## 0.1.0 112 | 113 | 2025-04-16 114 | 115 | Initial release. 116 | -------------------------------------------------------------------------------- /loess/src/scaffold/groups.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::VecDeque, 3 | panic::{AssertUnwindSafe, catch_unwind, resume_unwind}, 4 | }; 5 | 6 | use crate::{ 7 | Error, ErrorPriority, Errors, HandledPanic, Input, IntoTokens, PeekFrom, PopParsedFrom, 8 | Remnant, error_priorities::UNCONSUMED_IN_DELIMITER, scaffold::Exhaustive, 9 | }; 10 | use proc_macro2::{Delimiter, Group, TokenStream, TokenTree, extra::DelimSpan}; 11 | 12 | macro_rules! delimiter_struct { 13 | ($name:ident, $delimiter:expr, $opening:literal $closing:literal) => { 14 | #[doc = concat!($opening, " [`T`](`TokenStream`) ", $closing)] 15 | #[derive(Clone, Debug)] 16 | pub struct $name { 17 | #[allow(missing_docs)] 18 | pub span: DelimSpan, 19 | #[allow(missing_docs)] 20 | pub contents: T, 21 | } 22 | 23 | /// Checks for the delimiters **and contents**. 24 | impl PeekFrom for $name { 25 | fn peek_from(input: &Input) -> bool { 26 | match input.front() { 27 | Some(TokenTree::Group(group)) if group.delimiter() == $delimiter => { 28 | T::peek_from(&Input { 29 | tokens: group.stream().into_iter().collect(), 30 | end: group.span_close(), 31 | }) 32 | } 33 | _ => false, 34 | } 35 | } 36 | } 37 | 38 | impl $name { 39 | #[doc = concat!("Maps self.[contents](`", stringify!($name), "::contents`) using `f`.")] 40 | pub fn map(self, f: impl FnOnce(T) -> U) -> $name { 41 | let Self { span, contents } = self; 42 | $name { 43 | span, 44 | contents: f(contents), 45 | } 46 | } 47 | 48 | #[doc = concat!("Tries to map self.[contents](`", stringify!($name), "::contents`) using `f`.")] 49 | pub fn try_map( 50 | self, 51 | f: impl FnOnce(T) -> Result, 52 | ) -> Result<$name, $name> { 53 | let Self { span, contents } = self; 54 | $name { 55 | span, 56 | contents: f(contents), 57 | } 58 | .transpose() 59 | } 60 | } 61 | 62 | impl $name> { 63 | #[doc = concat!("Lifts an inner [`Result`] out of `self`. (The [`", stringify!($name), "`] \"sinks\" into the variants.)")] 64 | pub fn transpose(self) -> Result<$name, $name> { 65 | let Self { span, contents } = self; 66 | match contents { 67 | Ok(contents) => Ok($name { span, contents }), 68 | Err(contents) => Err($name { span, contents }), 69 | } 70 | } 71 | } 72 | 73 | impl PopParsedFrom for $name { 74 | type Parsed = $name; 75 | type Remnant = <>::Mapped<$name> as Remnant<$name>>::Option; 76 | 77 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result<::Parsed, Self::Remnant> { 78 | let (span, mut contents) = input 79 | .pop_or_replace(|tts, _| match tts { 80 | [TokenTree::Group(group)] if group.delimiter() == $delimiter => Ok(( 81 | group.delim_span(), 82 | Input { 83 | tokens: group.stream().into_iter().collect::>(), 84 | end: group.span_close(), 85 | }, 86 | )), 87 | other => Err(other), 88 | }) 89 | .map_err(|spans| { 90 | errors.push(Error::new( 91 | ErrorPriority::TOKEN, 92 | concat!("Expected ", $opening, "."), 93 | spans, 94 | )) 95 | })?; 96 | 97 | match catch_unwind(AssertUnwindSafe(|| { 98 | Ok(Self::Parsed { 99 | span, 100 | contents: Exhaustive::::pop_parsed_from( 101 | &mut contents, 102 | errors, 103 | )?, 104 | }) 105 | })) { 106 | Ok(result) => result, 107 | Err(panic) => { 108 | errors.push(Error::new( 109 | ErrorPriority::PANIC, 110 | &format!( 111 | "proc macro panicked: {:?}", 112 | if panic.as_ref().is::() { 113 | resume_unwind(panic) 114 | } else if let Some(message) = 115 | panic.as_ref().downcast_ref::() 116 | { 117 | message.clone() 118 | } else if let Some(message) = 119 | panic.as_ref().downcast_ref::<&'static str>() 120 | { 121 | message.to_string() 122 | } else { 123 | // Unhandled panic type. 124 | errors.push(Error::new( 125 | ErrorPriority::PANIC, 126 | "proc macro panicked (trace of unhandled panic type)", 127 | [contents.front_span()], 128 | )); 129 | resume_unwind(panic) 130 | } 131 | ), 132 | [contents.front_span()], 133 | )); 134 | resume_unwind(Box::new(HandledPanic)); 135 | } 136 | } 137 | } 138 | } 139 | 140 | impl IntoTokens for $name { 141 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 142 | let mut group = Group::new($delimiter, self.contents.collect_tokens(root)); 143 | group.set_span(self.span.join()); 144 | tokens.extend([TokenTree::Group(group)]); 145 | } 146 | } 147 | }; 148 | } 149 | 150 | delimiter_struct!(CurlyBraces, Delimiter::Brace, "`{`" "`}`"); 151 | delimiter_struct!(SquareBrackets, Delimiter::Bracket, "`[`" "`]`"); 152 | delimiter_struct!(Parentheses, Delimiter::Parenthesis, "`(`" "`)`"); 153 | delimiter_struct!(MetaGroup, Delimiter::None, "*meta-group-start*" "*meta-group-end*"); 154 | -------------------------------------------------------------------------------- /loess/src/proc_macro2_impls.rs: -------------------------------------------------------------------------------- 1 | use crate::{ErrorPriority, Errors, Input, IntoTokens, PeekFrom, PopParsedFrom, SimpleSpanned}; 2 | 3 | use super::Error; 4 | use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; 5 | 6 | impl PeekFrom for Group { 7 | fn peek_from(input: &Input) -> bool { 8 | matches!(input.front(), Some(TokenTree::Group(_))) 9 | } 10 | } 11 | 12 | impl PeekFrom for Ident { 13 | fn peek_from(input: &Input) -> bool { 14 | matches!(input.front(), Some(TokenTree::Ident(_))) 15 | } 16 | } 17 | 18 | impl PeekFrom for Punct { 19 | fn peek_from(input: &Input) -> bool { 20 | matches!(input.front(), Some(TokenTree::Punct(_))) 21 | } 22 | } 23 | 24 | impl PeekFrom for Literal { 25 | fn peek_from(input: &Input) -> bool { 26 | matches!(input.front(), Some(TokenTree::Literal(_))) 27 | } 28 | } 29 | 30 | impl PeekFrom for TokenTree { 31 | fn peek_from(input: &Input) -> bool { 32 | !input.is_empty() 33 | } 34 | } 35 | 36 | /// **Always** succeeds. 37 | impl PeekFrom for TokenStream { 38 | fn peek_from(_input: &Input) -> bool { 39 | true 40 | } 41 | } 42 | 43 | impl PopParsedFrom for Group { 44 | type Parsed = Self; 45 | type Remnant = (); 46 | 47 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result 48 | where 49 | Self: Sized, 50 | { 51 | input 52 | .pop_or_replace(|t, _| match t { 53 | [TokenTree::Group(group)] => Ok(group), 54 | t => Err(t), 55 | }) 56 | .map_err(|spans| { 57 | errors.push(Error::new(ErrorPriority::TOKEN, "Expected Group.", spans)) 58 | }) 59 | } 60 | } 61 | 62 | impl PopParsedFrom for Ident { 63 | type Parsed = Self; 64 | type Remnant = (); 65 | 66 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result 67 | where 68 | Self: Sized, 69 | { 70 | input 71 | .pop_or_replace(|t, _| match t { 72 | [TokenTree::Ident(ident)] => Ok(ident), 73 | t => Err(t), 74 | }) 75 | .map_err(|spans| { 76 | errors.push(Error::new(ErrorPriority::TOKEN, "Expected Ident.", spans)) 77 | }) 78 | } 79 | } 80 | 81 | impl PopParsedFrom for Punct { 82 | type Parsed = Self; 83 | type Remnant = (); 84 | 85 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result 86 | where 87 | Self: Sized, 88 | { 89 | input 90 | .pop_or_replace(|t, _| match t { 91 | [TokenTree::Punct(punct)] => Ok(punct), 92 | t => Err(t), 93 | }) 94 | .map_err(|spans| { 95 | errors.push(Error::new(ErrorPriority::TOKEN, "Expected Punct.", spans)) 96 | }) 97 | } 98 | } 99 | 100 | impl PopParsedFrom for Literal { 101 | type Parsed = Self; 102 | type Remnant = (); 103 | 104 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result 105 | where 106 | Self: Sized, 107 | { 108 | input 109 | .pop_or_replace(|t, _| match t { 110 | [TokenTree::Literal(literal)] => Ok(literal), 111 | t => Err(t), 112 | }) 113 | .map_err(|spans| { 114 | errors.push(Error::new(ErrorPriority::TOKEN, "Expected Literal.", spans)) 115 | }) 116 | } 117 | } 118 | 119 | impl PopParsedFrom for TokenTree { 120 | type Parsed = Self; 121 | type Remnant = (); 122 | 123 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result 124 | where 125 | Self: Sized, 126 | { 127 | input.pop_or_replace(|[t], _| Ok(t)).map_err(|spans| { 128 | errors.push(Error::new(ErrorPriority::TOKEN, "Expected token.", spans)) 129 | }) 130 | } 131 | } 132 | 133 | /// Exhaustive, infallible. 134 | impl PopParsedFrom for TokenStream { 135 | type Parsed = Self; 136 | type Remnant = (); 137 | 138 | fn pop_parsed_from(input: &mut Input, _errors: &mut Errors) -> Result { 139 | Ok(input.tokens.drain(..).collect()) 140 | } 141 | } 142 | 143 | impl SimpleSpanned for Ident { 144 | fn span(&self) -> Span { 145 | self.span() 146 | } 147 | 148 | fn set_span(&mut self, span: Span) { 149 | self.set_span(span) 150 | } 151 | } 152 | 153 | impl SimpleSpanned for Punct { 154 | fn span(&self) -> Span { 155 | self.span() 156 | } 157 | 158 | fn set_span(&mut self, span: Span) { 159 | self.set_span(span) 160 | } 161 | } 162 | 163 | impl SimpleSpanned for Literal { 164 | fn span(&self) -> Span { 165 | self.span() 166 | } 167 | 168 | fn set_span(&mut self, span: Span) { 169 | self.set_span(span) 170 | } 171 | } 172 | 173 | impl IntoTokens for Group { 174 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 175 | tokens.extend([TokenTree::Group(self)]) 176 | } 177 | } 178 | 179 | impl IntoTokens for Ident { 180 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 181 | tokens.extend([TokenTree::Ident(self)]) 182 | } 183 | } 184 | 185 | impl IntoTokens for Punct { 186 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 187 | tokens.extend([TokenTree::Punct(self)]) 188 | } 189 | } 190 | 191 | impl IntoTokens for Literal { 192 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 193 | tokens.extend([TokenTree::Literal(self)]) 194 | } 195 | } 196 | 197 | impl IntoTokens for TokenTree { 198 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 199 | tokens.extend([self]) 200 | } 201 | } 202 | 203 | impl IntoTokens for TokenStream { 204 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 205 | tokens.extend(self); 206 | } 207 | } 208 | -------------------------------------------------------------------------------- /labels.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "breaking", 4 | "color": "d73a4a", 5 | "description": "Introduces or requests a breaking change." 6 | }, 7 | { 8 | "name": "good first issue", 9 | "color": "7057ff", 10 | "description": "Good for newcomers" 11 | }, 12 | { 13 | "name": "help wanted", 14 | "color": "008672", 15 | "description": "Extra attention is needed" 16 | }, 17 | { 18 | "name": "effort: 1", 19 | "color": "91ca55", 20 | "description": "Relative effort required. There's no specific unit of measurement." 21 | }, 22 | { 23 | "name": "effort: 2", 24 | "color": "c2e2a2", 25 | "description": "Relative effort required. There's no specific unit of measurement." 26 | }, 27 | { 28 | "name": "effort: 3", 29 | "color": "e9f4dc", 30 | "description": "Relative effort required. There's no specific unit of measurement." 31 | }, 32 | { 33 | "name": "effort: 5", 34 | "color": "fef6d7", 35 | "description": "Relative effort required. There's no specific unit of measurement." 36 | }, 37 | { 38 | "name": "effort: 8", 39 | "color": "fef2c0", 40 | "description": "Relative effort required. There's no specific unit of measurement." 41 | }, 42 | { 43 | "name": "effort: 13", 44 | "color": "fbca04", 45 | "description": "Relative effort required. There's no specific unit of measurement." 46 | }, 47 | { 48 | "name": "priority: first", 49 | "color": "d73a4a", 50 | "description": "I'll try to take care of this soon-ish if nobody else does." 51 | }, 52 | { 53 | "name": "priority: next", 54 | "color": "ffb8c6", 55 | "description": "I'll probably get to it, eventually." 56 | }, 57 | { 58 | "name": "priority: someday", 59 | "color": "000000", 60 | "description": "If you need this, please let me know!" 61 | }, 62 | { 63 | "name": "state: approved", 64 | "color": "91ca55", 65 | "description": "Approved to proceed." 66 | }, 67 | { 68 | "name": "state: blocked", 69 | "color": "ffffff", 70 | "description": "Something is blocking action (aside from pure proceedings or missing information).", 71 | "aliases": [ 72 | "wontfix" 73 | ] 74 | }, 75 | { 76 | "name": "state: pending", 77 | "color": "fbca04", 78 | "description": "Waiting on other proceedings (aside from information)" 79 | }, 80 | { 81 | "name": "state: inactive", 82 | "color": "000000", 83 | "description": "No action needed or possible. The issue is either fixed or addressed better by other issues.", 84 | "aliases": [ 85 | "duplicate" 86 | ] 87 | }, 88 | { 89 | "name": "state: question", 90 | "color": "d876e3", 91 | "description": "Further information is requested", 92 | "aliases": [ 93 | "question" 94 | ] 95 | }, 96 | { 97 | "name": "state: denied", 98 | "color": "e4e669", 99 | "description": "It could just be out of scope, but either way it won't be implemented here.", 100 | "aliases": [ 101 | "invalid" 102 | ] 103 | }, 104 | { 105 | "name": "type: bug", 106 | "color": "d73a4a", 107 | "description": "Something isn't working as expected", 108 | "aliases": [ 109 | "bug" 110 | ] 111 | }, 112 | { 113 | "name": "type: upkeep", 114 | "color": "0366d6", 115 | "description": "Converting measurements, reorganizing folder structure, and other necessary tasks.", 116 | "aliases": [ 117 | "dependencies" 118 | ] 119 | }, 120 | { 121 | "name": "type: documentation", 122 | "color": "fef2c0", 123 | "description": "Related to documentation and information.", 124 | "aliases": [ 125 | "documentation" 126 | ] 127 | }, 128 | { 129 | "name": "type: feature", 130 | "color": "5ebeff", 131 | "description": "Brand new functionality, features, pages, workflows, endpoints, etc.", 132 | "aliases": [ 133 | "enhancement" 134 | ] 135 | }, 136 | { 137 | "name": "type: fix", 138 | "color": "91ca55", 139 | "description": "Iterations on existing features or infrastructure" 140 | }, 141 | { 142 | "name": "type: security", 143 | "color": "d73a4a", 144 | "description": "Something is vulnerable or not secure" 145 | }, 146 | { 147 | "name": "type: testing", 148 | "color": "fbca04", 149 | "description": "Related to testing" 150 | }, 151 | { 152 | "name": "work: novel", 153 | "color": "fbca04", 154 | "description": "There's a problem, and both goal and solution are unclear." 155 | }, 156 | { 157 | "name": "work: emergent", 158 | "color": "d4c5f9", 159 | "description": "The goal isn't known, but a plan exists." 160 | }, 161 | { 162 | "name": "work: complicated", 163 | "color": "ffb8c6", 164 | "description": "A specific goal already exists, but the path there isn't certain." 165 | }, 166 | { 167 | "name": "work: clear", 168 | "color": "91ca55", 169 | "description": "A known solution is (to be) implemented." 170 | }, 171 | { 172 | "name": "domain: Rust", 173 | "color": "000000", 174 | "description": "Involves Rust code.", 175 | "aliases": [ 176 | "rust" 177 | ] 178 | }, 179 | { 180 | "name": "domain: GitHub Actions", 181 | "color": "000000", 182 | "description": "Involves GitHub Actions.", 183 | "aliases": [ 184 | "github_actions" 185 | ] 186 | }, 187 | { 188 | "name": "Hacktoberfest", 189 | "color": "91A88C", 190 | "description": "A good-for-Hacktoberfest issue. This label is only maintained in October." 191 | }, 192 | { 193 | "name": "hacktoberfest-accepted", 194 | "color": "A88771", 195 | "description": "Thank you for your contribution!" 196 | }, 197 | { 198 | "name": "spam", 199 | "color": "F74700", 200 | "description": "Measure of last resort for Hacktoberfest purposes. Don't use this otherwise." 201 | } 202 | ] -------------------------------------------------------------------------------- /loess-rust-opaque/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Additional grammar tokens representing the stable Rust programming language, 2 | //! closely following [The Rust Reference](https://doc.rust-lang.org/stable/reference/). 3 | //! 4 | //! Corrections in that regard are not automatically considered breaking changes, 5 | //! unless they became necessary due to a change in Rust **and** reduce what is considered valid. 6 | //! 7 | //! Breaking changes to the API are considered breaking as normal. 8 | //! 9 | //! *Note that unstable grammar **is** accidentally accepted in some cases.* 10 | //! ***Ceasing to accept unstable grammar is not by itself considered a breaking change for Loess.*** 11 | 12 | use loess::{Error, ErrorPriority, Errors, Input, IntoTokens, PopParsedFrom}; 13 | use proc_macro2::{TokenStream, TokenTree}; 14 | use quote::ToTokens; 15 | use syn::{ 16 | Expr, Pat, Path, Stmt, 17 | parse::{Parse, ParseStream, Parser}, 18 | }; 19 | 20 | fn error_reporter(errors: &mut Errors) -> impl '_ + FnOnce(syn::Error) { 21 | move |error| { 22 | errors.push(Error::new( 23 | ErrorPriority::GRAMMAR, 24 | error.to_string(), 25 | [error.span()], 26 | )) 27 | } 28 | } 29 | 30 | macro_rules! wrappers { 31 | ($( 32 | $(#[$($attr:tt)*])* 33 | $name:ident($wrapped:ty)$(: $( 34 | // $(PeekFrom $(@ $PeekFrom:tt)?)? 35 | $(PopFrom $(@ $PopFrom:tt)?)? 36 | $(IntoTokens $(@ $IntoTokens:tt)?)? 37 | ),*$(,)?)?; 38 | )*) => {$( 39 | $(#[$($attr)*])* 40 | #[derive(Clone)] 41 | pub struct $name($wrapped); 42 | 43 | $($( 44 | // $( 45 | // $(@ $PeekFrom)? 46 | // impl PeekFrom for $name { 47 | // fn peek_from(input: &Input) -> bool { 48 | // fn peek(input: ParseStream) -> syn::Result { 49 | // let result = input.peek(<$wrapped>::default()); 50 | // let _ = TokenStream::parse(input).expect("infallible"); // Discard cloned input. 51 | // Ok(result) 52 | // } 53 | 54 | // let input = input.tokens.iter().cloned().collect::(); 55 | // peek.parse2(input).expect("infallible") 56 | // } 57 | // } 58 | // )? 59 | 60 | $( 61 | $(@ $PopFrom)? 62 | impl PopParsedFrom for $name { 63 | type Parsed = Self; 64 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 65 | fn parse(input: ParseStream) -> syn::Result<($wrapped, TokenStream)> { 66 | Ok((<$wrapped>::parse(input)?, TokenStream::parse(input)?)) 67 | } 68 | 69 | let tokens: TokenStream = input.tokens.drain(..).collect(); 70 | let (parsed, rest) = parse.parse2(tokens).map_err(error_reporter(errors))?; 71 | input.prepend(rest.into_iter().collect::>()); 72 | Ok(Self(parsed)) 73 | } 74 | } 75 | )? 76 | 77 | $( 78 | $(@ $IntoTokens)? 79 | impl IntoTokens for $name { 80 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 81 | self.0.into_token_stream().into_tokens(root, tokens); 82 | } 83 | } 84 | )? 85 | )*)? 86 | )*}; 87 | } 88 | 89 | wrappers! { 90 | /// [*Expression*](https://doc.rust-lang.org/stable/reference/expressions.html#r-expr.syntax) 91 | Expression(Expr): PopFrom, IntoTokens; 92 | 93 | /// [*Expression*](https://doc.rust-lang.org/stable/reference/expressions.html#r-expr.syntax) 94 | /// except [*StructExpression*](https://doc.rust-lang.org/stable/reference/expressions/struct-expr.html#r-expr.struct.syntax) 95 | ExpressionExceptStructExpression(Expr): IntoTokens; 96 | 97 | /// [*Pattern*](https://doc.rust-lang.org/stable/reference/patterns.html#r-patterns.syntax) 98 | Pattern(Pat): IntoTokens; 99 | 100 | /// [*SimplePath*](https://doc.rust-lang.org/stable/reference/paths.html#r-paths.simple.syntax) 101 | SimplePath(Path): IntoTokens; 102 | 103 | /// [*Statement*](https://doc.rust-lang.org/stable/reference/statements.html#r-statement.syntax) 104 | Statement(Stmt): PopFrom, IntoTokens; 105 | } 106 | 107 | impl PopParsedFrom for ExpressionExceptStructExpression { 108 | type Parsed = Self; 109 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 110 | fn parse( 111 | input: ParseStream, 112 | ) -> syn::Result<(ExpressionExceptStructExpression, TokenStream)> { 113 | Ok(( 114 | ExpressionExceptStructExpression(Expr::parse_without_eager_brace(input)?), 115 | TokenStream::parse(input)?, 116 | )) 117 | } 118 | 119 | let tokens = input.tokens.drain(..).collect::().into(); 120 | let (this, rest) = parse.parse2(tokens).map_err(error_reporter(errors))?; 121 | 122 | input.prepend(rest.into_iter().collect::>()); 123 | Ok(this) 124 | } 125 | } 126 | 127 | impl PopParsedFrom for Pattern { 128 | type Parsed = Self; 129 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 130 | fn parse(input: ParseStream) -> syn::Result<(Pattern, TokenStream)> { 131 | Ok(( 132 | Pattern(Pat::parse_multi_with_leading_vert(input)?), 133 | TokenStream::parse(input)?, 134 | )) 135 | } 136 | 137 | let tokens = input.tokens.drain(..).collect::().into(); 138 | let (this, rest) = parse.parse2(tokens).map_err(error_reporter(errors))?; 139 | 140 | input.prepend(rest.into_iter().collect::>()); 141 | Ok(this) 142 | } 143 | } 144 | 145 | impl PopParsedFrom for SimplePath { 146 | type Parsed = Self; 147 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 148 | fn parse(input: ParseStream) -> syn::Result<(SimplePath, TokenStream)> { 149 | Ok(( 150 | SimplePath(Path::parse_mod_style(input)?), 151 | TokenStream::parse(input)?, 152 | )) 153 | } 154 | 155 | let tokens = input.tokens.drain(..).collect::().into(); 156 | let (this, rest) = parse.parse2(tokens).map_err(error_reporter(errors))?; 157 | 158 | input.prepend(rest.into_iter().collect::>()); 159 | Ok(this) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /loess/src/macros/punctuation.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | macro_rules! punctuation { 3 | { 4 | $(#[$($attr:tt)*])* 5 | ($($punct:tt)+) $(not before [$($not:tt)*])? 6 | as $vis:vis $name:ident$(: $( 7 | $(doc $(@ $doc:tt)?)? 8 | $(Default $(@ $Default:tt)?)? 9 | $(PeekFrom $(@ $PeekFrom:tt)?)? 10 | $(PopFrom $(@ $PopFrom:tt)?)? 11 | $(IntoTokens $(@ $IntoTokens:tt)?)? 12 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 13 | $(LocatedAt $(@ $LocatedAt:tt)?)? 14 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 15 | ),*)? { 16 | $( 17 | $(#[$($field_attr:tt)*])* 18 | $punct_vis:vis $punct_name:ident 19 | ),+$(,)? 20 | }$(;)? 21 | 22 | $($rest:tt)* 23 | } => { 24 | $crate::__validate_punctuation!($($punct)*); 25 | 26 | #[cfg_attr( 27 | any($($($(all(), $(@ $doc)?)?)?)*), 28 | doc = $crate::__::concat!( 29 | '`', $crate::__::stringify!($($punct)+), '`', 30 | $(" not before [", $(" `", $crate::__::stringify!($not), "`",)* "]",)? 31 | ), 32 | )] 33 | $(#[$($attr)*])* 34 | $vis struct $name { 35 | $( 36 | $(#[$($field_attr)*])* 37 | $punct_vis $punct_name: $crate::__::Punct, 38 | )+ 39 | } 40 | 41 | // Implementations. 42 | const _: () = { 43 | const OP: &str = $crate::__::stringify!($($punct)+); 44 | const NOT: &str = $crate::__::concat!($($crate::__::stringify!($($not)+))?); 45 | $($( 46 | $( 47 | $(@ $PeekFrom)? 48 | $crate::__impl_punctuation!(PeekFrom for $name, OP, NOT); 49 | )? 50 | //TODO 51 | )*)? 52 | }; 53 | 54 | $crate::punctuation!($($rest)*); 55 | }; 56 | 57 | { 58 | $(#[$($attr:tt)*])* 59 | ($($punct:tt)+) $(not before [$($not:tt)*])? 60 | as $vis:vis $name:ident$(: $( 61 | $(doc $(@ $doc:tt)?)? 62 | $(Default $(@ $Default:tt)?)? 63 | $(PeekFrom $(@ $PeekFrom:tt)?)? 64 | $(PopFrom $(@ $PopFrom:tt)?)? 65 | $(IntoTokens $(@ $IntoTokens:tt)?)? 66 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 67 | $(LocatedAt $(@ $LocatedAt:tt)?)? 68 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 69 | ),*)?( 70 | $(#[$($field_attr:tt)*])* 71 | $($punct_vis:vis),+ 72 | ); 73 | 74 | $($rest:tt)* 75 | } => { 76 | $crate::__validate_punctuation!($($punct)*); 77 | 78 | #[cfg_attr( 79 | any($($($(all(), $(@ $doc)?)?)?)*), 80 | doc = $crate::__::concat!( 81 | '`', $crate::__::stringify!($($punct)+), '`', 82 | $(" not before [", $(" `", $crate::__::stringify!($not), "`",)* "]",)? 83 | ), 84 | )] 85 | $(#[$($attr)*])* 86 | $vis struct $name( 87 | $($punct_vis $crate::__::Punct),+ 88 | ); 89 | 90 | // Implementations. 91 | const _: () = { 92 | const OP: &str = $crate::__::stringify!($($punct)+); 93 | const NOT: &str = $crate::__::concat!($($crate::__::stringify!($($not)+))?); 94 | $($( 95 | $( 96 | $(@ $PeekFrom)? 97 | $crate::__impl_punctuation!(PeekFrom for $name, OP, NOT); 98 | )? 99 | //TODO 100 | )*)? 101 | }; 102 | 103 | $crate::punctuation!($($rest)*); 104 | }; 105 | 106 | { 107 | $(#[$($attr:tt)*])* 108 | ($($punct:tt)+) $(not before [$($not:tt)*])? 109 | as $vis:vis $name:ident$(: $( 110 | $(doc $(@ $doc:tt)?)? 111 | $(Default $(@ $Default:tt)?)? 112 | $(PeekFrom $(@ $PeekFrom:tt)?)? 113 | $(PopFrom $(@ $PopFrom:tt)?)? 114 | $(IntoTokens $(@ $IntoTokens:tt)?)? 115 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 116 | $(LocatedAt $(@ $LocatedAt:tt)?)? 117 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 118 | ),*)?; 119 | 120 | $($rest:tt)* 121 | } => { 122 | $crate::__validate_punctuation!($($punct)*); 123 | 124 | #[cfg_attr( 125 | any($($($(all(), $(@ $doc)?)?)?)*), 126 | doc = $crate::__::concat!( 127 | '`', $crate::__::stringify!($($punct)+), '`', 128 | $(" not before [", $(" `", $crate::__::stringify!($not), "`",)* "]",)? 129 | ), 130 | )] 131 | $(#[$($attr)*])* 132 | $vis struct $name; 133 | 134 | // Implementations. 135 | const _: () = { 136 | const OP: &str = $crate::__::stringify!($($punct)+); 137 | const NOT: &str = $crate::__::concat!($($crate::__::stringify!($($not)+))?); 138 | $($( 139 | $( 140 | $(@ $PeekFrom)? 141 | $crate::__impl_punctuation!(PeekFrom for $name, OP, NOT); 142 | )? 143 | //TODO 144 | )*)? 145 | }; 146 | 147 | $crate::punctuation!($($rest)*); 148 | }; 149 | 150 | // End. 151 | {} => {}; 152 | } 153 | 154 | #[doc(hidden)] 155 | #[macro_export] 156 | macro_rules! __validate_punctuation { 157 | ($other:ident $($rest:tt)*) => { 158 | $crate::__::compile_error!($crate::__::concat!("Expected punct, but found: ", $crate::__::stringify!($other))); 159 | $crate::__validate_punctuation!($($rest)*); 160 | }; 161 | 162 | ($other:block $($rest:tt)*) => { 163 | $crate::__::compile_error!($crate::__::concat!("Expected punct, but found: ", $crate::__::stringify!($other))); 164 | $crate::__validate_punctuation!($($rest)*); 165 | }; 166 | 167 | ($other:lifetime $($rest:tt)*) => { 168 | $crate::__::compile_error!($crate::__::concat!("Expected punct, but found: ", $crate::__::stringify!($other))); 169 | $crate::__validate_punctuation!($($rest)*); 170 | }; 171 | 172 | // The `literal` fragment doesn't fall back if it encounters `-`. 173 | (- $($rest:tt)*) => { 174 | $crate::__validate_punctuation!($($rest)*); 175 | }; 176 | ($other:literal $($rest:tt)*) => { 177 | $crate::__::compile_error!($crate::__::concat!("Expected punct, but found: ", $crate::__::stringify!($other))); 178 | $crate::__validate_punctuation!($($rest)*); 179 | }; 180 | 181 | (($($other:tt)*) $($rest:tt)*) => { 182 | $crate::__::compile_error!($crate::__::concat!("Expected punct, but found: (", $crate::__::stringify!($($other),*), ")")); 183 | $crate::__validate_punctuation!($($rest)*); 184 | }; 185 | 186 | ($tt:tt $($rest:tt)*) => ( $crate::__validate_punctuation!($($rest)*); ); 187 | 188 | // End. 189 | () => (); 190 | } 191 | 192 | #[doc(hidden)] 193 | #[macro_export] 194 | macro_rules! __impl_punctuation { 195 | (PeekFrom for $name:ident, $OP:expr, $NOT:expr) => { 196 | impl $crate::PeekFrom for $name { 197 | fn peek_from(input: &$crate::Input) -> bool { 198 | const LEN: usize = $OP.len(); 199 | 200 | //FIXME: Should be a constant asset once possible. 201 | $crate::__::debug_assert!( 202 | !$OP.contains(' '), 203 | "Unexpected space in punctuation definition `{}`.", 204 | $OP, 205 | ); 206 | 207 | //TODO: Assert length. 208 | 209 | input.peek(|tts: [&$crate::__::TokenTree; LEN], mut rest| { 210 | tts.into_iter().enumerate().all(|(i, tt)| match tt { 211 | $crate::__::TokenTree::Punct(punct) 212 | if punct.as_char() == $OP.chars().nth(i).expect("") => 213 | { 214 | if i < const { LEN - 1 } { 215 | punct.spacing() == $crate::__::Spacing::Joint 216 | } else { 217 | punct.spacing() == $crate::__::Spacing::Alone || { 218 | if let Some($crate::__::TokenTree::Punct(next)) = rest.next() { 219 | !$NOT.contains(next.as_char()) 220 | } else { 221 | true 222 | } 223 | } 224 | } 225 | } 226 | _ => false, 227 | }) 228 | }) 229 | } 230 | } 231 | }; 232 | } 233 | -------------------------------------------------------------------------------- /loess/src/stateful.rs: -------------------------------------------------------------------------------- 1 | use std::{any::type_name, collections::VecDeque, marker::PhantomData}; 2 | 3 | use crate::{Error, ErrorPriority, Errors, Input, PeekFrom, PopParsedFrom}; 4 | 5 | pub trait Stepper: Default { 6 | type Item; 7 | 8 | fn pop_next_from( 9 | &mut self, 10 | input: &mut Input, 11 | errors: &mut Errors, 12 | ) -> Result, ()>; 13 | 14 | fn peek_pop_next_from( 15 | &mut self, 16 | input: &mut Input, 17 | errors: &mut Errors, 18 | ) -> Result, ()> 19 | where 20 | Self: PeekNextFrom, 21 | { 22 | self.peek_next_from(input) 23 | .then_some(self.pop_next_from(input, errors)) 24 | .transpose() 25 | .map(Option::flatten) 26 | } 27 | } 28 | 29 | pub trait PeekNextFrom { 30 | /// # Returns 31 | /// 32 | /// ## [`true`] 33 | /// 34 | /// [`StatefulPopParsedFrom::pop_parsed_from`] may still fail and/or push to [`Errors`]. 35 | /// 36 | /// ## [`false`] 37 | /// 38 | /// [`StatefulPopParsedFrom::pop_parsed_from`] should fail **and** push to [`Errors`]. 39 | fn peek_next_from(&self, input: &Input) -> bool; 40 | } 41 | 42 | pub struct SimpleStepper { 43 | _phantom: PhantomData, 44 | } 45 | 46 | impl Default for SimpleStepper { 47 | fn default() -> Self { 48 | Self { 49 | _phantom: PhantomData, 50 | } 51 | } 52 | } 53 | 54 | impl Stepper for SimpleStepper { 55 | type Item = T::Parsed; 56 | 57 | fn pop_next_from( 58 | &mut self, 59 | input: &mut Input, 60 | errors: &mut Errors, 61 | ) -> Result, ()> { 62 | T::pop_parsed_from(input, errors).map(Some) 63 | } 64 | } 65 | 66 | impl PeekNextFrom for SimpleStepper { 67 | fn peek_next_from(&self, input: &Input) -> bool { 68 | T::peek_from(input) 69 | } 70 | } 71 | 72 | pub struct RepeatCountStepper { 73 | inner: S, 74 | buffer: VecDeque, 75 | counter: usize, 76 | } 77 | 78 | impl Default for RepeatCountStepper { 79 | fn default() -> Self { 80 | Self { 81 | inner: S::default(), 82 | buffer: VecDeque::new(), 83 | counter: 0, 84 | } 85 | } 86 | } 87 | 88 | impl Stepper for RepeatCountStepper { 89 | type Item = S::Item; 90 | 91 | fn pop_next_from( 92 | &mut self, 93 | input: &mut Input, 94 | errors: &mut Errors, 95 | ) -> Result, ()> { 96 | const { 97 | assert!(MIN <= MAX); 98 | }; 99 | 100 | fn pop_next_from( 101 | inner: &mut S, 102 | buffer: &mut VecDeque, 103 | counter: &mut usize, 104 | input: &mut Input, 105 | errors: &mut Errors, 106 | min: usize, 107 | max: usize, 108 | ) -> Result, ()> { 109 | if *counter == 0 { 110 | buffer.reserve_exact(min); 111 | 112 | while *counter < min { 113 | if let Some(item) = inner.pop_next_from(input, errors)? { 114 | *counter += 1; 115 | buffer.push_back(item) 116 | } else { 117 | todo!("Report error and return.") 118 | } 119 | } 120 | } 121 | 122 | if let Some(item) = buffer.pop_front() { 123 | Ok(Some(item)) 124 | } else if *counter < max 125 | && let Some(item) = inner.pop_next_from(input, errors)? 126 | { 127 | *counter += 1; 128 | Ok(Some(item)) 129 | } else { 130 | Ok(None) 131 | } 132 | } 133 | 134 | pop_next_from( 135 | &mut self.inner, 136 | &mut self.buffer, 137 | &mut self.counter, 138 | input, 139 | errors, 140 | MIN, 141 | MAX, 142 | ) 143 | } 144 | } 145 | 146 | impl PeekNextFrom 147 | for RepeatCountStepper 148 | where 149 | S: PeekNextFrom, 150 | { 151 | fn peek_next_from(&self, input: &Input) -> bool { 152 | if self.counter < MIN { 153 | true 154 | } else if self.counter < MAX { 155 | self.inner.peek_next_from(input) 156 | } else { 157 | false 158 | } 159 | } 160 | } 161 | 162 | pub struct SeparatedStepper { 163 | stop: bool, 164 | _phantom: PhantomData<(T, S)>, 165 | } 166 | 167 | impl Default for SeparatedStepper { 168 | fn default() -> Self { 169 | Self { 170 | stop: false, 171 | _phantom: PhantomData, 172 | } 173 | } 174 | } 175 | 176 | impl Stepper for SeparatedStepper { 177 | type Item = (T::Parsed, Option); 178 | 179 | fn pop_next_from( 180 | &mut self, 181 | input: &mut Input, 182 | errors: &mut Errors, 183 | ) -> Result, ()> { 184 | let len_before = input.len(); 185 | let item = match T::pop_parsed_from(input, errors) { 186 | //TODO: Slide separator! 187 | Ok(trailing) => match S::peek_pop_parsed_from(input, errors) { 188 | Ok(delimiter) => (trailing, delimiter), 189 | Err(()) => todo!("Recovery."), 190 | }, 191 | Err(()) => todo!("Recovery."), 192 | }; 193 | if input.len() == len_before { 194 | errors.push(Error::new( 195 | ErrorPriority::UNCONSUMED_INPUT, 196 | format!( 197 | "{} looped without consuming input. (This likely implies a faulty grammar.)", 198 | type_name::<(T, Option)>() 199 | ), 200 | input.drain_spans(..), 201 | )); 202 | self.stop = true; 203 | } 204 | Ok(Some(item)) 205 | } 206 | } 207 | 208 | impl PeekNextFrom for SeparatedStepper { 209 | fn peek_next_from(&self, input: &Input) -> bool { 210 | !self.stop && T::peek_from(input) 211 | } 212 | } 213 | 214 | pub struct DelimitedStepper { 215 | stop: bool, 216 | _phantom: PhantomData<(T, D)>, 217 | } 218 | 219 | impl Default for DelimitedStepper { 220 | fn default() -> Self { 221 | Self { 222 | stop: false, 223 | _phantom: PhantomData, 224 | } 225 | } 226 | } 227 | 228 | impl Stepper for DelimitedStepper { 229 | type Item = (T::Parsed, Option); 230 | 231 | fn pop_next_from( 232 | &mut self, 233 | input: &mut Input, 234 | errors: &mut Errors, 235 | ) -> Result, ()> { 236 | let len_before = input.len(); 237 | let item = match T::pop_parsed_from(input, errors) { 238 | //TODO: Slide separator! 239 | Ok(trailing) => match D::peek_pop_parsed_from(input, errors) { 240 | Ok(delimiter) => (trailing, delimiter), 241 | Err(()) => todo!("Recovery."), 242 | }, 243 | Err(()) => todo!("Recovery."), 244 | }; 245 | if input.len() == len_before { 246 | errors.push(Error::new( 247 | ErrorPriority::UNCONSUMED_INPUT, 248 | format!( 249 | "{} looped without consuming input. (This likely implies a faulty grammar.)", 250 | type_name::<(T, Option)>() 251 | ), 252 | input.drain_spans(..), 253 | )); 254 | self.stop = true; 255 | } 256 | Ok(Some(item)) 257 | } 258 | } 259 | 260 | impl PeekNextFrom for DelimitedStepper { 261 | fn peek_next_from(&self, input: &Input) -> bool { 262 | !self.stop && T::peek_from(input) 263 | } 264 | } 265 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/keywords.rs: -------------------------------------------------------------------------------- 1 | //! [lex.keywords](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords) 2 | //! 3 | //! Keywords are implemented as tuple structs with single public [`Ident`]. 4 | 5 | use loess::{lifetimes, words}; 6 | use proc_macro2::Ident; 7 | 8 | pub(crate) mod words_impl { 9 | use loess::words; 10 | 11 | words! { 12 | // Strict keywords. 13 | // See as of 2025-04-13. 14 | #[derive(Clone)] pub as as pub As: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 15 | #[derive(Clone)] pub box as pub Box: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 16 | #[derive(Clone)] pub break as pub Break: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 17 | #[derive(Clone)] pub const as pub Const: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 18 | #[derive(Clone)] pub continue as pub Continue: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 19 | #[derive(Clone)] pub crate as pub Crate: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 20 | #[derive(Clone)] pub else as pub Else: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 21 | #[derive(Clone)] pub enum as pub Enum: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 22 | #[derive(Clone)] pub extern as pub Extern: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 23 | #[derive(Clone)] pub false as pub False: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 24 | #[derive(Clone)] pub fn as pub Fn: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 25 | #[derive(Clone)] pub for as pub For: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 26 | #[derive(Clone)] pub if as pub If: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 27 | #[derive(Clone)] pub impl as pub Impl: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 28 | #[derive(Clone)] pub in as pub In: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 29 | #[derive(Clone)] pub let as pub Let: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 30 | #[derive(Clone)] pub loop as pub Loop: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 31 | #[derive(Clone)] pub match as pub Match: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 32 | #[derive(Clone)] pub mod as pub Mod: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 33 | #[derive(Clone)] pub move as pub Move: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 34 | #[derive(Clone)] pub mut as pub Mut: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 35 | #[derive(Clone)] pub pub as pub Pub: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 36 | #[derive(Clone)] pub ref as pub Ref: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 37 | #[derive(Clone)] pub return as pub Return: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 38 | #[derive(Clone)] pub self as pub SelfLowercase: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 39 | #[derive(Clone)] pub Self as pub SelfUppercase: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 40 | #[derive(Clone)] pub static as pub Static: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 41 | #[derive(Clone)] pub struct as pub Struct: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 42 | #[derive(Clone)] pub super as pub Super: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 43 | #[derive(Clone)] pub trait as pub Trait: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 44 | #[derive(Clone)] pub true as pub True: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 45 | #[derive(Clone)] pub type as pub Type: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 46 | #[derive(Clone)] pub unsafe as pub Unsafe: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 47 | #[derive(Clone)] pub use as pub Use: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 48 | #[derive(Clone)] pub where as pub Where: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 49 | #[derive(Clone)] pub while as pub While: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 50 | 51 | // 2018 edition 52 | #[derive(Clone)] pub async as pub Async: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 53 | #[derive(Clone)] pub await as pub Await: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 54 | #[derive(Clone)] pub dyn as pub Dyn: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 55 | 56 | // Reserved keywords. 57 | // See as of 2025-04-13. 58 | abstract as _; 59 | become as _; 60 | box as _; 61 | do as _; 62 | final as _; 63 | macro as _; 64 | override as _; 65 | priv as _; 66 | typeof as _; 67 | unsized as _; 68 | virtual as _; 69 | yield as _; 70 | 71 | // 2018+ 72 | try as _; 73 | 74 | // 2024+ 75 | gen as _; 76 | 77 | //TODO: Move this elsewhere in the API? 78 | /// [IDENTIFIER](https://doc.rust-lang.org/reference/identifiers.html#grammar-IDENTIFIER) 79 | #[derive(Clone)] pub _ as pub Identifier: IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 80 | } 81 | } 82 | pub use words_impl::{ 83 | As, Async, Await, Box, Break, Const, Continue, Crate, Dyn, Else, Enum, Extern, False, Fn, For, 84 | If, Impl, In, Let, Loop, Match, Mod, Move, Mut, Pub, Ref, Return, SelfLowercase, SelfUppercase, 85 | Static, Struct, Super, Trait, True, Type, Unsafe, Use, Where, While, 86 | }; 87 | 88 | // Weak keywords. 89 | // See . 90 | lifetimes! { 91 | /// [(weak)](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords.weak.lifetime-static) 92 | #[derive(Clone)] pub ('static) as pub LifetimeStatic: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 93 | } 94 | words! { 95 | 96 | /// [(weak)](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords.weak.macro_rules) 97 | #[derive(Clone)] pub macro_rules as pub MacroRules: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 98 | 99 | /// [(weak)](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords.weak.raw) 100 | #[derive(Clone)] pub raw as pub Raw: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 101 | 102 | /// [(weak)](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords.weak.safe) 103 | #[derive(Clone)] pub safe as pub Safe: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 104 | 105 | /// [(weak)](https://doc.rust-lang.org/stable/reference/keywords.html#r-lex.keywords.weak.union) 106 | #[derive(Clone)] pub union as pub Union: doc, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt; 107 | } 108 | 109 | /// See as of 2025-04-13. 110 | pub fn is_strict_keyword(ident: &Ident) -> bool { 111 | [ 112 | "as", "break", "const", "continue", "crate", "else", "enum", "extern", "false", "fn", 113 | "for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "pub", "ref", 114 | "return", "self", "Self", "static", "struct", "super", "trait", "true", "type", "unsafe", 115 | "use", "where", "while", // 116 | // 2018 edition 117 | "async", "await", "dyn", 118 | ] 119 | .iter() 120 | .any(|s| ident == s) 121 | } 122 | 123 | /// See as of 2025-04-13. 124 | pub fn is_reserved_keyword(ident: &Ident) -> bool { 125 | [ 126 | "abstract", "become", "box", "do", "final", "macro", "override", "priv", "typeof", 127 | "unsized", "virtual", "yield", // 128 | // 2018+ 129 | "try", // 130 | // 2024+ 131 | "gen", 132 | ] 133 | .iter() 134 | .any(|s| ident == s) 135 | } 136 | -------------------------------------------------------------------------------- /loess/src/macros/words.rs: -------------------------------------------------------------------------------- 1 | /// Defines keywords and identifiers (excluding lifetimes). 2 | /// 3 | /// # Examples 4 | /// 5 | /// ```rust 6 | /// loess::words! { 7 | /// // Defines a keyword. 8 | /// keyword as Keyword; 9 | /// 10 | /// // Defines a catch-other identifier. Must be last. 11 | /// _ as Identifier; 12 | /// } 13 | /// ``` 14 | /// 15 | /// ```rust 16 | /// loess::words! { 17 | /// #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 18 | /// pub keyword as pub Keyword: doc, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt; 19 | /// 20 | /// #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 21 | /// pub _ as pub Identifier: PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt; 22 | /// } 23 | /// ``` 24 | /// 25 | /// ## Exclude keywords without declaration 26 | /// 27 | /// ```rust 28 | /// loess::words! { 29 | /// // Exclude keyword for catch-other identifier. 30 | /// // Must not have otherwise optional elements. 31 | /// keyword as _; 32 | /// 33 | /// // Matches any except `keyword`. 34 | /// pub _ as pub Identifier; 35 | /// } 36 | /// ``` 37 | /// 38 | /// ```rust,compile_fail 39 | /// loess::words! { 40 | /// keyword as _; //Error: Unused keyword exclusion: keyword 41 | /// } 42 | /// ``` 43 | /// 44 | /// ### For macros 45 | /// 46 | /// ```rust 47 | /// loess::words! { 48 | /// keyword as _; 49 | /// allow_prior_unused!; 50 | /// } 51 | /// ``` 52 | /// 53 | /// ```rust,compile_fail 54 | /// loess::words! { 55 | /// allow_prior_unused!; 56 | /// keyword as _; 57 | /// } 58 | /// ``` 59 | #[macro_export] 60 | macro_rules! words { 61 | ($($input:tt)*) => { 62 | $crate::__words_muncher!([] [] $($input)*); 63 | } 64 | } 65 | 66 | #[doc(hidden)] 67 | #[macro_export] 68 | macro_rules! __words_muncher { 69 | // Keyword. 70 | ( 71 | [$($kws:tt)*] [$($excluded_kws:tt)*] 72 | 73 | $(#[$($attr:tt)*])* 74 | $ident_vis:vis $kw:ident as $vis:vis $name:ident 75 | $(: $( 76 | $(doc $(@ $doc:tt)?)? 77 | $(PeekFrom $(@ $PeekFrom:tt)?)? 78 | $(PopFrom $(@ $PopFrom:tt)?)? 79 | $(IntoTokens $(@ $IntoTokens:tt)?)? 80 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 81 | $(LocatedAt $(@ $LocatedAt:tt)?)? 82 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 83 | ),*$(,)?)? 84 | ; 85 | 86 | $($rest:tt)* 87 | ) => { 88 | $($($( 89 | $(@ $doc)? 90 | #[doc = $crate::__::concat!('`', stringify!($kw), '`')] 91 | )?)*)? 92 | $(#[$($attr)*])* 93 | $vis struct $name($ident_vis $crate::__::Ident); 94 | 95 | $($( 96 | $( 97 | $(@ $PeekFrom)? 98 | $crate::__impl_word!(PeekFrom for $name, ident => ident == stringify!($kw)); 99 | )? 100 | $( 101 | $(@ $PopFrom)? 102 | $crate::__impl_word!( 103 | PopFrom for $name, 104 | ident => ident == stringify!($kw), 105 | $crate::__::concat!("Expected `", stringify!($kw), "`."), 106 | ); 107 | )? 108 | $( 109 | $(@ $IntoTokens)? 110 | $crate::__impl_word!(IntoTokens for $name); 111 | )? 112 | $( 113 | $(@ $SimpleSpanned)? 114 | $crate::__impl_word!(SimpleSpanned for $name); 115 | )? 116 | $( 117 | $(@ $LocatedAt)? 118 | $crate::__impl_word!(LocatedAt for $name); 119 | )? 120 | $( 121 | $(@ $ResolvedAt)? 122 | $crate::__impl_word!(ResolvedAt for $name); 123 | )? 124 | )*)? 125 | 126 | $crate::__words_muncher! { 127 | [$($kws)* $kw] [$($excluded_kws)*] 128 | $($rest)* 129 | } 130 | }; 131 | 132 | // Exclude keyword. 133 | ( 134 | [$($kws:tt)*] [$($excluded_kws:tt)*] 135 | $kw:ident as _; 136 | $($rest:tt)* 137 | ) => { 138 | $crate::__words_muncher! { 139 | [$($kws)*] [$($excluded_kws)* $kw] 140 | $($rest)* 141 | } 142 | }; 143 | 144 | // Other identifier. Final. 145 | ( 146 | [$($kws:tt)*] [$($excluded_kws:tt)*] 147 | 148 | $(#[$($attr:tt)*])* 149 | $ident_vis:vis _ as $vis:vis $name:ident 150 | $(: $( 151 | $(PeekFrom $(@ $PeekFrom:tt)?)? 152 | $(PopFrom $(@ $PopFrom:tt)?)? 153 | $(IntoTokens $(@ $IntoTokens:tt)?)? 154 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 155 | $(LocatedAt $(@ $LocatedAt:tt)?)? 156 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 157 | ),*$(,)?)? 158 | ; 159 | 160 | $($($rest:tt)+)? 161 | ) => { 162 | $(#[$($attr)*])* 163 | $vis struct $name($ident_vis $crate::__::Ident); 164 | 165 | const _: () = { 166 | #[allow(non_upper_case_globals)] 167 | const __LOESS__WORDS_EXCLUSIONS: &[&str] = &[ 168 | $(stringify!($kws),)* 169 | $(stringify!($excluded_kws),)* 170 | ]; 171 | 172 | $($( 173 | $( 174 | $(@ $PeekFrom)? 175 | $crate::__impl_word!( 176 | PeekFrom for $name, 177 | ident => __LOESS__WORDS_EXCLUSIONS.into_iter().copied().all(|kw| ident != kw), 178 | ); 179 | )? 180 | $( 181 | $(@ $PopFrom)? 182 | $crate::__impl_word!( 183 | PopFrom for $name, 184 | ident => __LOESS__WORDS_EXCLUSIONS.into_iter().copied().all(|kw| ident != kw), 185 | $crate::__::concat!("Expected ", stringify!($name), "."), 186 | ); 187 | )? 188 | $( 189 | $(@ $IntoTokens)? 190 | $crate::__impl_word!(IntoTokens for $name); 191 | )? 192 | $( 193 | $(@ $SimpleSpanned)? 194 | $crate::__impl_word!(SimpleSpanned for $name); 195 | )? 196 | $( 197 | $(@ $LocatedAt)? 198 | $crate::__impl_word!(LocatedAt for $name); 199 | )? 200 | $( 201 | $(@ $ResolvedAt)? 202 | $crate::__impl_word!(ResolvedAt for $name); 203 | )? 204 | )*)? 205 | }; 206 | 207 | $($crate::__::compile_error!($crate::__::concat!("Catch-other identifier must be last, but was followed by: ", $crate::__::stringify!($($rest)+)));)? 208 | }; 209 | 210 | // For macro authors. 211 | ( 212 | [$($kws:tt)*] [$($excluded_kws:tt)*] 213 | allow_prior_unused!; 214 | $($rest:tt)* 215 | ) => { 216 | $crate::__words_muncher! { 217 | [$($kws)* $($excluded_kws)*] [] 218 | $($rest)* 219 | } 220 | }; 221 | 222 | // Other end. 223 | ([$($kws:tt)*] [$($excluded_kws:tt)*]) => { 224 | $( 225 | $crate::__::compile_error!($crate::__::concat!("Unused keyword exclusion: ", $crate::__::stringify!($excluded_kws))); 226 | )* 227 | } 228 | } 229 | 230 | #[macro_export] 231 | #[doc(hidden)] 232 | macro_rules! __impl_word { 233 | (PeekFrom for $name:ty, $ident:ident => $condition:expr$(,)?) => { 234 | impl $crate::PeekFrom for $name { 235 | fn peek_from(input: &$crate::Input) -> bool { 236 | input.peek(|tts, _| matches!(tts, [$crate::__::TokenTree::Ident($ident)] if $condition && !<$crate::__::Ident as $crate::__::ToString>::to_string($ident).as_str().starts_with('\''))) 237 | } 238 | } 239 | }; 240 | 241 | (PopFrom for $name:ty, $ident:ident => $condition:expr, $message:expr$(,)?) => { 242 | impl $crate::PopParsedFrom for $name { 243 | type Parsed = Self; 244 | 245 | fn pop_parsed_from( 246 | input: &mut $crate::Input, 247 | errors: &mut $crate::Errors, 248 | ) -> Result { 249 | input 250 | .pop_or_replace(|tts, _| match tts { 251 | [$crate::__::TokenTree::Ident($ident)] if $condition && !<$crate::__::Ident as $crate::__::ToString>::to_string(&$ident).as_str().starts_with('\'') => Ok(Self($ident)), 252 | tts => Err(tts), 253 | }) 254 | .map_err(|spans| { 255 | errors.push($crate::Error::new( 256 | $crate::ErrorPriority::TOKEN, 257 | $message, 258 | spans, 259 | )) 260 | }) 261 | } 262 | } 263 | }; 264 | 265 | (IntoTokens for $name:ty$(,)?) => { 266 | impl $crate::IntoTokens for $name { 267 | fn into_tokens(self, root: &$crate::__::TokenStream, tokens: &mut impl $crate::__::Extend<$crate::__::TokenTree>) { 268 | self.0.into_tokens(root, tokens) 269 | } 270 | } 271 | }; 272 | 273 | (SimpleSpanned for $name:ty$(,)?) => { 274 | impl $crate::SimpleSpanned for $name { 275 | fn span(&self) -> $crate::__::Span { 276 | self.0.span() 277 | } 278 | 279 | fn set_span(&mut self, span: $crate::__::Span) { 280 | self.0.set_span(span) 281 | } 282 | } 283 | }; 284 | 285 | (LocatedAt for $name:ty$(,)?) => { 286 | impl $crate::LocatedAt for $name { 287 | fn located_at(mut self, span: $crate::__::Span) -> Self { 288 | self.0.set_span(self.0.span().located_at(span)); 289 | self 290 | } 291 | } 292 | }; 293 | 294 | (ResolvedAt for $name:ty$(,)?) => { 295 | impl $crate::ResolvedAt for $name { 296 | fn resolved_at(mut self, span: $crate::__::Span) -> Self { 297 | self.0.set_span(self.0.span().resolved_at(span)); 298 | self 299 | } 300 | } 301 | }; 302 | } 303 | -------------------------------------------------------------------------------- /loess/src/macros/lifetimes.rs: -------------------------------------------------------------------------------- 1 | /// Defines key and catch-other lifetimes. 2 | /// 3 | /// # Examples 4 | /// 5 | /// ```rust 6 | /// loess::lifetimes! { 7 | /// // Defines a key lifetime. 8 | /// ('key) as LifetimeKey; 9 | /// 10 | /// // Defines a catch-other lifetime. Must be last. 11 | /// _ as Lifetime; 12 | /// } 13 | /// ``` 14 | /// 15 | /// ```rust 16 | /// loess::lifetimes! { 17 | /// #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 18 | /// pub ('key) as pub LifetimeKey: doc, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt; 19 | /// 20 | /// #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 21 | /// pub _ as pub Lifetime: PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt; 22 | /// } 23 | /// ``` 24 | /// 25 | /// ## Exclude key lifetimes without declaration 26 | /// 27 | /// ```rust 28 | /// loess::lifetimes! { 29 | /// // Exclude 'key for catch-other lifetime. 30 | /// // Must not have otherwise optional elements. 31 | /// ('key) as _; 32 | /// 33 | /// // Matches any except `'key`. 34 | /// pub _ as pub Lifetime; 35 | /// } 36 | /// ``` 37 | /// 38 | /// ```rust,compile_fail 39 | /// loess::lifetimes! { 40 | /// ('key) as _; //Error: Unused key lifetime exclusion: 'key 41 | /// } 42 | /// ``` 43 | /// 44 | /// ### For macros 45 | /// 46 | /// ```rust 47 | /// loess::lifetimes! { 48 | /// ('key) as _; 49 | /// allow_prior_unused!; 50 | /// } 51 | /// ``` 52 | /// 53 | /// ```rust,compile_fail 54 | /// loess::lifetimes! { 55 | /// allow_prior_unused!; 56 | /// ('key) as _; 57 | /// } 58 | /// ``` 59 | #[macro_export] 60 | macro_rules! lifetimes { 61 | ($($input:tt)*) => { 62 | $crate::__lifetimes_muncher!([] [] $($input)*); 63 | } 64 | } 65 | 66 | #[doc(hidden)] 67 | #[macro_export] 68 | macro_rules! __lifetimes_muncher { 69 | // Keylifetime. 70 | ( 71 | [$($kws:tt)*] [$($excluded_kws:tt)*] 72 | 73 | $(#[$($attr:tt)*])* 74 | $ident_vis:vis ($kw:lifetime) as $vis:vis $name:ident 75 | $(: $( 76 | $(doc $(@ $doc:tt)?)? 77 | $(PeekFrom $(@ $PeekFrom:tt)?)? 78 | $(PopFrom $(@ $PopFrom:tt)?)? 79 | $(IntoTokens $(@ $IntoTokens:tt)?)? 80 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 81 | $(LocatedAt $(@ $LocatedAt:tt)?)? 82 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 83 | ),*$(,)?)? 84 | ; 85 | 86 | $($rest:tt)* 87 | ) => { 88 | $($($( 89 | $(@ $doc)? 90 | #[doc = $crate::__::concat!('`', stringify!($kw), '`')] 91 | )?)*)? 92 | $(#[$($attr)*])* 93 | $vis struct $name($ident_vis $crate::__::Ident); 94 | 95 | $($( 96 | $( 97 | $(@ $PeekFrom)? 98 | $crate::__impl_lifetime!(PeekFrom for $name, ident => ident == stringify!($kw)); 99 | )? 100 | $( 101 | $(@ $PopFrom)? 102 | $crate::__impl_lifetime!( 103 | PopFrom for $name, 104 | ident => ident == stringify!($kw), 105 | $crate::__::concat!("Expected `", stringify!($kw), "`."), 106 | ); 107 | )? 108 | $( 109 | $(@ $IntoTokens)? 110 | $crate::__impl_lifetime!(IntoTokens for $name); 111 | )? 112 | $( 113 | $(@ $SimpleSpanned)? 114 | $crate::__impl_lifetime!(SimpleSpanned for $name); 115 | )? 116 | $( 117 | $(@ $LocatedAt)? 118 | $crate::__impl_lifetime!(LocatedAt for $name); 119 | )? 120 | $( 121 | $(@ $ResolvedAt)? 122 | $crate::__impl_lifetime!(ResolvedAt for $name); 123 | )? 124 | )*)? 125 | 126 | $crate::__lifetimes_muncher! { 127 | [$($kws)* $kw] [$($excluded_kws)*] 128 | $($rest)* 129 | } 130 | }; 131 | 132 | // Exclude key lifetime. 133 | ( 134 | [$($kws:tt)*] [$($excluded_kws:tt)*] 135 | ($kw:lifetime) as _; 136 | $($rest:tt)* 137 | ) => { 138 | $crate::__lifetimes_muncher! { 139 | [$($kws)*] [$($excluded_kws)* $kw] 140 | $($rest)* 141 | } 142 | }; 143 | 144 | // Other lifetime. Final. 145 | ( 146 | [$($kws:tt)*] [$($excluded_kws:tt)*] 147 | 148 | $(#[$($attr:tt)*])* 149 | $ident_vis:vis _ as $vis:vis $name:ident 150 | $(: $( 151 | $(PeekFrom $(@ $PeekFrom:tt)?)? 152 | $(PopFrom $(@ $PopFrom:tt)?)? 153 | $(IntoTokens $(@ $IntoTokens:tt)?)? 154 | $(SimpleSpanned $(@ $SimpleSpanned:tt)?)? 155 | $(LocatedAt $(@ $LocatedAt:tt)?)? 156 | $(ResolvedAt $(@ $ResolvedAt:tt)?)? 157 | ),*$(,)?)? 158 | ; 159 | 160 | $($($rest:tt)+)? 161 | ) => { 162 | $(#[$($attr)*])* 163 | $vis struct $name($ident_vis $crate::__::Ident); 164 | 165 | const _: () = { 166 | #[allow(non_upper_case_globals)] 167 | const __LOESS__WORDS_EXCLUSIONS: &[&str] = &[ 168 | $(stringify!($kws),)* 169 | $(stringify!($excluded_kws),)* 170 | ]; 171 | 172 | $($( 173 | $( 174 | $(@ $PeekFrom)? 175 | $crate::__impl_lifetime!( 176 | PeekFrom for $name, 177 | ident => __LOESS__WORDS_EXCLUSIONS.into_iter().copied().all(|kw| ident != kw), 178 | ); 179 | )? 180 | $( 181 | $(@ $PopFrom)? 182 | $crate::__impl_lifetime!( 183 | PopFrom for $name, 184 | ident => __LOESS__WORDS_EXCLUSIONS.into_iter().copied().all(|kw| ident != kw), 185 | $crate::__::concat!("Expected ", stringify!($name), "."), 186 | ); 187 | )? 188 | $( 189 | $(@ $IntoTokens)? 190 | $crate::__impl_lifetime!(IntoTokens for $name); 191 | )? 192 | $( 193 | $(@ $SimpleSpanned)? 194 | $crate::__impl_lifetime!(SimpleSpanned for $name); 195 | )? 196 | $( 197 | $(@ $LocatedAt)? 198 | $crate::__impl_lifetime!(LocatedAt for $name); 199 | )? 200 | $( 201 | $(@ $ResolvedAt)? 202 | $crate::__impl_lifetime!(ResolvedAt for $name); 203 | )? 204 | )*)? 205 | }; 206 | 207 | $($crate::__::compile_error!($crate::__::concat!("Catch-other lifetime must be last, but was followed by: ", $crate::__::stringify!($($rest)+)));)? 208 | }; 209 | 210 | // For macro authors. 211 | ( 212 | [$($kws:tt)*] [$($excluded_kws:tt)*] 213 | allow_prior_unused!; 214 | $($rest:tt)* 215 | ) => { 216 | $crate::__lifetimes_muncher! { 217 | [$($kws)* $($excluded_kws)*] [] 218 | $($rest)* 219 | } 220 | }; 221 | 222 | // Other end. 223 | ([$($kws:tt)*] [$($excluded_kws:tt)*]) => { 224 | $( 225 | $crate::__::compile_error!($crate::__::concat!("Unused key lifetime exclusion: ", $crate::__::stringify!($excluded_kws))); 226 | )* 227 | } 228 | } 229 | 230 | #[macro_export] 231 | #[doc(hidden)] 232 | macro_rules! __impl_lifetime { 233 | (PeekFrom for $name:ty, $ident:ident => $condition:expr$(,)?) => { 234 | impl $crate::PeekFrom for $name { 235 | fn peek_from(input: &$crate::Input) -> bool { 236 | input.peek(|tts, _| matches!(tts, [$crate::__::TokenTree::Ident($ident)] if $condition && <$crate::__::Ident as $crate::__::ToString>::to_string($ident).as_str().starts_with('\''))) 237 | } 238 | } 239 | }; 240 | 241 | (PopFrom for $name:ty, $ident:ident => $condition:expr, $message:expr$(,)?) => { 242 | impl $crate::PopParsedFrom for $name { 243 | type Parsed = Self; 244 | 245 | fn pop_parsed_from( 246 | input: &mut $crate::Input, 247 | errors: &mut $crate::Errors, 248 | ) -> Result { 249 | input 250 | .pop_or_replace(|tts, _| match tts { 251 | [$crate::__::TokenTree::Ident($ident)] if $condition && <$crate::__::Ident as $crate::__::ToString>::to_string(&$ident).as_str().starts_with('\'') => Ok(Self($ident)), 252 | tts => Err(tts), 253 | }) 254 | .map_err(|spans| { 255 | errors.push($crate::Error::new( 256 | $crate::ErrorPriority::TOKEN, 257 | $message, 258 | spans, 259 | )) 260 | }) 261 | } 262 | } 263 | }; 264 | 265 | (IntoTokens for $name:ty$(,)?) => { 266 | impl $crate::IntoTokens for $name { 267 | fn into_tokens(self, root: &$crate::__::TokenStream, tokens: &mut impl $crate::__::Extend<$crate::__::TokenTree>) { 268 | self.0.into_tokens(root, tokens) 269 | } 270 | } 271 | }; 272 | 273 | (SimpleSpanned for $name:ty$(,)?) => { 274 | impl $crate::SimpleSpanned for $name { 275 | fn span(&self) -> $crate::__::Span { 276 | self.0.span() 277 | } 278 | 279 | fn set_span(&mut self, span: $crate::__::Span) { 280 | self.0.set_span(span) 281 | } 282 | } 283 | }; 284 | 285 | (LocatedAt for $name:ty$(,)?) => { 286 | impl $crate::LocatedAt for $name { 287 | fn located_at(mut self, span: $crate::__::Span) -> Self { 288 | self.0.set_span(self.0.span().located_at(span)); 289 | self 290 | } 291 | } 292 | }; 293 | 294 | (ResolvedAt for $name:ty$(,)?) => { 295 | impl $crate::ResolvedAt for $name { 296 | fn resolved_at(mut self, span: $crate::__::Span) -> Self { 297 | self.0.set_span(self.0.span().resolved_at(span)); 298 | self 299 | } 300 | } 301 | }; 302 | } 303 | -------------------------------------------------------------------------------- /loess/src/macros/grammar.rs: -------------------------------------------------------------------------------- 1 | /// Parser- and serialiser-generator macro. 2 | /// 3 | /// # Example 4 | /// 5 | /// ``` 6 | /// use loess::{ 7 | /// grammar, words, 8 | /// scaffold::{Parentheses, SquareBrackets}, 9 | /// }; 10 | /// use proc_macro2::{Ident, TokenTree, Punct}; 11 | /// 12 | /// words! { 13 | /// #[derive(Clone)] 14 | /// pub let as Let: doc, PeekFrom, PopFrom, IntoTokens; 15 | /// 16 | /// #[derive(Clone)] 17 | /// pub pub as Pub: doc, PeekFrom, PopFrom, IntoTokens; 18 | /// 19 | /// #[derive(Clone)] 20 | /// pub _ as Identifier: PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt; 21 | /// } 22 | /// 23 | /// grammar! { 24 | /// #[derive(Clone)] 25 | /// pub struct Visibility: PeekFrom, PopFrom, IntoTokens { 26 | /// pub r#pub: Pub, 27 | /// parens: Option, 28 | /// } 29 | /// 30 | /// /// 31 | /// /// Has auto-documented grammar. 32 | /// #[derive(Clone)] 33 | /// pub enum Alternatives: doc, PeekFrom, PopFrom, IntoTokens { 34 | /// Identifier(Identifier), 35 | /// Paren(Parentheses), 36 | /// Bracket(SquareBrackets>), 37 | /// Vis(Visibility), 38 | /// } else "Expected Alternative."; 39 | /// 40 | /// #[derive(Clone)] 41 | /// /// `visibility` can't be first, as `Option` isn't `PeekFrom`. 42 | /// /// However, `Visibility` itself is `PeekFrom` (checking for `pub`). 43 | /// /// 44 | /// /// Fields are parsed and emitted in order. 45 | /// pub struct StructuredSequence: PeekFrom, PopFrom, IntoTokens { 46 | /// pub r#let: Let, 47 | /// pub visibility: Option, 48 | /// pub paren_ident: Parentheses, 49 | /// pub vec_punct: Vec, 50 | /// } 51 | /// 52 | /// #[derive(Clone)] 53 | /// /// Generated implementations for tuple structs are currently the most limited. 54 | /// pub struct TupleSequence: PeekFrom, PopFrom ( 55 | /// pub Let, 56 | /// pub Option, 57 | /// pub Parentheses, 58 | /// pub Vec, 59 | /// ); 60 | /// } 61 | /// ``` 62 | #[macro_export] 63 | macro_rules! grammar { 64 | //TODO: Change impl separator to `+`? 65 | { 66 | $(#[$($attr:tt)*])* 67 | $vis:vis enum $name:ident$(: $( 68 | $(doc $(@ $doc:tt)?)? 69 | $(PeekFrom $(@ $PeekFrom:tt)? $(via $PeekFromViaType:ident)?)? 70 | $(PopFrom $(@ $PopFrom:tt)? $(via $PopFromViaType:ident)?)? 71 | $(IntoTokens $(@ $IntoTokens:tt)?)? 72 | ),*)? {$( 73 | $(#[$($variant_attr:tt)*])* 74 | $variant:ident($($type:ty),*$(,)?) 75 | ),*$(,)?} else $error:expr; 76 | 77 | $($tt:tt)* 78 | } => { 79 | #[cfg_attr(any($($($(all(), $(@ $doc)?)?)?)*), doc = $crate::grammar!(@enum_doc [$([$($type,)*])*]))] 80 | $(#[$($attr)*])* 81 | $vis enum $name {$( 82 | $(#[$($variant_attr)*])* 83 | $variant($(<$type as $crate::PopParsedFrom>::Parsed),*), 84 | )*} 85 | 86 | #[cfg(any($($($(all(), $(@ $PeekFrom)?)?)?)*))] 87 | $crate::grammar!(@PeekFrom for enum $name $($($($(via $PeekFromViaType)?)?)*)?, [$([$($type),*]),*]); 88 | 89 | #[cfg(any($($($(all(), $(@ $PopFrom)?)?)?)*))] 90 | $crate::grammar!(@PopFrom for enum $name $($($($(via $PeekFromViaType)?)?)*)?, [$($variant[$($type),*]),*], $error); 91 | 92 | #[cfg(any($($($(all(), $(@ $IntoTokens)?)?)?)*))] 93 | impl $crate::IntoTokens for $name { 94 | fn into_tokens(self, root: &$crate::__::TokenStream, tokens: &mut impl $crate::__::Extend<$crate::__::TokenTree>) { 95 | match self { 96 | $(Self::$variant(value) => $crate::IntoTokens::into_tokens(value, root, tokens),)* 97 | } 98 | } 99 | } 100 | 101 | $crate::grammar!($($tt)*); 102 | }; 103 | { 104 | $(#[$($attr:tt)*])* 105 | $vis:vis struct $name:ident$(: $( 106 | $(PeekFrom $(@ $PeekFrom:tt)? $(via $PeekFromViaType:ident)?)? 107 | $(PopFrom $(@ $PopFrom:tt)?)? 108 | $(IntoTokens $(@ $IntoTokens:tt)?)? 109 | ),*)? {$( 110 | $(#[$($field_attr:tt)*])* 111 | $field_vis:vis $field:ident: $type:ty 112 | ),*$(,)?} 113 | 114 | $($tt:tt)* 115 | } => { 116 | $(#[$($attr)*])* 117 | $vis struct $name {$( 118 | $(#[$($field_attr)*])* 119 | $field_vis $field: <$type as $crate::PopParsedFrom>::Parsed, 120 | )*} 121 | 122 | #[cfg(any($($($(all(), $(@ $PeekFrom)?)?)?)*))] 123 | $crate::grammar!(@PeekFrom for struct $name $($($($(via $PeekFromViaType)?)?)*)?, $($type),*); 124 | 125 | #[cfg(any($($($(all(), $(@ $PopFrom)?)?)?)*))] 126 | impl $crate::PopParsedFrom for $name { 127 | type Parsed = Self; 128 | fn pop_parsed_from(input: &mut $crate::Input, errors: &mut $crate::Errors) -> $crate::__::Result { 129 | $crate::__::Result::Ok(Self { 130 | $($field: <$type as $crate::PopParsedFrom>::pop_parsed_from(input, errors)?,)* 131 | }) 132 | } 133 | } 134 | 135 | #[cfg(any($($($(all(), $(@ $IntoTokens)?)?)?)*))] 136 | impl $crate::IntoTokens for $name { 137 | fn into_tokens(self, root: &$crate::__::TokenStream, tokens: &mut impl $crate::__::Extend<$crate::__::TokenTree>) { 138 | let Self { 139 | $($field,)* 140 | } = self; 141 | $($crate::IntoTokens::into_tokens($field, root, tokens);)* 142 | } 143 | } 144 | 145 | $crate::grammar!($($tt)*); 146 | }; 147 | { 148 | $(#[$($attr:tt)*])* 149 | $vis:vis struct $name:ident$(: $( 150 | $(PeekFrom $(@ $PeekFrom:tt)? $(via $PeekFromViaType:ident)?)? 151 | $(PopFrom $(@ $PopFrom:tt)?)? 152 | ),*)? ($( 153 | $(#[$($field_attr:tt)*])* 154 | $field_vis:vis $type:ty 155 | ),*$(,)?); 156 | 157 | $($tt:tt)* 158 | } => { 159 | $(#[$($attr)*])* 160 | $vis struct $name ($( 161 | $(#[$($field_attr)*])* 162 | $field_vis <$type as $crate::PopParsedFrom>::Parsed, 163 | )*); 164 | 165 | #[cfg(any($($($(all(), $(@ $PeekFrom)?)?)?)*))] 166 | $crate::grammar!(@PeekFrom for struct $name $($($($(via $PeekFromViaType)?)?)*)?, $($type),*); 167 | 168 | #[cfg(any($($($(all(), $(@ $PopFrom)?)?)?)*))] 169 | impl $crate::PopParsedFrom for $name { 170 | type Parsed = Self; 171 | fn pop_parsed_from(input: &mut $crate::Input, errors: &mut $crate::Errors) -> $crate::__::Result { 172 | $crate::__::Result::Ok(Self ( 173 | $(<$type as $crate::PopParsedFrom>::pop_parsed_from(input, errors)?,)* 174 | )) 175 | } 176 | } 177 | 178 | $crate::grammar!($($tt)*); 179 | }; 180 | 181 | (@PeekFrom for enum $name:ident, [$([$($type:ty),*$(,)?]),*$(,)?]$(,)?) => { 182 | impl $crate::PeekFrom for $name { 183 | fn peek_from(input: &$crate::Input) -> $crate::__::bool { 184 | false 185 | $(|| $crate::grammar!(@peek_first $name input $($type,)*))* 186 | } 187 | } 188 | }; 189 | (@PeekFrom for struct $name:ident, $($type:ty),*$(,)?) => { 190 | impl $crate::PeekFrom for $name { 191 | fn peek_from(input: &$crate::Input) -> $crate::__::bool { 192 | $crate::grammar!(@peek_first $name input $($type,)*) 193 | } 194 | } 195 | }; 196 | (@PeekFrom for $_either:tt $name:ident via $PeekFromViaType:ident, $($_ignored:tt)*) => { 197 | impl $crate::PeekFrom for $name { 198 | fn peek_from(input: &$crate::Input) -> $crate::__::bool { 199 | <$PeekFromViaType as $crate::PeekFrom>::peek_from(input) 200 | } 201 | } 202 | }; 203 | 204 | (@PopFrom for enum $name:ident, [$($variant:ident[$($type:ty),*$(,)?]),*$(,)?], $error:expr$(,)?) => { 205 | impl $crate::PopParsedFrom for $name { 206 | type Parsed = Self; 207 | fn pop_parsed_from(input: &mut $crate::Input, errors: &mut $crate::Errors) -> $crate::__::Result { 208 | $crate::__::Result::Ok($(if let Some(values) = ($(<$type as $crate::PopParsedFrom>::peek_pop_parsed_from(input, errors)?),*) { 209 | Self::$variant(values) 210 | } else)* { 211 | return $crate::__::Result::Err(errors.push($crate::Error::new( 212 | $crate::ErrorPriority::GRAMMAR, 213 | $error, 214 | [input.front_span()], 215 | ))); 216 | }) 217 | } 218 | } 219 | }; 220 | //TODO 221 | (@PopFrom for struct $name:ident, $($type:ty),*$(,)?) => { 222 | impl $crate::PeekFrom for $name { 223 | fn peek_from(input: &$crate::Input) -> $crate::__::bool { 224 | $crate::grammar!(@peek_first $name input $($type,)*) 225 | } 226 | } 227 | }; 228 | (@PopFrom for $_either:tt $name:ident via $PopFromViaType:ident, $($_ignored:tt)*) => { 229 | impl $crate::PopParsedFrom for $name { 230 | type Parsed = Self; 231 | fn pop_parsed_from(input: &mut $crate::Input, errors: &mut $crate::Errors) -> $crate::__::Result { 232 | $crate::__::Result::Ok( 233 | ::Parsed>>::from( 234 | <$PopFromViaType as $crate::PopParsedFrom>::pop_parsed_from(input, errors)?, 235 | ), 236 | ) 237 | } 238 | } 239 | }; 240 | 241 | (@peek_first $name:ident $input:ident $type:ty, $($rest:ty,)*) => ( 242 | <$type as $crate::PeekFrom>::peek_from($input) 243 | ); 244 | (@peek_first $name:ident $input:ident) => ( 245 | ::core::compile_error!($crate::__::concat!("To implement `PeekFrom` for `", $crate::__::stringify!($name), "`, at least one field is necessary.")) 246 | ); 247 | (@enum_doc []) => ( 248 | // Empty. 249 | "" 250 | ); 251 | (@enum_doc [[$($type0:ty,)*] $([$($type:ty,)*])*]) => ( 252 | // Start. 253 | $crate::grammar!(@enum_doc [$([$($type,)*])*] [$("[`", $crate::__::stringify!($type0), "`] ", )*]) 254 | ); 255 | (@enum_doc [[$($type0:ty,)*] $([$($type:ty,)*])*] [$($output:tt)*]) => ( 256 | // Continue. 257 | $crate::grammar!(@enum_doc [$([$($type,)*])*] [$($output)* "| ", $("[`", $crate::__::stringify!($type0), "`] ", )*]) 258 | ); 259 | (@enum_doc [] [$($output:tt)*]) => ( 260 | // End. 261 | $crate::__::concat!($($output)*) 262 | ); 263 | {$t:tt $($tt:tt)*} => { 264 | // Error 265 | ::core::compile_error!($crate::__::concat!("Unexpected grammar input: ", $crate::__::stringify!($t $($tt)*))); 266 | }; 267 | {} => {}; // Stop. 268 | } 269 | -------------------------------------------------------------------------------- /supply-chain/imports.lock: -------------------------------------------------------------------------------- 1 | 2 | # cargo-vet imports lock 3 | 4 | [[unpublished.loess]] 5 | version = "0.2.3" 6 | audited_as = "0.2.2" 7 | 8 | [[audits.google.audits.proc-macro2]] 9 | who = "Lukasz Anforowicz " 10 | criteria = "safe-to-deploy" 11 | version = "1.0.78" 12 | notes = """ 13 | Grepped for \"crypt\", \"cipher\", \"fs\", \"net\" - there were no hits 14 | (except for a benign \"fs\" hit in a doc comment) 15 | 16 | Notes from the `unsafe` review can be found in https://crrev.com/c/5385745. 17 | """ 18 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 19 | 20 | [[audits.google.audits.proc-macro2]] 21 | who = "Adrian Taylor " 22 | criteria = "safe-to-deploy" 23 | delta = "1.0.78 -> 1.0.79" 24 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 25 | 26 | [[audits.google.audits.proc-macro2]] 27 | who = "Adrian Taylor " 28 | criteria = "safe-to-deploy" 29 | delta = "1.0.79 -> 1.0.80" 30 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 31 | 32 | [[audits.google.audits.proc-macro2]] 33 | who = "Dustin J. Mitchell " 34 | criteria = "safe-to-deploy" 35 | delta = "1.0.80 -> 1.0.81" 36 | notes = "Comment changes only" 37 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 38 | 39 | [[audits.google.audits.proc-macro2]] 40 | who = "danakj " 41 | criteria = "safe-to-deploy" 42 | delta = "1.0.81 -> 1.0.82" 43 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 44 | 45 | [[audits.google.audits.proc-macro2]] 46 | who = "Dustin J. Mitchell " 47 | criteria = "safe-to-deploy" 48 | delta = "1.0.82 -> 1.0.83" 49 | notes = "Substantive change is replacing String with Box, saving memory." 50 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 51 | 52 | [[audits.google.audits.proc-macro2]] 53 | who = "Lukasz Anforowicz " 54 | criteria = "safe-to-deploy" 55 | delta = "1.0.83 -> 1.0.84" 56 | notes = "Only doc comment changes in `src/lib.rs`." 57 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 58 | 59 | [[audits.google.audits.proc-macro2]] 60 | who = "danakj@chromium.org" 61 | criteria = "safe-to-deploy" 62 | delta = "1.0.84 -> 1.0.85" 63 | notes = "Test-only changes." 64 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 65 | 66 | [[audits.google.audits.proc-macro2]] 67 | who = "Lukasz Anforowicz " 68 | criteria = "safe-to-deploy" 69 | delta = "1.0.85 -> 1.0.86" 70 | notes = """ 71 | Comment-only changes in `build.rs`. 72 | Reordering of `Cargo.toml` entries. 73 | Just bumping up the version number in `lib.rs`. 74 | Config-related changes in `test_size.rs`. 75 | """ 76 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 77 | 78 | [[audits.google.audits.proc-macro2]] 79 | who = "danakj " 80 | criteria = "safe-to-deploy" 81 | delta = "1.0.86 -> 1.0.87" 82 | notes = "No new unsafe interactions." 83 | aggregated-from = "https://chromium.googlesource.com/chromium/src/+/main/third_party/rust/chromium_crates_io/supply-chain/audits.toml?format=TEXT" 84 | 85 | [[audits.google.audits.proc-macro2]] 86 | who = "Liza Burakova { ${3:tokens} }", 247 | "\t$4", 248 | "}}", 249 | "$0" 250 | ], 251 | "description": "In Loess quotes, expands to a `match` statement." 252 | }, 253 | "Loess quote #match branch": { 254 | "scope": "rust", 255 | "prefix": "#branch", 256 | "body": [ 257 | "${1:pattern} => { ${2:tokens} }", 258 | "$0" 259 | ], 260 | "description": "A Loess quote `#match` branch." 261 | }, 262 | "Loess quote #'label:": { 263 | "scope": "rust", 264 | "prefix": "#'label:", 265 | "body": [ 266 | "{#${1:'label}: {", 267 | "\t${2:tokens}", 268 | "}}", 269 | "$0" 270 | ], 271 | "description": "In Loess quotes, expands to a block with a label." 272 | }, 273 | "Loess quote #loop": { 274 | "scope": "rust", 275 | "prefix": "#loop", 276 | "body": [ 277 | "{#loop {", 278 | "\t${1:tokens}", 279 | "}}", 280 | "$0" 281 | ], 282 | "description": "In Loess quotes, expands to a `loop`-statement." 283 | }, 284 | "Loess quote #'label: loop": { 285 | "scope": "rust", 286 | "prefix": "#'label: loop", 287 | "body": [ 288 | "{#${1:'label}: loop {", 289 | "\t${2:tokens}", 290 | "}}", 291 | "$0" 292 | ], 293 | "description": "In Loess quotes, expands to a `loop`-statement with a label." 294 | }, 295 | "Loess quote #for": { 296 | "scope": "rust", 297 | "prefix": "#for", 298 | "body": [ 299 | "{#for ${1:pattern} in ${2:expression} {", 300 | "\t${3:tokens}", 301 | "}}", 302 | "$0" 303 | ], 304 | "description": "In Loess quotes, expands to a `for`-loop." 305 | }, 306 | "Loess quote #'label: for": { 307 | "scope": "rust", 308 | "prefix": "#'label: for", 309 | "body": [ 310 | "{#${1:'label}: for ${2:pattern} in ${3:expression} {", 311 | "\t${4:tokens}", 312 | "}}", 313 | "$0" 314 | ], 315 | "description": "In Loess quotes, expands to a `for`-loop with a label." 316 | }, 317 | "Loess quote #while": { 318 | "scope": "rust", 319 | "prefix": "#while", 320 | "body": [ 321 | "{#while ${1:expression} {", 322 | "\t${2:tokens}", 323 | "}}", 324 | "$0" 325 | ], 326 | "description": "In Loess quotes, expands to a `while`-loop." 327 | }, 328 | "Loess quote #'label: while": { 329 | "scope": "rust", 330 | "prefix": "#'label: while", 331 | "body": [ 332 | "{#${1:'label}: while ${2:expression} {", 333 | "\t${3:tokens}", 334 | "}}", 335 | "$0" 336 | ], 337 | "description": "In Loess quotes, expands to a `while`-loop with a label." 338 | }, 339 | "Loess quote #while let": { 340 | "scope": "rust", 341 | "prefix": "#while let", 342 | "body": [ 343 | "{#while let ${1:pattern} = ${2:expression} {", 344 | "\t${3:tokens}", 345 | "}}", 346 | "$0" 347 | ], 348 | "description": "In Loess quotes, expands to a `while let`-loop." 349 | }, 350 | "Loess quote #'label: while let": { 351 | "scope": "rust", 352 | "prefix": "#'label: while let", 353 | "body": [ 354 | "{#${1:'label}: while let ${2:pattern} = ${3:expression} {", 355 | "\t${4:tokens}", 356 | "}}", 357 | "$0" 358 | ], 359 | "description": "In Loess quotes, expands to a `while let`-loop with a label." 360 | } 361 | } -------------------------------------------------------------------------------- /loess/src/scaffold.rs: -------------------------------------------------------------------------------- 1 | //! Modular helper types mainly for use within [`grammar!`](`crate::grammar!`). 2 | //! 3 | //! The enums in this module are vacant, as they parse their wrapped types' projections. 4 | //! 5 | //! See also [`PopParsedFrom`#foreign-impls] for additional, mostly lower-level building blocks. 6 | 7 | use std::{any::type_name, collections::VecDeque, convert::Infallible, iter, marker::PhantomData}; 8 | 9 | use proc_macro2::{TokenStream, TokenTree}; 10 | 11 | use crate::{ 12 | ConstErrorPriority, Error, ErrorPriority, Errors, Input, PeekFrom, PopParsedFrom, 13 | error_priorities::UNCONSUMED_AFTER_REPEATS, 14 | stateful::{ 15 | DelimitedStepper, PeekNextFrom, RepeatCountStepper, SeparatedStepper, SimpleStepper, 16 | Stepper, 17 | }, 18 | }; 19 | 20 | mod groups; 21 | pub use groups::{CurlyBraces, MetaGroup, Parentheses, SquareBrackets}; 22 | 23 | /// Doesn't fail to parse but emits an [`Error`] with the given [`ConstErrorPriority`] for any unconsumed tokens in [`Input`] after `T`. 24 | pub(crate) enum Exhaustive { 25 | _Vacant(PhantomData<(T, P)>, Infallible), 26 | } 27 | 28 | impl PopParsedFrom for Exhaustive { 29 | type Parsed = T::Parsed; 30 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 31 | let value = T::pop_parsed_from(input, errors); 32 | EndOfInput::

::pop_parsed_from(input, errors).ok(); 33 | Ok(value?) 34 | } 35 | } 36 | 37 | //TODO: Maybe replace on input with some into_unconsumed_tokens_error. 38 | /// Fails to parse and emits an [`Error`] with the given [`ConstErrorPriority`] for any unconsumed tokens in [`Input`]. 39 | #[derive(Clone)] 40 | pub(crate) struct EndOfInput(PhantomData

); 41 | 42 | /// Fails iff the [`Input`] isn't empty. 43 | impl PopParsedFrom for EndOfInput

{ 44 | type Parsed = Self; 45 | type Remnant = (); 46 | 47 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 48 | input 49 | .is_empty() 50 | .then_some(Self(PhantomData)) 51 | .ok_or_else(|| { 52 | let rest = input.tokens.iter().cloned().collect::(); 53 | errors.push(Error::new( 54 | P::PRIORITY, 55 | format!("Unconsumed tokens: `{rest}`"), 56 | rest.into_iter().map(|t| t.span()), 57 | )); 58 | }) 59 | } 60 | } 61 | 62 | /// Exhaustive parsing of C: [`Repeats`]. 63 | /// Often implicit via impl [`PopParsedFrom`]. 64 | pub enum ToEnd { 65 | #[expect(missing_docs)] 66 | _Vacant(PhantomData, Infallible), 67 | } 68 | 69 | /// Greedy parsing of C: [`Repeats`]. 70 | pub enum Greedy { 71 | #[expect(missing_docs)] 72 | _Vacant(PhantomData, Infallible), 73 | } 74 | 75 | //TODO: Use this also for Separated and Delimited. 76 | /// Flexible [`Stepper`]-based parsing of items used by [`Greedy`] and [`ToEnd`]. 77 | /// 78 | /// You can usually copy-paste the following: 79 | /// 80 | /// ```rust,ignore 81 | /// fn collect_repeats( 82 | /// input: &mut Input, 83 | /// errors: &mut Errors, 84 | /// f: &mut dyn FnMut( 85 | /// &mut Input, 86 | /// &mut Errors, 87 | /// ) -> Result::Item>, ()>, 88 | /// ) -> Result { 89 | /// iter::from_fn(move || f(input, errors).transpose()).collect() 90 | /// } 91 | /// ``` 92 | pub trait Repeats { 93 | type Projected; 94 | type Stepper: Stepper; 95 | 96 | fn collect_repeats( 97 | input: &mut Input, 98 | errors: &mut Errors, 99 | f: &mut dyn FnMut( 100 | &mut Input, 101 | &mut Errors, 102 | ) -> Result::Item>, ()>, 103 | ) -> Result; 104 | } 105 | 106 | impl PopParsedFrom for ToEnd { 107 | type Parsed = C::Projected; 108 | 109 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 110 | let mut stepper = C::Stepper::default(); 111 | let mut stop = false; 112 | 113 | //TODO: Revise error emission wrt constraint errors! 114 | C::collect_repeats(input, errors, &mut |input, errors| { 115 | if stop || input.is_empty() { 116 | return Ok(None); 117 | } 118 | let len_before = input.len(); 119 | let Some(item) = stepper.pop_next_from(input, errors)? else { 120 | EndOfInput::::pop_parsed_from(input, errors).ok(); 121 | stop = true; 122 | return Ok(None); 123 | }; 124 | 125 | if input.len() == len_before { 126 | errors.push(Error::new( 127 | ErrorPriority::UNCONSUMED_INPUT, 128 | format!( 129 | "{} looped without consuming input. (This likely implies a faulty grammar.)", 130 | type_name::() 131 | ), 132 | input.drain_spans(..), 133 | )); 134 | stop = true; 135 | } 136 | 137 | Ok(Some(item)) 138 | }) 139 | } 140 | } 141 | 142 | impl PopParsedFrom for Greedy 143 | where 144 | C::Stepper: PeekNextFrom, 145 | { 146 | type Parsed = C::Projected; 147 | 148 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 149 | let mut stepper = C::Stepper::default(); 150 | let mut stop = false; 151 | 152 | //TODO: Revise error emission wrt constraint errors! 153 | C::collect_repeats(input, errors, &mut |input, errors| { 154 | if stop || input.is_empty() { 155 | return Ok(None); 156 | } 157 | let len_before = input.len(); 158 | let Some(item) = stepper.peek_pop_next_from(input, errors)? else { 159 | stop = true; 160 | return Ok(None); 161 | }; 162 | 163 | if input.len() == len_before { 164 | errors.push(Error::new( 165 | ErrorPriority::UNCONSUMED_INPUT, 166 | format!( 167 | "{} looped without consuming input. (This likely implies a faulty grammar.)", 168 | type_name::() 169 | ), 170 | input.drain_spans(..), 171 | )); 172 | stop = true; 173 | } 174 | 175 | Ok(Some(item)) 176 | }) 177 | } 178 | } 179 | 180 | impl PeekFrom for Greedy { 181 | fn peek_from(input: &Input) -> bool { 182 | C::peek_from(input) 183 | } 184 | } 185 | 186 | impl PeekFrom for ToEnd { 187 | fn peek_from(input: &Input) -> bool { 188 | C::peek_from(input) 189 | } 190 | } 191 | 192 | impl Repeats for Vec { 193 | type Projected = Vec; 194 | type Stepper = SimpleStepper; 195 | 196 | fn collect_repeats( 197 | input: &mut Input, 198 | errors: &mut Errors, 199 | f: &mut dyn FnMut( 200 | &mut Input, 201 | &mut Errors, 202 | ) -> Result::Item>, ()>, 203 | ) -> Result { 204 | iter::from_fn(move || f(input, errors).transpose()).collect() 205 | } 206 | } 207 | 208 | /// Implicit [`ToEnd`]. 209 | impl PopParsedFrom for Vec { 210 | type Parsed = as PopParsedFrom>::Parsed; 211 | 212 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 213 | ToEnd::::pop_parsed_from(input, errors) 214 | } 215 | } 216 | 217 | impl Repeats for VecDeque { 218 | type Projected = VecDeque; 219 | type Stepper = SimpleStepper; 220 | 221 | fn collect_repeats( 222 | input: &mut Input, 223 | errors: &mut Errors, 224 | f: &mut dyn FnMut( 225 | &mut Input, 226 | &mut Errors, 227 | ) -> Result::Item>, ()>, 228 | ) -> Result { 229 | iter::from_fn(move || f(input, errors).transpose()).collect() 230 | } 231 | } 232 | 233 | /// Implicit [`ToEnd`]. 234 | impl PopParsedFrom for VecDeque { 235 | type Parsed = as PopParsedFrom>::Parsed; 236 | 237 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 238 | ToEnd::::pop_parsed_from(input, errors) 239 | } 240 | } 241 | 242 | impl Repeats for TokenStream { 243 | type Projected = TokenStream; 244 | type Stepper = SimpleStepper; 245 | 246 | fn collect_repeats( 247 | input: &mut Input, 248 | errors: &mut Errors, 249 | f: &mut dyn FnMut( 250 | &mut Input, 251 | &mut Errors, 252 | ) -> Result::Item>, ()>, 253 | ) -> Result { 254 | iter::from_fn(move || f(input, errors).transpose()).collect() 255 | } 256 | } 257 | 258 | /// A series of alternating `T` and `S` where either can be last. 259 | /// 260 | /// # Recovery 261 | /// 262 | /// Recovers towards `S`, preserving it if a `T` led the current repeat. 263 | /// 264 | /// # Errors 265 | /// 266 | /// Emits an error iff a repetition does not consume any input, consuming all remaining input. 267 | /// 268 | /// This is a symptom of faulty grammar definitions. 269 | /// 270 | /// # Returns 271 | /// 272 | /// [`Ok`] once all input is consumed. (Never [`Err`].) 273 | /// 274 | /// Can be wrapped in [`Greedy`] to preserve remaining input after [`T::peek_from`](`PeekFrom::peek_from`) 275 | /// returns [`false`] at the start of an iteration. 276 | pub struct Separated { 277 | #[allow(missing_docs)] 278 | pub delimited: Vec<(T, S)>, 279 | #[allow(missing_docs)] 280 | pub trailing: Option, 281 | } 282 | 283 | impl Repeats for Separated 284 | where 285 | T: PopParsedFrom, 286 | S: PopParsedFrom + PeekFrom, 287 | { 288 | type Projected = Separated; 289 | 290 | type Stepper = SeparatedStepper; 291 | 292 | fn collect_repeats( 293 | input: &mut Input, 294 | errors: &mut Errors, 295 | f: &mut dyn FnMut( 296 | &mut Input, 297 | &mut Errors, 298 | ) -> Result::Item>, ()>, 299 | ) -> Result { 300 | todo!() 301 | } 302 | } 303 | 304 | /// Implicit [`ToEnd`]. 305 | impl PopParsedFrom for Separated { 306 | type Parsed = > as PopParsedFrom>::Parsed; 307 | 308 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 309 | > as PopParsedFrom>::pop_parsed_from(input, errors) 310 | } 311 | } 312 | 313 | /// A series of alternating `T` and **precedent** `D` where either can be last. 314 | /// 315 | /// # Recovery 316 | /// 317 | /// Recovers towards `D`, preserving it if a `T` led the current repeat. 318 | /// 319 | /// # Errors 320 | /// 321 | /// Emits an error iff a repetition does not consume any input, consuming all remaining input. 322 | /// 323 | /// This is a symptom of faulty grammar definitions. 324 | /// 325 | /// # Returns 326 | /// 327 | /// [`Ok`] once all input is consumed. (Never [`Err`].) 328 | /// 329 | /// Can be wrapped in [`Greedy`] to preserve remaining input after [`T::peek_from`](`PeekFrom::peek_from`) 330 | /// returns [`false`] at the start of an iteration. 331 | pub struct Delimited { 332 | #[allow(missing_docs)] 333 | pub delimited: Vec<(T, D)>, 334 | #[allow(missing_docs)] 335 | pub trailing: Option, 336 | } 337 | 338 | impl Repeats for Delimited 339 | where 340 | T: PopParsedFrom, 341 | D: PopParsedFrom + PeekFrom, 342 | { 343 | type Projected = Delimited; 344 | 345 | type Stepper = DelimitedStepper; 346 | 347 | fn collect_repeats( 348 | input: &mut Input, 349 | errors: &mut Errors, 350 | f: &mut dyn FnMut( 351 | &mut Input, 352 | &mut Errors, 353 | ) -> Result::Item>, ()>, 354 | ) -> Result { 355 | todo!() 356 | } 357 | } 358 | 359 | /// Implicit [`ToEnd`]. 360 | impl PopParsedFrom for Delimited { 361 | type Parsed = > as PopParsedFrom>::Parsed; 362 | 363 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 364 | > as PopParsedFrom>::pop_parsed_from(input, errors) 365 | } 366 | } 367 | 368 | /// Wraps around other C: [`Repeats`] to constrain item count. 369 | pub enum RepeatCount { 370 | #[expect(missing_docs)] 371 | _Vacant(PhantomData, Infallible), 372 | } 373 | 374 | impl Repeats for RepeatCount { 375 | type Projected = C::Projected; 376 | 377 | type Stepper = RepeatCountStepper; 378 | 379 | fn collect_repeats( 380 | input: &mut Input, 381 | errors: &mut Errors, 382 | f: &mut dyn FnMut( 383 | &mut Input, 384 | &mut Errors, 385 | ) -> Result::Item>, ()>, 386 | ) -> Result { 387 | //TODO: Constrain here too/only? 388 | C::collect_repeats(input, errors, f) 389 | } 390 | } 391 | 392 | /// Implicit [`ToEnd`]. 393 | impl PopParsedFrom for RepeatCount { 394 | type Parsed = as PopParsedFrom>::Parsed; 395 | 396 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 397 | ToEnd::::pop_parsed_from(input, errors) 398 | } 399 | } 400 | 401 | /// Waiting on feature [`generic_const_exprs`](https://github.com/rust-lang/rust/issues/76560) for expansion. 402 | impl PeekFrom for RepeatCount 403 | where 404 | C::Stepper: PeekNextFrom, 405 | { 406 | fn peek_from(input: &Input) -> bool { 407 | C::Stepper::default().peek_next_from(input) 408 | } 409 | } 410 | 411 | // /// Iff `T` fails to parse immediately, slides along the input by peeking until `T` peeks successfully and then retries once. 412 | // /// 413 | // /// Transparent to errors only during the initial attempt. 414 | // pub enum Slide { 415 | // #[expect(missing_docs)] 416 | // _Vacant(PhantomData, Infallible), 417 | // } 418 | 419 | // impl PopParsedFrom for Slide { 420 | // type Parsed = T::Parsed; 421 | 422 | // fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 423 | // match T::pop_parsed_from(input, errors) { 424 | // Ok(parsed) => Ok(parsed), 425 | // Err(()) => { 426 | // while !input.is_empty() && !T::peek_from(input) { 427 | // input.tokens.pop_front(); 428 | // } 429 | // T::pop_parsed_from(input, &mut Errors::new()) // Silent. 430 | // } 431 | // } 432 | // } 433 | // } 434 | -------------------------------------------------------------------------------- /loess/tests/quote_expansions.rs: -------------------------------------------------------------------------------- 1 | #![deny(unused_variables)] // At least for now, this is used to detect missing expansions. 2 | 3 | use loess::{ 4 | Error, ErrorPriority, quote_into_call_site, quote_into_mixed_site, quote_into_with_exact_span, 5 | raw_quote_into_call_site, raw_quote_into_mixed_site, raw_quote_into_with_exact_span, 6 | }; 7 | use proc_macro2::{Span, TokenStream, TokenTree}; 8 | 9 | macro_rules! test { 10 | (let ($span:ident, $root:ident, $tokens:ident), $test:expr, $expected:expr$(,)?) => {{ 11 | let $span = Span::call_site(); 12 | let $root: &TokenStream = &TokenStream::new(); 13 | let mut tokens = TokenStream::new(); 14 | let $tokens = &mut tokens; 15 | let result = $test; 16 | assert_eq!(tokens.to_string(), $expected); 17 | result 18 | }}; 19 | } 20 | 21 | #[test] 22 | pub fn mixed_site() { 23 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {....... .....}), "....... ....."); 24 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {},), ""); // Trailing comma! 25 | } 26 | 27 | #[test] 28 | pub fn same_site() { 29 | test!(let (span, root, tokens), quote_into_with_exact_span!(span, root, tokens, {....... .....}), "....... ....."); 30 | test!(let (span, root, tokens), quote_into_with_exact_span!(span, root, tokens, {},), ""); // Trailing comma! 31 | } 32 | 33 | #[test] 34 | pub fn call_site() { 35 | test!(let (span, root, tokens), quote_into_call_site!(span, root, tokens, {....... .....}), "....... ....."); 36 | test!(let (span, root, tokens), quote_into_call_site!(span, root, tokens, {},), ""); // Trailing comma! 37 | } 38 | 39 | #[test] 40 | pub fn mixed_site_raw() { 41 | test!(let (span, _root, tokens), raw_quote_into_mixed_site!(span, tokens, {....... .....}), "....... ....."); 42 | test!(let (span, _root, tokens), raw_quote_into_mixed_site!(span, tokens, {},), ""); // Trailing comma! 43 | } 44 | 45 | #[test] 46 | pub fn same_site_raw() { 47 | test!(let (span, _root, tokens), raw_quote_into_with_exact_span!(span, tokens, {....... .....}), "....... ....."); 48 | test!(let (span, _root, tokens), raw_quote_into_with_exact_span!(span, tokens, {},), ""); // Trailing comma! 49 | } 50 | 51 | #[test] 52 | pub fn call_site_raw() { 53 | test!(let (span, _root, tokens), raw_quote_into_call_site!(span, tokens, {....... .....}), "....... ....."); 54 | test!(let (span, _root, tokens), raw_quote_into_call_site!(span, tokens, {},), ""); // Trailing comma! 55 | } 56 | 57 | #[test] 58 | pub fn long_punctuation() { 59 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {............... .....}), "............... ....."); 60 | } 61 | 62 | #[test] 63 | pub fn paste() { 64 | let mut custom_root = TokenStream::new(); 65 | raw_quote_into_with_exact_span!(Span::call_site(), &mut custom_root, [::custom::root]); 66 | let error = Error::new( 67 | ErrorPriority::GRAMMAR, 68 | "This is an error message.", 69 | [Span::mixed_site()], 70 | ); 71 | test!( 72 | let (span, _root, tokens), 73 | quote_into_mixed_site!(span, &custom_root, tokens, { {#()} }), 74 | "", 75 | ); 76 | test!( 77 | let (span, _root, tokens), 78 | quote_into_mixed_site!(span, &custom_root, tokens, { {#(error.clone())} }), 79 | ":: custom :: root :: core :: compile_error ! (\"This is an error message.\") ;", 80 | ); 81 | test!( 82 | let (span, _root, tokens), 83 | //TODO: Ensure order is eval, eval, into_tokens, into_tokens. 84 | quote_into_mixed_site!(span, &custom_root, tokens, { {#(error.clone(), error)} }), 85 | ":: custom :: root :: core :: compile_error ! (\"This is an error message.\") ; :: custom :: root :: core :: compile_error ! (\"This is an error message.\") ;", 86 | ); 87 | } 88 | 89 | #[test] 90 | pub fn raw() { 91 | test!( 92 | let (span, root, tokens), 93 | quote_into_mixed_site!(span, root, tokens, { {#raw { {#raw } }} }), 94 | "{ # raw }", 95 | ); 96 | } 97 | 98 | #[test] 99 | pub fn error() { 100 | let mut custom_root = TokenStream::new(); 101 | raw_quote_into_with_exact_span!(Span::call_site(), &mut custom_root, { ::custom::root }); 102 | test!( 103 | let (span, _root, tokens), 104 | quote_into_mixed_site!(span, &custom_root, tokens, {{#error { "This is an error message." }}}), 105 | ":: custom :: root :: core :: compile_error ! (\"This is an error message.\")", 106 | ); 107 | } 108 | 109 | #[test] 110 | pub fn root() { 111 | let mut custom_root = TokenStream::new(); 112 | raw_quote_into_with_exact_span!(Span::call_site(), &mut custom_root, { ::custom::root }); 113 | test!(let (span, _root, tokens), quote_into_mixed_site!(span, &custom_root, tokens, {{#root}}), ":: custom :: root"); 114 | } 115 | 116 | #[test] 117 | pub fn let_and_span_directives() { 118 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, { 119 | {#let a = Span::mixed_site();} 120 | {#let b = Span::call_site();} 121 | {#let Some(c) = Some(Span::mixed_site()) else { unreachable!() };} 122 | {#mixed_site { mx }} 123 | {#call_site { cs }} 124 | {#located_at(a) { a_ }} 125 | {#resolved_at(b) { b_ }} 126 | {#with_exact_span(c) { c_ }} 127 | }), "mx cs a_ b_ c_"); 128 | } 129 | 130 | #[test] 131 | pub fn block() { 132 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, { 133 | {#let b = TokenStream::new();} 134 | {#{ 135 | {#let b = TokenStream::new();} 136 | {#(b)} 137 | }} 138 | {#(b)} 139 | }), ""); 140 | } 141 | 142 | #[test] 143 | pub fn r#return() { 144 | fn r#return( 145 | span: Span, 146 | root: &TokenStream, 147 | tokens: &mut impl Extend, 148 | condition: bool, 149 | ) -> bool { 150 | quote_into_mixed_site!(span, root, tokens, { 151 | {#if condition { 152 | {#return !condition;} 153 | }} 154 | not condition 155 | }); 156 | true 157 | } 158 | 159 | assert_eq!( 160 | test!(let (span, root, tokens), r#return(span, root, tokens, true), ""), 161 | false 162 | ); 163 | assert_eq!( 164 | test!(let (span, root, tokens), r#return(span, root, tokens, false), "not condition"), 165 | true 166 | ); 167 | } 168 | 169 | #[test] 170 | pub fn if_else_chain() { 171 | fn if_else_chain( 172 | span: Span, 173 | root: &TokenStream, 174 | tokens: &mut impl Extend, 175 | condition: Option, 176 | ) { 177 | quote_into_mixed_site!(span, root, tokens, { 178 | {#if condition == Some(true) { 179 | + 180 | } else if condition == Some(false) { 181 | - 182 | } else { 183 | ~ 184 | }} 185 | }); 186 | } 187 | 188 | test!(let (span, root, tokens), if_else_chain(span, root, tokens, Some(true)), "+"); 189 | test!(let (span, root, tokens), if_else_chain(span, root, tokens, Some(false)), "-"); 190 | test!(let (span, root, tokens), if_else_chain(span, root, tokens, None), "~"); 191 | } 192 | 193 | #[test] 194 | pub fn if_let_else_chain() { 195 | fn if_let_else_chain( 196 | span: Span, 197 | root: &TokenStream, 198 | tokens: &mut impl Extend, 199 | condition: Option, 200 | ) { 201 | quote_into_mixed_site!(span, root, tokens, { 202 | {#if let Some(true) = condition { 203 | + 204 | } else if let Some(false) = condition { 205 | - 206 | } else { 207 | ~ 208 | }} 209 | }); 210 | } 211 | 212 | test!(let (span, root, tokens), if_let_else_chain(span, root, tokens, Some(true)), "+"); 213 | test!(let (span, root, tokens), if_let_else_chain(span, root, tokens, Some(false)), "-"); 214 | test!(let (span, root, tokens), if_let_else_chain(span, root, tokens, None), "~"); 215 | } 216 | 217 | #[test] 218 | pub fn r#match() { 219 | fn r#match( 220 | span: Span, 221 | root: &TokenStream, 222 | tokens: &mut impl Extend, 223 | condition: Option, 224 | ) { 225 | quote_into_mixed_site!(span, root, tokens, { 226 | {#match condition { 227 | Some(true) => {+} 228 | Some(false) => {-} 229 | None => {~} 230 | }} 231 | }); 232 | } 233 | 234 | test!(let (span, root, tokens), r#match(span, root, tokens, Some(true)), "+"); 235 | test!(let (span, root, tokens), r#match(span, root, tokens, Some(false)), "-"); 236 | test!(let (span, root, tokens), r#match(span, root, tokens, None), "~"); 237 | } 238 | 239 | #[test] 240 | pub fn break_from_block() { 241 | fn break_from_block(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 242 | #![allow(unreachable_code)] 243 | quote_into_mixed_site!(span, root, tokens, { 244 | {#'my_label: { 245 | always 246 | {#break 'my_label;} 247 | never 248 | }} 249 | }); 250 | } 251 | 252 | test!(let (span, root, tokens), break_from_block(span, root, tokens), "always"); 253 | } 254 | 255 | #[test] 256 | pub fn break_from_loop() { 257 | fn break_from_loop(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 258 | quote_into_mixed_site!(span, root, tokens, { 259 | {#loop { 260 | once 261 | {#break;} 262 | }} 263 | }); 264 | } 265 | 266 | test!(let (span, root, tokens), break_from_loop(span, root, tokens), "once"); 267 | } 268 | 269 | #[test] 270 | pub fn break_from_loop_with_label() { 271 | fn break_from_loop_with_label( 272 | span: Span, 273 | root: &TokenStream, 274 | tokens: &mut impl Extend, 275 | ) { 276 | quote_into_mixed_site!(span, root, tokens, { 277 | {#'my_label: loop { 278 | once 279 | {#break 'my_label;} 280 | }} 281 | }); 282 | } 283 | 284 | test!(let (span, root, tokens), break_from_loop_with_label(span, root, tokens), "once"); 285 | } 286 | 287 | #[test] 288 | pub fn break_from_for() { 289 | fn break_from_for(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 290 | quote_into_mixed_site!(span, root, tokens, { 291 | {#for _ in 0..2 { 292 | once 293 | {#break;} 294 | }} 295 | }); 296 | } 297 | 298 | test!(let (span, root, tokens), break_from_for(span, root, tokens), "once"); 299 | } 300 | 301 | #[test] 302 | pub fn break_from_for_with_label() { 303 | fn break_from_for_with_label( 304 | span: Span, 305 | root: &TokenStream, 306 | tokens: &mut impl Extend, 307 | ) { 308 | quote_into_mixed_site!(span, root, tokens, { 309 | {#'my_label: for _ in 0..2 { 310 | once 311 | {#break 'my_label;} 312 | }} 313 | }); 314 | } 315 | 316 | test!(let (span, root, tokens), break_from_for_with_label(span, root, tokens), "once"); 317 | } 318 | 319 | #[test] 320 | pub fn continue_in_for() { 321 | fn continue_in_for(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 322 | quote_into_mixed_site!(span, root, tokens, { 323 | {#for _ in 0..2 { 324 | twice 325 | {#if true { {#continue;} }} 326 | never 327 | }} 328 | }); 329 | } 330 | 331 | test!(let (span, root, tokens), continue_in_for(span, root, tokens), "twice twice"); 332 | } 333 | 334 | #[test] 335 | pub fn continue_in_for_with_label() { 336 | fn continue_in_for_with_label( 337 | span: Span, 338 | root: &TokenStream, 339 | tokens: &mut impl Extend, 340 | ) { 341 | quote_into_mixed_site!(span, root, tokens, { 342 | {#'my_label: for _ in 0..2 { 343 | twice 344 | {#if true { {#continue 'my_label;} }} 345 | never 346 | }} 347 | }); 348 | } 349 | 350 | test!(let (span, root, tokens), continue_in_for_with_label(span, root, tokens), "twice twice"); 351 | } 352 | 353 | #[test] 354 | pub fn for_else() { 355 | fn for_else(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 356 | quote_into_mixed_site!(span, root, tokens, { 357 | {#for _ in 0..0 { 358 | never 359 | } else { 360 | once 361 | }} 362 | }); 363 | } 364 | 365 | test!(let (span, root, tokens), for_else(span, root, tokens), "once"); 366 | } 367 | 368 | #[test] 369 | pub fn for_else_with_label() { 370 | fn for_else_with_label(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 371 | quote_into_mixed_site!(span, root, tokens, { 372 | {#'label: for _ in 0..5 { 373 | once 374 | {#break 'label;} 375 | } else { 376 | never 377 | }} 378 | }); 379 | } 380 | 381 | test!(let (span, root, tokens), for_else_with_label(span, root, tokens), "once"); 382 | } 383 | 384 | #[test] 385 | pub fn for_not_else() { 386 | fn for_not_else(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 387 | quote_into_mixed_site!(span, root, tokens, { 388 | {#for _ in 0..2 { 389 | twice 390 | } else { 391 | never 392 | }} 393 | }); 394 | } 395 | 396 | test!(let (span, root, tokens), for_not_else(span, root, tokens), "twice twice"); 397 | } 398 | 399 | #[test] 400 | pub fn while_continue_with_label() { 401 | fn while_continue_with_label( 402 | span: Span, 403 | root: &TokenStream, 404 | tokens: &mut impl Extend, 405 | ) { 406 | let mut i = 0; 407 | quote_into_mixed_site!(span, root, tokens, { 408 | {#'my_label: while i < 2 { 409 | twice 410 | {#let _ = i += 1;} // Not recommended, obviously. 411 | {#if true { {#continue 'my_label;} }} 412 | never 413 | }} 414 | }); 415 | } 416 | 417 | test!(let (span, root, tokens), while_continue_with_label(span, root, tokens), "twice twice"); 418 | } 419 | 420 | #[test] 421 | pub fn while_else() { 422 | fn while_else(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 423 | quote_into_mixed_site!(span, root, tokens, { 424 | {#while false { 425 | never 426 | } else { 427 | once 428 | }} 429 | }); 430 | } 431 | 432 | test!(let (span, root, tokens), while_else(span, root, tokens), "once"); 433 | } 434 | 435 | #[test] 436 | pub fn while_else_with_label() { 437 | fn while_else_with_label(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 438 | quote_into_mixed_site!(span, root, tokens, { 439 | {#'label: while true { 440 | once 441 | {#break 'label;} 442 | } else { 443 | never 444 | }} 445 | }); 446 | } 447 | 448 | test!(let (span, root, tokens), while_else_with_label(span, root, tokens), "once"); 449 | } 450 | 451 | #[test] 452 | pub fn while_not_else() { 453 | fn while_not_else(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 454 | quote_into_mixed_site!(span, root, tokens, { 455 | {#if false { }} 456 | {#while true { 457 | once 458 | {#break;} 459 | } else { 460 | never 461 | }} 462 | }); 463 | } 464 | 465 | test!(let (span, root, tokens), while_not_else(span, root, tokens), "once"); 466 | } 467 | 468 | #[test] 469 | pub fn while_let() { 470 | fn while_let(span: Span, root: &TokenStream, tokens: &mut impl Extend) { 471 | let mut condition = Some(true); 472 | quote_into_mixed_site!(span, root, tokens, { 473 | {#while let Some(_) = condition { 474 | once 475 | {#let _ = condition = None;} 476 | }} 477 | }); 478 | } 479 | 480 | test!(let (span, root, tokens), while_let(span, root, tokens), "once"); 481 | } 482 | 483 | #[test] 484 | fn braced() { 485 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {{braced tokens}}), "{ braced tokens }"); 486 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {{{double braced tokens}}}), "{ { double braced tokens } }"); 487 | } 488 | 489 | #[test] 490 | fn bracketed() { 491 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {[bracketed tokens]}), "[bracketed tokens]"); 492 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {[[double bracketed tokens]]}), "[[double bracketed tokens]]"); 493 | } 494 | 495 | #[test] 496 | fn parenthesized() { 497 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {(parenthesized tokens)}), "(parenthesized tokens)"); 498 | test!(let (span, root, tokens), quote_into_mixed_site!(span, root, tokens, {((double parenthesized tokens))}), "((double parenthesized tokens))"); 499 | } 500 | -------------------------------------------------------------------------------- /loess-rust-lex/src/lex/token/punct.rs: -------------------------------------------------------------------------------- 1 | //! [lex.token.punct](https://doc.rust-lang.org/stable/reference/tokens.html#r-lex.token.punct): Punctuation 2 | //! 3 | //! Punctuation is implemented as structs with named [`Punct`] fields. 4 | //! Where collisions would happen, they have a 0-based suffix. 5 | 6 | use loess::{ 7 | Error, ErrorPriority, Errors, Input, IntoTokens, PopParsedFrom, SimpleSpanned, punctuation, 8 | }; 9 | use proc_macro2::{Punct, Spacing, Span, TokenStream, TokenTree}; 10 | 11 | // See as of 2025-12-03. 12 | punctuation! { 13 | #[derive(Clone)] (+) not before [=] as pub Plus: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub plus } 14 | #[derive(Clone)] (-) not before [= >] as pub Minus: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub minus } 15 | #[derive(Clone)] (*) not before [=] as pub Star: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub star } 16 | #[derive(Clone)] (/) not before [=] as pub Slash: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub slash } 17 | #[derive(Clone)] (%) not before [=] as pub Percent: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub percent } 18 | #[derive(Clone)] (^) not before [=] as pub Caret: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub caret } 19 | #[derive(Clone)] (!) not before [=] as pub Not: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub not } 20 | #[derive(Clone)] (&) not before [& =] as pub And: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub and } 21 | #[derive(Clone)] (|) not before [| =] as pub Or: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub or } 22 | #[derive(Clone)] (&&) as pub AndAnd: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub and0, pub and1 } 23 | #[derive(Clone)] (||) as pub OrOr: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub or0, pub or1 } 24 | #[derive(Clone)] (<<) not before [=] as pub Shl: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub lt0, pub lt1 } 25 | #[derive(Clone)] (>>) not before [=] as pub Shr: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub gt0, pub gt1 } 26 | #[derive(Clone)] (+=) as pub PlusEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub plus, pub eq } 27 | #[derive(Clone)] (-=) as pub MinusEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub minus, pub eq } 28 | #[derive(Clone)] (*=) as pub StarEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub star, pub eq } 29 | #[derive(Clone)] (/=) as pub SlashEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub slash, pub eq } 30 | #[derive(Clone)] (%=) as pub PercentEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub percent, pub eq } 31 | #[derive(Clone)] (^=) as pub CaretEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub caret, pub eq } 32 | #[derive(Clone)] (!=) as pub NotEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub not, pub eq } 33 | #[derive(Clone)] (&=) as pub AndEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub and, pub eq } 34 | #[derive(Clone)] (|=) as pub OrEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub or, pub eq } 35 | #[derive(Clone)] (<<=) as pub ShlEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub lt0, pub lt1, pub eq } 36 | #[derive(Clone)] (>>=) as pub ShrEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub gt0, pub gt1, pub eq } 37 | #[derive(Clone)] (=) not before [=] as pub Eq: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub eq } 38 | #[derive(Clone)] (==) as pub EqEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub eq0, pub eq1 } 39 | #[derive(Clone)] (!=) as pub Ne: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub not, pub eq } 40 | #[derive(Clone)] (>) not before [> =] as pub Gt: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub gt } 41 | #[derive(Clone)] (<) not before [< = -] as pub Lt: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub lt } 42 | #[derive(Clone)] (>=) as pub Ge: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub gt, pub eq } 43 | #[derive(Clone)] (<=) as pub Le: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub lt, pub eq } 44 | #[derive(Clone)] (@) as pub At: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub at } 45 | #[derive(Clone)] (_) as pub Underscore: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub underscore } 46 | #[derive(Clone)] (.) not before [.] as pub Dot: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub dot } 47 | #[derive(Clone)] (..) not before [. =] as pub DotDot: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub dot0, pub dot1 } 48 | #[derive(Clone)] (...) as pub DotDotDot: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub dot0, pub dot1, pub dot2 } 49 | #[derive(Clone)] (..=) as pub DotDotEq: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub dot0, pub dot1, pub eq } 50 | #[derive(Clone)] (,) as pub Comma: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub comma } 51 | #[derive(Clone)] (;) as pub Semi: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub semi } 52 | #[derive(Clone)] (:) not before [:] as pub Colon: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub colon } 53 | #[derive(Clone)] (::) as pub PathSep: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub colon0, pub colon1 } 54 | #[derive(Clone)] (->) as pub RArrow: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub minus, pub gt } 55 | #[derive(Clone)] (=>) as pub FatArrow: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub eq, pub gt } 56 | 57 | /// 58 | /// Unused since before Rust 1.0, but still treated as single token. 59 | #[derive(Clone)] (<-) as pub LArrow: doc, Default, PeekFrom, PopFrom, IntoTokens, LocatedAt, ResolvedAt { pub lt, pub minus } 60 | 61 | #[derive(Clone)] (#) as pub Pound: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub pound } 62 | #[derive(Clone)] ($) as pub Dollar: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub dollar } 63 | #[derive(Clone)] (?) as pub Question: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub question } 64 | #[derive(Clone)] (~) as pub Tilde: doc, Default, PeekFrom, PopFrom, IntoTokens, SimpleSpanned, LocatedAt, ResolvedAt { pub tilde } 65 | } 66 | 67 | // `!` 68 | impl Default for Not { 69 | fn default() -> Self { 70 | Self { 71 | not: Punct::new('!', Spacing::Alone).with_span(Span::mixed_site()), 72 | } 73 | } 74 | } 75 | 76 | impl PopParsedFrom for Not { 77 | type Parsed = Self; 78 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 79 | input 80 | .pop_or_replace(|tts, _| match tts { 81 | [TokenTree::Punct(not)] 82 | if not.as_char() == '!' && not.spacing() == Spacing::Alone => 83 | { 84 | Ok(Self { not }) 85 | } 86 | other => Err(other), 87 | }) 88 | .map_err(|spans| { 89 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `!`.", spans)) 90 | }) 91 | } 92 | } 93 | 94 | impl IntoTokens for Not { 95 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 96 | self.not.into_tokens(root, tokens) 97 | } 98 | } 99 | 100 | // `|` 101 | impl Default for Or { 102 | fn default() -> Self { 103 | Self { 104 | or: Punct::new('|', Spacing::Alone).with_span(Span::mixed_site()), 105 | } 106 | } 107 | } 108 | 109 | impl PopParsedFrom for Or { 110 | type Parsed = Self; 111 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 112 | input 113 | .pop_or_replace(|tts, _| match tts { 114 | [TokenTree::Punct(or)] if or.as_char() == '|' && or.spacing() == Spacing::Alone => { 115 | Ok(Self { or }) 116 | } 117 | other => Err(other), 118 | }) 119 | .map_err(|spans| { 120 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `|`.", spans)) 121 | }) 122 | } 123 | } 124 | 125 | impl IntoTokens for Or { 126 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 127 | self.or.into_tokens(root, tokens) 128 | } 129 | } 130 | 131 | // `_` 132 | impl Default for Underscore { 133 | fn default() -> Self { 134 | Self { 135 | underscore: Punct::new('_', Spacing::Alone).with_span(Span::mixed_site()), 136 | } 137 | } 138 | } 139 | 140 | impl PopParsedFrom for Underscore { 141 | type Parsed = Self; 142 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 143 | input 144 | .pop_or_replace(|tts, _| match tts { 145 | [TokenTree::Punct(underscore)] if underscore.as_char() == '_' => { 146 | Ok(Self { underscore }) 147 | } 148 | other => Err(other), 149 | }) 150 | .map_err(|spans| { 151 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `_`.", spans)) 152 | }) 153 | } 154 | } 155 | 156 | impl IntoTokens for Underscore { 157 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 158 | self.underscore.into_tokens(root, tokens) 159 | } 160 | } 161 | 162 | // `.` 163 | impl Default for Dot { 164 | fn default() -> Self { 165 | Self { 166 | dot: Punct::new('.', Spacing::Alone).with_span(Span::mixed_site()), 167 | } 168 | } 169 | } 170 | 171 | impl PopParsedFrom for Dot { 172 | type Parsed = Self; 173 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 174 | input 175 | .pop_or_replace(|tts, _| match tts { 176 | [TokenTree::Punct(dot)] 177 | if dot.as_char() == '.' && dot.spacing() == Spacing::Alone => 178 | { 179 | Ok(Self { dot }) 180 | } 181 | other => Err(other), 182 | }) 183 | .map_err(|spans| { 184 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `.`.", spans)) 185 | }) 186 | } 187 | } 188 | 189 | impl IntoTokens for Dot { 190 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 191 | self.dot.into_tokens(root, tokens) 192 | } 193 | } 194 | 195 | // `..` 196 | impl Default for DotDot { 197 | fn default() -> Self { 198 | Self { 199 | dot0: Punct::new('.', Spacing::Joint).with_span(Span::mixed_site()), 200 | dot1: Punct::new('.', Spacing::Alone).with_span(Span::mixed_site()), 201 | } 202 | } 203 | } 204 | 205 | impl PopParsedFrom for DotDot { 206 | type Parsed = Self; 207 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 208 | input 209 | .pop_or_replace(|tts, rest| match tts { 210 | [TokenTree::Punct(dot0), TokenTree::Punct(dot1)] 211 | if dot0.as_char() == '.' 212 | && dot0.spacing() == Spacing::Joint 213 | && dot1.as_char() == '.' 214 | && (dot1.spacing() == Spacing::Alone || !matches!(rest.front(), Some(TokenTree::Punct(next_punct)) if matches!(next_punct.as_char(), '.' | '='))) 215 | => { Ok(Self{dot0, dot1}) } 216 | other => Err(other), 217 | }) 218 | .map_err(|spans| { 219 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `..`.", spans)) 220 | }) 221 | } 222 | } 223 | 224 | impl IntoTokens for DotDot { 225 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 226 | let Self { dot0, dot1 } = self; 227 | tokens.extend([dot0.into(), dot1.into()]) 228 | } 229 | } 230 | 231 | // `,` 232 | impl Default for Comma { 233 | fn default() -> Self { 234 | Self { 235 | comma: Punct::new(',', Spacing::Alone).with_span(Span::mixed_site()), 236 | } 237 | } 238 | } 239 | 240 | impl PopParsedFrom for Comma { 241 | type Parsed = Self; 242 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 243 | input 244 | .pop_or_replace(|tts, _| match tts { 245 | [TokenTree::Punct(comma)] if comma.as_char() == ',' => Ok(Self { comma }), 246 | other => Err(other), 247 | }) 248 | .map_err(|spans| { 249 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `,`.", spans)) 250 | }) 251 | } 252 | } 253 | 254 | impl IntoTokens for Comma { 255 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 256 | self.comma.into_tokens(root, tokens) 257 | } 258 | } 259 | 260 | // `;` 261 | impl Default for Semi { 262 | fn default() -> Self { 263 | Self { 264 | semi: Punct::new(';', Spacing::Alone).with_span(Span::mixed_site()), 265 | } 266 | } 267 | } 268 | 269 | impl PopParsedFrom for Semi { 270 | type Parsed = Self; 271 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 272 | input 273 | .pop_or_replace(|tts, _| match tts { 274 | [TokenTree::Punct(semi)] if semi.as_char() == ';' => Ok(Self { semi }), 275 | other => Err(other), 276 | }) 277 | .map_err(|spans| { 278 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `;`.", spans)) 279 | }) 280 | } 281 | } 282 | 283 | impl IntoTokens for Semi { 284 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 285 | self.semi.into_tokens(root, tokens) 286 | } 287 | } 288 | 289 | // `:` 290 | impl Default for Colon { 291 | fn default() -> Self { 292 | Self { 293 | colon: Punct::new(':', Spacing::Alone).with_span(Span::mixed_site()), 294 | } 295 | } 296 | } 297 | 298 | impl PopParsedFrom for Colon { 299 | type Parsed = Self; 300 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 301 | input 302 | .pop_or_replace(|tts, _| match tts { 303 | [TokenTree::Punct(colon)] 304 | if colon.as_char() == ':' && colon.spacing() == Spacing::Alone => 305 | { 306 | Ok(Self { colon }) 307 | } 308 | other => Err(other), 309 | }) 310 | .map_err(|spans| { 311 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `:`.", spans)) 312 | }) 313 | } 314 | } 315 | 316 | impl IntoTokens for Colon { 317 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 318 | self.colon.into_tokens(root, tokens) 319 | } 320 | } 321 | 322 | // `::` 323 | impl Default for PathSep { 324 | fn default() -> Self { 325 | Self { 326 | colon0: Punct::new(':', Spacing::Joint).with_span(Span::mixed_site()), 327 | colon1: Punct::new(':', Spacing::Alone).with_span(Span::mixed_site()), 328 | } 329 | } 330 | } 331 | 332 | impl PopParsedFrom for PathSep { 333 | type Parsed = Self; 334 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 335 | input 336 | .pop_or_replace(|tts, _| match tts { 337 | [TokenTree::Punct(colon0), TokenTree::Punct(colon1)] 338 | if colon0.as_char() == ':' 339 | && colon0.spacing() == Spacing::Joint 340 | && colon1.as_char() == ':' 341 | && colon1.spacing() == Spacing::Alone => 342 | { 343 | Ok(Self { colon0, colon1 }) 344 | } 345 | other => Err(other), 346 | }) 347 | .map_err(|spans| { 348 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `::`.", spans)) 349 | }) 350 | } 351 | } 352 | 353 | impl IntoTokens for PathSep { 354 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 355 | self.colon0.into_tokens(root, tokens); 356 | self.colon1.into_tokens(root, tokens); 357 | } 358 | } 359 | 360 | // `->` 361 | impl Default for RArrow { 362 | fn default() -> Self { 363 | Self { 364 | minus: Punct::new('-', Spacing::Joint).with_span(Span::mixed_site()), 365 | gt: Punct::new('>', Spacing::Alone).with_span(Span::mixed_site()), 366 | } 367 | } 368 | } 369 | 370 | impl PopParsedFrom for RArrow { 371 | type Parsed = Self; 372 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 373 | input 374 | .pop_or_replace(|tts, _| match tts { 375 | [TokenTree::Punct(minus), TokenTree::Punct(gt)] 376 | if minus.as_char() == '-' 377 | && minus.spacing() == Spacing::Joint 378 | && gt.as_char() == '>' => 379 | { 380 | Ok(Self { minus, gt }) 381 | } 382 | other => Err(other), 383 | }) 384 | .map_err(|spans| { 385 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `->`.", spans)) 386 | }) 387 | } 388 | } 389 | 390 | impl IntoTokens for RArrow { 391 | fn into_tokens(self, _root: &TokenStream, tokens: &mut impl Extend) { 392 | let Self { minus, gt } = self; 393 | tokens.extend([minus.into(), gt.into()]) 394 | } 395 | } 396 | 397 | // `#` 398 | impl Default for Pound { 399 | fn default() -> Self { 400 | Self { 401 | pound: Punct::new('#', Spacing::Alone).with_span(Span::mixed_site()), 402 | } 403 | } 404 | } 405 | 406 | impl PopParsedFrom for Pound { 407 | type Parsed = Self; 408 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 409 | input 410 | .pop_or_replace(|tts, _| match tts { 411 | [TokenTree::Punct(pound)] if pound.as_char() == '#' => Ok(Self { pound }), 412 | other => Err(other), 413 | }) 414 | .map_err(|spans| { 415 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `#`.", spans)) 416 | }) 417 | } 418 | } 419 | 420 | impl IntoTokens for Pound { 421 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 422 | self.pound.into_tokens(root, tokens) 423 | } 424 | } 425 | 426 | // `$` 427 | impl Default for Dollar { 428 | fn default() -> Self { 429 | Self { 430 | dollar: Punct::new('$', Spacing::Alone).with_span(Span::mixed_site()), 431 | } 432 | } 433 | } 434 | 435 | impl PopParsedFrom for Dollar { 436 | type Parsed = Self; 437 | fn pop_parsed_from(input: &mut Input, errors: &mut Errors) -> Result { 438 | input 439 | .pop_or_replace(|tts, _| match tts { 440 | [TokenTree::Punct(dollar)] 441 | if dollar.as_char() == '$' && dollar.spacing() == Spacing::Alone => 442 | { 443 | Ok(Self { dollar }) 444 | } 445 | other => Err(other), 446 | }) 447 | .map_err(|spans| { 448 | errors.push(Error::new(ErrorPriority::GRAMMAR, "Expected `$`.", spans)) 449 | }) 450 | } 451 | } 452 | 453 | impl IntoTokens for Dollar { 454 | fn into_tokens(self, root: &TokenStream, tokens: &mut impl Extend) { 455 | self.dollar.into_tokens(root, tokens) 456 | } 457 | } 458 | --------------------------------------------------------------------------------