├── Cargo.toml ├── flexgen ├── examples │ └── basic │ │ ├── flexgen.toml │ │ ├── fib.rs │ │ └── main.rs ├── Cargo.toml └── src │ ├── var.rs │ ├── lib.rs │ └── config.rs ├── .gitignore ├── use_builder ├── Cargo.toml ├── README.md └── src │ └── lib.rs ├── doc_test ├── Cargo.toml ├── README.md └── src │ └── lib.rs ├── rust_format ├── Cargo.toml ├── README.md └── src │ ├── lib.rs │ └── replace.rs ├── README.md └── LICENSE /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["doc_test", "flexgen", "rust_format", "use_builder"] 3 | -------------------------------------------------------------------------------- /flexgen/examples/basic/flexgen.toml: -------------------------------------------------------------------------------- 1 | [fragment_lists] 2 | fib = [ "function", "main" ] 3 | 4 | [files.fib] 5 | path = "fib.rs" 6 | fragment_list = "fib" 7 | 8 | [files.fib.vars] 9 | fib = "$ident$fibonacci" 10 | one = "$int_lit$1" 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | 13 | # Added by cargo 14 | # 15 | # already existing elements were commented out 16 | 17 | /target 18 | #Cargo.lock 19 | .idea/ 20 | tarpaulin-report.html 21 | -------------------------------------------------------------------------------- /use_builder/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "use-builder" 3 | version = "0.1.0" 4 | authors = ["Scott Meeuwsen "] 5 | license = "MIT OR Apache-2.0" 6 | description = "A crate to build source code use sections by combining multiple (possibly duplicate) use section inputs" 7 | repository = "https://github.com/nu11ptr/flexgen/tree/master/use_builder" 8 | documentation = "https://docs.rs/use-builder" 9 | keywords = ["quote", "syn", "use"] 10 | categories = ["development-tools", "value-formatting"] 11 | readme = "README.md" 12 | edition = "2021" 13 | 14 | [dependencies] 15 | indexmap = "1.8" 16 | quote = { version = "1.0", default-features = false } 17 | syn = { version = "1.0", default-features = false, features = [ "clone-impls", "extra-traits", "full", "parsing", "printing" ] } 18 | 19 | [dev-dependencies] 20 | assert_unordered = "0.3.3" 21 | -------------------------------------------------------------------------------- /doc_test/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "quote-doctest" 3 | version = "0.3.2" 4 | authors = ["Scott Meeuwsen "] 5 | license = "MIT OR Apache-2.0" 6 | description = "A simple doctest generator for quote" 7 | repository = "https://github.com/nu11ptr/flexgen/tree/master/doc_test" 8 | documentation = "https://docs.rs/quote-doctest" 9 | keywords = ["syn", "macros", "quote", "doctest"] 10 | categories = ["development-tools::procedural-macro-helpers"] 11 | readme = "README.md" 12 | edition = "2021" 13 | 14 | [package.metadata.docs.rs] 15 | all-features = true 16 | rustdoc-args = ["--cfg", "docsrs"] 17 | 18 | [features] 19 | default = ["pretty_please"] 20 | pretty_please = ["rust-format/pretty_please"] 21 | 22 | [dependencies] 23 | proc-macro2 = "1.0" 24 | quote = "1.0" 25 | rust-format = { version = "0.3.4", features = ["post_process", "token_stream"] } 26 | 27 | [dev-dependencies] 28 | pretty_assertions = "1.2" 29 | -------------------------------------------------------------------------------- /flexgen/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "flexgen" 3 | version = "0.4.5" 4 | authors = ["Scott Meeuwsen "] 5 | license = "MIT OR Apache-2.0" 6 | description = "A flexible, yet simple quote-based code generator for creating beautiful Rust code" 7 | repository = "https://github.com/nu11ptr/flexgen" 8 | documentation = "https://docs.rs/flexgen" 9 | keywords = ["syn", "quote", "doctest", "generate"] 10 | categories = ["development-tools"] 11 | readme = "../README.md" 12 | edition = "2021" 13 | 14 | [dependencies] 15 | flexstr = { version = "0.9.2", features = ["serde"] } 16 | heck = "0.4" 17 | proc-macro2 = "1.0" 18 | quote = "1.0" 19 | rayon = "1.5" 20 | rust-format = { version = "0.3.4", features = ["post_process", "pretty_please", "token_stream"] } 21 | serde = { version = "1", features = ["derive"] } 22 | syn = { version = "1.0", default-features = false, features = ["clone-impls", "extra-traits", "full", "parsing", "printing"] } 23 | thiserror = "1.0" 24 | toml = "0.5" 25 | use-builder = "0.1" 26 | 27 | [dev-dependencies] 28 | pretty_assertions = "1" 29 | quote-doctest = "0.3.2" 30 | -------------------------------------------------------------------------------- /rust_format/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rust-format" 3 | version = "0.3.4" 4 | authors = ["Scott Meeuwsen "] 5 | license = "MIT OR Apache-2.0" 6 | description = "A Rust source code formatting crate with a unified interface for string, file, and TokenStream input" 7 | repository = "https://github.com/nu11ptr/flexgen/tree/master/rust_format" 8 | documentation = "https://docs.rs/rust-format" 9 | keywords = ["rustfmt", "prettyplease", "formatter", "quote"] 10 | categories = ["development-tools", "value-formatting"] 11 | readme = "README.md" 12 | edition = "2021" 13 | 14 | [package.metadata.docs.rs] 15 | all-features = true 16 | rustdoc-args = ["--cfg", "docsrs"] 17 | 18 | [features] 19 | post_process = ["syn/parsing"] 20 | pretty_please = ["prettyplease", "syn/full", "syn/parsing", "token_stream"] 21 | token_stream = ["proc-macro2"] 22 | 23 | [dependencies] 24 | prettyplease = { version = "0.1", optional = true } 25 | proc-macro2 = { version = "1.0", optional = true } 26 | syn = { version = "1.0", default-features = false, optional = true } 27 | 28 | [dev-dependencies] 29 | pretty_assertions = "1.2" 30 | quote = "1.0" 31 | temp-env = "0.2" 32 | tempfile = "3.3" 33 | -------------------------------------------------------------------------------- /flexgen/examples/basic/fib.rs: -------------------------------------------------------------------------------- 1 | // +-------------------------------------------------------------------------------------------------+ 2 | // | WARNING: This file has been auto-generated using FlexGen (https://github.com/nu11ptr/flexgen). | 3 | // | Any manual modifications to this file will be overwritten the next time this file is generated. | 4 | // +-------------------------------------------------------------------------------------------------+ 5 | 6 | use std::error::Error as StdError; 7 | use std::io::stdin; 8 | 9 | /// This will run a compare between fib inputs and the outputs 10 | /// ``` 11 | /// assert_eq!(fibonacci(10), 55); 12 | /// assert_eq!(fibonacci(1), 1); 13 | /// ``` 14 | #[inline] 15 | fn fibonacci(n: u64) -> u64 { 16 | match n { 17 | 0 => 0, 18 | 1 => 1, 19 | n => fibonacci(n - 1) + fibonacci(n - 2), 20 | } 21 | } 22 | 23 | /// This is the main function 24 | fn main() -> Result<(), Box> { 25 | println!("Enter a number:"); 26 | let mut line = String::new(); 27 | stdin().read_line(&mut line)?; 28 | let num: u64 = line.trim_end().parse()?; 29 | 30 | // 31 | // Calculate fibonacci for user input 32 | // 33 | let answer = fibonacci(num); 34 | println!("The number '{num}' in the fibonacci sequence is: {answer}"); 35 | 36 | Ok(()) 37 | } 38 | -------------------------------------------------------------------------------- /use_builder/README.md: -------------------------------------------------------------------------------- 1 | # use-builder 2 | 3 | [![Crate](https://img.shields.io/crates/v/use-builder)](https://crates.io/crates/use-builder) 4 | [![Docs](https://docs.rs/use-builder/badge.svg)](https://docs.rs/use-builder) 5 | 6 | A crate to build source code use sections by combining multiple (possibly duplicate) 7 | use section inputs. 8 | 9 | NOTE: This is a fairly specialized crate. The only likely use case is really that 10 | of compiling source code snippets into files, like flexgen does. 11 | 12 | ## Usage 13 | 14 | ```toml 15 | [dependencies] 16 | use-builder = "0.1" 17 | ``` 18 | 19 | ## Example 20 | 21 | ```rust 22 | use assert_unordered::assert_eq_unordered; 23 | use quote::quote; 24 | use use_builder::{UseBuilder, UseItems}; 25 | 26 | fn main() { 27 | // #1 - Build a two or more use trees and convert into `UseItems` (wrapped `Vec`) 28 | 29 | let use1 = quote! { 30 | use crate::Test; 31 | use std::error::{Error as StdError}; 32 | use std::fmt::Debug; 33 | }; 34 | 35 | let use2 = quote! { 36 | use syn::ItemUse; 37 | use std::fmt::Display; 38 | use crate::*; 39 | }; 40 | 41 | let items1: UseItems = syn::parse2(use1).unwrap(); 42 | let items2: UseItems = syn::parse2(use2).unwrap(); 43 | 44 | // #2 - Parse, process, and extract into sections 45 | 46 | let builder = UseBuilder::from_uses(vec![items1, items2]); 47 | let (std_use, ext_use, crate_use) = builder.into_items_sections().unwrap(); 48 | 49 | // #3 - Validate our response matches expectation 50 | 51 | let std_expected = quote! { 52 | use std::error::Error as StdError; 53 | use std::fmt::{Debug, Display}; 54 | }; 55 | let std_expected = syn::parse2::(std_expected).unwrap().into_inner(); 56 | 57 | let ext_expected = quote! { 58 | use syn::ItemUse; 59 | }; 60 | let ext_expected = syn::parse2::(ext_expected).unwrap().into_inner(); 61 | 62 | let crate_expected = quote! { 63 | use crate::*; 64 | }; 65 | let crate_expected = syn::parse2::(crate_expected).unwrap().into_inner(); 66 | 67 | assert_eq_unordered!(std_expected, std_use); 68 | assert_eq_unordered!(ext_expected, ext_use); 69 | assert_eq_unordered!(crate_expected, crate_use); 70 | } 71 | ``` 72 | 73 | ## License 74 | 75 | This project is licensed optionally under either: 76 | 77 | * Apache License, Version 2.0, (LICENSE-APACHE 78 | or https://www.apache.org/licenses/LICENSE-2.0) 79 | * MIT license (LICENSE-MIT or https://opensource.org/licenses/MIT) 80 | -------------------------------------------------------------------------------- /flexgen/examples/basic/main.rs: -------------------------------------------------------------------------------- 1 | use flexgen::config::Config; 2 | use flexgen::var::TokenVars; 3 | use flexgen::{import_vars, register_fragments, CodeFragment, CodeGenerator, Error}; 4 | use proc_macro2::TokenStream; 5 | use quote::quote; 6 | use quote_doctest::doc_test; 7 | 8 | struct DocTest; 9 | 10 | impl CodeFragment for DocTest { 11 | fn generate(&self, vars: &TokenVars) -> Result { 12 | import_vars! { vars => fib, one }; 13 | 14 | let test = quote! { 15 | assert_eq!(#fib(10), 55); 16 | assert_eq!(#fib(#one), #one); 17 | }; 18 | 19 | Ok(doc_test!(test)?) 20 | } 21 | } 22 | 23 | struct Function; 24 | 25 | impl CodeFragment for Function { 26 | fn generate(&self, vars: &TokenVars) -> Result { 27 | import_vars! { vars => fib, one }; 28 | 29 | let doc_test = DocTest.generate(vars)?; 30 | 31 | Ok(quote! { 32 | /// This will run a compare between fib inputs and the outputs 33 | #doc_test 34 | #[inline] 35 | fn #fib(n: u64) -> u64 { 36 | match n { 37 | 0 => 0, 38 | #one => #one, 39 | n => #fib(n - 1) + #fib(n - 2), 40 | } 41 | } 42 | }) 43 | } 44 | } 45 | 46 | struct Main; 47 | 48 | impl CodeFragment for Main { 49 | fn uses(&self, _vars: &TokenVars) -> Result { 50 | Ok(quote! { 51 | use std::error::{Error as StdError}; 52 | use std::io::stdin; 53 | }) 54 | } 55 | 56 | fn generate(&self, vars: &TokenVars) -> Result { 57 | import_vars! { vars => fib }; 58 | 59 | Ok(quote! { 60 | /// This is the main function 61 | fn main() -> Result<(), Box> { 62 | println!("Enter a number:"); 63 | let mut line = String::new(); 64 | stdin().read_line(&mut line)?; 65 | let num: u64 = line.trim_end().parse()?; 66 | _blank_!(); 67 | 68 | _comment_!("\nCalculate fibonacci for user input\n\n"); 69 | let answer = #fib(num); 70 | println!("The number '{num}' in the fibonacci sequence is: {answer}"); 71 | _blank_!(); 72 | 73 | Ok(()) 74 | } 75 | }) 76 | } 77 | } 78 | 79 | fn main() -> Result<(), Error> { 80 | let fragments = register_fragments!(Function, Main); 81 | let config = Config::from_default_toml_file()?; 82 | let gen = CodeGenerator::new(fragments, config)?; 83 | gen.generate_files() 84 | } 85 | -------------------------------------------------------------------------------- /doc_test/README.md: -------------------------------------------------------------------------------- 1 | # quote-doctest 2 | 3 | [![Crate](https://img.shields.io/crates/v/quote-doctest)](https://crates.io/crates/quote-doctest) 4 | [![Docs](https://docs.rs/quote-doctest/badge.svg)](https://docs.rs/quote-doctest) 5 | 6 | A simple doctest and doc comment generator for [quote](https://crates.io/crates/quote) 7 | 8 | ## Overview 9 | 10 | Currently, quote 11 | [does not support](https://docs.rs/quote/latest/quote/macro.quote.html#interpolating-text-inside-of-doc-comments) 12 | interpolation inside of comments, which means no customized doctests. This 13 | crate provides a simple mechanism to generate doctests and doc comments for 14 | inclusion in generated code. 15 | 16 | ```toml 17 | [dependencies] 18 | quote-doctest = "0.3" 19 | ``` 20 | 21 | ## Example 22 | 23 | Using the `doc_test` macro, we can take any `TokenStream` and turn it into 24 | a doctest `TokenStream` that can be interpolated in any `quote` macro 25 | invocation. 26 | 27 | The `doc_comment` function takes any string and turns it into one or more 28 | comments inside a `TokenStream`. 29 | 30 | ```rust 31 | use quote::quote; 32 | use quote_doctest::{doc_comment, doc_test, FormatDocTest}; 33 | 34 | fn main() { 35 | // Takes any `TokenStream` as input (but typically `quote` would be used) 36 | let test = doc_test!(quote! { 37 | _comment_!("Calling fibonacci with 10 returns 55"); 38 | assert_eq!(fibonacci(10), 55); 39 | 40 | _blank_!(); 41 | _comment_!("Calling fibonacci with 1 simply returns 1"); 42 | assert_eq!(fibonacci(1), 1); 43 | }).unwrap(); 44 | 45 | let comment = doc_comment("This compares fib inputs and outputs:\n\n"); 46 | 47 | // Interpolates into a regular `quote` invocation 48 | let actual = quote! { 49 | #comment 50 | #test 51 | fn fibonacci(n: u64) -> u64 { 52 | match n { 53 | 0 => 1, 54 | 1 => 1, 55 | n => fibonacci(n - 1) + fibonacci(n - 2), 56 | } 57 | } 58 | }; 59 | 60 | // This is what is generated: 61 | let expected = quote! { 62 | /// This compares fib inputs and outputs: 63 | /// 64 | /// ``` 65 | /// // Calling fibonacci with 10 returns 55 66 | /// assert_eq!(fibonacci(10), 55); 67 | /// 68 | /// // Calling fibonacci with 1 simply returns 1 69 | /// assert_eq!(fibonacci(1), 1); 70 | /// ``` 71 | fn fibonacci(n: u64) -> u64 { 72 | match n { 73 | 0 => 1, 74 | 1 => 1, 75 | n => fibonacci(n - 1) + fibonacci(n - 2), 76 | } 77 | } 78 | }; 79 | 80 | assert_eq!(expected.format_tokens().unwrap(), actual.format_tokens().unwrap()); 81 | } 82 | ``` 83 | 84 | ## Notes 85 | - It leverages the [rust-format](https://crates.io/crates/rust-format) crate which can 86 | use both [prettyplease](https://crates.io/crates/prettyplease) 87 | (default) or the system `rustfmt` for formatting the doctests 88 | - When using `rustfmt`, it honors the `RUSTFMT` environment variable if set 89 | - Since comments and blank lines are whitespace to the parser, marker macros 90 | are used to map out where the comments and blank lines should appear. 91 | These will be replaced by comments and blank lines respectively in the 92 | doctest (as shown in the example above) 93 | 94 | ## License 95 | 96 | This project is licensed optionally under either: 97 | 98 | * Apache License, Version 2.0, (LICENSE-APACHE 99 | or https://www.apache.org/licenses/LICENSE-2.0) 100 | * MIT license (LICENSE-MIT or https://opensource.org/licenses/MIT) 101 | -------------------------------------------------------------------------------- /rust_format/README.md: -------------------------------------------------------------------------------- 1 | # rust-format 2 | 3 | [![Crate](https://img.shields.io/crates/v/rust-format)](https://crates.io/crates/rust-format) 4 | [![Docs](https://docs.rs/rust-format/badge.svg)](https://docs.rs/rust-format) 5 | 6 | A Rust source code formatting crate with a unified interface for string, file, and 7 | [TokenStream](https://docs.rs/proc-macro2/latest/proc_macro2/struct.TokenStream.html) 8 | input. It currently supports [rustfmt](https://crates.io/crates/rustfmt-nightly) 9 | and [prettyplease](https://crates.io/crates/prettyplease). 10 | 11 | It optionally supports post-processing replacement of special blank/comment markers for 12 | inserting blank lines and comments in `TokenStream` generated source code 13 | respectively (as used by [quote-doctest](https://crates.io/crates/quote-doctest) 14 | for inserting blanks/comments in generated doctests). It additionally supports 15 | converting doc blocks (`#[doc =""]`) into doc comments (`///`). 16 | 17 | NOTE: This is primarily to support `rustfmt` as `prettyplease` automatically 18 | converts doc blocks into doc comments (but for `rustfmt` it requires nightly and 19 | a configuration option). 20 | 21 | ## Usage 22 | 23 | ```toml 24 | [dependencies] 25 | rust-format = "0.3" 26 | ``` 27 | 28 | ### Optional Features 29 | 30 | * `post_process` - enables support for post-process conversion of special 31 | "marker macros" into blank lines/comments. It additionally supports converting 32 | doc blocks (`#[doc]`) into doc comments (`///`) 33 | * `pretty_please` - enables [prettyplease](https://crates.io/crates/prettyplease) 34 | formatting support 35 | * `token_stream` - enables formatting from 36 | [TokenStream](https://docs.rs/proc-macro2/latest/proc_macro2/struct.TokenStream.html) 37 | input 38 | 39 | ## Examples 40 | 41 | Simple example using default options of `RustFmt`: 42 | 43 | ```rust 44 | use rust_format::{Formatter, RustFmt}; 45 | 46 | fn main() { 47 | let source = r#"fn main() { println!("Hello World!"); }"#; 48 | 49 | let actual = RustFmt::default().format_str(source).unwrap(); 50 | let expected = r#"fn main() { 51 | println!("Hello World!"); 52 | } 53 | "#; 54 | 55 | assert_eq!(expected, actual); 56 | } 57 | ``` 58 | 59 | Using a custom configuration: 60 | 61 | ```rust 62 | use rust_format::{Config, Edition, Formatter, RustFmt}; 63 | 64 | fn main() { 65 | let source = r#"use std::marker; use std::io; mod test; mod impls;"#; 66 | 67 | let mut config = Config::new_str() 68 | .edition(Edition::Rust2018) 69 | .option("reorder_imports", "false") 70 | .option("reorder_modules", "false"); 71 | let rustfmt = RustFmt::from_config(config); 72 | 73 | let actual = rustfmt.format_str(source).unwrap(); 74 | let expected = r#"use std::marker; 75 | use std::io; 76 | mod test; 77 | mod impls; 78 | "#; 79 | 80 | assert_eq!(expected, actual); 81 | } 82 | ``` 83 | 84 | `RustFmt` with post-processing: 85 | 86 | ```rust 87 | use quote::quote; 88 | use rust_format::{Config, Formatter, PostProcess, RustFmt}; 89 | 90 | fn main() { 91 | let source = quote! { 92 | #[doc = " This is main"] 93 | fn main() { 94 | _blank_!(); 95 | _comment_!("\nThis prints hello world\n\n"); 96 | println!("Hello World!"); 97 | } 98 | }; 99 | 100 | let mut config = Config::new_str() 101 | .post_proc(PostProcess::ReplaceMarkersAndDocBlocks); 102 | let actual = RustFmt::from_config(config).format_tokens(source).unwrap(); 103 | let expected = r#"/// This is main 104 | fn main() { 105 | 106 | // 107 | // This prints hello world 108 | // 109 | println!("Hello World!"); 110 | } 111 | "#; 112 | 113 | assert_eq!(expected, actual); 114 | } 115 | ``` 116 | 117 | ## License 118 | 119 | This project is licensed optionally under either: 120 | 121 | * Apache License, Version 2.0, (LICENSE-APACHE 122 | or https://www.apache.org/licenses/LICENSE-2.0) 123 | * MIT license (LICENSE-MIT or https://opensource.org/licenses/MIT) 124 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # flexgen 2 | 3 | [![Crate](https://img.shields.io/crates/v/flexgen)](https://crates.io/crates/flexgen) 4 | [![Docs](https://docs.rs/flexgen/badge.svg)](https://docs.rs/flexgen) 5 | 6 | A flexible, yet simple quote-based code generator for creating beautiful Rust code 7 | 8 | ## Why? 9 | 10 | Rust has two types of macros, and they are both very popular, however, they 11 | are not always the optimal choice. They can impact build performance and make 12 | the source code more obfuscated to read and study. Regular macros make it difficult 13 | to do much more than simple variable substitution and using `quote` via proc-macro 14 | doesn't allow variable interpolation in doc blocks (see 15 | [quote-doctest](https://crates.io/crates/quote-doctest) for a solution). 16 | 17 | Code generation isn't perfect either. It creates excess code which is 18 | likely to be highly duplicated and thus create "noise". However, it can 19 | also be nice to have a complete set of source code available and easily 20 | reachable via the docs. Since we can generate it ahead of time, its impact 21 | on performance is the same as regular Rust code. 22 | 23 | The right solution likely depends on the use case. I personally think macros 24 | tend to be better for writing either very simple duplication or very fancy 25 | things that are hard or impossible without them. Code generation is more niche 26 | but works well for generating bulk wrapper code esp. for code that is slightly 27 | different per type and requires more logic to handle (esp. in doctests). 28 | 29 | ## Example 30 | 31 | It is probably easiest to look at the "fibonacci" example: 32 | [directory](https://github.com/nu11ptr/flexgen/tree/master/flexgen/examples/basic) 33 | 34 | * `fib.rs` - the generated file 35 | * `flexgen.toml` - the configuration file 36 | * `main.rs` - the source file that generates `fib.rs` 37 | 38 | To run yourself: 39 | 40 | 1. Change into the `examples/basic` directory 41 | 2. Delete the existing `fib.rs` file 42 | 3. Run: `cargo run --example basic` 43 | 4. Compile the new fib.rs file: `rustc fib.rs -C opt-level=3` 44 | 5. Run it: `./fib` 45 | 46 | ## Usage 47 | 48 | 1. Create a new binary crate (`flexgen` is a library, not a binary crate) 49 | 50 | 2. Edit `Cargo.toml` with any needed dependencies (at minimum, `flexgen`, but 51 | you will likely want `quote` and possibly `quote-doctest` as well) 52 | 53 | ```toml 54 | [dependencies] 55 | flexgen = "0.4" 56 | ``` 57 | 58 | 3. Edit your `main.rs` and add in one or more code fragments implementing 59 | `CodeFragment`. How much code a fragment contains is a process of trial and error, 60 | but typically it would be "one thing" (ie. one function). See the example above 61 | for more details. 62 | 63 | ```rust 64 | // main.rs 65 | 66 | use flexgen::var::TokenVars; 67 | use flexgen::{import_vars, CodeFragment, Error}; 68 | use quote::quote; 69 | 70 | struct HelloWorld; 71 | 72 | impl CodeFragment for HelloWorld { 73 | fn generate(&self, vars: &TokenVars) -> Result { 74 | import_vars! { vars => hello }; 75 | 76 | Ok(quote! { 77 | fn main() { 78 | println!("{hello} world!"); 79 | } 80 | }) 81 | } 82 | } 83 | ``` 84 | 85 | 4. Create and edit `flexgen.toml` 86 | 87 | NOTE: All the possible options can be found in the test code 88 | [here](https://github.com/nu11ptr/flexgen/blob/68de04679ce568981c72fdde1db8f8987332964f/flexgen/src/config.rs#L316) 89 | 90 | ```toml 91 | # flexgen.toml 92 | 93 | [fragment_lists] 94 | hello = [ "hello_world" ] 95 | 96 | [files.hello] 97 | path = "hello.rs" 98 | fragment_list = "hello" 99 | 100 | [files.hello.vars] 101 | hello = "Hello" 102 | ``` 103 | 104 | 5. Add a `main` function to your `main.rs` file 105 | 106 | ```rust 107 | // main.rs 108 | 109 | use flexgen::config::Config; 110 | use flexgen::{register_fragments, Error, CodeGenerator}; 111 | 112 | fn main() -> Result<(), Error> { 113 | // Register all your code fragments 114 | let fragments = register_fragments!(HelloWorld); 115 | // Read in the configuration from our flexgen.toml file 116 | let config = Config::from_default_toml_file()?; 117 | // Create a new code generator from our fragments and config 118 | let gen = CodeGenerator::new(fragments, config)?; 119 | // Generate our 'hello.rs' file 120 | gen.generate_files() 121 | } 122 | ``` 123 | 124 | 6. Execute your binary to generate the code 125 | 126 | ``` 127 | cargo run 128 | ``` 129 | 130 | ## License 131 | 132 | This project is licensed optionally under either: 133 | 134 | * Apache License, Version 2.0, (LICENSE-APACHE 135 | or https://www.apache.org/licenses/LICENSE-2.0) 136 | * MIT license (LICENSE-MIT or https://opensource.org/licenses/MIT) 137 | -------------------------------------------------------------------------------- /flexgen/src/var.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::fmt; 3 | use std::str::FromStr; 4 | 5 | use flexstr::{shared_str, SharedStr, ToSharedStr}; 6 | use proc_macro2::TokenStream; 7 | use quote::ToTokens; 8 | 9 | use crate::Error; 10 | 11 | const IDENT: &str = "$ident$"; 12 | const INT_LIT: &str = "$int_lit$"; 13 | const TYPE: &str = "$type$"; 14 | 15 | /// A hashmap of variables for interpolation into [CodeFragments] 16 | pub(crate) type Vars = HashMap; 17 | 18 | /// Represents a map of variables ready for interpolation 19 | pub type TokenVars = HashMap; 20 | 21 | // *** Expand Vars *** 22 | 23 | #[doc(hidden)] 24 | #[inline] 25 | pub fn import_var<'vars>( 26 | vars: &'vars TokenVars, 27 | var: &'static str, 28 | ) -> Result<&'vars TokenValue, Error> { 29 | let var = shared_str!(var); 30 | let value = vars.get(&var).ok_or(Error::MissingVar(var))?; 31 | 32 | match value { 33 | TokenItem::Single(value) => Ok(value), 34 | TokenItem::List(_) => Err(Error::WrongItem), 35 | } 36 | } 37 | 38 | /// Import the variables from the [Config](crate::config::Config) into local variables that can be interpolated with `quote` 39 | #[macro_export] 40 | macro_rules! import_vars { 41 | // Allow trailing comma 42 | ($vars:ident => $($var:ident,)+) => { $crate::var::import_vars!($vars, $($var),+) }; 43 | ($vars:ident => $($var:ident),+) => { 44 | $( 45 | let $var = $crate::var::import_var($vars, stringify!($var))?; 46 | )+ 47 | }; 48 | } 49 | 50 | #[doc(hidden)] 51 | #[inline] 52 | pub fn import_list<'vars>( 53 | vars: &'vars TokenVars, 54 | var: &'static str, 55 | ) -> Result<&'vars [TokenValue], Error> { 56 | let var = shared_str!(var); 57 | let value = vars.get(&var).ok_or(Error::MissingVar(var))?; 58 | 59 | match value { 60 | TokenItem::List(value) => Ok(value), 61 | TokenItem::Single(_) => Err(Error::WrongItem), 62 | } 63 | } 64 | 65 | /// Import the list of variables from the [Config](crate::config::Config) into local bindings that can be interpolated with `quote` 66 | #[macro_export] 67 | macro_rules! import_lists { 68 | // Allow trailing comma 69 | ($vars:ident => $($var:ident,)+) => { $crate::var::import_lists!($vars, $($var),+) }; 70 | ($vars:ident => $($var:ident),+) => { 71 | $( 72 | let $var = $crate::var::import_list($vars, stringify!($var))?; 73 | )+ 74 | }; 75 | } 76 | 77 | // *** CodeValue *** 78 | 79 | #[inline] 80 | fn strip_prefix(s: &str, prefix: &str) -> Option { 81 | if matches!(s.find(prefix), Some(idx) if idx == 0) { 82 | Some(s[prefix.len()..].to_shared_str()) 83 | } else { 84 | None 85 | } 86 | } 87 | 88 | #[derive(Clone, Debug, PartialEq)] 89 | pub(crate) enum CodeValue { 90 | Ident(SharedStr), 91 | IntLit(SharedStr), 92 | Type(SharedStr), 93 | } 94 | 95 | impl FromStr for CodeValue { 96 | type Err = Error; 97 | 98 | #[inline] 99 | fn from_str(s: &str) -> Result { 100 | if let Some(s) = strip_prefix(s, IDENT) { 101 | Ok(CodeValue::Ident(s)) 102 | } else if let Some(s) = strip_prefix(s, INT_LIT) { 103 | Ok(CodeValue::IntLit(s)) 104 | } else if let Some(s) = strip_prefix(s, TYPE) { 105 | Ok(CodeValue::Type(s)) 106 | } else { 107 | Err(Error::NotCodeItem(s.to_shared_str())) 108 | } 109 | } 110 | } 111 | 112 | struct SynItemVisitor; 113 | 114 | impl<'de> serde::de::Visitor<'de> for SynItemVisitor { 115 | type Value = CodeValue; 116 | 117 | #[inline] 118 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 119 | formatter.write_str("a string with a special prefix") 120 | } 121 | 122 | #[inline] 123 | fn visit_str(self, v: &str) -> Result 124 | where 125 | E: serde::de::Error, 126 | { 127 | v.parse() 128 | .map_err(|_| serde::de::Error::custom("Error deserializing 'str'")) 129 | } 130 | 131 | #[inline] 132 | fn visit_string(self, v: String) -> Result 133 | where 134 | E: serde::de::Error, 135 | { 136 | v.parse() 137 | .map_err(|_| serde::de::Error::custom("Error deserializing 'String'")) 138 | } 139 | } 140 | 141 | impl<'de> serde::de::Deserialize<'de> for CodeValue { 142 | #[inline] 143 | fn deserialize>(deserializer: D) -> Result { 144 | deserializer.deserialize_str(SynItemVisitor) 145 | } 146 | } 147 | 148 | // *** CodeTokenValue *** 149 | 150 | /// A single code-related token variable from the [Config](crate::config::Config) 151 | #[derive(Clone, Debug, PartialEq)] 152 | pub enum CodeTokenValue { 153 | /// An identifier 154 | Ident(syn::Ident), 155 | /// An integer literal 156 | IntLit(syn::LitInt), 157 | /// A type 158 | Type(Box), 159 | } 160 | 161 | impl CodeTokenValue { 162 | #[inline] 163 | pub(crate) fn new(item: &CodeValue) -> Result { 164 | match item { 165 | CodeValue::Ident(i) => Ok(CodeTokenValue::Ident(syn::parse_str::(i)?)), 166 | CodeValue::IntLit(i) => Ok(CodeTokenValue::IntLit(syn::parse_str::(i)?)), 167 | CodeValue::Type(t) => Ok(CodeTokenValue::Type(Box::new(syn::parse_str::( 168 | t, 169 | )?))), 170 | } 171 | } 172 | } 173 | 174 | impl fmt::Display for CodeTokenValue { 175 | #[inline] 176 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 177 | match self { 178 | CodeTokenValue::Ident(i) => ::fmt(i, f), 179 | CodeTokenValue::IntLit(i) => ::fmt(i, f), 180 | CodeTokenValue::Type(t) => { 181 | // Doesn't implement Display, so we get creative 182 | let type_str = t.to_token_stream().to_string().replace(" ", ""); 183 | write!(f, "{type_str}") 184 | } 185 | } 186 | } 187 | } 188 | 189 | impl ToTokens for CodeTokenValue { 190 | #[inline] 191 | fn to_tokens(&self, tokens: &mut TokenStream) { 192 | match self { 193 | CodeTokenValue::Ident(ident) => ident.to_tokens(tokens), 194 | CodeTokenValue::IntLit(lit) => lit.to_tokens(tokens), 195 | CodeTokenValue::Type(t) => t.to_tokens(tokens), 196 | } 197 | } 198 | } 199 | 200 | // *** VarItem *** 201 | 202 | #[derive(Clone, Debug, serde::Deserialize, PartialEq)] 203 | #[serde(untagged)] 204 | pub(crate) enum VarItem { 205 | List(Vec), 206 | Single(VarValue), 207 | } 208 | 209 | impl VarItem { 210 | #[inline] 211 | pub fn to_token_item(&self) -> Result { 212 | match self { 213 | VarItem::List(l) => { 214 | let items: Vec<_> = l 215 | .iter() 216 | .map(|item| item.to_token_value()) 217 | .collect::, Error>>()?; 218 | Ok(TokenItem::List(items)) 219 | } 220 | VarItem::Single(s) => Ok(TokenItem::Single(s.to_token_value()?)), 221 | } 222 | } 223 | } 224 | 225 | // *** VarValue *** 226 | 227 | #[derive(Clone, Debug, serde::Deserialize, PartialEq)] 228 | #[serde(untagged)] 229 | pub(crate) enum VarValue { 230 | Number(i64), 231 | Bool(bool), 232 | CodeValue(CodeValue), 233 | String(SharedStr), 234 | } 235 | 236 | impl VarValue { 237 | #[inline] 238 | fn to_token_value(&self) -> Result { 239 | Ok(match self { 240 | VarValue::Number(n) => TokenValue::Number(*n), 241 | VarValue::Bool(b) => TokenValue::Bool(*b), 242 | VarValue::CodeValue(c) => TokenValue::CodeValue(CodeTokenValue::new(c)?), 243 | VarValue::String(s) => TokenValue::String(s.clone()), 244 | }) 245 | } 246 | } 247 | 248 | // *** TokenItem *** 249 | 250 | /// Represents either a list of variables or a single variable from the [Config](crate::config::Config) 251 | #[derive(Clone, Debug, PartialEq)] 252 | pub enum TokenItem { 253 | /// A list of values 254 | List(Vec), 255 | /// A single value 256 | Single(TokenValue), 257 | } 258 | 259 | // *** TokenValue *** 260 | 261 | /// A single variable from the [Config](crate::config::Config) 262 | #[derive(Clone, Debug, PartialEq)] 263 | pub enum TokenValue { 264 | /// A numeric value 265 | Number(i64), 266 | /// A boolean value 267 | Bool(bool), 268 | /// A code token value 269 | CodeValue(CodeTokenValue), 270 | /// A string value 271 | String(SharedStr), 272 | } 273 | 274 | impl fmt::Display for TokenValue { 275 | #[inline] 276 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 277 | match self { 278 | TokenValue::Number(n) => ::fmt(n, f), 279 | TokenValue::Bool(b) => ::fmt(b, f), 280 | TokenValue::CodeValue(c) => ::fmt(c, f), 281 | TokenValue::String(s) => ::fmt(s, f), 282 | } 283 | } 284 | } 285 | 286 | impl ToTokens for TokenValue { 287 | #[inline] 288 | fn to_tokens(&self, tokens: &mut TokenStream) { 289 | match self { 290 | TokenValue::CodeValue(c) => c.to_tokens(tokens), 291 | TokenValue::String(s) => s.to_tokens(tokens), 292 | TokenValue::Number(n) => n.to_tokens(tokens), 293 | TokenValue::Bool(b) => b.to_tokens(tokens), 294 | } 295 | } 296 | } 297 | 298 | #[cfg(test)] 299 | mod tests { 300 | use crate::var::{CodeTokenValue, CodeValue}; 301 | use flexstr::shared_str; 302 | use std::str::FromStr; 303 | 304 | #[test] 305 | fn code_value_from_str() { 306 | assert_eq!( 307 | CodeValue::from_str("$type$str").unwrap(), 308 | CodeValue::Type(shared_str!("str")) 309 | ); 310 | assert_eq!( 311 | CodeValue::from_str("$ident$str").unwrap(), 312 | CodeValue::Ident(shared_str!("str")) 313 | ); 314 | assert_eq!( 315 | CodeValue::from_str("$int_lit$123").unwrap(), 316 | CodeValue::IntLit(shared_str!("123")) 317 | ); 318 | } 319 | 320 | #[test] 321 | fn code_token_value_display() { 322 | let s = "alloc::sync::Arc"; 323 | let value = CodeTokenValue::Type(syn::parse_str(s).unwrap()); 324 | assert_eq!(s, value.to_string()); 325 | } 326 | } 327 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /flexgen/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A flexible, yet simple quote-based code generator for creating beautiful Rust code 2 | 3 | #![warn(missing_docs)] 4 | 5 | /// Configuration related items 6 | pub mod config; 7 | /// Configuration variable related items 8 | pub mod var; 9 | 10 | use std::collections::HashMap; 11 | use std::io::Write; 12 | use std::{fs, io}; 13 | 14 | use flexstr::SharedStr; 15 | use heck::ToSnakeCase; 16 | use proc_macro2::TokenStream; 17 | use quote::quote; 18 | use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; 19 | use rust_format::{Formatter, PostProcess, PrettyPlease}; 20 | use use_builder::{UseBuilder, UseItems}; 21 | 22 | use crate::config::{Config, FragmentItem}; 23 | use crate::var::TokenVars; 24 | 25 | #[doc(hidden)] 26 | #[inline] 27 | pub fn make_key(s: &'static str) -> SharedStr { 28 | SharedStr::from_ref(&s.to_snake_case()) 29 | } 30 | 31 | /// Register code fragments in preparation for code generation 32 | #[macro_export] 33 | macro_rules! register_fragments { 34 | (%item%, $v:ident) => { () }; 35 | (%count%, $($v:ident),+) => { [$($crate::register_fragments!(%item%, $v)),+].len() }; 36 | // Allow trailing comma 37 | ($($fragment:ident,)+) => { $crate::register_fragments!($($fragment),+) }; 38 | ($($fragment:ident),+) => { 39 | { 40 | let cap = $crate::register_fragments!(%count%, $($fragment),+); 41 | let mut map = $crate::CodeFragments::with_capacity(cap); 42 | 43 | $( 44 | map.insert($crate::make_key(stringify!($fragment)), &$fragment); 45 | )+ 46 | map 47 | } 48 | }; 49 | } 50 | 51 | // *** Error *** 52 | 53 | /// This error will be returned if any issues arise during the code generation process 54 | #[derive(Debug, thiserror::Error)] 55 | pub enum Error { 56 | /// A variable specified in [import_vars] could not be found 57 | #[error("The specified variable '{0}' was missing.")] 58 | MissingVar(SharedStr), 59 | 60 | /// A fragment specified by the [CodeFragments] could not be found 61 | #[error("These code fragments from the configuration are missing: {0:?}")] 62 | MissingFragments(Vec), 63 | 64 | /// The fragment list specified in the file section doesn't exist 65 | #[error("The fragment list '{0}' referenced by file '{1}' doesn't exist")] 66 | MissingFragmentList(SharedStr, SharedStr), 67 | 68 | /// The fragment list exceptions specified by the file don't exist 69 | #[error("These fragment list exceptions referenced by file '{1}' don't exist: {0:?}")] 70 | MissingFragmentListExceptions(Vec, SharedStr), 71 | 72 | /// The file section requested from the [Config](config::Config) doesn't exist 73 | #[error("The configuration file item '{0}' doesn't exist")] 74 | FileNotFound(SharedStr), 75 | 76 | /// The fragment list requested from the [Config](config::Config) doesn't exist 77 | #[error("The configuration fragment list item '{0}' doesn't exist")] 78 | FragmentListNotFound(SharedStr), 79 | 80 | /// A nested list of execution errors occurred while trying to generate source code 81 | #[error("Errors occurred during execution: {0:?}")] 82 | ExecutionErrors(Vec), 83 | 84 | /// The item imported was of the wrong type (either single when a list was needed or vice versa) 85 | #[error("The specified item was a 'list' instead of a 'single' item (or vice versa)")] 86 | WrongItem, 87 | 88 | /// Unable to parse source code value from variable 89 | #[error("The code item could not be parsed: {0}")] 90 | UnrecognizedCodeItem(#[from] syn::Error), 91 | 92 | /// The code item variable data was in an unknown format 93 | #[error("The item did not match any known code item prefix: {0}")] 94 | NotCodeItem(SharedStr), 95 | 96 | /// An error occurred while deserializing the [Config](config::Config) 97 | #[error("There was an error while deserializing: {0}")] 98 | DeserializeError(String), 99 | 100 | /// An error occurred while formatting 101 | #[error(transparent)] 102 | FormatError(#[from] rust_format::Error), 103 | 104 | /// A general I/O error occurred 105 | #[error(transparent)] 106 | IOError(#[from] io::Error), 107 | 108 | /// A TOML syntax error occurred 109 | #[error(transparent)] 110 | TOMLError(#[from] toml::de::Error), 111 | 112 | /// An error occurred while parsing use sections 113 | #[error(transparent)] 114 | UseBuilderError(#[from] use_builder::Error), 115 | } 116 | 117 | // *** Execute *** 118 | 119 | struct FileGenerator<'exec> { 120 | name: &'exec SharedStr, 121 | vars: TokenVars, 122 | fragments: &'exec CodeFragments, 123 | config: &'exec Config, 124 | } 125 | 126 | impl<'exec> FileGenerator<'exec> { 127 | fn new( 128 | name: &'exec SharedStr, 129 | fragments: &'exec CodeFragments, 130 | config: &'exec Config, 131 | ) -> Result { 132 | // Get merged vars 133 | let vars = config.vars(name)?; 134 | 135 | Ok(Self { 136 | name, 137 | vars, 138 | fragments, 139 | config, 140 | }) 141 | } 142 | 143 | fn assemble_source( 144 | &self, 145 | results: Vec, 146 | top_results: Vec, 147 | uses: Vec, 148 | ) -> Result { 149 | // Would be nice to make this a constant, but _comment_! marker needs a literal 150 | let comment = quote! { 151 | _comment_!("+-------------------------------------------------------------------------------------------------+"); 152 | _comment_!("| WARNING: This file has been auto-generated using FlexGen (https://github.com/nu11ptr/flexgen). |"); 153 | _comment_!("| Any manual modifications to this file will be overwritten the next time this file is generated. |"); 154 | _comment_!("+-------------------------------------------------------------------------------------------------+"); 155 | }; 156 | 157 | let builder = UseBuilder::from_uses(uses); 158 | let (std_uses, ext_uses, crate_uses) = builder.into_items_sections()?; 159 | 160 | let tokens = quote! { 161 | #comment 162 | _blank_!(); 163 | 164 | #( #top_results )* 165 | 166 | #( #std_uses )* 167 | _blank_!(); 168 | #( #ext_uses )* 169 | _blank_!(); 170 | #( #crate_uses )* 171 | _blank_!(); 172 | 173 | #( #results )* 174 | }; 175 | 176 | let config = rust_format::Config::new_str().post_proc(PostProcess::ReplaceMarkers); 177 | let formatter = PrettyPlease::from_config(config); 178 | let source = formatter.format_tokens(tokens)?; 179 | 180 | // Either return after PrettyPlease format or do one last final RustFmt run 181 | Ok(match self.config.build_rust_fmt() { 182 | Some(rust_fmt) => rust_fmt.format_str(source)?, 183 | None => source, 184 | }) 185 | } 186 | 187 | fn build_source( 188 | &self, 189 | fragments: &[FragmentItem], 190 | exceptions: &[SharedStr], 191 | results: &mut Vec, 192 | top_results: &mut Vec, 193 | use_trees: &mut Vec, 194 | ) -> Result<(), Error> { 195 | for (idx, fragment) in fragments.iter().enumerate() { 196 | match fragment { 197 | FragmentItem::FragmentListRef(name) => { 198 | if exceptions.contains(name) { 199 | continue; 200 | } 201 | 202 | let fragments = self.config.fragment_list(name)?; 203 | return self.build_source( 204 | fragments, 205 | exceptions, 206 | results, 207 | top_results, 208 | use_trees, 209 | ); 210 | } 211 | FragmentItem::Fragment(name) => { 212 | if exceptions.contains(name) { 213 | continue; 214 | } 215 | 216 | // Panic safety: This was pre-validated 217 | let fragment = self.fragments[name]; 218 | let tokens = fragment.generate(&self.vars)?; 219 | if !tokens.is_empty() { 220 | results.push(tokens); 221 | } 222 | 223 | let top_tokens = fragment.generate_top(&self.vars)?; 224 | if !top_tokens.is_empty() { 225 | top_results.push(top_tokens); 226 | } 227 | 228 | // Store the use tree, if we had one 229 | let use_tokens = fragment.uses(&self.vars)?; 230 | if !use_tokens.is_empty() { 231 | use_trees.push(syn::parse2(use_tokens)?) 232 | } 233 | 234 | // Push a blank line on all but the last fragment in the list 235 | if idx < fragments.len() - 1 { 236 | results.push(quote! { _blank_!(); }) 237 | } 238 | } 239 | } 240 | } 241 | 242 | Ok(()) 243 | } 244 | 245 | fn generate_string(&self) -> Result<(SharedStr, String), Error> { 246 | // TODO: Combine into one call? 247 | let fragments = self.config.file_fragment_list(self.name)?; 248 | let exceptions = self.config.file_fragment_exceptions(self.name)?; 249 | 250 | // TODO: What capacity? (we could have nested lists, etc.) 251 | let mut results = Vec::with_capacity(self.fragments.len() * 2); 252 | let mut top_results = Vec::with_capacity(3); 253 | // Random choice based on a typical file 254 | let mut uses = Vec::with_capacity(10); 255 | 256 | self.build_source( 257 | fragments, 258 | exceptions, 259 | &mut results, 260 | &mut top_results, 261 | &mut uses, 262 | )?; 263 | let source = self.assemble_source(results, top_results, uses)?; 264 | 265 | Ok((self.name.clone(), source)) 266 | } 267 | 268 | fn generate_file(&self) -> Result<(), Error> { 269 | let (_, source) = self.generate_string()?; 270 | 271 | let mut file = fs::File::create(self.config.file_path(self.name)?)?; 272 | file.write_all(source.as_bytes())?; 273 | Ok(()) 274 | } 275 | } 276 | 277 | /// The actual code generator 278 | pub struct CodeGenerator { 279 | code: CodeFragments, 280 | config: Config, 281 | } 282 | 283 | impl CodeGenerator { 284 | /// Create a new instance of the `CodeGenerator`. It will validate the [Config] and return 285 | /// an [Error] if there are any issues 286 | #[inline] 287 | pub fn new(code: CodeFragments, mut config: Config) -> Result { 288 | config.build_and_validate(&code)?; 289 | Ok(Self { code, config }) 290 | } 291 | 292 | fn parse_results(results: Vec>) -> Result, Error> { 293 | let mut errors = Vec::with_capacity(results.len()); 294 | let mut source = Vec::with_capacity(results.len()); 295 | 296 | for result in results { 297 | match result { 298 | Ok(result) => source.push(result), 299 | Err(err) => errors.push(err), 300 | } 301 | } 302 | 303 | if errors.is_empty() { 304 | Ok(source) 305 | } else { 306 | Err(Error::ExecutionErrors(errors)) 307 | } 308 | } 309 | 310 | fn generate(&self, to_file: bool) -> Result, Error> { 311 | let names = self.config.file_names(); 312 | 313 | Ok(if to_file { 314 | let results: Vec> = names 315 | .par_iter() 316 | .map(|&name| FileGenerator::new(name, &self.code, &self.config)?.generate_file()) 317 | .collect(); 318 | 319 | Self::parse_results(results)?; 320 | HashMap::new() 321 | } else { 322 | let results: Vec> = names 323 | .par_iter() 324 | .map(|&name| FileGenerator::new(name, &self.code, &self.config)?.generate_string()) 325 | .collect(); 326 | let results: HashMap<_, _> = Self::parse_results(results)?.into_iter().collect(); 327 | results 328 | }) 329 | } 330 | 331 | /// Generate the files listed in the [Config], but return them as a map of strings instead of 332 | /// actually writing them to he filesystem 333 | #[inline] 334 | pub fn generate_strings(&self) -> Result, Error> { 335 | self.generate(false) 336 | } 337 | 338 | /// Generate the files listed in the [Config] 339 | #[inline] 340 | pub fn generate_files(&self) -> Result<(), Error> { 341 | self.generate(true).map(|_| ()) 342 | } 343 | } 344 | 345 | // *** Misc. Types *** 346 | 347 | /// A map of all registered code fragments. Is is returned by a call to [register_fragments] 348 | pub type CodeFragments = HashMap; 349 | 350 | /// A single code fragment - the smallest unit of work 351 | #[allow(unused_variables)] 352 | pub trait CodeFragment { 353 | /// Generate the `use` sections of the file, if any. The returned `TokenStream` can ONLY 354 | /// be varius `use` items. They will be deduplicated and grouped before inclusion in the file. 355 | #[inline] 356 | fn uses(&self, vars: &TokenVars) -> Result { 357 | Ok(quote! {}) 358 | } 359 | 360 | /// Generate any portion of the source file that must be on the top (such as `#![]` style attributes). 361 | /// Each snippet will be collected in order and combined into the whole 362 | #[inline] 363 | fn generate_top(&self, vars: &TokenVars) -> Result { 364 | Ok(quote! {}) 365 | } 366 | 367 | /// Generate the general body of the source file. Each snippet will be collected in order and 368 | /// combined into the whole 369 | #[inline] 370 | fn generate(&self, vars: &TokenVars) -> Result { 371 | Ok(quote! {}) 372 | } 373 | } 374 | -------------------------------------------------------------------------------- /flexgen/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::io::Read; 3 | use std::path::{Path, PathBuf}; 4 | use std::{fs, io}; 5 | 6 | use flexstr::SharedStr; 7 | 8 | use crate::var::Vars; 9 | use crate::{CodeFragments, Error, TokenVars}; 10 | 11 | const BUF_SIZE: usize = u16::MAX as usize; 12 | 13 | const DEFAULT_FILENAME: &str = "flexgen.toml"; 14 | 15 | // *** FragmentItem *** 16 | 17 | /// An enum that is either a reference to a code fragment or a fragment list 18 | #[derive(Clone, Debug, serde::Deserialize, PartialEq)] 19 | #[serde(untagged)] 20 | pub enum FragmentItem { 21 | // Must be first so Serde uses this one always 22 | /// A single code fragment 23 | Fragment(SharedStr), 24 | /// A reference to a list of code fragments 25 | FragmentListRef(SharedStr), 26 | } 27 | 28 | // *** Fragment Lists *** 29 | 30 | #[derive(Clone, Debug, Default, serde::Deserialize, PartialEq)] 31 | struct FragmentLists(HashMap>); 32 | 33 | impl FragmentLists { 34 | pub fn build(&self) -> Self { 35 | let mut lists = HashMap::with_capacity(self.0.len()); 36 | 37 | for (key, fragments) in &self.0 { 38 | let mut new_fragments = Vec::with_capacity(fragments.len()); 39 | 40 | for fragment in fragments { 41 | match fragment { 42 | FragmentItem::Fragment(s) | FragmentItem::FragmentListRef(s) => { 43 | // If it is also a key, that means it is a list reference 44 | if self.0.contains_key(s) { 45 | new_fragments.push(FragmentItem::FragmentListRef(s.clone())); 46 | } else { 47 | new_fragments.push(FragmentItem::Fragment(s.clone())); 48 | } 49 | } 50 | } 51 | } 52 | 53 | lists.insert(key.clone(), new_fragments); 54 | } 55 | 56 | Self(lists) 57 | } 58 | 59 | pub fn validate_code_fragments(&self, code: &CodeFragments) -> Result<(), Error> { 60 | let mut missing = Vec::new(); 61 | 62 | // Loop over each fragment list searching for each item in the code fragments 63 | for fragments in self.0.values() { 64 | let v: Vec<_> = fragments 65 | .iter() 66 | .filter_map(|fragment| match fragment { 67 | FragmentItem::Fragment(name) if !code.contains_key(name) => Some(name.clone()), 68 | _ => None, 69 | }) 70 | .collect(); 71 | 72 | // Store all missing fragments 73 | missing.extend(v); 74 | } 75 | 76 | if missing.is_empty() { 77 | Ok(()) 78 | } else { 79 | Err(Error::MissingFragments(missing)) 80 | } 81 | } 82 | 83 | pub fn validate_file(&self, name: &SharedStr, f: &File) -> Result<(), Error> { 84 | // Ensure the file's fragment list exists 85 | if !self.0.contains_key(&f.fragment_list) { 86 | return Err(Error::MissingFragmentList( 87 | f.fragment_list.clone(), 88 | name.clone(), 89 | )); 90 | } 91 | 92 | let mut missing = Vec::new(); 93 | 94 | 'top: for exception in &f.fragment_list_exceptions { 95 | // If it is the name of a list, we can bypass the 2nd scan entirely 96 | if self.0.contains_key(exception) { 97 | continue; 98 | } 99 | 100 | // If it might be the name of an actual fragment we will need to scan them all 101 | for fragment_list in self.0.values() { 102 | // As soon as we find a match jump to looking for next exception 103 | if fragment_list.iter().any(|fragment| match fragment { 104 | FragmentItem::Fragment(name) => name == exception, 105 | _ => false, 106 | }) { 107 | continue 'top; 108 | } 109 | } 110 | 111 | // If we didn't find as a list or via scan, it is missing 112 | missing.push(exception.clone()); 113 | } 114 | 115 | if missing.is_empty() { 116 | Ok(()) 117 | } else { 118 | Err(Error::MissingFragmentListExceptions(missing, name.clone())) 119 | } 120 | } 121 | 122 | #[inline] 123 | pub fn fragment_list(&self, name: &SharedStr) -> Result<&Vec, Error> { 124 | self.0 125 | .get(name) 126 | .ok_or_else(|| Error::FragmentListNotFound(name.clone())) 127 | } 128 | } 129 | 130 | // *** Config *** 131 | 132 | #[derive(Clone, Debug, Default, serde::Deserialize, PartialEq)] 133 | struct General { 134 | #[serde(default)] 135 | base_path: PathBuf, 136 | #[serde(default)] 137 | rust_fmt: RustFmt, 138 | #[serde(default)] 139 | vars: Vars, 140 | } 141 | 142 | impl General { 143 | #[inline] 144 | fn build_rust_fmt(&self) -> Option { 145 | self.rust_fmt.build_rust_fmt() 146 | } 147 | } 148 | 149 | #[derive(Clone, Debug, Default, serde::Deserialize, PartialEq)] 150 | struct RustFmt { 151 | #[serde(default)] 152 | omit_final_format: bool, 153 | #[serde(default)] 154 | path: Option, 155 | #[serde(default)] 156 | options: HashMap, 157 | } 158 | 159 | impl RustFmt { 160 | fn build_rust_fmt(&self) -> Option { 161 | if !self.omit_final_format { 162 | let mut config = if !self.options.is_empty() { 163 | let map = self.options.iter().map(|(k, v)| (&**k, &**v)).collect(); 164 | rust_format::Config::from_hash_map(map) 165 | } else { 166 | rust_format::Config::new() 167 | }; 168 | if let Some(path) = &self.path { 169 | config = config.rust_fmt_path(path.clone()) 170 | } 171 | 172 | Some(rust_format::RustFmt::from_config(config)) 173 | } else { 174 | None 175 | } 176 | } 177 | } 178 | 179 | #[derive(Clone, Debug, Default, serde::Deserialize, PartialEq)] 180 | struct File { 181 | path: PathBuf, 182 | fragment_list: SharedStr, 183 | #[serde(default)] 184 | fragment_list_exceptions: Vec, 185 | vars: Vars, 186 | } 187 | 188 | /// The `flexgen` configuration 189 | #[derive(Clone, Debug, Default, serde::Deserialize, PartialEq)] 190 | pub struct Config { 191 | #[serde(default)] 192 | general: General, 193 | fragment_lists: FragmentLists, 194 | files: HashMap, 195 | } 196 | 197 | impl Config { 198 | /// Try to load the `Config` from the given TOML reader 199 | pub fn from_toml_reader(r: impl io::Read) -> Result { 200 | let mut reader = io::BufReader::new(r); 201 | let mut buffer = String::with_capacity(BUF_SIZE); 202 | reader.read_to_string(&mut buffer)?; 203 | 204 | Ok(toml::from_str(&buffer)?) 205 | } 206 | 207 | /// Try to load the `Config` from the default TOML file (flexgen.toml) 208 | pub fn from_default_toml_file() -> Result { 209 | let f = fs::File::open(DEFAULT_FILENAME)?; 210 | Self::from_toml_reader(f) 211 | } 212 | 213 | /// Try to load the `Config` from the given TOML file 214 | pub fn from_toml_file(cfg_name: impl AsRef) -> Result { 215 | let f = fs::File::open(cfg_name)?; 216 | Self::from_toml_reader(f) 217 | } 218 | 219 | pub(crate) fn build_and_validate(&mut self, code: &CodeFragments) -> Result<(), Error> { 220 | // Build and validate fragment lists against code fragments and files 221 | self.fragment_lists = self.fragment_lists.build(); 222 | 223 | self.fragment_lists.validate_code_fragments(code)?; 224 | for (name, file) in &self.files { 225 | self.fragment_lists.validate_file(name, file)?; 226 | } 227 | 228 | Ok(()) 229 | } 230 | 231 | /// Return all the files names specified in the config 232 | #[inline] 233 | pub fn file_names(&self) -> Vec<&SharedStr> { 234 | self.files.keys().collect() 235 | } 236 | 237 | /// Return the specified file configuration 238 | #[inline] 239 | fn file(&self, name: &SharedStr) -> Result<&File, Error> { 240 | self.files 241 | .get(name) 242 | .ok_or_else(|| Error::FileNotFound(name.clone())) 243 | } 244 | 245 | /// Build the full file path to the file given as a parameter 246 | pub fn file_path(&self, name: &SharedStr) -> Result { 247 | let file = self.file(name)?; 248 | let base_path = self.general.base_path.as_os_str(); 249 | 250 | let mut path = PathBuf::with_capacity(base_path.len() + file.path.as_os_str().len()); 251 | path.push(base_path); 252 | path.push(&file.path); 253 | Ok(path) 254 | } 255 | 256 | #[inline] 257 | fn convert_vars(vars: &Vars) -> Result { 258 | vars.iter() 259 | .map(|(key, value)| match value.to_token_item() { 260 | Ok(value) => Ok((key.clone(), value)), 261 | Err(err) => Err(err), 262 | }) 263 | .collect() 264 | } 265 | 266 | #[inline] 267 | fn general_vars(&self) -> Result { 268 | Self::convert_vars(&self.general.vars) 269 | } 270 | 271 | #[inline] 272 | fn file_vars(&self, name: &SharedStr) -> Result { 273 | Self::convert_vars(&self.file(name)?.vars) 274 | } 275 | 276 | /// Return the complete vars for the file name given as a parameter 277 | #[inline] 278 | pub fn vars(&self, name: &SharedStr) -> Result { 279 | let mut vars = self.general_vars()?; 280 | vars.extend(self.file_vars(name)?); 281 | Ok(vars) 282 | } 283 | 284 | /// Return the given named fragment list 285 | #[inline] 286 | pub fn fragment_list(&self, name: &SharedStr) -> Result<&Vec, Error> { 287 | self.fragment_lists.fragment_list(name) 288 | } 289 | 290 | /// Return the fragment list used by the file given a parameter 291 | #[inline] 292 | pub fn file_fragment_list(&self, name: &SharedStr) -> Result<&Vec, Error> { 293 | let name = &self.file(name)?.fragment_list; 294 | self.fragment_list(name) 295 | } 296 | 297 | /// Return all the fragment exceptions for the given file 298 | #[inline] 299 | pub fn file_fragment_exceptions(&self, name: &SharedStr) -> Result<&Vec, Error> { 300 | Ok(&self.file(name)?.fragment_list_exceptions) 301 | } 302 | 303 | /// Return a [RustFmt](rust_format::RustFmt) instance configured as specified in this configuration 304 | #[inline] 305 | pub fn build_rust_fmt(&self) -> Option { 306 | self.general.build_rust_fmt() 307 | } 308 | } 309 | 310 | #[cfg(test)] 311 | mod tests { 312 | use std::collections::HashMap; 313 | use std::path::PathBuf; 314 | use std::str::FromStr; 315 | 316 | use flexstr::{shared_str, SharedStr}; 317 | use pretty_assertions::assert_eq; 318 | 319 | use crate::config::{Config, File, FragmentItem, FragmentLists, General, RustFmt}; 320 | use crate::var::{CodeValue, VarItem, VarValue}; 321 | 322 | const CONFIG: &str = r#" 323 | [general] 324 | base_path = "src/" 325 | 326 | [general.rust_fmt] 327 | path = "rustfmt" 328 | 329 | [general.vars] 330 | product = "FlexStr" 331 | generate = true 332 | count = 5 333 | suffix = "$ident$Str" 334 | list = [ "FlexStr", true, 5, "$ident$Str" ] 335 | 336 | [fragment_lists] 337 | impl = [ "impl_struct", "impl_core_ref" ] 338 | impl_struct = [ "empty", "from_ref" ] 339 | 340 | [files.str] 341 | path = "strings/generated/std_str.rs" 342 | fragment_list = "impl" 343 | fragment_list_exceptions = [ "impl_core_ref" ] 344 | 345 | [files.str.vars] 346 | str_type = "str" 347 | "#; 348 | 349 | fn general() -> General { 350 | let mut vars = HashMap::new(); 351 | 352 | let product = VarValue::String(shared_str!("FlexStr")); 353 | vars.insert(shared_str!("product"), VarItem::Single(product.clone())); 354 | 355 | let generate = VarValue::Bool(true); 356 | vars.insert(shared_str!("generate"), VarItem::Single(generate.clone())); 357 | 358 | let count = VarValue::Number(5); 359 | vars.insert(shared_str!("count"), VarItem::Single(count.clone())); 360 | 361 | let suffix = VarValue::CodeValue(CodeValue::from_str("$ident$Str").unwrap()); 362 | vars.insert(shared_str!("suffix"), VarItem::Single(suffix.clone())); 363 | 364 | vars.insert( 365 | shared_str!("list"), 366 | VarItem::List(vec![product, generate, count, suffix]), 367 | ); 368 | 369 | let rust_fmt = RustFmt { 370 | omit_final_format: false, 371 | path: Some("rustfmt".into()), 372 | options: Default::default(), 373 | }; 374 | 375 | General { 376 | base_path: PathBuf::from("src/"), 377 | rust_fmt, 378 | vars, 379 | } 380 | } 381 | 382 | fn fragment_lists() -> FragmentLists { 383 | use FragmentItem::*; 384 | 385 | let mut lists = HashMap::new(); 386 | lists.insert( 387 | shared_str!("impl"), 388 | vec![ 389 | Fragment(shared_str!("impl_struct")), 390 | Fragment(shared_str!("impl_core_ref")), 391 | ], 392 | ); 393 | lists.insert( 394 | shared_str!("impl_struct"), 395 | vec![ 396 | Fragment(shared_str!("empty")), 397 | Fragment(shared_str!("from_ref")), 398 | ], 399 | ); 400 | FragmentLists(lists) 401 | } 402 | 403 | fn files() -> HashMap { 404 | let mut str_vars = HashMap::new(); 405 | str_vars.insert( 406 | shared_str!("str_type"), 407 | VarItem::Single(VarValue::String(shared_str!("str"))), 408 | ); 409 | 410 | let files_str = File { 411 | path: PathBuf::from("strings/generated/std_str.rs"), 412 | fragment_list: shared_str!("impl"), 413 | fragment_list_exceptions: vec![shared_str!("impl_core_ref")], 414 | vars: str_vars, 415 | }; 416 | 417 | let mut files = HashMap::new(); 418 | files.insert(shared_str!("str"), files_str); 419 | files 420 | } 421 | 422 | #[test] 423 | fn from_reader() { 424 | let actual = Config::from_toml_reader(CONFIG.as_bytes()).unwrap(); 425 | let expected = Config { 426 | general: general(), 427 | fragment_lists: fragment_lists(), 428 | files: files(), 429 | }; 430 | 431 | assert_eq!(expected, actual); 432 | } 433 | } 434 | -------------------------------------------------------------------------------- /use_builder/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A crate to build source code use sections by combining multiple (possibly duplicate) use section inputs 2 | //! 3 | //! ```rust 4 | //! use assert_unordered::assert_eq_unordered; 5 | //! use quote::quote; 6 | //! use use_builder::{UseBuilder, UseItems}; 7 | //! 8 | //! // #1 - Build a two or more use trees and convert into `UseItems` (wrapped `Vec`) 9 | //! 10 | //! let use1 = quote! { 11 | //! use crate::Test; 12 | //! use std::error::{Error as StdError}; 13 | //! use std::fmt::Debug; 14 | //! }; 15 | //! 16 | //! let use2 = quote! { 17 | //! use syn::ItemUse; 18 | //! use std::fmt::Display; 19 | //! use crate::*; 20 | //! }; 21 | //! 22 | //! let items1: UseItems = syn::parse2(use1).unwrap(); 23 | //! let items2: UseItems = syn::parse2(use2).unwrap(); 24 | //! 25 | //! // #2 - Parse, process, and extract into sections 26 | //! 27 | //! let builder = UseBuilder::from_uses(vec![items1, items2]); 28 | //! let (std_use, ext_use, crate_use) = builder.into_items_sections().unwrap(); 29 | //! 30 | //! // #3 - Validate our response matches expectation 31 | //! 32 | //! let std_expected = quote! { 33 | //! use std::error::Error as StdError; 34 | //! use std::fmt::{Debug, Display}; 35 | //! }; 36 | //! let std_expected = syn::parse2::(std_expected).unwrap().into_inner(); 37 | //! 38 | //! let ext_expected = quote! { 39 | //! use syn::ItemUse; 40 | //! }; 41 | //! let ext_expected = syn::parse2::(ext_expected).unwrap().into_inner(); 42 | //! 43 | //! let crate_expected = quote! { 44 | //! use crate::*; 45 | //! }; 46 | //! let crate_expected = syn::parse2::(crate_expected).unwrap().into_inner(); 47 | //! 48 | //! assert_eq_unordered!(std_expected, std_use); 49 | //! assert_eq_unordered!(ext_expected, ext_use); 50 | //! assert_eq_unordered!(crate_expected, crate_use); 51 | //! ``` 52 | 53 | #![warn(missing_docs)] 54 | 55 | // Trick to test README samples (from: https://github.com/rust-lang/cargo/issues/383#issuecomment-720873790) 56 | #[cfg(doctest)] 57 | mod test_readme { 58 | macro_rules! external_doc_test { 59 | ($x:expr) => { 60 | #[doc = $x] 61 | extern "C" {} 62 | }; 63 | } 64 | 65 | external_doc_test!(include_str!("../README.md")); 66 | } 67 | 68 | use quote::__private::TokenStream; 69 | use quote::{ToTokens, TokenStreamExt}; 70 | 71 | use indexmap::IndexMap; 72 | use std::collections::HashSet; 73 | use std::error::Error as StdError; 74 | use std::{cmp, fmt, hash}; 75 | 76 | const STD: [&str; 5] = ["std", "alloc", "core", "proc_macro", "test"]; 77 | const CRATE: [&str; 3] = ["self", "super", "crate"]; 78 | 79 | // *** UseItems *** 80 | 81 | /// An opaque type primarily used for parsing to get an inner `Vec` (however, 82 | /// [from_items](UseItems::from_items) can also be used for an existing [Vec] of items if parsing is 83 | /// not required). This type is the sole input into [UseBuilder]. 84 | pub struct UseItems { 85 | items: Vec, 86 | } 87 | 88 | impl UseItems { 89 | /// Instead of using syn parsing, this can be used to wrap an existing [Vec] of use items 90 | #[inline] 91 | pub fn from_items(items: Vec) -> Self { 92 | Self { items } 93 | } 94 | 95 | /// Consume this value and emit the inner [Vec] of [syn::ItemUse] 96 | #[inline] 97 | pub fn into_inner(self) -> Vec { 98 | self.items 99 | } 100 | } 101 | 102 | impl syn::parse::Parse for UseItems { 103 | fn parse(input: syn::parse::ParseStream) -> syn::Result { 104 | // Random guess on capacity 105 | let mut items = Vec::with_capacity(5); 106 | 107 | while !input.is_empty() { 108 | items.push(input.parse()?); 109 | } 110 | 111 | Ok(Self { items }) 112 | } 113 | } 114 | 115 | impl ToTokens for UseItems { 116 | fn to_tokens(&self, tokens: &mut TokenStream) { 117 | tokens.append_all(&self.items); 118 | } 119 | } 120 | 121 | impl IntoIterator for UseItems { 122 | type Item = syn::ItemUse; 123 | type IntoIter = std::vec::IntoIter; 124 | 125 | #[inline] 126 | fn into_iter(self) -> Self::IntoIter { 127 | self.items.into_iter() 128 | } 129 | } 130 | 131 | // *** Use Entry *** 132 | 133 | #[derive(Clone, Debug, cmp::Eq, hash::Hash, cmp::PartialEq)] 134 | enum UseKey { 135 | Name(syn::Ident), 136 | Rename(syn::Ident, syn::Ident), 137 | Glob, 138 | } 139 | 140 | // *** Use Data *** 141 | 142 | #[derive(Clone, Debug, cmp::Eq, hash::Hash, cmp::PartialEq)] 143 | struct UseData { 144 | vis: syn::Visibility, 145 | attrs: Vec, 146 | has_leading_colons: bool, 147 | } 148 | 149 | impl UseData { 150 | #[inline] 151 | fn new(vis: syn::Visibility, attrs: Vec, has_leading_colons: bool) -> Self { 152 | Self { 153 | vis, 154 | attrs, 155 | has_leading_colons, 156 | } 157 | } 158 | } 159 | 160 | // *** UseValue *** 161 | 162 | #[derive(Clone, Default, Debug)] 163 | struct UseValue { 164 | nodes: HashSet, 165 | paths: UseBuilder, 166 | } 167 | 168 | // *** Error *** 169 | 170 | /// The error type returned if issues occur during [UseBuilder] operations 171 | #[derive(fmt::Debug)] 172 | pub enum Error { 173 | /// A glob was found as the first entry in a use path - this is illegal Rust 174 | TopLevelGlob, 175 | /// A group was found as the first entry in a use path - this is not supported 176 | TopLevelGroup, 177 | /// The same use was found but with differing dual colon prefix, attributes, or visibility 178 | UseWithDiffAttr, 179 | } 180 | 181 | impl fmt::Display for Error { 182 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 183 | use Error::*; 184 | 185 | match self { 186 | TopLevelGlob => f.write_str("Top level glob is not allowed"), 187 | TopLevelGroup => f.write_str("Top level group is not allowed"), 188 | UseWithDiffAttr => f.write_str( 189 | "Multiple copies of the same import with differing attributes are not allowed", 190 | ), 191 | } 192 | } 193 | } 194 | 195 | impl StdError for Error {} 196 | 197 | // *** ItemUse Builder *** 198 | 199 | #[derive(Clone, Default)] 200 | struct ItemUseBuilder { 201 | paths: Vec, 202 | } 203 | 204 | impl ItemUseBuilder { 205 | #[inline] 206 | fn add_path(&mut self, path: syn::Ident) { 207 | self.paths.push(path); 208 | } 209 | 210 | fn into_item_use(mut self, names: Vec, data: UseData) -> syn::ItemUse { 211 | let key_to_tree = |key| match key { 212 | UseKey::Name(name) => syn::UseTree::Name(syn::UseName { ident: name }), 213 | UseKey::Rename(name, rename) => syn::UseTree::Rename(syn::UseRename { 214 | ident: name, 215 | as_token: Default::default(), 216 | rename, 217 | }), 218 | _ => unreachable!("Impossible glob"), 219 | }; 220 | 221 | // #1 - Setup name tree 222 | 223 | // Regardless of number of entries in names, if there is a glob, ignore the rest 224 | let mut tree = if names.contains(&UseKey::Glob) { 225 | syn::UseTree::Glob(syn::UseGlob { 226 | star_token: Default::default(), 227 | }) 228 | // If a single entry then it is either a name or rename 229 | } else if names.len() == 1 { 230 | // Panic safety: we verified there is exactly one item in the set 231 | key_to_tree(names.into_iter().next().unwrap()) 232 | // Group 233 | } else { 234 | let items = names.into_iter().map(key_to_tree).collect(); 235 | 236 | syn::UseTree::Group(syn::UseGroup { 237 | brace_token: Default::default(), 238 | items, 239 | }) 240 | }; 241 | 242 | // #2 - Build path (in reverse order) 243 | 244 | while !self.paths.is_empty() { 245 | let path = self.paths.remove(self.paths.len() - 1); 246 | 247 | tree = syn::UseTree::Path(syn::UsePath { 248 | ident: path, 249 | colon2_token: Default::default(), 250 | tree: Box::new(tree), 251 | }); 252 | } 253 | 254 | // #3 - Build ItemUse 255 | 256 | let leading_colon = if data.has_leading_colons { 257 | Some(syn::token::Colon2::default()) 258 | } else { 259 | None 260 | }; 261 | 262 | syn::ItemUse { 263 | attrs: data.attrs, 264 | vis: data.vis, 265 | use_token: Default::default(), 266 | leading_colon, 267 | tree, 268 | semi_token: Default::default(), 269 | } 270 | } 271 | } 272 | 273 | // *** UseMap *** 274 | 275 | /// Type that contains a partitioned list of uses by std, external, and crate level 276 | pub type StdExtCrateUse = (Vec, Vec, Vec); 277 | 278 | /// A type that builds vecs of [syn::ItemUse]. It takes a [Vec] of [UseItems] as input, ensures no 279 | /// conflicting duplicates, groups them, and then emits as [Vec] (or multiple [Vec]) of [syn::ItemUse] 280 | #[derive(Clone, Default, Debug)] 281 | pub struct UseBuilder { 282 | map: IndexMap, 283 | entries: usize, 284 | } 285 | 286 | impl UseBuilder { 287 | /// Create a new builder from a [Vec] of [UseItems] 288 | pub fn from_uses(items: Vec) -> Self { 289 | let mut root_map = Self { 290 | map: IndexMap::new(), 291 | entries: 0, 292 | }; 293 | 294 | for inner_items in items { 295 | for item in inner_items.items { 296 | let data = UseData::new(item.vis, item.attrs, item.leading_colon.is_some()); 297 | root_map.parse_tree(item.tree, data); 298 | } 299 | } 300 | 301 | root_map 302 | } 303 | 304 | fn add_node(&mut self, entry: UseKey, data: UseData) { 305 | match self.map.entry(entry) { 306 | indexmap::map::Entry::Occupied(mut e) => { 307 | e.get_mut().nodes.insert(data); 308 | } 309 | indexmap::map::Entry::Vacant(e) => { 310 | self.entries += 1; 311 | let mut u = UseValue::default(); 312 | u.nodes.insert(data); 313 | e.insert(u); 314 | } 315 | } 316 | } 317 | 318 | fn add_path(&mut self, entry: UseKey) -> &mut UseBuilder { 319 | match self.map.entry(entry) { 320 | indexmap::map::Entry::Occupied(e) => &mut e.into_mut().paths, 321 | indexmap::map::Entry::Vacant(e) => { 322 | let u = UseValue::default(); 323 | &mut e.insert(u).paths 324 | } 325 | } 326 | } 327 | 328 | fn parse_tree(&mut self, tree: syn::UseTree, data: UseData) { 329 | use syn::UseTree::*; 330 | 331 | match tree { 332 | Path(syn::UsePath { ident, tree, .. }) => { 333 | let map = self.add_path(UseKey::Name(ident)); 334 | // TODO: I hate cloning tree here, but Box::into_inner() is unstable - replace when stable 335 | map.parse_tree(syn::UseTree::clone(&*tree), data); 336 | } 337 | Name(syn::UseName { ident }) => { 338 | self.add_node(UseKey::Name(ident), data); 339 | } 340 | Rename(syn::UseRename { ident, rename, .. }) => { 341 | self.add_node(UseKey::Rename(ident, rename), data); 342 | } 343 | Glob(syn::UseGlob { .. }) => { 344 | self.add_node(UseKey::Glob, data); 345 | } 346 | Group(syn::UseGroup { items, .. }) => { 347 | for item in items { 348 | self.parse_tree(item, data.clone()); 349 | } 350 | } 351 | } 352 | } 353 | 354 | fn next_map( 355 | use_map: UseBuilder, 356 | builder: ItemUseBuilder, 357 | items: &mut Vec, 358 | ) -> Result<(), Error> { 359 | let mut map: IndexMap> = IndexMap::new(); 360 | let len = use_map.map.len(); 361 | 362 | // Node Strategy: try to combine as we loop over 363 | for (key, value) in use_map.map { 364 | // *** Path handling ** 365 | 366 | // Ignore anything but names for future paths (others are invalid as paths) 367 | if let UseKey::Name(path) = key.clone() { 368 | // Create a builder from the original 369 | let mut builder = builder.clone(); 370 | builder.add_path(path); 371 | if let err @ Err(_) = Self::next_map(value.paths, builder, items) { 372 | return err; 373 | } 374 | } 375 | 376 | // *** Node handling *** 377 | 378 | // Peek at nodes held by this key 379 | if !value.nodes.is_empty() { 380 | // We should really only have one entry - more than that means incompatible attrs 381 | if value.nodes.len() > 1 { 382 | return Err(Error::UseWithDiffAttr); 383 | } 384 | 385 | // Insert into our map 386 | // Panic safety: we confirmed above there is exactly one entry 387 | match map.entry(value.nodes.into_iter().next().unwrap()) { 388 | indexmap::map::Entry::Occupied(mut e) => { 389 | e.get_mut().push(key); 390 | } 391 | indexmap::map::Entry::Vacant(e) => { 392 | let mut set = Vec::with_capacity(len); 393 | set.push(key); 394 | e.insert(set); 395 | } 396 | } 397 | } 398 | } 399 | 400 | // If we found any nodes, build them based on associated data 401 | for (data, names) in map { 402 | let item = builder.clone().into_item_use(names, data); 403 | items.push(item); 404 | } 405 | 406 | Ok(()) 407 | } 408 | 409 | /// Consume this builder an emit a [Vec] of [syn::ItemUse] 410 | pub fn into_items(self) -> Result, Error> { 411 | let mut items = Vec::with_capacity(self.entries); 412 | let builder = ItemUseBuilder::default(); 413 | Self::next_map(self, builder, &mut items)?; 414 | Ok(items) 415 | } 416 | 417 | /// Consume this builder and emit three vectors of [syn::ItemUse] partitioned by crate type: 418 | /// std, external, and intra-crate uses 419 | pub fn into_items_sections(self) -> Result { 420 | let items = self.into_items()?; 421 | 422 | // Will be too big - better too big than too small 423 | let mut std_uses = Vec::with_capacity(items.len()); 424 | let mut extern_uses = Vec::with_capacity(items.len()); 425 | let mut crate_uses = Vec::with_capacity(items.len()); 426 | 427 | for item in items { 428 | use syn::UseTree::*; 429 | 430 | match &item.tree { 431 | // Name and rename don't make much sense, but technically legal 432 | Path(syn::UsePath { ident, .. }) 433 | | Name(syn::UseName { ident }) 434 | | Rename(syn::UseRename { ident, .. }) => { 435 | let name = &*ident.to_string(); 436 | 437 | if STD.contains(&name) { 438 | std_uses.push(item); 439 | } else if CRATE.contains(&name) { 440 | crate_uses.push(item); 441 | } else { 442 | extern_uses.push(item); 443 | }; 444 | } 445 | Glob(_) => return Err(Error::TopLevelGlob), 446 | Group(_) => {} 447 | } 448 | } 449 | 450 | Ok((std_uses, extern_uses, crate_uses)) 451 | } 452 | } 453 | 454 | #[cfg(test)] 455 | mod tests { 456 | use assert_unordered::assert_eq_unordered; 457 | use quote::quote; 458 | 459 | use crate::{UseBuilder, UseItems}; 460 | 461 | fn make_builder() -> UseBuilder { 462 | let use1 = quote! { 463 | use crate::Test; 464 | use std::error::Error as StdError; 465 | use std::fmt::Debug; 466 | }; 467 | 468 | let use2 = quote! { 469 | use syn::ItemUse; 470 | use std::fmt::Display; 471 | use crate::*; 472 | }; 473 | 474 | let items1: UseItems = syn::parse2(use1).unwrap(); 475 | let items2: UseItems = syn::parse2(use2).unwrap(); 476 | 477 | UseBuilder::from_uses(vec![items1, items2]) 478 | } 479 | 480 | #[test] 481 | fn items() { 482 | let builder = make_builder(); 483 | //eprintln!("{:#?}", &builder); 484 | let uses = builder.into_items().unwrap(); 485 | //println!("{uses:#?}"); 486 | 487 | let expected = quote! { 488 | use crate::*; 489 | use std::error::Error as StdError; 490 | use std::fmt::{Debug, Display}; 491 | use syn::ItemUse; 492 | }; 493 | let expected = syn::parse2::(expected).unwrap().into_inner(); 494 | 495 | assert_eq_unordered!(expected, uses); 496 | } 497 | 498 | #[test] 499 | fn items_separated() { 500 | let builder = make_builder(); 501 | let (std_use, ext_use, crate_use) = builder.into_items_sections().unwrap(); 502 | 503 | let std_expected = quote! { 504 | use std::error::Error as StdError; 505 | use std::fmt::{Debug, Display}; 506 | }; 507 | let std_expected = syn::parse2::(std_expected).unwrap().into_inner(); 508 | 509 | let ext_expected = quote! { 510 | use syn::ItemUse; 511 | }; 512 | let ext_expected = syn::parse2::(ext_expected).unwrap().into_inner(); 513 | 514 | let crate_expected = quote! { 515 | use crate::*; 516 | }; 517 | let crate_expected = syn::parse2::(crate_expected) 518 | .unwrap() 519 | .into_inner(); 520 | 521 | assert_eq_unordered!(std_expected, std_use); 522 | assert_eq_unordered!(ext_expected, ext_use); 523 | assert_eq_unordered!(crate_expected, crate_use); 524 | } 525 | } 526 | -------------------------------------------------------------------------------- /doc_test/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(docsrs, feature(doc_cfg))] 2 | #![warn(missing_docs)] 3 | 4 | //! Using the [doc_test] macro, we can take any [TokenStream](proc_macro2::TokenStream) and turn it into 5 | //! a doctest [TokenStream](proc_macro2::TokenStream) that can be interpolated in any [quote](quote::quote) 6 | //! macro invocation. 7 | //! 8 | //! The [doc_comment] function takes any string and turns it into one or more comments inside a 9 | //! [TokenStream](proc_macro2::TokenStream). 10 | //! 11 | //! ``` 12 | //! use quote::quote; 13 | //! use quote_doctest::{doc_comment, doc_test, FormatDocTest}; 14 | //! 15 | //! // Takes any `TokenStream` as input (but typically `quote` would be used) 16 | //! let test = doc_test!(quote! { 17 | //! _comment_!("Calling fibonacci with 10 returns 55"); 18 | //! assert_eq!(fibonacci(10), 55); 19 | //! 20 | //! _blank_!(); 21 | //! _comment_!("Calling fibonacci with 1 simply returns 1"); 22 | //! assert_eq!(fibonacci(1), 1); 23 | //! }).unwrap(); 24 | //! 25 | //! let comment = doc_comment("This compares fib inputs and outputs:\n\n"); 26 | //! 27 | //! // Interpolates into a regular `quote` invocation 28 | //! let actual = quote! { 29 | //! #comment 30 | //! #test 31 | //! fn fibonacci(n: u64) -> u64 { 32 | //! match n { 33 | //! 0 => 1, 34 | //! 1 => 1, 35 | //! n => fibonacci(n - 1) + fibonacci(n - 2), 36 | //! } 37 | //! } 38 | //! }; 39 | //! 40 | //! // This is what is generated: 41 | //! let expected = quote! { 42 | //! /// This compares fib inputs and outputs: 43 | //! /// 44 | //! /// ``` 45 | //! /// // Calling fibonacci with 10 returns 55 46 | //! /// assert_eq!(fibonacci(10), 55); 47 | //! /// 48 | //! /// // Calling fibonacci with 1 simply returns 1 49 | //! /// assert_eq!(fibonacci(1), 1); 50 | //! /// ``` 51 | //! fn fibonacci(n: u64) -> u64 { 52 | //! match n { 53 | //! 0 => 1, 54 | //! 1 => 1, 55 | //! n => fibonacci(n - 1) + fibonacci(n - 2), 56 | //! } 57 | //! } 58 | //! }; 59 | //! 60 | //! assert_eq!(expected.format_tokens().unwrap(), actual.format_tokens().unwrap()); 61 | //! ``` 62 | 63 | // Trick to test README samples (from: https://github.com/rust-lang/cargo/issues/383#issuecomment-720873790) 64 | #[cfg(doctest)] 65 | mod test_readme { 66 | macro_rules! external_doc_test { 67 | ($x:expr) => { 68 | #[doc = $x] 69 | extern "C" {} 70 | }; 71 | } 72 | 73 | external_doc_test!(include_str!("../README.md")); 74 | } 75 | 76 | use std::cmp; 77 | 78 | use proc_macro2::TokenStream; 79 | use quote::{quote, ToTokens}; 80 | use rust_format::Formatter as _; 81 | 82 | const MIN_BUFF_SIZE: usize = 128; 83 | 84 | /// The default amount of formatter indent to remove (when generating `main`) 85 | pub const FORMATTER_INDENT: usize = 4; 86 | 87 | /// Creates a doctest from a [TokenStream](proc_macro2::TokenStream). Typically that is all that 88 | /// is supplied, however, there is an optional parameter of type [DocTestOptions] that can be supplied 89 | /// to fine tune whether or not formating is used, choose a formatter (either `pretty_please` or `rustfmt`), 90 | /// or whether a main function generated (it is required if formatting, but one can be specified manually). 91 | /// 92 | /// This macro returns `Result`. An error could be returned if an issue occurs during 93 | /// the formatting process. 94 | #[macro_export] 95 | macro_rules! doc_test { 96 | ($tokens:expr) => { 97 | $crate::make_doc_test($tokens, $crate::DocTestOptions::default()) 98 | }; 99 | ($tokens:expr, $options:expr) => { 100 | $crate::make_doc_test($tokens, $options) 101 | }; 102 | } 103 | 104 | pub use rust_format::{Error, _blank_, _comment_}; 105 | 106 | // *** Formatter *** 107 | 108 | /// The formatter used to format source code - either `prettyplease` or the system `rustfmt` 109 | #[derive(Clone)] 110 | pub enum Formatter { 111 | /// Format using `prettyplease` crate 112 | #[cfg(feature = "pretty_please")] 113 | #[cfg_attr(docsrs, doc(cfg(feature = "pretty_please")))] 114 | PrettyPlease(rust_format::PrettyPlease), 115 | /// Format by calling out to the system `rustfmt` 116 | RustFmt(rust_format::RustFmt), 117 | } 118 | 119 | impl Formatter { 120 | /// Creates a basic default `rustfmt` `Formatter` instance that automatically strips 121 | /// markers from the source code 122 | pub fn new_rust_fmt() -> Self { 123 | let config = 124 | rust_format::Config::new_str().post_proc(rust_format::PostProcess::ReplaceMarkers); 125 | let rust_fmt = rust_format::RustFmt::from_config(config); 126 | Formatter::RustFmt(rust_fmt) 127 | } 128 | 129 | /// Creates a basic default `prettyplease` `Formatter` instance that automatically strips 130 | /// markers from the source code 131 | #[cfg(feature = "pretty_please")] 132 | #[cfg_attr(docsrs, doc(cfg(feature = "pretty_please")))] 133 | pub fn new_pretty_please() -> Self { 134 | let config = 135 | rust_format::Config::new_str().post_proc(rust_format::PostProcess::ReplaceMarkers); 136 | let rust_fmt = rust_format::PrettyPlease::from_config(config); 137 | Formatter::PrettyPlease(rust_fmt) 138 | } 139 | } 140 | 141 | #[cfg(not(feature = "pretty_please"))] 142 | impl Default for Formatter { 143 | #[inline] 144 | fn default() -> Self { 145 | Formatter::new_rust_fmt() 146 | } 147 | } 148 | 149 | #[cfg(feature = "pretty_please")] 150 | impl Default for Formatter { 151 | #[inline] 152 | fn default() -> Self { 153 | Formatter::new_pretty_please() 154 | } 155 | } 156 | 157 | /// Optional enum passed to [doc_test] for different configuration options 158 | #[derive(Clone)] 159 | pub enum DocTestOptions { 160 | /// TokenStream is not formatted and no main function is generated. The doctest will be a single line 161 | NoFormatOrGenMain, 162 | /// TokenStream is formatted only by the specified formatter. The source code must be inside a 163 | /// function or it will cause an error 164 | FormatOnly(Formatter), 165 | /// TokenStream is formatted by the specified formatter and a main function is generated that is 166 | /// later stripped after formatting. The `usize` parameter is the number of indent spaces to be 167 | /// stripped (typically this number should be 4) 168 | FormatAndGenMain(Formatter, usize), 169 | } 170 | 171 | impl DocTestOptions { 172 | /// Creates a basic default `rustfmt` `DocTestOptions` instance that generates main, 173 | /// formats, and then strips the main function 174 | #[inline] 175 | pub fn new_rust_fmt() -> Self { 176 | DocTestOptions::FormatAndGenMain(Formatter::new_rust_fmt(), FORMATTER_INDENT) 177 | } 178 | 179 | /// Creates a basic default `prettyplease` `DocTestOptions` instance that generates main, 180 | /// formats, and then strips the main function 181 | #[cfg(feature = "pretty_please")] 182 | #[cfg_attr(docsrs, doc(cfg(feature = "pretty_please")))] 183 | #[inline] 184 | pub fn new_pretty_please() -> Self { 185 | DocTestOptions::FormatAndGenMain(Formatter::new_pretty_please(), FORMATTER_INDENT) 186 | } 187 | 188 | #[inline] 189 | fn options(self) -> (Option, bool, usize) { 190 | match self { 191 | DocTestOptions::NoFormatOrGenMain => (None, false, 0), 192 | DocTestOptions::FormatOnly(fmt) => (Some(fmt), false, 0), 193 | DocTestOptions::FormatAndGenMain(fmt, strip_indent) => (Some(fmt), true, strip_indent), 194 | } 195 | } 196 | } 197 | 198 | #[cfg(not(feature = "pretty_please"))] 199 | impl Default for DocTestOptions { 200 | #[inline] 201 | fn default() -> Self { 202 | DocTestOptions::new_rust_fmt() 203 | } 204 | } 205 | 206 | #[cfg(feature = "pretty_please")] 207 | impl Default for DocTestOptions { 208 | #[inline] 209 | fn default() -> Self { 210 | DocTestOptions::new_pretty_please() 211 | } 212 | } 213 | 214 | /// Attempts to translate this [TokenStream](proc_macro2::TokenStream) into a [String]. It takes an 215 | /// optional [Formatter] which formats using either `prettyplease` or `rustfmt`. It returns a [String] 216 | /// of the formatted code (or a single line of unformatted text, if `fmt` is `None`) or an [Error] 217 | /// error, if one occurred. 218 | #[inline] 219 | fn tokens_to_string(tokens: TokenStream, fmt: Option) -> Result { 220 | match fmt { 221 | #[cfg(feature = "pretty_please")] 222 | Some(Formatter::PrettyPlease(pp)) => pp.format_tokens(tokens), 223 | Some(Formatter::RustFmt(rust_fmt)) => rust_fmt.format_tokens(tokens), 224 | None => Ok(tokens.to_string()), 225 | } 226 | } 227 | 228 | /// Creates a doc comment for interpolation into a [TokenStream](proc_macro2::TokenStream). It takes 229 | /// a string as input, splits it by line, and inserts one doc comment per line. 230 | /// 231 | /// The value of this function over simply using `///` is that `quote` does not currently interpolate. 232 | /// It will for `#[doc]` but only for one comment at a time. This function allows insertion of any 233 | /// number of lines with one comment per line. 234 | /// 235 | /// ``` 236 | /// use quote::quote; 237 | /// use quote_doctest::{doc_comment, FormatDocTest}; 238 | /// 239 | /// let actual = doc_comment("this\nwill be\n\nmultiple comments\n\n"); 240 | /// let expected = quote! { 241 | /// /// this 242 | /// /// will be 243 | /// /// 244 | /// /// multiple comments 245 | /// /// 246 | /// }; 247 | /// 248 | /// assert_eq!(expected.format_tokens().unwrap(), actual.format_tokens().unwrap()); 249 | /// ``` 250 | pub fn doc_comment(comment: impl AsRef) -> TokenStream { 251 | let comment = comment.as_ref(); 252 | 253 | // Unlikely to be this big, but better than reallocating 254 | let mut buffer = String::with_capacity(cmp::max(comment.len() * 2, MIN_BUFF_SIZE)); 255 | 256 | // Build code from lines 257 | for line in comment.lines() { 258 | // Except for empty lines, all lines should get a space at the front 259 | if !line.is_empty() { 260 | buffer.push(' '); 261 | } 262 | buffer.push_str(line); 263 | buffer.push('\n'); 264 | } 265 | 266 | let doc_comment: Vec<_> = buffer.lines().collect(); 267 | quote! { #( #[doc = #doc_comment] )* } 268 | } 269 | 270 | #[doc(hidden)] 271 | pub fn make_doc_test( 272 | mut tokens: TokenStream, 273 | options: DocTestOptions, 274 | ) -> Result { 275 | let (fmt, gen_main, strip_indent) = options.options(); 276 | 277 | // Surround with main, if needed (we can't remove it unless we are formatting) 278 | if gen_main { 279 | tokens = quote! { 280 | fn main() { #tokens } 281 | }; 282 | } 283 | 284 | // Format, if required, and then break into lines 285 | let src = tokens_to_string(tokens, fmt)?; 286 | let lines = to_source_lines(&src, gen_main); 287 | 288 | // Assemble the lines back into a string while indenting 289 | // NOTE: strip_indent will be zero unless gen_main was set 290 | let indent = " ".repeat(strip_indent); 291 | let doc_test = assemble_doc_test(lines, src.len(), indent); 292 | let doc_test: Vec<_> = doc_test.lines().collect(); 293 | 294 | // Turn back into a token stream and into a doc test 295 | Ok(quote! { 296 | /// ``` 297 | #( #[doc = #doc_test] )* 298 | /// ``` 299 | }) 300 | } 301 | 302 | fn to_source_lines(src: &str, gen_main: bool) -> Vec<&str> { 303 | // Split string source code into lines 304 | let lines = src.lines(); 305 | 306 | // Remove `fn main () {`, if we added it 307 | if gen_main { 308 | // Skip 'fn main {' 309 | let mut lines = lines.skip(1).collect::>(); 310 | // Remove the trailing `}` 311 | lines.pop(); 312 | lines 313 | } else { 314 | lines.collect() 315 | } 316 | } 317 | 318 | fn assemble_doc_test(lines: Vec<&str>, cap: usize, prefix: String) -> String { 319 | // Unlikely to be this big, but better than reallocating 320 | let mut buffer = String::with_capacity(cmp::max(cap * 2, MIN_BUFF_SIZE)); 321 | 322 | // Build code from lines 323 | for mut line in lines { 324 | // Strip whitespace left over from main, if any (else noop) 325 | line = line.strip_prefix(&prefix).unwrap_or(line); 326 | 327 | // Except for empty lines, all lines should get a space at the front 328 | if !line.is_empty() { 329 | buffer.push(' '); 330 | } 331 | buffer.push_str(line); 332 | buffer.push('\n'); 333 | } 334 | 335 | buffer 336 | } 337 | 338 | #[cfg(not(feature = "pretty_please"))] 339 | #[inline] 340 | fn doc_test_formatter() -> impl rust_format::Formatter { 341 | let config = rust_format::Config::new_str() 342 | .post_proc(rust_format::PostProcess::ReplaceMarkersAndDocBlocks); 343 | rust_format::RustFmt::from_config(config) 344 | } 345 | 346 | #[cfg(feature = "pretty_please")] 347 | #[inline] 348 | fn doc_test_formatter() -> impl rust_format::Formatter { 349 | rust_format::PrettyPlease::default() 350 | } 351 | 352 | /// Trait for converting [doc_test] results into a well formatted `String` 353 | pub trait FormatDocTest: ToTokens { 354 | /// Convert results of a [doc_test] (or any other value that implements `ToTokens` that is valid 355 | /// Rust source) into a formatted `String`. This will also convert doc blocks (`#[doc = ""]`) into 356 | /// doc comments (`///`). This can be useful for display or equality testing in a unit test. An 357 | /// error is returned if an issue occurs during the formatting process 358 | /// 359 | /// NOTE: If `pretty_please` is not enabled then `rustfmt` will be used via the `rust_format` crate 360 | /// and when translating doc blocks will also translate any [`_comment_!`] or [`_blank_!`] markers. 361 | /// If the source of this function came from [doc_test], these will already be translated anyway, 362 | /// but this is mentioned for awareness. 363 | fn format_tokens(self) -> Result 364 | where 365 | Self: Sized, 366 | { 367 | // We need a function - doc blocks alone won't pass the formatter 368 | let doc_test = quote! { 369 | #self 370 | fn main() {} 371 | }; 372 | 373 | // Format (and translate doc blocks to doc comments 374 | let formatter = doc_test_formatter(); 375 | let source = formatter.format_tokens(doc_test)?; 376 | 377 | // Convert into lines so we trim off the last line (our added main function) 378 | let mut lines: Vec<_> = source.lines().collect(); 379 | // All in one line because there will never be anything inside 380 | lines.pop(); // fn main() {} 381 | 382 | // Unlikely to be this big, but better than reallocating 383 | let mut buffer = String::with_capacity(cmp::max(source.len() * 2, MIN_BUFF_SIZE)); 384 | 385 | for line in lines { 386 | buffer.push_str(line); 387 | buffer.push('\n'); 388 | } 389 | 390 | buffer.shrink_to_fit(); 391 | Ok(buffer) 392 | } 393 | } 394 | 395 | impl FormatDocTest for T where T: ToTokens {} 396 | 397 | #[cfg(test)] 398 | mod tests { 399 | use pretty_assertions::assert_eq; 400 | use quote::quote; 401 | 402 | use crate::{ 403 | tokens_to_string, DocTestOptions, Error, FormatDocTest, Formatter, FORMATTER_INDENT, 404 | }; 405 | 406 | #[test] 407 | fn doctest_format() { 408 | let actual = quote! { 409 | /// ``` 410 | /// assert_eq!(fibonacci(10), 55); 411 | /// assert_eq!(fibonacci(1), 1); 412 | /// ``` 413 | }; 414 | 415 | let expected = r#"/// ``` 416 | /// assert_eq!(fibonacci(10), 55); 417 | /// assert_eq!(fibonacci(1), 1); 418 | /// ``` 419 | "#; 420 | 421 | assert_eq!(expected, actual.format_tokens().unwrap()); 422 | } 423 | 424 | #[test] 425 | fn rustfmt_format_only() { 426 | format_only(Formatter::new_rust_fmt()); 427 | } 428 | 429 | #[cfg(feature = "pretty_please")] 430 | #[test] 431 | fn prettyplz_format_only() { 432 | format_only(Formatter::new_pretty_please()); 433 | } 434 | 435 | fn format_only(fmt: Formatter) { 436 | let code = quote! { 437 | fn main() { 438 | assert_eq!(fibonacci(10), 55); 439 | assert_eq!(fibonacci(1), 1); 440 | } 441 | }; 442 | 443 | let actual = doc_test!(code, DocTestOptions::FormatOnly(fmt)).unwrap(); 444 | 445 | let expected = quote! { 446 | /// ``` 447 | /// fn main() { 448 | /// assert_eq!(fibonacci(10), 55); 449 | /// assert_eq!(fibonacci(1), 1); 450 | /// } 451 | /// ``` 452 | }; 453 | 454 | assert_eq!( 455 | expected.format_tokens().unwrap(), 456 | actual.format_tokens().unwrap() 457 | ); 458 | } 459 | 460 | #[test] 461 | fn no_format_or_gen_main() { 462 | let code = quote! { 463 | fn main() { 464 | assert_eq!(fibonacci(10), 55); 465 | assert_eq!(fibonacci(1), 1); 466 | } 467 | }; 468 | 469 | let actual = doc_test!(code, DocTestOptions::NoFormatOrGenMain).unwrap(); 470 | let expected = quote! { 471 | /// ``` 472 | /// fn main () { assert_eq ! (fibonacci (10) , 55) ; assert_eq ! (fibonacci (1) , 1) ; } 473 | /// ``` 474 | }; 475 | 476 | assert_eq!( 477 | expected.format_tokens().unwrap(), 478 | actual.format_tokens().unwrap() 479 | ); 480 | } 481 | 482 | #[test] 483 | fn rustfmt_bad_source_code() { 484 | bad_source_code(Formatter::new_rust_fmt()); 485 | } 486 | 487 | #[cfg(feature = "pretty_please")] 488 | #[test] 489 | fn prettyplz_bad_source_code() { 490 | bad_source_code(Formatter::new_pretty_please()); 491 | } 492 | 493 | fn bad_source_code(fmt: Formatter) { 494 | match tokens_to_string(quote! {"blah blah blah"}, Some(fmt)) { 495 | Err(Error::BadSourceCode(_)) => {} 496 | _ => panic!("'rustfmt' should have failed due to bad source code"), 497 | } 498 | } 499 | 500 | #[test] 501 | fn rustfmt_comment_marker() { 502 | comment_marker(Formatter::new_rust_fmt()); 503 | } 504 | 505 | #[cfg(feature = "pretty_please")] 506 | #[test] 507 | fn prettyplz_comment_marker() { 508 | comment_marker(Formatter::new_pretty_please()); 509 | } 510 | 511 | fn comment_marker(fmt: Formatter) { 512 | let code = quote! { 513 | assert_eq!(fibonacci(10), 55); 514 | 515 | // Should translate to a blank line 516 | _comment_!(); 517 | _comment_!("first line\n\nsecond line"); 518 | assert_eq!(fibonacci(1), 1); 519 | }; 520 | 521 | let actual = doc_test!( 522 | code, 523 | DocTestOptions::FormatAndGenMain(fmt, FORMATTER_INDENT) 524 | ) 525 | .unwrap(); 526 | 527 | let expected = quote! { 528 | /// ``` 529 | /// assert_eq!(fibonacci(10), 55); 530 | /// // 531 | /// // first line 532 | /// // 533 | /// // second line 534 | /// assert_eq!(fibonacci(1), 1); 535 | /// ``` 536 | }; 537 | 538 | assert_eq!( 539 | expected.format_tokens().unwrap(), 540 | actual.format_tokens().unwrap() 541 | ); 542 | } 543 | 544 | #[test] 545 | fn rustfmt_blank_marker() { 546 | blank_marker(Formatter::new_rust_fmt()); 547 | } 548 | 549 | #[cfg(feature = "pretty_please")] 550 | #[test] 551 | fn prettyplz_blank_marker() { 552 | blank_marker(Formatter::new_pretty_please()); 553 | } 554 | 555 | fn blank_marker(fmt: Formatter) { 556 | let code = quote! { 557 | assert_eq!(fibonacci(10), 55); 558 | 559 | // Should translate to a single blank line 560 | _blank_!(); 561 | assert_eq!(fibonacci(1), 1); 562 | 563 | // Should translate to multiple blank lines 564 | _blank_!(2); 565 | }; 566 | 567 | let actual = doc_test!( 568 | code, 569 | DocTestOptions::FormatAndGenMain(fmt, FORMATTER_INDENT) 570 | ) 571 | .unwrap(); 572 | 573 | let expected = quote! { 574 | /// ``` 575 | /// assert_eq!(fibonacci(10), 55); 576 | /// 577 | /// assert_eq!(fibonacci(1), 1); 578 | /// 579 | /// 580 | /// ``` 581 | }; 582 | 583 | assert_eq!( 584 | expected.format_tokens().unwrap(), 585 | actual.format_tokens().unwrap() 586 | ); 587 | } 588 | 589 | #[test] 590 | fn rustfmt_inner_string() { 591 | inner_string(Formatter::new_rust_fmt()); 592 | } 593 | 594 | #[cfg(feature = "pretty_please")] 595 | #[test] 596 | fn prettyplz_inner_string() { 597 | inner_string(Formatter::new_pretty_please()); 598 | } 599 | 600 | fn inner_string(fmt: Formatter) { 601 | let code = quote! { 602 | println!("inner string"); 603 | // Escaped double quote 604 | println!("inner \""); 605 | println!("inner \r"); 606 | println!("inner \\"); 607 | 608 | println!(r"inner raw string"); 609 | println!(b"inner byte string"); 610 | println!(br"inner raw byte string"); 611 | 612 | println!(r#"inner raw string"#); 613 | println!(br#"inner byte raw string"#); 614 | 615 | // Multiple 616 | println!(r#"{}"#, "multiple"); 617 | 618 | // Raw entry fake out 619 | r(); 620 | 621 | // Raw exit fake out 1 622 | println!(r##"inner raw " string"##); 623 | // Raw exit fake out 2 624 | println!(r##"inner raw "# string"##); 625 | }; 626 | 627 | let actual = doc_test!( 628 | code, 629 | DocTestOptions::FormatAndGenMain(fmt, FORMATTER_INDENT) 630 | ) 631 | .unwrap(); 632 | 633 | let expected = quote! { 634 | /// ``` 635 | /// println!("inner string"); 636 | /// println!("inner \""); 637 | /// println!("inner \r"); 638 | /// println!("inner \\"); 639 | /// println!(r"inner raw string"); 640 | /// println!(b"inner byte string"); 641 | /// println!(br"inner raw byte string"); 642 | /// println!(r#"inner raw string"#); 643 | /// println!(br#"inner byte raw string"#); 644 | /// println!(r#"{}"#, "multiple"); 645 | /// r(); 646 | /// println!(r##"inner raw " string"##); 647 | /// println!(r##"inner raw "# string"##); 648 | /// ``` 649 | }; 650 | 651 | assert_eq!( 652 | expected.format_tokens().unwrap(), 653 | actual.format_tokens().unwrap() 654 | ); 655 | } 656 | } 657 | -------------------------------------------------------------------------------- /rust_format/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(docsrs, feature(doc_cfg))] 2 | #![warn(missing_docs)] 3 | 4 | //! A Rust source code formatting crate with a unified interface for string, file, and 5 | //! [TokenStream](proc_macro2::TokenStream) input. It currently supports 6 | //! [rustfmt](https://crates.io/crates/rustfmt-nightly) and 7 | //! [prettyplease](https://crates.io/crates/prettyplease). 8 | //! 9 | //! ``` 10 | //! use rust_format::{Formatter, RustFmt}; 11 | //! 12 | //! let source = r#"fn main() { println!("Hello World!"); }"#; 13 | //! 14 | //! let actual = RustFmt::default().format_str(source).unwrap(); 15 | //! let expected = r#"fn main() { 16 | //! println!("Hello World!"); 17 | //! } 18 | //! "#; 19 | //! 20 | //! assert_eq!(expected, actual); 21 | //! ``` 22 | 23 | #[cfg(feature = "post_process")] 24 | mod replace; 25 | 26 | #[cfg(not(feature = "post_process"))] 27 | mod replace { 28 | use std::borrow::Cow; 29 | 30 | use crate::Error; 31 | 32 | #[inline] 33 | pub(crate) fn replace_markers(s: &str, _replace_doc_blocks: bool) -> Result, Error> { 34 | Ok(Cow::Borrowed(s)) 35 | } 36 | } 37 | 38 | // Trick to test README samples (from: https://github.com/rust-lang/cargo/issues/383#issuecomment-720873790) 39 | #[cfg(feature = "post_process")] 40 | #[cfg(feature = "token_stream")] 41 | #[cfg(doctest)] 42 | mod test_readme { 43 | macro_rules! external_doc_test { 44 | ($x:expr) => { 45 | #[doc = $x] 46 | extern "C" {} 47 | }; 48 | } 49 | 50 | external_doc_test!(include_str!("../README.md")); 51 | } 52 | 53 | use std::borrow::Cow; 54 | use std::collections::HashMap; 55 | use std::default::Default; 56 | use std::ffi::{OsStr, OsString}; 57 | use std::hash::Hash; 58 | use std::io::{Read, Write}; 59 | use std::path::{Path, PathBuf}; 60 | use std::process::{Command, Stdio}; 61 | use std::{env, fmt, fs, io, string}; 62 | 63 | const RUST_FMT: &str = "rustfmt"; 64 | const RUST_FMT_KEY: &str = "RUSTFMT"; 65 | 66 | // *** Marker macros *** 67 | 68 | /// A "marker" macro used to mark locations in the source code where blank lines should be inserted. 69 | /// If no parameter is given, one blank line is assumed, otherwise the integer literal specified 70 | /// gives the # of blank lines to insert. 71 | /// 72 | /// It is important to understand this is NOT actually a macro that is executed. In fact, it is just 73 | /// here for documentation purposes. Instead, this works as a raw set of tokens in the source code 74 | /// that we match against verbatim. This means it cannot be renamed on import for example, and it MUST be 75 | /// invoked as `_blank_!(`, then an optional Rust integer literal, and then `);`. 76 | /// 77 | /// Actually executing this macro has no effect and it is not meant to even be imported. 78 | #[cfg(feature = "post_process")] 79 | #[cfg_attr(docsrs, doc(cfg(feature = "post_process")))] 80 | #[macro_export] 81 | macro_rules! _blank_ { 82 | () => {}; 83 | ($lit:literal) => {}; 84 | } 85 | 86 | /// A "marker" macro used to mark locations in the source code where comments should be inserted. 87 | /// If no parameter is given, a single blank comment is assumed, otherwise the string literal 88 | /// specified is broken into lines and those comments will be inserted individually. 89 | /// 90 | /// It is important to understand this is NOT actually a macro that is executed. In fact, it is just 91 | /// here for documentation purposes. Instead, this works as a raw set of tokens in the source code 92 | /// that we match against verbatim. This means it cannot be renamed on import for example, and it MUST be 93 | /// invoked as `_comment_!(`, then an optional Rust `str` literal (regular or raw, not byte string), 94 | /// and then `);`. 95 | /// 96 | /// Actually executing this macro has no effect and it is not meant to even be imported. 97 | #[cfg(feature = "post_process")] 98 | #[cfg_attr(docsrs, doc(cfg(feature = "post_process")))] 99 | #[macro_export] 100 | macro_rules! _comment_ { 101 | () => {}; 102 | ($lit:literal) => {}; 103 | } 104 | 105 | // *** Error *** 106 | 107 | /// This error is returned when errors are triggered during the formatting process 108 | #[derive(Debug)] 109 | pub enum Error { 110 | /// An I/O related error occurred 111 | IOError(io::Error), 112 | /// The response of formatting was not valid UTF8 113 | UTFConversionError(string::FromUtf8Error), 114 | /// The source code has bad syntax and could not be formatted 115 | BadSourceCode(String), 116 | } 117 | 118 | impl fmt::Display for Error { 119 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 120 | match self { 121 | Error::IOError(err) => ::fmt(err, f), 122 | Error::UTFConversionError(err) => ::fmt(err, f), 123 | Error::BadSourceCode(cause) => { 124 | f.write_str("An error occurred while formatting the source code: ")?; 125 | f.write_str(cause) 126 | } 127 | } 128 | } 129 | } 130 | 131 | impl std::error::Error for Error {} 132 | 133 | impl From for Error { 134 | #[inline] 135 | fn from(err: io::Error) -> Self { 136 | Error::IOError(err) 137 | } 138 | } 139 | 140 | impl From for Error { 141 | #[inline] 142 | fn from(err: string::FromUtf8Error) -> Self { 143 | Error::UTFConversionError(err) 144 | } 145 | } 146 | 147 | #[cfg(feature = "syn")] 148 | impl From for Error { 149 | #[inline] 150 | fn from(err: syn::Error) -> Self { 151 | Error::BadSourceCode(err.to_string()) 152 | } 153 | } 154 | 155 | // *** Edition *** 156 | 157 | /// The Rust edition the source code uses 158 | #[derive(Clone, Copy, Debug)] 159 | pub enum Edition { 160 | /// Rust 2015 edition 161 | Rust2015, 162 | /// Rust 2018 edition 163 | Rust2018, 164 | /// Rust 2021 edition 165 | Rust2021, 166 | } 167 | 168 | impl Edition { 169 | #[inline] 170 | fn as_os_str(self) -> &'static OsStr { 171 | match self { 172 | Edition::Rust2015 => "2015", 173 | Edition::Rust2018 => "2018", 174 | Edition::Rust2021 => "2021", 175 | } 176 | .as_ref() 177 | } 178 | } 179 | 180 | impl Default for Edition { 181 | #[inline] 182 | fn default() -> Self { 183 | Edition::Rust2021 184 | } 185 | } 186 | 187 | // *** Post Processing *** 188 | 189 | /// Post format processing options - optionally replace comment/blank markers and doc blocks 190 | #[derive(Clone, Copy, Debug)] 191 | pub enum PostProcess { 192 | /// No post processing after formatting (default) 193 | None, 194 | 195 | /// Replace [`_blank_!`] and [`_comment_!`] markers 196 | #[cfg(feature = "post_process")] 197 | #[cfg_attr(docsrs, doc(cfg(feature = "post_process")))] 198 | ReplaceMarkers, 199 | 200 | /// Replace [`_blank_!`] and [`_comment_!`] markers and `#[doc = ""]` (with `///`) 201 | #[cfg(feature = "post_process")] 202 | #[cfg_attr(docsrs, doc(cfg(feature = "post_process")))] 203 | ReplaceMarkersAndDocBlocks, 204 | } 205 | 206 | impl PostProcess { 207 | /// Returns true if blank and comment markers should be replaced in the formatted source or 208 | /// false if they should not be 209 | #[inline] 210 | pub fn replace_markers(self) -> bool { 211 | !matches!(self, PostProcess::None) 212 | } 213 | 214 | /// Returns true if doc blocks should be replaced in the formatted source or false if they 215 | /// should not be 216 | #[cfg(feature = "post_process")] 217 | #[inline] 218 | pub fn replace_doc_blocks(self) -> bool { 219 | matches!(self, PostProcess::ReplaceMarkersAndDocBlocks) 220 | } 221 | 222 | /// Returns true if doc blocks should be replaced in the formatted source or false if they 223 | /// should not be 224 | #[cfg(not(feature = "post_process"))] 225 | #[inline] 226 | pub fn replace_doc_blocks(self) -> bool { 227 | false 228 | } 229 | } 230 | 231 | impl Default for PostProcess { 232 | #[inline] 233 | fn default() -> Self { 234 | PostProcess::None 235 | } 236 | } 237 | 238 | // *** Config *** 239 | 240 | /// The configuration for the formatters. Most of the options are for `rustfmt` only (they are ignored 241 | /// by [PrettyPlease], but [PostProcess] options are used by both formatters). 242 | #[derive(Clone, Debug, Default)] 243 | pub struct Config 244 | where 245 | K: Eq + Hash + AsRef, 246 | P: Into, 247 | V: AsRef, 248 | { 249 | rust_fmt: Option

, 250 | edition: Edition, 251 | post_proc: PostProcess, 252 | options: HashMap, 253 | } 254 | 255 | impl<'a, 'b> Config<&'a str, &'b str, &'a str> { 256 | /// Creates a new blank configuration with `&str` for all type params 257 | /// (if you wish to use different types, use [new](Config::new) instead) 258 | #[inline] 259 | pub fn new_str() -> Self { 260 | Self::new() 261 | } 262 | 263 | /// Creates a new configuration from the given [HashMap] of options using `&str` for all type params 264 | /// (if you wish to use different types, use [from_hash_map](Config::from_hash_map) instead) 265 | #[inline] 266 | pub fn from_hash_map_str(options: HashMap<&'a str, &'a str>) -> Self { 267 | Self::from_hash_map(options) 268 | } 269 | } 270 | 271 | impl Config 272 | where 273 | K: Eq + Hash + AsRef, 274 | P: Into, 275 | V: AsRef, 276 | { 277 | /// Creates a new blank configuration without type parameter assumptions 278 | #[inline] 279 | pub fn new() -> Self { 280 | Self::from_hash_map(HashMap::default()) 281 | } 282 | 283 | /// Creates a new configuration from the given [HashMap] of options with no type assumptions 284 | #[inline] 285 | pub fn from_hash_map(options: HashMap) -> Self { 286 | Self { 287 | rust_fmt: None, 288 | edition: Edition::Rust2021, 289 | post_proc: PostProcess::None, 290 | options, 291 | } 292 | } 293 | 294 | /// Set the path to the `rustfmt` binary to use (`RustFmt` only, ignored by `PrettyPlease`). 295 | /// This takes precedence over the `RUSTFMT` environment variable, if specified 296 | #[inline] 297 | pub fn rust_fmt_path(mut self, path: P) -> Self { 298 | self.rust_fmt = Some(path); 299 | self 300 | } 301 | 302 | /// Set the Rust edition of the source input (`RustFmt` only, ignored by `PrettyPlease`) 303 | #[inline] 304 | pub fn edition(mut self, edition: Edition) -> Self { 305 | self.edition = edition; 306 | self 307 | } 308 | 309 | /// Set the post processing option after formatting (used by both `RustFmt` and `PrettyPlease`) 310 | #[inline] 311 | pub fn post_proc(mut self, post_proc: PostProcess) -> Self { 312 | self.post_proc = post_proc; 313 | self 314 | } 315 | 316 | /// Set a key/value pair option (`RustFmt` only, ignored by `PrettyPlease`). 317 | /// See [here](https://rust-lang.github.io/rustfmt/) for a list of possible options 318 | #[inline] 319 | pub fn option(mut self, key: K, value: V) -> Self { 320 | self.options.insert(key, value); 321 | self 322 | } 323 | } 324 | 325 | // *** Misc. format related functions *** 326 | 327 | #[inline] 328 | fn post_process(post_proc: PostProcess, source: String) -> Result { 329 | if post_proc.replace_markers() { 330 | match replace::replace_markers(&source, post_proc.replace_doc_blocks())? { 331 | // No change 332 | Cow::Borrowed(_) => Ok(source), 333 | // Changed 334 | Cow::Owned(source) => Ok(source), 335 | } 336 | } else { 337 | Ok(source) 338 | } 339 | } 340 | 341 | #[inline] 342 | fn file_to_string(path: impl AsRef) -> Result { 343 | // Read our file into a string 344 | let mut file = fs::File::open(path.as_ref())?; 345 | let len = file.metadata()?.len(); 346 | let mut source = String::with_capacity(len as usize); 347 | 348 | file.read_to_string(&mut source)?; 349 | Ok(source) 350 | } 351 | 352 | #[inline] 353 | fn string_to_file(path: impl AsRef, source: &str) -> Result<(), Error> { 354 | let mut file = fs::File::create(path)?; 355 | file.write_all(source.as_bytes())?; 356 | Ok(()) 357 | } 358 | 359 | // *** Formatter *** 360 | 361 | /// A unified interface to all formatters. It allows for formatting from string, file, or 362 | /// [TokenStream](proc_macro2::TokenStream) 363 | pub trait Formatter { 364 | /// Format the given string and return the results in another `String`. An error is returned 365 | /// if any issues occur during formatting 366 | fn format_str(&self, source: impl AsRef) -> Result; 367 | 368 | /// Format the given file specified hte path and overwrite the file with the results. An error 369 | /// is returned if any issues occur during formatting 370 | fn format_file(&self, path: impl AsRef) -> Result<(), Error> { 371 | let source = file_to_string(path.as_ref())?; 372 | let result = self.format_str(source)?; 373 | string_to_file(path, &result) 374 | } 375 | 376 | /// Format the given [TokenStream](proc_macro2::TokenStream) and return the results in a `String`. 377 | /// An error is returned if any issues occur during formatting 378 | #[cfg(feature = "token_stream")] 379 | #[cfg_attr(docsrs, doc(cfg(feature = "token_stream")))] 380 | #[inline] 381 | fn format_tokens(&self, tokens: proc_macro2::TokenStream) -> Result { 382 | self.format_str(tokens.to_string()) 383 | } 384 | } 385 | 386 | // *** Rust Fmt *** 387 | 388 | /// This formatter uses `rustfmt` for formatting source code 389 | /// 390 | /// An example using a custom configuration: 391 | /// ``` 392 | /// use rust_format::{Config, Edition, Formatter, RustFmt}; 393 | /// 394 | /// let source = r#"use std::marker; use std::io; mod test; mod impls;"#; 395 | /// 396 | /// let mut config = Config::new_str() 397 | /// .edition(Edition::Rust2018) 398 | /// .option("reorder_imports", "false") 399 | /// .option("reorder_modules", "false"); 400 | /// let rustfmt = RustFmt::from_config(config); 401 | /// 402 | /// let actual = rustfmt.format_str(source).unwrap(); 403 | /// let expected = r#"use std::marker; 404 | /// use std::io; 405 | /// mod test; 406 | /// mod impls; 407 | /// "#; 408 | /// 409 | /// assert_eq!(expected, actual); 410 | /// ``` 411 | #[derive(Clone)] 412 | pub struct RustFmt { 413 | rust_fmt: PathBuf, 414 | edition: Edition, 415 | post_proc: PostProcess, 416 | config_str: Option, 417 | } 418 | 419 | impl RustFmt { 420 | /// Creates a new instance of `RustFmt` using a default configuration 421 | #[inline] 422 | pub fn new() -> Self { 423 | Self::build(None as Option>) 424 | } 425 | 426 | /// Creates a new instance of the formatter from the given configuration 427 | #[inline] 428 | pub fn from_config(config: Config) -> Self 429 | where 430 | K: Default + Eq + Hash + AsRef, 431 | P: Default + Into, 432 | V: Default + AsRef, 433 | { 434 | Self::build(Some(config)) 435 | } 436 | 437 | fn build(config: Option>) -> Self 438 | where 439 | K: Default + Eq + Hash + AsRef, 440 | P: Default + Into, 441 | V: Default + AsRef, 442 | { 443 | let config = config.unwrap_or_default(); 444 | 445 | // Use 'rustfmt' specified by the config first, and if not, environment var, if specified, 446 | // else use the default 447 | let rust_fmt = match config.rust_fmt { 448 | Some(path) => path.into(), 449 | None => env::var_os(RUST_FMT_KEY) 450 | .unwrap_or_else(|| RUST_FMT.parse().unwrap()) 451 | .into(), 452 | }; 453 | 454 | let edition = config.edition; 455 | let config_str = Self::build_config_str(config.options); 456 | Self { 457 | rust_fmt, 458 | edition, 459 | post_proc: config.post_proc, 460 | config_str, 461 | } 462 | } 463 | 464 | fn build_config_str(cfg_options: HashMap) -> Option 465 | where 466 | K: Default + AsRef, 467 | V: Default + AsRef, 468 | { 469 | if !cfg_options.is_empty() { 470 | // Random # that should hold a few options 471 | let mut options = OsString::with_capacity(512); 472 | let iter = cfg_options.iter(); 473 | 474 | for (idx, (k, v)) in iter.enumerate() { 475 | // Build a comma separated list but only between items (no trailing comma) 476 | if idx > 0 { 477 | options.push(","); 478 | } 479 | options.push(k); 480 | options.push("="); 481 | options.push(v); 482 | } 483 | 484 | Some(options) 485 | } else { 486 | None 487 | } 488 | } 489 | 490 | fn build_args<'a, P>(&'a self, path: Option<&'a P>) -> Vec<&'a OsStr> 491 | where 492 | P: AsRef + ?Sized, 493 | { 494 | let mut args = match path { 495 | Some(path) => { 496 | let mut args = Vec::with_capacity(5); 497 | args.push(path.as_ref().as_ref()); 498 | args 499 | } 500 | None => Vec::with_capacity(4), 501 | }; 502 | 503 | args.push("--edition".as_ref()); 504 | args.push(self.edition.as_os_str()); 505 | 506 | if let Some(config_str) = &self.config_str { 507 | args.push("--config".as_ref()); 508 | args.push(config_str); 509 | } 510 | 511 | args 512 | } 513 | } 514 | 515 | impl Default for RustFmt { 516 | #[inline] 517 | fn default() -> Self { 518 | Self::new() 519 | } 520 | } 521 | 522 | impl Formatter for RustFmt { 523 | fn format_str(&self, source: impl AsRef) -> Result { 524 | let args = self.build_args(None as Option<&Path>); 525 | 526 | // Launch rustfmt 527 | let mut proc = Command::new(&self.rust_fmt) 528 | .stdin(Stdio::piped()) 529 | .stdout(Stdio::piped()) 530 | .stderr(Stdio::piped()) 531 | .args(args) 532 | .spawn()?; 533 | 534 | // Get stdin and send our source code to it to be formatted 535 | // Safety: Can't panic - we captured stdin above 536 | let mut stdin = proc.stdin.take().unwrap(); 537 | stdin.write_all(source.as_ref().as_bytes())?; 538 | // Close stdin 539 | drop(stdin); 540 | 541 | // Parse the results and return stdout/stderr 542 | let output = proc.wait_with_output()?; 543 | let stderr = String::from_utf8(output.stderr)?; 544 | 545 | if output.status.success() { 546 | let stdout = String::from_utf8(output.stdout)?; 547 | post_process(self.post_proc, stdout) 548 | } else { 549 | Err(Error::BadSourceCode(stderr)) 550 | } 551 | } 552 | 553 | fn format_file(&self, path: impl AsRef) -> Result<(), Error> { 554 | // Just use regular string method if doing post processing so we don't write to file twice 555 | if self.post_proc.replace_markers() { 556 | let source = file_to_string(path.as_ref())?; 557 | let result = self.format_str(source)?; 558 | string_to_file(path, &result) 559 | } else { 560 | let args = self.build_args(Some(path.as_ref())); 561 | 562 | // Launch rustfmt 563 | let proc = Command::new(&self.rust_fmt) 564 | .stderr(Stdio::piped()) 565 | .args(args) 566 | .spawn()?; 567 | 568 | // Parse the results and return stdout/stderr 569 | let output = proc.wait_with_output()?; 570 | let stderr = String::from_utf8(output.stderr)?; 571 | 572 | if output.status.success() { 573 | Ok(()) 574 | } else { 575 | Err(Error::BadSourceCode(stderr)) 576 | } 577 | } 578 | } 579 | } 580 | 581 | // *** Pretty Please *** 582 | 583 | /// This formatter uses [prettyplease](https://crates.io/crates/prettyplease) for formatting source code 584 | /// 585 | /// From string: 586 | /// ``` 587 | /// use rust_format::{Formatter, PrettyPlease}; 588 | /// 589 | /// let source = r#"fn main() { println!("Hello World!"); }"#; 590 | /// 591 | /// let actual = PrettyPlease::default().format_str(source).unwrap(); 592 | /// let expected = r#"fn main() { 593 | /// println!("Hello World!"); 594 | /// } 595 | /// "#; 596 | /// 597 | /// assert_eq!(expected, actual); 598 | /// ``` 599 | /// 600 | /// From token stream: 601 | /// ``` 602 | /// use quote::quote; 603 | /// use rust_format::{Formatter, PrettyPlease}; 604 | /// 605 | /// let source = quote! { fn main() { println!("Hello World!"); } }; 606 | /// 607 | /// let actual = PrettyPlease::default().format_tokens(source).unwrap(); 608 | /// let expected = r#"fn main() { 609 | /// println!("Hello World!"); 610 | /// } 611 | /// "#; 612 | /// 613 | /// assert_eq!(expected, actual); 614 | /// ``` 615 | #[cfg(feature = "pretty_please")] 616 | #[cfg_attr(docsrs, doc(cfg(feature = "pretty_please")))] 617 | #[derive(Clone, Default)] 618 | pub struct PrettyPlease { 619 | post_proc: PostProcess, 620 | } 621 | 622 | #[cfg(feature = "pretty_please")] 623 | impl PrettyPlease { 624 | /// Creates a new instance of `PrettyPlease` using a default configuration 625 | #[inline] 626 | pub fn new() -> Self { 627 | Self::build(None as Option>) 628 | } 629 | 630 | /// Creates a new instance of `PrettyPlease` from the given configuration 631 | #[inline] 632 | pub fn from_config(config: Config) -> Self 633 | where 634 | K: Default + Eq + Hash + AsRef, 635 | P: Default + Into, 636 | V: Default + AsRef, 637 | { 638 | Self::build(Some(config)) 639 | } 640 | 641 | fn build(config: Option>) -> Self 642 | where 643 | K: Default + Eq + Hash + AsRef, 644 | P: Default + Into, 645 | V: Default + AsRef, 646 | { 647 | let config = config.unwrap_or_default(); 648 | 649 | Self { 650 | post_proc: config.post_proc, 651 | } 652 | } 653 | 654 | #[inline] 655 | fn format(&self, f: &syn::File) -> Result { 656 | let result = prettyplease::unparse(f); 657 | post_process(self.post_proc, result) 658 | } 659 | } 660 | 661 | #[cfg(feature = "pretty_please")] 662 | impl Formatter for PrettyPlease { 663 | #[inline] 664 | fn format_str(&self, source: impl AsRef) -> Result { 665 | let f = syn::parse_file(source.as_ref())?; 666 | self.format(&f) 667 | } 668 | 669 | #[inline] 670 | #[cfg(feature = "token_stream")] 671 | #[cfg_attr(docsrs, doc(cfg(feature = "token_stream")))] 672 | fn format_tokens(&self, tokens: proc_macro2::TokenStream) -> Result { 673 | let f = syn::parse2::(tokens)?; 674 | self.format(&f) 675 | } 676 | } 677 | 678 | // *** Tests *** 679 | 680 | #[cfg(test)] 681 | mod tests { 682 | use std::io::{Read, Seek, Write}; 683 | 684 | use pretty_assertions::assert_eq; 685 | 686 | #[cfg(feature = "post_process")] 687 | use crate::PostProcess; 688 | #[cfg(feature = "pretty_please")] 689 | use crate::PrettyPlease; 690 | use crate::{Config, Error, Formatter, RustFmt, RUST_FMT, RUST_FMT_KEY}; 691 | 692 | const PLAIN_EXPECTED: &str = r#"#[doc = " This is main"] 693 | fn main() { 694 | _comment_!("This prints hello world"); 695 | println!("Hello World!"); 696 | _blank_!(); 697 | } 698 | "#; 699 | #[cfg(feature = "pretty_please")] 700 | const PLAIN_PP_EXPECTED: &str = r#"/// This is main 701 | fn main() { 702 | _comment_!("This prints hello world"); 703 | println!("Hello World!"); 704 | _blank_!(); 705 | } 706 | "#; 707 | #[cfg(feature = "post_process")] 708 | const REPLACE_EXPECTED: &str = r#"#[doc = " This is main"] 709 | fn main() { 710 | // This prints hello world 711 | println!("Hello World!"); 712 | 713 | } 714 | "#; 715 | #[cfg(feature = "post_process")] 716 | const REPLACE_BLOCKS_EXPECTED: &str = r#"/// This is main 717 | fn main() { 718 | // This prints hello world 719 | println!("Hello World!"); 720 | 721 | } 722 | "#; 723 | 724 | #[test] 725 | fn rustfmt_bad_env_path() { 726 | temp_env::with_var( 727 | RUST_FMT_KEY, 728 | Some("this_is_never_going_to_be_a_valid_executable"), 729 | || match RustFmt::new().format_str("bogus") { 730 | Err(Error::IOError(_)) => {} 731 | _ => panic!("'rustfmt' should have failed due to bad path"), 732 | }, 733 | ); 734 | } 735 | 736 | #[test] 737 | fn rustfmt_bad_config_path() { 738 | temp_env::with_var(RUST_FMT_KEY, Some(RUST_FMT), || { 739 | let config = 740 | Config::new_str().rust_fmt_path("this_is_never_going_to_be_a_valid_executable"); 741 | match RustFmt::from_config(config).format_str("bogus") { 742 | Err(Error::IOError(_)) => {} 743 | _ => panic!("'rustfmt' should have failed due to bad path"), 744 | } 745 | }); 746 | } 747 | 748 | fn format_file(fmt: impl Formatter, expected: &str) { 749 | // Write source code to file 750 | let source = r#"#[doc = " This is main"] fn main() { _comment_!("This prints hello world"); 751 | println!("Hello World!"); _blank_!(); }"#; 752 | let mut file = tempfile::NamedTempFile::new().unwrap(); 753 | file.write_all(source.as_bytes()).unwrap(); 754 | 755 | fmt.format_file(file.path()).unwrap(); 756 | 757 | // Now read back the formatted file 758 | file.rewind().unwrap(); 759 | let mut actual = String::with_capacity(128); 760 | file.read_to_string(&mut actual).unwrap(); 761 | 762 | assert_eq!(expected, actual); 763 | } 764 | 765 | #[test] 766 | fn rustfmt_file() { 767 | temp_env::with_var(RUST_FMT_KEY, Some(RUST_FMT), || { 768 | format_file(RustFmt::new(), PLAIN_EXPECTED); 769 | }); 770 | } 771 | 772 | // prettyplease replaces doc blocks by default 773 | #[cfg(feature = "pretty_please")] 774 | #[test] 775 | fn prettyplease_file() { 776 | format_file(PrettyPlease::new(), PLAIN_PP_EXPECTED); 777 | } 778 | 779 | #[cfg(feature = "post_process")] 780 | #[test] 781 | fn rustfmt_file_replace_markers() { 782 | temp_env::with_var(RUST_FMT_KEY, Some(RUST_FMT), || { 783 | let config = Config::new_str().post_proc(PostProcess::ReplaceMarkers); 784 | format_file(RustFmt::from_config(config), REPLACE_EXPECTED); 785 | }); 786 | } 787 | 788 | // prettyplease replaces doc blocks by default 789 | #[cfg(feature = "post_process")] 790 | #[cfg(feature = "pretty_please")] 791 | #[test] 792 | fn prettyplease_file_replace_markers() { 793 | let config = Config::new_str().post_proc(PostProcess::ReplaceMarkers); 794 | format_file(PrettyPlease::from_config(config), REPLACE_BLOCKS_EXPECTED); 795 | } 796 | 797 | #[cfg(feature = "post_process")] 798 | #[test] 799 | fn rustfmt_file_replace_markers_and_docs() { 800 | temp_env::with_var(RUST_FMT_KEY, Some(RUST_FMT), || { 801 | let config = Config::new_str().post_proc(PostProcess::ReplaceMarkersAndDocBlocks); 802 | format_file(RustFmt::from_config(config), REPLACE_BLOCKS_EXPECTED); 803 | }); 804 | } 805 | 806 | #[cfg(feature = "post_process")] 807 | #[cfg(feature = "pretty_please")] 808 | #[test] 809 | fn prettyplease_file_replace_markers_and_docs() { 810 | let config = Config::new_str().post_proc(PostProcess::ReplaceMarkersAndDocBlocks); 811 | format_file(PrettyPlease::from_config(config), REPLACE_BLOCKS_EXPECTED); 812 | } 813 | 814 | fn bad_format_file(fmt: impl Formatter) { 815 | // Write source code to file 816 | let source = r#"use"#; 817 | let mut file = tempfile::NamedTempFile::new().unwrap(); 818 | file.write_all(source.as_bytes()).unwrap(); 819 | 820 | match fmt.format_file(file.path()) { 821 | Err(Error::BadSourceCode(_)) => {} 822 | _ => panic!("Expected bad source code"), 823 | } 824 | } 825 | 826 | #[test] 827 | fn rustfmt_bad_file() { 828 | temp_env::with_var(RUST_FMT_KEY, Some(RUST_FMT), || { 829 | bad_format_file(RustFmt::new()); 830 | }); 831 | } 832 | 833 | #[cfg(feature = "pretty_please")] 834 | #[test] 835 | fn prettyplease_bad_file() { 836 | bad_format_file(PrettyPlease::new()); 837 | } 838 | } 839 | -------------------------------------------------------------------------------- /rust_format/src/replace.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "post_process")] 2 | 3 | use std::borrow::Cow; 4 | use std::{cmp, slice}; 5 | 6 | use crate::Error; 7 | 8 | const BLANK_START: &[&[u8]] = &[b"lank_", b"!", b"("]; 9 | const BLANK_END: &[&[u8]] = &[b";"]; 10 | const COMMENT_START: &[&[u8]] = &[b"omment_", b"!", b"("]; 11 | const COMMENT_END: &[&[u8]] = &[b")", b";"]; 12 | const COMMENT_END2: &[&[u8]] = &[b";"]; 13 | const DOC_BLOCK_START: &[&[u8]] = &[b"[", b"doc", b"="]; 14 | const DOC_BLOCK_END: &[&[u8]] = &[b"]"]; 15 | 16 | const EMPTY_COMMENT: &str = "//"; 17 | const COMMENT: &str = "// "; 18 | const DOC_COMMENT: &str = "///"; 19 | const LF_STR: &str = "\n"; 20 | const CRLF_STR: &str = "\r\n"; 21 | 22 | const CR: u8 = b'\r'; 23 | const LF: u8 = b'\n'; 24 | 25 | const MIN_BUFF_SIZE: usize = 128; 26 | 27 | // In order to replace the markers there were a few options: 28 | // 1. Create a full special purpose Rust lexer, replace the tokens we want as we go, write it back 29 | // 2. Find the markers via regular string search, copy everything up to that point, replace, repeat 30 | // 3. A hybrid of 1 and 2 31 | // 32 | // The problem with #1 is it is hugely overkill - we are only interested in 3 markers 33 | // The problem with #2 is that it would find markers in strings and comments - likely not an issue, but it bothered me 34 | // (and also we generalize the marker replacement code also for doc blocks, which someone could have commented out) 35 | // #3 is what is below - it does basic lexing of Rust comments and strings for the purposes of skipping them only. It 36 | // understands just enough to do the job. The weird part is it literally searches inside all other constructs, but the 37 | // probability of a false positive while low in comments and strings, is likely very close to zero anywhere else, so 38 | // I think this is a good compromise. Regardless, the user should be advised to not use `_comment_!(` or `_blank_!(` 39 | // anywhere in the source file other than where they want markers. 40 | 41 | struct CopyingCursor<'a> { 42 | start_idx: usize, 43 | curr_idx: usize, 44 | curr: u8, 45 | 46 | // We can iterate as if this were raw bytes since we are only matching ASCII. We preserve 47 | // any unicode, however, and copy it verbatim 48 | iter: slice::Iter<'a, u8>, 49 | source: &'a str, 50 | buffer: String, 51 | } 52 | 53 | impl<'a> CopyingCursor<'a> { 54 | fn new(source: &'a str) -> Option { 55 | // Better to be too large than not large enough 56 | let buffer = String::with_capacity(cmp::max(source.len() * 2, MIN_BUFF_SIZE)); 57 | let mut iter = source.as_bytes().iter(); 58 | 59 | iter.next().map(|&ch| Self { 60 | start_idx: 0, 61 | curr_idx: 0, 62 | curr: ch, 63 | iter, 64 | source, 65 | buffer, 66 | }) 67 | } 68 | 69 | #[inline] 70 | fn next(&mut self) -> Option { 71 | self.iter.next().map(|&ch| { 72 | self.curr_idx += 1; 73 | self.curr = ch; 74 | ch 75 | }) 76 | } 77 | 78 | #[inline] 79 | fn copy_to_marker(&mut self, marker: usize, new_start_idx: usize) { 80 | if marker > self.start_idx { 81 | // Copy exclusive of marker position 82 | self.buffer.push_str(&self.source[self.start_idx..marker]); 83 | } 84 | self.start_idx = new_start_idx; 85 | } 86 | 87 | fn into_buffer(mut self) -> Cow<'a, str> { 88 | // We have done some work 89 | if self.start_idx > 0 { 90 | // Last write to ensure everything is copied 91 | self.copy_to_marker(self.curr_idx + 1, self.curr_idx + 1); 92 | 93 | self.buffer.shrink_to_fit(); 94 | Cow::Owned(self.buffer) 95 | // We have done nothing - just return original str 96 | } else { 97 | Cow::Borrowed(self.source) 98 | } 99 | } 100 | 101 | fn skip_block_comment(&mut self) { 102 | enum State { 103 | InComment, 104 | MaybeStarting, 105 | MaybeEnding, 106 | } 107 | 108 | let mut nest_level = 1; 109 | let mut state = State::InComment; 110 | 111 | while let Some(ch) = self.next() { 112 | match (ch, state) { 113 | (b'*', State::InComment) => { 114 | state = State::MaybeEnding; 115 | } 116 | (b'/', State::MaybeEnding) => { 117 | nest_level -= 1; 118 | if nest_level == 0 { 119 | break; 120 | } 121 | state = State::InComment; 122 | } 123 | (b'*', State::MaybeStarting) => { 124 | nest_level += 1; 125 | state = State::InComment; 126 | } 127 | (b'/', State::InComment) => { 128 | state = State::MaybeStarting; 129 | } 130 | (_, _) => { 131 | state = State::InComment; 132 | } 133 | } 134 | } 135 | } 136 | 137 | fn try_skip_comment(&mut self) -> bool { 138 | match self.next() { 139 | // Line comment of some form (we don't care which) 140 | Some(b'/') => { 141 | while let Some(ch) = self.next() { 142 | if ch == b'\n' { 143 | break; 144 | } 145 | } 146 | 147 | true 148 | } 149 | // Block comment of some form (we don't care which) 150 | Some(b'*') => { 151 | self.skip_block_comment(); 152 | true 153 | } 154 | // Not a comment or EOF, etc. - should be impossible in valid code 155 | _ => false, 156 | } 157 | } 158 | 159 | fn skip_string(&mut self) { 160 | let mut in_escape = false; 161 | 162 | while let Some(ch) = self.next() { 163 | match ch { 164 | b'"' if !in_escape => break, 165 | b'\\' if !in_escape => in_escape = true, 166 | _ if in_escape => in_escape = false, 167 | _ => {} 168 | } 169 | } 170 | } 171 | 172 | fn try_skip_raw_string(&mut self) -> bool { 173 | // First, match the entry sequence to the raw string and collect # of pads present 174 | let pads = match self.next() { 175 | Some(b'#') => { 176 | let mut pads = 1; 177 | 178 | while let Some(ch) = self.next() { 179 | match ch { 180 | b'#' => { 181 | pads += 1; 182 | } 183 | b'"' => break, 184 | // Not a raw string 185 | _ => return false, 186 | } 187 | } 188 | 189 | pads 190 | } 191 | Some(b'"') => 0, 192 | _ => return false, 193 | }; 194 | 195 | #[derive(Clone, Copy)] 196 | enum State { 197 | InRawComment, 198 | MaybeEndingComment(i32), 199 | } 200 | 201 | let mut state = State::InRawComment; 202 | 203 | // Loop over the raw string looking for ending sequence and count pads until we have 204 | // the correct # of them 205 | while let Some(ch) = self.next() { 206 | match (ch, state) { 207 | (b'"', State::InRawComment) if pads == 0 => break, 208 | (b'"', State::InRawComment) => state = State::MaybeEndingComment(0), 209 | (b'#', State::MaybeEndingComment(pads_seen)) => { 210 | let pads_seen = pads_seen + 1; 211 | if pads_seen == pads { 212 | break; 213 | } 214 | state = State::MaybeEndingComment(pads_seen); 215 | } 216 | (_, _) => { 217 | state = State::InRawComment; 218 | } 219 | } 220 | } 221 | 222 | true 223 | } 224 | 225 | #[inline] 226 | fn skip_blank_param(&mut self) -> Result<(), Error> { 227 | while let Some(ch) = self.next() { 228 | if ch == b')' { 229 | return Ok(()); 230 | } 231 | } 232 | 233 | // EOF 234 | Err(Error::BadSourceCode("Unexpected end of input".to_string())) 235 | } 236 | 237 | fn try_skip_string(&mut self) -> Result, Error> { 238 | while let Some(ch) = self.next() { 239 | if Self::is_whitespace(ch) { 240 | continue; 241 | } 242 | 243 | return match ch { 244 | // Regular string 245 | b'"' => { 246 | self.skip_string(); 247 | Ok(None) 248 | } 249 | // Raw string 250 | b'r' => { 251 | if self.try_skip_raw_string() { 252 | Ok(None) 253 | } else { 254 | Err(Error::BadSourceCode("Bad raw string".to_string())) 255 | } 256 | } 257 | // Something else 258 | ch => Ok(Some(ch)), 259 | }; 260 | } 261 | 262 | // EOF 263 | Err(Error::BadSourceCode("Unexpected end of input".to_string())) 264 | } 265 | 266 | // TODO: Was planning to match values here (but we only recognize ASCII atm): 267 | // https://github.com/rust-lang/rust/blob/38e0ae590caab982a4305da58a0a62385c2dd880/compiler/rustc_lexer/src/lib.rs#L245 268 | // We could switch back to UTF8 since we have been matching valid ASCII up to this point, but atm 269 | // any unicode whitespace will make it not match (not sure any code formatter preserves non-ASCII whitespace?) 270 | // For now, users should use NO whitespace and let the code formatters add any, if needed. I suspect 271 | // they will not add any non-ASCII whitespace on their own at min, but likely just ' ', '\n', and '\r' 272 | // 273 | // Code points we don't handle that we should (for future ref): 274 | // Code point 0x0085 == 0xC285 275 | // Code point 0x200E == 0xE2808E 276 | // Code point 0x200F == 0xE2808F 277 | // Code point 0x2028 == 0xE280A8 278 | // Code point 0x2029 == 0xE280A9 279 | #[inline] 280 | fn is_whitespace(ch: u8) -> bool { 281 | matches!(ch, b' ' | b'\n' | b'\r' | b'\t' | b'\x0b' | b'\x0c') 282 | } 283 | 284 | fn try_ws_matches(&mut self, slices: &[&[u8]], allow_whitespace_first: bool) -> bool { 285 | let mut allow_whitespace = allow_whitespace_first; 286 | 287 | 'top: for &sl in slices { 288 | // Panic safety: it is pointless for us to pass in a blank slice, don't do that 289 | let first_ch = sl[0]; 290 | 291 | while let Some(ch) = self.next() { 292 | // This is what we were looking for, now match the rest (if needed) 293 | if ch == first_ch { 294 | // Panic safety: it is pointless for us to pass in a blank slice, don't do that 295 | let remainder = &sl[1..]; 296 | 297 | if !remainder.is_empty() && !self.try_match(remainder) { 298 | return false; 299 | } 300 | allow_whitespace = true; 301 | continue 'top; 302 | } else if allow_whitespace && Self::is_whitespace(ch) { 303 | // no op 304 | } else { 305 | return false; 306 | } 307 | } 308 | 309 | // Premature EOF 310 | return false; 311 | } 312 | 313 | // If we can exhaust the iterator then they all must have matched 314 | true 315 | } 316 | 317 | fn try_match(&mut self, sl: &[u8]) -> bool { 318 | let iter = sl.iter(); 319 | 320 | for &ch in iter { 321 | if self.next().is_none() { 322 | // This isn't great as it will reevaluate the last char - 'b' or 'c' in the main loop, 323 | // but since those aren't top level it will exit at the bottom of the main loop gracefully 324 | return false; 325 | } 326 | 327 | if self.curr != ch { 328 | return false; 329 | } 330 | } 331 | 332 | // If we can exhaust the iterator then it must have matched 333 | true 334 | } 335 | 336 | #[inline] 337 | fn detect_line_ending(&mut self) -> Option<&'static str> { 338 | match self.next() { 339 | Some(CR) => match self.next() { 340 | Some(LF) => Some(CRLF_STR), 341 | _ => None, 342 | }, 343 | Some(LF) => Some(LF_STR), 344 | _ => None, 345 | } 346 | } 347 | 348 | #[inline] 349 | fn push_spaces(spaces: usize, buffer: &mut String) { 350 | for _ in 0..spaces { 351 | buffer.push(' '); 352 | } 353 | } 354 | 355 | fn process_blanks( 356 | _spaces: usize, 357 | buffer: &mut String, 358 | num: &str, 359 | ending: &str, 360 | ) -> Result<(), Error> { 361 | // Single blank line 362 | if num.is_empty() { 363 | buffer.push_str(ending); 364 | // Multiple blank lines 365 | } else { 366 | let num: syn::LitInt = syn::parse_str(num)?; 367 | let blanks: u32 = num.base10_parse()?; 368 | 369 | for _ in 0..blanks { 370 | buffer.push_str(ending); 371 | } 372 | } 373 | 374 | Ok(()) 375 | } 376 | 377 | fn process_comments( 378 | spaces: usize, 379 | buffer: &mut String, 380 | s: &str, 381 | ending: &str, 382 | ) -> Result<(), Error> { 383 | // Single blank comment 384 | if s.is_empty() { 385 | Self::push_spaces(spaces, buffer); 386 | buffer.push_str(EMPTY_COMMENT); 387 | buffer.push_str(ending); 388 | // Multiple comments 389 | } else { 390 | let s: syn::LitStr = syn::parse_str(s)?; 391 | let comment = s.value(); 392 | 393 | // Blank comment after parsing 394 | if comment.is_empty() { 395 | Self::push_spaces(spaces, buffer); 396 | buffer.push_str(EMPTY_COMMENT); 397 | buffer.push_str(ending); 398 | } else { 399 | for line in comment.lines() { 400 | Self::push_spaces(spaces, buffer); 401 | 402 | if line.is_empty() { 403 | buffer.push_str(EMPTY_COMMENT); 404 | } else { 405 | buffer.push_str(COMMENT); 406 | buffer.push_str(line); 407 | } 408 | 409 | buffer.push_str(ending); 410 | } 411 | } 412 | } 413 | 414 | Ok(()) 415 | } 416 | 417 | // This is slightly different than comment in that we don't prepend a space but need to translate 418 | // the doc block literally (#[doc = "test"] == ///test <-- no prepended space) 419 | fn process_doc_block( 420 | spaces: usize, 421 | buffer: &mut String, 422 | s: &str, 423 | ending: &str, 424 | ) -> Result<(), Error> { 425 | // Single blank comment 426 | if s.is_empty() { 427 | Self::push_spaces(spaces, buffer); 428 | buffer.push_str(DOC_COMMENT); 429 | buffer.push_str(ending); 430 | // Multiple comments 431 | } else { 432 | let s: syn::LitStr = syn::parse_str(s)?; 433 | let comment = s.value(); 434 | 435 | // Blank comment after parsing 436 | if comment.is_empty() { 437 | Self::push_spaces(spaces, buffer); 438 | buffer.push_str(DOC_COMMENT); 439 | buffer.push_str(ending); 440 | } else { 441 | for line in comment.lines() { 442 | Self::push_spaces(spaces, buffer); 443 | buffer.push_str(DOC_COMMENT); 444 | buffer.push_str(line); 445 | buffer.push_str(ending); 446 | } 447 | } 448 | } 449 | 450 | Ok(()) 451 | } 452 | 453 | fn try_match_prefixes( 454 | &mut self, 455 | indent: usize, 456 | chars_matched: usize, 457 | prefixes: &[&[u8]], 458 | allow_ws_first: bool, 459 | ) -> Option<(usize, usize)> { 460 | // We already matched X chars before we got here (but didn't 'next()' after last match so minus 1) 461 | let mark_start_ident = self.curr_idx - ((chars_matched + indent) - 1); 462 | 463 | if self.try_ws_matches(prefixes, allow_ws_first) { 464 | let mark_start_value = self.curr_idx + 1; 465 | Some((mark_start_ident, mark_start_value)) 466 | } else { 467 | None 468 | } 469 | } 470 | 471 | fn try_replace( 472 | &mut self, 473 | spaces: usize, 474 | chars_matched: usize, 475 | suffixes: &[&[u8]], 476 | mark_start_ident: usize, 477 | mark_start_value: usize, 478 | f: F, 479 | ) -> Result<(), Error> 480 | where 481 | F: FnOnce(usize, &mut String, &str, &str) -> Result<(), Error>, 482 | { 483 | // End of value (exclusive) 484 | let mark_end_value = self.curr_idx + (1 - chars_matched); 485 | 486 | if !self.try_ws_matches(suffixes, true) { 487 | return Err(Error::BadSourceCode( 488 | "Unable to match suffix on doc block or marker.".to_string(), 489 | )); 490 | } 491 | 492 | if let Some(ending) = self.detect_line_ending() { 493 | // Mark end of ident here (inclusive) 494 | let mark_end_ident = self.curr_idx + 1; 495 | 496 | // Copy everything up until this marker 497 | self.copy_to_marker(mark_start_ident, mark_end_ident); 498 | 499 | // Parse and output 500 | f( 501 | spaces, 502 | &mut self.buffer, 503 | &self.source[mark_start_value..mark_end_value], 504 | ending, 505 | )?; 506 | Ok(()) 507 | } else { 508 | Err(Error::BadSourceCode("Expected CR or LF".to_string())) 509 | } 510 | } 511 | 512 | fn try_replace_blank_marker(&mut self, spaces: usize) -> Result { 513 | // 6 or 7 sections to match: _blank_ ! ( [int] ) ; CRLF|LF 514 | 515 | match self.try_match_prefixes(spaces, 2, BLANK_START, false) { 516 | Some((ident_start, value_start)) => { 517 | self.skip_blank_param()?; 518 | 519 | self.try_replace( 520 | spaces, 521 | 1, 522 | BLANK_END, 523 | ident_start, 524 | value_start, 525 | CopyingCursor::process_blanks, 526 | )?; 527 | Ok(true) 528 | } 529 | None => Ok(false), 530 | } 531 | } 532 | 533 | fn try_replace_comment_marker(&mut self, spaces: usize) -> Result { 534 | // 6 or 7 sections to match: _comment_ ! ( [string] ) ; CRLF|LF 535 | 536 | match self.try_match_prefixes(spaces, 2, COMMENT_START, false) { 537 | Some((ident_start, value_start)) => { 538 | // Make sure it is empty or a string 539 | let (matched, suffix) = match self.try_skip_string()? { 540 | // String 541 | None => (0, COMMENT_END), 542 | // Empty 543 | Some(b')') => (1, COMMENT_END2), 544 | Some(ch) => { 545 | return Err(Error::BadSourceCode(format!( 546 | "Expected ')' or string, but got: {}", 547 | ch as char 548 | ))) 549 | } 550 | }; 551 | 552 | self.try_replace( 553 | spaces, 554 | matched, 555 | suffix, 556 | ident_start, 557 | value_start, 558 | CopyingCursor::process_comments, 559 | )?; 560 | Ok(true) 561 | } 562 | None => Ok(false), 563 | } 564 | } 565 | 566 | fn try_replace_doc_block(&mut self, spaces: usize) -> Result { 567 | // 7 sections to match: # [ doc = ] CRLF|LF 568 | 569 | match self.try_match_prefixes(spaces, 1, DOC_BLOCK_START, true) { 570 | Some((ident_start, value_start)) => { 571 | // Make sure it is a string 572 | match self.try_skip_string()? { 573 | // String 574 | None => { 575 | self.try_replace( 576 | spaces, 577 | 0, 578 | DOC_BLOCK_END, 579 | ident_start, 580 | value_start, 581 | CopyingCursor::process_doc_block, 582 | )?; 583 | Ok(true) 584 | } 585 | Some(ch) => Err(Error::BadSourceCode(format!( 586 | "Expected string, but got: {}", 587 | ch as char 588 | ))), 589 | } 590 | } 591 | None => Ok(false), 592 | } 593 | } 594 | } 595 | 596 | pub(crate) fn replace_markers(s: &str, replace_doc_blocks: bool) -> Result, Error> { 597 | match CopyingCursor::new(s) { 598 | Some(mut cursor) => { 599 | let mut indent = 0; 600 | 601 | loop { 602 | match cursor.curr { 603 | // Possible raw string 604 | b'r' => { 605 | indent = 0; 606 | if !cursor.try_skip_raw_string() { 607 | continue; 608 | } 609 | } 610 | // Regular string 611 | b'\"' => { 612 | indent = 0; 613 | cursor.skip_string() 614 | } 615 | // Possible comment 616 | b'/' => { 617 | indent = 0; 618 | if !cursor.try_skip_comment() { 619 | continue; 620 | } 621 | } 622 | // Possible special ident (_comment!_ or _blank!_) 623 | b'_' => { 624 | if cursor.next().is_none() { 625 | break; 626 | } 627 | 628 | match cursor.curr { 629 | // Possible blank marker 630 | b'b' => { 631 | if !cursor.try_replace_blank_marker(indent)? { 632 | indent = 0; 633 | continue; 634 | } 635 | } 636 | // Possible comment marker 637 | b'c' => { 638 | if !cursor.try_replace_comment_marker(indent)? { 639 | indent = 0; 640 | continue; 641 | } 642 | } 643 | // Nothing we are interested in 644 | _ => { 645 | indent = 0; 646 | continue; 647 | } 648 | } 649 | 650 | indent = 0; 651 | } 652 | // Possible doc block 653 | b'#' if replace_doc_blocks => { 654 | if !cursor.try_replace_doc_block(indent)? { 655 | indent = 0; 656 | continue; 657 | } 658 | 659 | indent = 0; 660 | } 661 | // Count spaces in front of our three special replacements 662 | b' ' => { 663 | indent += 1; 664 | } 665 | // Anything else 666 | _ => { 667 | indent = 0; 668 | } 669 | } 670 | 671 | if cursor.next().is_none() { 672 | break; 673 | } 674 | } 675 | 676 | Ok(cursor.into_buffer()) 677 | } 678 | // Empty file 679 | None => Ok(Cow::Borrowed(s)), 680 | } 681 | } 682 | 683 | // *** Tests *** 684 | 685 | #[cfg(test)] 686 | mod tests { 687 | use std::borrow::Cow; 688 | 689 | use pretty_assertions::assert_eq; 690 | 691 | use crate::replace::replace_markers; 692 | use crate::Error; 693 | 694 | #[test] 695 | fn blank() { 696 | let source = ""; 697 | 698 | let actual = replace_markers(source, false).unwrap(); 699 | let expected = source; 700 | 701 | assert_eq!(expected, actual); 702 | assert!(matches!(actual, Cow::Borrowed(_))); 703 | } 704 | 705 | #[test] 706 | fn no_replacements() { 707 | let source = r####"// _comment!_("comment"); 708 | 709 | /* /* nested comment */ */ 710 | 711 | /// This is a main function 712 | fn main() { 713 | println!("hello world"); 714 | println!(r##"hello raw world!"##); 715 | } 716 | _blank!_; 717 | "####; 718 | 719 | let actual = replace_markers(source, false).unwrap(); 720 | let expected = source; 721 | 722 | assert_eq!(expected, actual); 723 | assert!(matches!(actual, Cow::Borrowed(_))); 724 | } 725 | 726 | #[test] 727 | fn replace_comments() { 728 | let source = r####"// _comment!_("comment"); 729 | 730 | /* /* nested comment */ */ 731 | _comment_!("comment 1\n\ncomment 2"); 732 | _comment_!("test"); 733 | _comment!("skip this"); 734 | /// This is a main function 735 | fn main() { 736 | println!(r##"hello raw world!"##); 737 | _comment_!(r""); 738 | _comment_!(); 739 | println!("hello \nworld"); 740 | } 741 | 742 | _comment_ ! 743 | ( r#"This is two 744 | comments"# ) 745 | ; 746 | _blank!_; 747 | "####; 748 | 749 | let actual = replace_markers(source, false).unwrap(); 750 | let expected = r####"// _comment!_("comment"); 751 | 752 | /* /* nested comment */ */ 753 | // comment 1 754 | // 755 | // comment 2 756 | // test 757 | _comment!("skip this"); 758 | /// This is a main function 759 | fn main() { 760 | println!(r##"hello raw world!"##); 761 | // 762 | // 763 | println!("hello \nworld"); 764 | } 765 | 766 | // This is two 767 | // comments 768 | _blank!_; 769 | "####; 770 | 771 | assert_eq!(expected, actual); 772 | } 773 | 774 | #[test] 775 | fn replace_blanks() { 776 | let source = r####"// _blank!_(5); 777 | 778 | /* /* nested comment */ */ 779 | _blank_!(2); 780 | _blank!_("skip this"); 781 | #[doc = "This is a main function"] 782 | fn main() { 783 | let r#test = "hello"; 784 | println!(r"hello raw world!"); 785 | _blank_!(); 786 | println!("hello \nworld"); 787 | } 788 | 789 | _blank_ 790 | !( 791 | 2 792 | ); 793 | _blank!_; 794 | "####; 795 | 796 | let actual = replace_markers(source, false).unwrap(); 797 | let expected = r####"// _blank!_(5); 798 | 799 | /* /* nested comment */ */ 800 | 801 | 802 | _blank!_("skip this"); 803 | #[doc = "This is a main function"] 804 | fn main() { 805 | let r#test = "hello"; 806 | println!(r"hello raw world!"); 807 | 808 | println!("hello \nworld"); 809 | } 810 | 811 | 812 | 813 | _blank!_; 814 | "####; 815 | 816 | assert_eq!(expected, actual); 817 | } 818 | 819 | #[test] 820 | fn replace_doc_blocks() { 821 | let source = r####"// _blank!_(5); 822 | 823 | /* not a nested comment */ 824 | #[doc = r#" This is a main function"#] 825 | #[doc = r#" This is two doc 826 | comments"#] 827 | #[cfg(feature = "main")] 828 | #[doc(hidden)] 829 | fn main() { 830 | println!(r##"hello raw world!"##); 831 | #[doc = ""] 832 | println!("hello \nworld"); 833 | } 834 | 835 | # [ 836 | doc 837 | = 838 | " this is\n\n three doc comments" 839 | 840 | ] 841 | fn test() { 842 | } 843 | _blank!_; 844 | "####; 845 | 846 | let actual = replace_markers(source, true).unwrap(); 847 | let expected = r####"// _blank!_(5); 848 | 849 | /* not a nested comment */ 850 | /// This is a main function 851 | /// This is two doc 852 | /// comments 853 | #[cfg(feature = "main")] 854 | #[doc(hidden)] 855 | fn main() { 856 | println!(r##"hello raw world!"##); 857 | /// 858 | println!("hello \nworld"); 859 | } 860 | 861 | /// this is 862 | /// 863 | /// three doc comments 864 | fn test() { 865 | } 866 | _blank!_; 867 | "####; 868 | 869 | assert_eq!(expected, actual); 870 | } 871 | 872 | #[test] 873 | fn replace_crlf() { 874 | let source = "_blank_!(2);\r\n"; 875 | let actual = replace_markers(source, false).unwrap(); 876 | 877 | let expected = "\r\n\r\n"; 878 | assert_eq!(expected, actual); 879 | } 880 | 881 | #[test] 882 | fn marker_end_after_prefix() { 883 | assert!(matches!( 884 | replace_markers("_blank_!(", false), 885 | Err(Error::BadSourceCode(_)) 886 | )); 887 | } 888 | 889 | #[test] 890 | fn marker_param_not_string() { 891 | assert!(matches!( 892 | replace_markers("_comment_!(blah);\n", false), 893 | Err(Error::BadSourceCode(_)) 894 | )); 895 | } 896 | 897 | #[test] 898 | fn marker_bad_suffix() { 899 | assert!(matches!( 900 | replace_markers("_comment_!(\"blah\"];\n", false), 901 | Err(Error::BadSourceCode(_)) 902 | )); 903 | } 904 | 905 | #[test] 906 | fn doc_block_string_not_closed() { 907 | assert!(matches!( 908 | replace_markers("#[doc = \"test]\n", true), 909 | Err(Error::BadSourceCode(_)) 910 | )); 911 | } 912 | } 913 | --------------------------------------------------------------------------------