├── .gitignore ├── Cargo.toml ├── Readme.md ├── src ├── hash.rs ├── lib.rs └── num.rs └── tests ├── hash.rs └── num.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ethers-literal" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | proc-macro = true 8 | 9 | [dependencies] 10 | ethers = { version = "2.0.7", default-features = false } 11 | hex = "0.4.3" 12 | -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # The `num!` and `hash!` macros for `ethers-core` types 2 | 3 | Within the macros arguments, you can write `U256`, `I256`, `U128`, `H256` and `H160` literals using the [same syntax][rust-syntax] as Rust integer literals, but using a capital `U`, `I` or `H` suffix respectively. 4 | In order to make it works, you need to import `ethers`. 5 | 6 | I just readapted https://github.com/recmo/uint 7 | 8 | [rust-syntax]: https://doc.rust-lang.org/stable/reference/tokens.html#integer-literals 9 | 10 | ## Examples 11 | ```rust 12 | use ethers_literal::{num, hash}; 13 | let a = num!(4_U128); 14 | const b: U256 = num!(42_U256); 15 | const c: I256 = num!(-0xa3_I256); 16 | 17 | let addr1 = hash!(0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640_H160); 18 | const hash: H256 = hash!(0x4000000000000000000000000040000000000000000000000000000000000000_H256); 19 | ``` 20 | -------------------------------------------------------------------------------- /src/hash.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../Readme.md")] 2 | #![warn(clippy::all, clippy::pedantic, clippy::cargo, clippy::nursery)] 3 | 4 | use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; 5 | use std::{fmt::Write, str::FromStr}; 6 | 7 | /// Construct a `H{bits}` literal from `limbs`. 8 | fn construct(bits: usize, limbs: &[u8]) -> TokenStream { 9 | let mut limbs_str = String::new(); 10 | let mut limbs_vec = vec![0; bits / 8]; 11 | for (limb, b) in limbs_vec.iter_mut().zip(limbs) { 12 | *limb = *b; 13 | } 14 | for limb in limbs_vec { 15 | write!(&mut limbs_str, "{limb}_u8, ").unwrap(); 16 | } 17 | let limbs_str = limbs_str.trim_end_matches(", "); 18 | 19 | let source = format!("::ethers::core::types::H{bits}([{limbs_str}])"); 20 | 21 | TokenStream::from_str(&source).unwrap() 22 | } 23 | 24 | /// Construct a compiler error message. 25 | // FEATURE: (BLOCKED) Replace with Diagnostic API when stable. 26 | // See 27 | fn error(span: Span, message: &str) -> TokenTree { 28 | // See: https://docs.rs/syn/1.0.70/src/syn/error.rs.html#243 29 | let tokens = TokenStream::from_iter(vec![ 30 | TokenTree::Ident(Ident::new("compile_error", span)), 31 | TokenTree::Punct(Punct::new('!', Spacing::Alone)), 32 | TokenTree::Group({ 33 | let mut group = Group::new( 34 | Delimiter::Brace, 35 | TokenStream::from_iter(vec![TokenTree::Literal(Literal::string(message))]), 36 | ); 37 | group.set_span(span); 38 | group 39 | }), 40 | ]); 41 | TokenTree::Group(Group::new(Delimiter::None, tokens)) 42 | } 43 | 44 | fn parse_suffix(source: &str) -> Option<(usize, &str)> { 45 | // Parse into value, bits, and base type. 46 | let suffix_index = source.rfind('H')?; 47 | let (value, suffix) = source.split_at(suffix_index); 48 | let value = value.strip_suffix('_').unwrap_or(value); 49 | let (_, bits) = suffix.split_at(1); 50 | let bits = bits.parse::().ok()?; 51 | 52 | Some((bits, value)) 53 | } 54 | 55 | /// Transforms a [`Literal`] and returns the substitute [`TokenStream`]. 56 | fn transform_literal(source: &str) -> Result, String> { 57 | // Check if literal has a suffix we accept 58 | let Some((bits, value)) = parse_suffix(source) else { 59 | return Ok(None); 60 | }; 61 | 62 | let value = value.strip_prefix("0x").unwrap_or(value); 63 | 64 | // Parse `value` into limbs. 65 | // At this point we are confident the literal was for us, so we throw errors. 66 | let limbs = hex::decode(value).map_err(|e| format!("hex error: {e}"))?; 67 | 68 | Ok(Some(construct(bits, &limbs))) 69 | } 70 | 71 | /// Recurse down tree and transform all literals. 72 | fn transform_tree(tree: TokenTree) -> TokenTree { 73 | match tree { 74 | TokenTree::Group(group) => { 75 | let delimiter = group.delimiter(); 76 | let span = group.span(); 77 | let stream = transform_stream_hash(group.stream()); 78 | let mut transformed = Group::new(delimiter, stream); 79 | transformed.set_span(span); 80 | TokenTree::Group(transformed) 81 | } 82 | TokenTree::Literal(a) => { 83 | let span = a.span(); 84 | let source = a.to_string(); 85 | let mut tree = match transform_literal(&source) { 86 | Ok(Some(stream)) => TokenTree::Group({ 87 | let mut group = Group::new(Delimiter::None, stream); 88 | group.set_span(span); 89 | group 90 | }), 91 | Ok(None) => TokenTree::Literal(a), 92 | Err(message) => error(span, &message), 93 | }; 94 | tree.set_span(span); 95 | tree 96 | } 97 | tree => tree, 98 | } 99 | } 100 | 101 | /// Iterate over a [`TokenStream`] and transform all [`TokenTree`]s. 102 | pub fn transform_stream_hash(stream: TokenStream) -> TokenStream { 103 | stream.into_iter().map(transform_tree).collect() 104 | } 105 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro::TokenStream; 2 | 3 | mod hash; 4 | mod num; 5 | 6 | use hash::transform_stream_hash; 7 | use num::transform_stream_num; 8 | 9 | // Repeat the crate doc 10 | #[doc = include_str!("../Readme.md")] 11 | #[proc_macro] 12 | pub fn num(stream: TokenStream) -> TokenStream { 13 | transform_stream_num(stream) 14 | } 15 | 16 | // Repeat the crate doc 17 | #[doc = include_str!("../Readme.md")] 18 | #[proc_macro] 19 | pub fn hash(stream: TokenStream) -> TokenStream { 20 | transform_stream_hash(stream) 21 | } 22 | -------------------------------------------------------------------------------- /src/num.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../Readme.md")] 2 | #![warn(clippy::all, clippy::pedantic, clippy::cargo, clippy::nursery)] 3 | 4 | use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; 5 | use std::{ 6 | fmt::{Display, Formatter, Write}, 7 | str::FromStr, 8 | }; 9 | 10 | #[derive(Copy, Clone, PartialEq, Debug)] 11 | enum LiteralBaseType { 12 | Signed, 13 | Unsigned, 14 | } 15 | 16 | impl LiteralBaseType { 17 | const PATTERN: &[char] = &['I', 'U']; 18 | } 19 | 20 | impl Display for LiteralBaseType { 21 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 22 | match self { 23 | Self::Signed => f.write_str("I"), 24 | Self::Unsigned => f.write_str("U"), 25 | } 26 | } 27 | } 28 | 29 | impl FromStr for LiteralBaseType { 30 | type Err = (); 31 | 32 | fn from_str(s: &str) -> Result { 33 | match s { 34 | "U" => Ok(Self::Unsigned), 35 | "I" => Ok(Self::Signed), 36 | _ => Err(()), 37 | } 38 | } 39 | } 40 | 41 | /// Construct a `U{bits}` literal from `limbs`. 42 | fn construct(base_type: LiteralBaseType, bits: usize, limbs: &[u64]) -> TokenStream { 43 | let mut limbs_str = String::new(); 44 | for limb in limbs { 45 | write!(&mut limbs_str, "0x{limb:016x}_u64, ").unwrap(); 46 | } 47 | let limbs_str = limbs_str.trim_end_matches(", "); 48 | 49 | let source = match base_type { 50 | LiteralBaseType::Signed => { 51 | format!("::ethers::core::types::{base_type}{bits}::from_raw(::ethers::core::types::U{bits}([{limbs_str}]))") 52 | } 53 | LiteralBaseType::Unsigned => { 54 | format!("::ethers::core::types::{base_type}{bits}([{limbs_str}])") 55 | } 56 | }; 57 | TokenStream::from_str(&source).unwrap() 58 | } 59 | 60 | /// Construct a compiler error message. 61 | // FEATURE: (BLOCKED) Replace with Diagnostic API when stable. 62 | // See 63 | fn error(span: Span, message: &str) -> TokenTree { 64 | // See: https://docs.rs/syn/1.0.70/src/syn/error.rs.html#243 65 | let tokens = TokenStream::from_iter(vec![ 66 | TokenTree::Ident(Ident::new("compile_error", span)), 67 | TokenTree::Punct(Punct::new('!', Spacing::Alone)), 68 | TokenTree::Group({ 69 | let mut group = Group::new( 70 | Delimiter::Brace, 71 | TokenStream::from_iter(vec![TokenTree::Literal(Literal::string(message))]), 72 | ); 73 | group.set_span(span); 74 | group 75 | }), 76 | ]); 77 | TokenTree::Group(Group::new(Delimiter::None, tokens)) 78 | } 79 | 80 | fn parse_digits(value: &str) -> Result, String> { 81 | // Parse base 82 | let (base, digits) = if value.len() >= 2 { 83 | let (prefix, remainder) = value.split_at(2); 84 | match prefix { 85 | "0x" => (16_u8, remainder), 86 | "0o" => (8, remainder), 87 | "0b" => (2, remainder), 88 | _ => (10, value), 89 | } 90 | } else { 91 | (10, value) 92 | }; 93 | 94 | // Parse digits in base 95 | let mut limbs = vec![0_u64]; 96 | for c in digits.chars() { 97 | // Read next digit 98 | let digit = match c { 99 | '0'..='9' => c as u64 - '0' as u64, 100 | 'a'..='f' => c as u64 - 'a' as u64 + 10, 101 | 'A'..='F' => c as u64 - 'A' as u64 + 10, 102 | '_' => continue, 103 | _ => return Err(format!("Invalid character '{c}'")), 104 | }; 105 | #[allow(clippy::cast_lossless)] 106 | if digit > base as u64 { 107 | return Err(format!( 108 | "Invalid digit {c} in base {base} (did you forget the `0x` prefix?)" 109 | )); 110 | } 111 | 112 | // Multiply result by base and add digit 113 | let mut carry = digit; 114 | #[allow(clippy::cast_lossless)] 115 | #[allow(clippy::cast_possible_truncation)] 116 | for limb in &mut limbs { 117 | let product = (*limb as u128) * (base as u128) + (carry as u128); 118 | *limb = product as u64; 119 | carry = (product >> 64) as u64; 120 | } 121 | if carry > 0 { 122 | limbs.push(carry); 123 | } 124 | } 125 | 126 | Ok(limbs) 127 | } 128 | 129 | fn pad_limbs(bits: usize, mut limbs: Vec) -> Option> { 130 | // Get limb count and mask 131 | let num_limbs = (bits + 63) / 64; 132 | let mask = if bits == 0 { 133 | 0 134 | } else { 135 | let bits = bits % 64; 136 | if bits == 0 { 137 | u64::MAX 138 | } else { 139 | (1 << bits) - 1 140 | } 141 | }; 142 | 143 | // Remove trailing zeros, pad with zeros 144 | while limbs.len() > num_limbs && limbs.last() == Some(&0) { 145 | limbs.pop(); 146 | } 147 | while limbs.len() < num_limbs { 148 | limbs.push(0); 149 | } 150 | 151 | // Validate length 152 | if limbs.len() > num_limbs || limbs.last().copied().unwrap_or(0) > mask { 153 | return None; 154 | } 155 | Some(limbs) 156 | } 157 | 158 | fn parse_suffix(source: &str) -> Option<(LiteralBaseType, usize, &str)> { 159 | // Parse into value, bits, and base type. 160 | let suffix_index = source.rfind(LiteralBaseType::PATTERN)?; 161 | let (value, suffix) = source.split_at(suffix_index); 162 | let (base_type, bits) = suffix.split_at(1); 163 | let base_type = base_type.parse::().ok()?; 164 | let bits = bits.parse::().ok()?; 165 | 166 | // If tpye is unsigned but literal starts with minus 167 | if base_type == LiteralBaseType::Unsigned && value.starts_with('-') { 168 | return None; 169 | } 170 | Some((base_type, bits, value)) 171 | } 172 | 173 | /// Transforms a [`Literal`] and returns the substitute [`TokenStream`]. 174 | fn transform_literal(source: &str) -> Result, String> { 175 | // Check if literal has a suffix we accept 176 | let Some((base_type, bits, value)) = parse_suffix(source) else { 177 | return Ok(None); 178 | }; 179 | 180 | // Parse `value` into limbs. 181 | // At this point we are confident the literal was for us, so we throw errors. 182 | let limbs = parse_digits(value)?; 183 | 184 | // Pad limbs to the correct length. 185 | let Some(mut limbs) = pad_limbs(bits, limbs) else { 186 | let value = value.trim_end_matches('_'); 187 | return Err(format!("Value too large for {base_type}{bits}: {value}")); 188 | }; 189 | 190 | // If signed integer is negative, we compute the absolute unsigned value and negate it. 191 | // Calculating two's complement would be cleaner but since only I256 is signed, I got lazy. 192 | if source.chars().nth(0).unwrap() == '-' { 193 | let num = ethers::core::types::I256::from_raw(ethers::core::types::U256( 194 | limbs.try_into().unwrap(), 195 | )); 196 | let num = -num; 197 | limbs = num.into_raw().0.to_vec(); 198 | } 199 | Ok(Some(construct(base_type, bits, &limbs))) 200 | } 201 | 202 | /// Recurse down tree and transform all literals. 203 | fn transform_tree(tree: TokenTree) -> TokenTree { 204 | match tree { 205 | TokenTree::Group(group) => { 206 | let delimiter = group.delimiter(); 207 | let span = group.span(); 208 | let stream = transform_stream_num(group.stream()); 209 | let mut transformed = Group::new(delimiter, stream); 210 | transformed.set_span(span); 211 | TokenTree::Group(transformed) 212 | } 213 | TokenTree::Literal(a) => { 214 | let span = a.span(); 215 | let source = a.to_string(); 216 | let mut tree = match transform_literal(&source) { 217 | Ok(Some(stream)) => TokenTree::Group({ 218 | let mut group = Group::new(Delimiter::None, stream); 219 | group.set_span(span); 220 | group 221 | }), 222 | Ok(None) => TokenTree::Literal(a), 223 | Err(message) => error(span, &message), 224 | }; 225 | tree.set_span(span); 226 | tree 227 | } 228 | tree => tree, 229 | } 230 | } 231 | 232 | /// Iterate over a [`TokenStream`] and transform all [`TokenTree`]s. 233 | pub(crate) fn transform_stream_num(stream: TokenStream) -> TokenStream { 234 | stream.into_iter().map(transform_tree).collect() 235 | } 236 | -------------------------------------------------------------------------------- /tests/hash.rs: -------------------------------------------------------------------------------- 1 | use ethers::core::types::*; 2 | use ethers_literal::hash; 3 | 4 | #[test] 5 | fn test() { 6 | hash! { 7 | assert_eq!(0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640_H160, "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640".parse::().unwrap()); 8 | assert_eq!(0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640000000000000000000000000_H256, "0x88e6A0c2dDD26FEEb64F039a2c41296FcB3f5640000000000000000000000000".parse::().unwrap()); 9 | assert_eq!(0x00000000000000000000000088e6A0c2dDD26FEEb64F039a2c41296FcB3f5640_H256, "0x00000000000000000000000088e6A0c2dDD26FEEb64F039a2c41296FcB3f5640".parse::().unwrap()); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/num.rs: -------------------------------------------------------------------------------- 1 | use ethers::core::types::*; 2 | use ethers_literal::num; 3 | 4 | #[test] 5 | fn test() { 6 | num! { 7 | assert_eq!(0x10_U256, "0x10".parse::().unwrap()); 8 | assert_eq!(10_U256, U256::from(10)); 9 | 10 | assert_eq!(0x10U256, "0x10".parse::().unwrap()); 11 | assert_eq!(10U256, U256::from(10)); 12 | 13 | assert_eq!(-0x10_I256, I256::from(-0x10)); 14 | assert_eq!(-10_I256, I256::from(-10)); 15 | 16 | assert_eq!(0x10I256, I256::from(0x10)); 17 | assert_eq!(10I256, I256::from(10)); 18 | 19 | 20 | assert_eq!(0x10_U128, "0x10".parse::().unwrap()); 21 | assert_eq!(10_U128, U128::from(10)); 22 | 23 | assert_eq!(2, 2); 24 | } 25 | } 26 | --------------------------------------------------------------------------------