├── .gitignore ├── .travis.yml ├── Cargo.toml ├── LICENSE ├── README.md ├── src ├── config.rs ├── error.rs └── lib.rs └── tests ├── args_as_ref.rs ├── cache_type.rs ├── clone_only_type.rs ├── config.rs ├── multiple_args.rs └── multiple_caches.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | *.idea 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | rust: 3 | - nightly 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cache-macro" 3 | version = "0.4.1" 4 | authors = ["Tyler Reisinger "] 5 | edition = "2018" 6 | repository = "https://github.com/tylerreisinger/cache-macro" 7 | homepage = "https://github.com/tylerreisinger/cache-macro" 8 | readme = "README.md" 9 | documentation = "https://docs.rs/cache-macro" 10 | categories = ["caching"] 11 | keywords = ["lru", "procedural-macro", "cache", "memoization"] 12 | license = "MIT" 13 | description = "A procedural macro for automatically caching the output of functions." 14 | 15 | [dependencies] 16 | quote = "0.6" 17 | lazy_static = "1.2.0" 18 | 19 | [dependencies.proc-macro2] 20 | version = "0.4" 21 | features = ["nightly"] 22 | 23 | [dependencies.syn] 24 | version = "0.15" 25 | features = ["full", "extra-traits"] 26 | 27 | [dev-dependencies] 28 | lru-cache = "0.1.1" 29 | expiring_map = "0.1.0" 30 | 31 | [lib] 32 | proc-macro = true 33 | 34 | [badges] 35 | travis-ci = { repository = "tylerreisinger/cache-macro", branch = "master" } 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2018 Tyler Reisinger 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 4 | documentation files (the "Software"), to deal in the Software without restriction, including without limitation the 5 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit 6 | persons to whom the Software is furnished to do so, subject to the following conditions: 7 | 8 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 9 | 10 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 11 | WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 13 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | cache-macro 2 | ================ 3 | [![Build Status](https://travis-ci.org/tylerreisinger/cache-macro.svg?branch=master)](https://travis-ci.org/tylerreisinger/cache_macro) 4 | [![cache-macro on docs.rs][docsrs-image]][docsrs] 5 | [![cache-macro on crates.io][crates-image]][crates] 6 | 7 | [docsrs-image]: https://docs.rs/cache-macro/badge.svg 8 | [docsrs]: https://docs.rs/cache-macro 9 | [crates-image]: https://img.shields.io/crates/v/cache-macro.svg 10 | [crates]: https://crates.io/crates/cache-macro/ 11 | 12 | A procedural macro to automatically cache the result of a function given a set of inputs. 13 | 14 | Previously named 'lru-cache-macros', but renamed to reflect the broadening of scope. 15 | 16 | # Example: 17 | 18 | ```rust 19 | use cache_macro::cache; 20 | use lru_cache::LruCache; 21 | 22 | #[cache(LruCache : LruCache::new(20))] 23 | fn fib(x: u32) -> u64 { 24 | println!("{:?}", x); 25 | if x <= 1 { 26 | 1 27 | } else { 28 | fib(x - 1) + fib(x - 2) 29 | } 30 | } 31 | 32 | assert_eq!(fib(19), 6765); 33 | ``` 34 | 35 | The above example only calls `fib` twenty times, with the values from 0 to 19. All intermediate 36 | results because of the recursion hit the cache. 37 | 38 | # Usage: 39 | 40 | Simply place `#[cache(CacheType : constructor)]` above your function. The function must obey a few properties 41 | to use lru_cache: 42 | 43 | * All arguments and return values must implement `Clone`. 44 | * The function may not take `self` in any form. 45 | 46 | The `LruCache` type used must accept two generic parameters `` and must support methods 47 | `get_mut(&K) -> Option<&mut V>` and `insert(K, V)`. The `lru-cache` (for LRU caching) 48 | and `expiring_map` (for time-to-live caching) crates currently meet these requirements. 49 | 50 | Currently, this crate only works on nightly rust. However, once the 2018 edition stabilizes as well as the 51 | procedural macro diagnostic interface, it should be able to run on stable. 52 | 53 | # Configuration: 54 | 55 | The lru_cache macro can be configured by adding additional attributes under `#[cache(...)]`. 56 | 57 | All configuration attributes take the form `#[cache_cfg(...)]`. The available attributes are: 58 | 59 | * `#[cache_cfg(ignore_args = ...)]` 60 | 61 | This allows certain arguments to be ignored for the purposes of caching. That means they are not part of the 62 | hash table key and thus should never influence the output of the function. It can be useful for diagnostic settings, 63 | returning the number of times executed, or other introspection purposes. 64 | 65 | `ignore_args` takes a comma-separated list of variable identifiers to ignore. 66 | 67 | ### Example: 68 | ```rust 69 | use cache_macro::cache; 70 | use lru_cache::LruCache; 71 | #[cache(LruCache : LruCache::new(20))] 72 | #[cache_cfg(ignore_args = call_count)] 73 | fn fib(x: u64, call_count: &mut u32) -> u64 { 74 | *call_count += 1; 75 | if x <= 1 { 76 | 1 77 | } else { 78 | fib(x - 1, call_count) + fib(x - 2, call_count) 79 | } 80 | } 81 | 82 | let mut call_count = 0; 83 | assert_eq!(fib(39, &mut call_count), 102_334_155); 84 | assert_eq!(call_count, 40); 85 | ``` 86 | 87 | The `call_count` argument can vary, caching is only done based on `x`. 88 | 89 | * `#[cache_cfg(thread_local)]` 90 | 91 | Store the cache in thread-local storage instead of global static storage. This avoids the overhead of Mutex locking, 92 | but each thread will be given its own cache, and all caching will not affect any other thread. 93 | 94 | Expanding on the first example: 95 | 96 | ```rust 97 | use cache_macro::cache; 98 | use lru_cache::LruCache; 99 | 100 | #[cache(LruCache : LruCache::new(20))] 101 | #[cache_cfg(thread_local)] 102 | fn fib(x: u32) -> u64 { 103 | println!("{:?}", x); 104 | if x <= 1 { 105 | 1 106 | } else { 107 | fib(x - 1) + fib(x - 2) 108 | } 109 | } 110 | 111 | assert_eq!(fib(19), 6765); 112 | ``` 113 | 114 | # Details 115 | The created cache is stored as a static variable protected by a mutex unless the `#[cache_cfg(thread_local)]` 116 | configuration is added. 117 | 118 | With the default settings, the fibonacci example will generate the following code: 119 | 120 | ```rust 121 | fn __lru_base_fib(x: u32) -> u64 { 122 | if x <= 1 { 1 } else { fib(x - 1) + fib(x - 2) } 123 | } 124 | fn fib(x: u32) -> u64 { 125 | use lazy_static::lazy_static; 126 | use std::sync::Mutex; 127 | 128 | lazy_static! { 129 | static ref cache: Mutex<::lru_cache::LruCache<(u32,), u64>> = 130 | Mutex::new(::lru_cache::LruCache::new(20usize)); 131 | } 132 | 133 | let cloned_args = (x.clone(),); 134 | let mut cache_unlocked = cache.lock().unwrap(); 135 | let stored_result = cache_unlocked.get_mut(&cloned_args); 136 | if let Some(stored_result) = stored_result { 137 | return stored_result.clone(); 138 | }; 139 | drop(cache_unlocked); 140 | let ret = __lru_base_fib(x); 141 | let mut cache_unlocked = cache.lock().unwrap(); 142 | cache_unlocked.insert(cloned_args, ret.clone()); 143 | ret 144 | } 145 | 146 | ``` 147 | 148 | Whereas, if you use the `#[lru_config(thread_local)]` the generated code will look like: 149 | 150 | 151 | ```rust 152 | fn __lru_base_fib(x: u32) -> u64 { 153 | if x <= 1 { 1 } else { fib(x - 1) + fib(x - 2) } 154 | } 155 | fn fib(x: u32) -> u64 { 156 | use std::cell::UnsafeCell; 157 | use std::thread_local; 158 | 159 | thread_local!( 160 | static cache: UnsafeCell<::lru_cache::LruCache<(u32,), u64>> = 161 | UnsafeCell::new(::lru_cache::LruCache::new(20usize)); 162 | ); 163 | 164 | cache.with(|c| 165 | { 166 | let mut cache_ref = unsafe { &mut *c.get() }; 167 | let cloned_args = (x.clone(),); 168 | let stored_result = cache_ref.get_mut(&cloned_args); 169 | if let Some(stored_result) = stored_result { 170 | stored_result.clone() 171 | } else { 172 | let ret = __lru_base_fib(x); 173 | cache_ref.insert(cloned_args, ret.clone()); 174 | ret 175 | } 176 | }) 177 | } 178 | ``` 179 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use std::default::Default; 2 | use std::collections::HashSet; 3 | 4 | use proc_macro2; 5 | use syn::{self, Token, parenthesized}; 6 | use syn::parse::{Parse, ParseStream}; 7 | 8 | use crate::error::{DiagnosticError, Result}; 9 | 10 | pub struct Config { 11 | pub ignore_args: HashSet, 12 | pub use_tls: bool, 13 | } 14 | 15 | struct IgnoreArgsAttrib { 16 | ignore_args: HashSet, 17 | } 18 | 19 | enum ConfigAttrib { 20 | IgnoreArgs(IgnoreArgsAttrib), 21 | UseTls, 22 | } 23 | 24 | const CONFIG_ATTRIBUTE_NAME: &'static str = "cache_cfg"; 25 | 26 | impl Config { 27 | // Parse any additional attributes present after `lru_cache` and return a configuration object 28 | // created from their contents. Additionally, return any attributes that were not handled here. 29 | pub fn parse_from_attributes(attribs: &[syn::Attribute]) -> Result<(Config, Vec)> { 30 | let mut parsed_attributes = Vec::new(); 31 | let mut remaining_attributes = Vec::new(); 32 | 33 | for attrib in attribs { 34 | let segs = &attrib.path.segments; 35 | if segs.len() > 0 { 36 | if segs[0].ident == CONFIG_ATTRIBUTE_NAME { 37 | let tokens = attrib.tts.clone(); 38 | let parsed = syn::parse2::(tokens); 39 | match parsed { 40 | Ok(parsed_attrib) => parsed_attributes.push(parsed_attrib), 41 | Err(e) => { 42 | let diag = e.span().unstable() 43 | .error(format!("{}", e)); 44 | return Err(DiagnosticError::new_with_syn_error(diag, e)); 45 | } 46 | } 47 | } 48 | else { 49 | remaining_attributes.push(attrib.clone()); 50 | } 51 | } 52 | } 53 | 54 | let mut config: Config = Default::default(); 55 | 56 | for parsed_attrib in parsed_attributes { 57 | match parsed_attrib { 58 | ConfigAttrib::IgnoreArgs(val) => config.ignore_args = val.ignore_args, 59 | ConfigAttrib::UseTls => config.use_tls = true, 60 | } 61 | } 62 | 63 | Ok((config, remaining_attributes)) 64 | } 65 | } 66 | 67 | impl Default for Config { 68 | fn default() -> Config { 69 | Config { 70 | ignore_args: HashSet::new(), 71 | use_tls: false, 72 | } 73 | } 74 | } 75 | 76 | impl Parse for ConfigAttrib { 77 | fn parse(input: ParseStream) -> syn::parse::Result { 78 | let content; 79 | let _paren = parenthesized!(content in input); 80 | let name = content.parse::()?; 81 | 82 | match &name.to_string()[..] { 83 | "ignore_args" => Ok(ConfigAttrib::IgnoreArgs(content.parse::()?)), 84 | "thread_local" => Ok(ConfigAttrib::UseTls), 85 | _ => Err(syn::parse::Error::new( 86 | name.span(), format!("unrecognized config option '{}'", name.to_string()) 87 | )) 88 | } 89 | } 90 | } 91 | 92 | impl Parse for IgnoreArgsAttrib { 93 | fn parse(input: ParseStream) -> syn::parse::Result { 94 | input.parse::()?; 95 | let elems = syn::punctuated::Punctuated::::parse_terminated(input)?; 96 | Ok(IgnoreArgsAttrib { 97 | ignore_args: elems.into_iter().collect(), 98 | }) 99 | } 100 | } -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::result; 2 | use proc_macro::Diagnostic; 3 | use syn; 4 | 5 | pub struct DiagnosticError { 6 | diagnostic: Diagnostic, 7 | #[allow(dead_code)] 8 | syn_error: Option, 9 | } 10 | 11 | impl DiagnosticError { 12 | pub fn new(diagnostic: Diagnostic) -> DiagnosticError { 13 | DiagnosticError { 14 | diagnostic, 15 | syn_error: None, 16 | } 17 | } 18 | pub fn new_with_syn_error(diagnostic: Diagnostic, syn_error: syn::parse::Error) -> DiagnosticError { 19 | DiagnosticError { 20 | diagnostic, 21 | syn_error: Some(syn_error), 22 | } 23 | } 24 | 25 | #[allow(dead_code)] 26 | pub fn source(&self) -> Option<&syn::parse::Error> { 27 | self.syn_error.as_ref() 28 | } 29 | 30 | pub fn emit(self) { 31 | self.diagnostic.emit(); 32 | } 33 | } 34 | 35 | pub type Result = result::Result; 36 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! cache-macro 2 | //! ================ 3 | //! 4 | //! A procedural macro to automatically cache the result of a function given a set of inputs. 5 | //! 6 | //! # Example: 7 | //! 8 | //! ```rust 9 | //! use cache_macro::cache; 10 | //! use lru_cache::LruCache; 11 | //! 12 | //! #[cache(LruCache : LruCache::new(20))] 13 | //! fn fib(x: u32) -> u64 { 14 | //! println!("{:?}", x); 15 | //! if x <= 1 { 16 | //! 1 17 | //! } else { 18 | //! fib(x - 1) + fib(x - 2) 19 | //! } 20 | //! } 21 | //! 22 | //! assert_eq!(fib(19), 6765); 23 | //! ``` 24 | //! 25 | //! The above example only calls `fib` twenty times, with the values from 0 to 19. All intermediate 26 | //! results because of the recursion hit the cache. 27 | //! 28 | //! # Usage: 29 | //! 30 | //! Simply place `#[cache(CacheType : constructor)]` above your function. The function must obey a few properties 31 | //! to use lru_cache: 32 | //! 33 | //! * All arguments and return values must implement `Clone`. 34 | //! * The function may not take `self` in any form. 35 | //! 36 | //! The `LruCache` type used must accept two generic parameters `` and must support methods 37 | //! `get_mut(&K) -> Option<&mut V>` and `insert(K, V)`. The `lru-cache` (for LRU caching) 38 | //! and `expiring_map` (for time-to-live caching) crates currently meet these requirements. 39 | //! 40 | //! Currently, this crate only works on nightly rust. However, once the 2018 edition stabilizes as well as the 41 | //! procedural macro diagnostic interface, it should be able to run on stable. 42 | //! 43 | //! # Configuration: 44 | //! 45 | //! The lru_cache macro can be configured by adding additional attributes under `#[cache(...)]`. 46 | //! 47 | //! All configuration attributes take the form `#[cache_cfg(...)]`. The available attributes are: 48 | //! 49 | //! * `#[cache_cfg(ignore_args = ...)]` 50 | //! 51 | //! This allows certain arguments to be ignored for the purposes of caching. That means they are not part of the 52 | //! hash table key and thus should never influence the output of the function. It can be useful for diagnostic settings, 53 | //! returning the number of times executed, or other introspection purposes. 54 | //! 55 | //! `ignore_args` takes a comma-separated list of variable identifiers to ignore. 56 | //! 57 | //! ### Example: 58 | //! ```rust 59 | //! use cache_macro::cache; 60 | //! use lru_cache::LruCache; 61 | //! #[cache(LruCache : LruCache::new(20))] 62 | //! #[cache_cfg(ignore_args = call_count)] 63 | //! fn fib(x: u64, call_count: &mut u32) -> u64 { 64 | //! *call_count += 1; 65 | //! if x <= 1 { 66 | //! 1 67 | //! } else { 68 | //! fib(x - 1, call_count) + fib(x - 2, call_count) 69 | //! } 70 | //! } 71 | //! 72 | //! let mut call_count = 0; 73 | //! assert_eq!(fib(39, &mut call_count), 102_334_155); 74 | //! assert_eq!(call_count, 40); 75 | //! ``` 76 | //! 77 | //! The `call_count` argument can vary, caching is only done based on `x`. 78 | //! 79 | //! * `#[cache_cfg(thread_local)]` 80 | //! 81 | //! Store the cache in thread-local storage instead of global static storage. This avoids the overhead of Mutex locking, 82 | //! but each thread will be given its own cache, and all caching will not affect any other thread. 83 | //! 84 | //! Expanding on the first example: 85 | //! 86 | //! ```rust 87 | //! use cache_macro::cache; 88 | //! use lru_cache::LruCache; 89 | //! 90 | //! #[cache(LruCache : LruCache::new(20))] 91 | //! #[cache_cfg(thread_local)] 92 | //! fn fib(x: u32) -> u64 { 93 | //! println!("{:?}", x); 94 | //! if x <= 1 { 95 | //! 1 96 | //! } else { 97 | //! fib(x - 1) + fib(x - 2) 98 | //! } 99 | //! } 100 | //! 101 | //! assert_eq!(fib(19), 6765); 102 | //! ``` 103 | //! 104 | //! # Details 105 | //! The created cache is stored as a static variable protected by a mutex unless the `#[cache_cfg(thread_local)]` 106 | //! configuration is added. 107 | //! 108 | //! With the default settings, the fibonacci example will generate the following code: 109 | //! 110 | //! ```rust 111 | //! fn __lru_base_fib(x: u32) -> u64 { 112 | //! if x <= 1 { 1 } else { fib(x - 1) + fib(x - 2) } 113 | //! } 114 | //! fn fib(x: u32) -> u64 { 115 | //! use lazy_static::lazy_static; 116 | //! use std::sync::Mutex; 117 | //! 118 | //! lazy_static! { 119 | //! static ref cache: Mutex<::lru_cache::LruCache<(u32,), u64>> = 120 | //! Mutex::new(::lru_cache::LruCache::new(20usize)); 121 | //! } 122 | //! 123 | //! let cloned_args = (x.clone(),); 124 | //! let mut cache_unlocked = cache.lock().unwrap(); 125 | //! let stored_result = cache_unlocked.get_mut(&cloned_args); 126 | //! if let Some(stored_result) = stored_result { 127 | //! return stored_result.clone(); 128 | //! }; 129 | //! drop(cache_unlocked); 130 | //! let ret = __lru_base_fib(x); 131 | //! let mut cache_unlocked = cache.lock().unwrap(); 132 | //! cache_unlocked.insert(cloned_args, ret.clone()); 133 | //! ret 134 | //! } 135 | //! 136 | //! ``` 137 | //! 138 | //! Whereas, if you use the `#[lru_config(thread_local)]` the generated code will look like: 139 | //! 140 | //! 141 | //! ```rust 142 | //! fn __lru_base_fib(x: u32) -> u64 { 143 | //! if x <= 1 { 1 } else { fib(x - 1) + fib(x - 2) } 144 | //! } 145 | //! fn fib(x: u32) -> u64 { 146 | //! use std::cell::RefCell; 147 | //! use std::thread_local; 148 | //! 149 | //! thread_local!( 150 | //! static cache: RefCell<::lru_cache::LruCache<(u32,), u64>> = 151 | //! RefCell::new(::lru_cache::LruCache::new(20usize)); 152 | //! ); 153 | //! 154 | //! cache.with(|c| 155 | //! { 156 | //! let mut cache_ref = c.borrow_mut(); 157 | //! let cloned_args = (x.clone(),); 158 | //! let stored_result = cache_ref.get_mut(&cloned_args); 159 | //! if let Some(stored_result) = stored_result { 160 | //! return stored_result.clone() 161 | //! } 162 | //! 163 | //! // Don't hold a mutable borrow across 164 | //! // the recursive function call 165 | //! drop(cache_ref); 166 | //! 167 | //! let ret = __lru_base_fib(x); 168 | //! c.borrow_mut().insert(cloned_args, ret.clone()); 169 | //! ret 170 | //! }) 171 | //! } 172 | //! ``` 173 | //! 174 | #![feature(extern_crate_item_prelude)] 175 | #![feature(proc_macro_diagnostic)] 176 | #![recursion_limit="128"] 177 | extern crate proc_macro; 178 | 179 | use proc_macro::TokenStream; 180 | use syn; 181 | use syn::{Token, parse_quote}; 182 | use syn::spanned::Spanned; 183 | use syn::punctuated::Punctuated; 184 | use quote::quote; 185 | use proc_macro2; 186 | 187 | mod config; 188 | mod error; 189 | 190 | use self::error::{DiagnosticError, Result}; 191 | use syn::parse::Parse; 192 | use syn::parse::ParseStream; 193 | use syn::parse_macro_input; 194 | 195 | struct Attr { 196 | cache_type: syn::Type, 197 | cache_creation_expr: syn::Expr, 198 | } 199 | 200 | impl Parse for Attr { 201 | fn parse(input: ParseStream) -> syn::parse::Result { 202 | let cache_type: syn::Type = input.parse()?; 203 | input.parse::()?; 204 | let cache_creation_expr: syn::Expr = input.parse()?; 205 | Ok(Attr { 206 | cache_type, 207 | cache_creation_expr, 208 | }) 209 | } 210 | } 211 | 212 | // Function shim to allow us to use `Result` and the `?` operator. 213 | #[proc_macro_attribute] 214 | pub fn cache(attr: TokenStream, item: TokenStream) -> TokenStream { 215 | let attr = parse_macro_input!(attr as Attr); 216 | 217 | match lru_cache_impl(attr, item.clone()) { 218 | Ok(tokens) => return tokens, 219 | Err(e) => { 220 | e.emit(); 221 | return item; 222 | } 223 | } 224 | } 225 | 226 | // The main entry point for the macro. 227 | fn lru_cache_impl(attr: Attr, item: TokenStream) -> Result { 228 | let mut original_fn: syn::ItemFn = match syn::parse(item.clone()) { 229 | Ok(ast) => ast, 230 | Err(e) => { 231 | let diag = proc_macro2::Span::call_site().unstable() 232 | .error("lru_cache may only be used on functions"); 233 | return Err(DiagnosticError::new_with_syn_error(diag, e)); 234 | } 235 | }; 236 | 237 | let (macro_config, out_attributes) = 238 | { 239 | let attribs = &original_fn.attrs[..]; 240 | config::Config::parse_from_attributes(attribs)? 241 | }; 242 | original_fn.attrs = out_attributes; 243 | 244 | let mut new_fn = original_fn.clone(); 245 | 246 | let return_type = get_cache_fn_return_type(&original_fn)?; 247 | 248 | let new_name = format!("__lru_base_{}", original_fn.ident.to_string()); 249 | original_fn.ident = syn::Ident::new(&new_name[..], original_fn.ident.span()); 250 | 251 | let (call_args, types, cache_args) = get_args_and_types(&original_fn, ¯o_config)?; 252 | let cloned_args = make_cloned_args_tuple(&cache_args); 253 | let fn_path = path_from_ident(original_fn.ident.clone()); 254 | 255 | let fn_call = syn::ExprCall { 256 | attrs: Vec::new(), 257 | paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() }, 258 | args: call_args.clone(), 259 | func: Box::new(fn_path) 260 | }; 261 | 262 | let tuple_type = syn::TypeTuple { 263 | paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() }, 264 | elems: types, 265 | }; 266 | 267 | let cache_type = &attr.cache_type; 268 | let cache_type_with_generics: syn::Type = parse_quote! { 269 | #cache_type<#tuple_type, #return_type> 270 | }; 271 | 272 | let lru_body = build_cache_body(&cache_type_with_generics, &attr.cache_creation_expr, &cloned_args, 273 | &fn_call, ¯o_config); 274 | 275 | 276 | new_fn.block = Box::new(lru_body); 277 | 278 | let out = quote! { 279 | #original_fn 280 | 281 | #new_fn 282 | }; 283 | Ok(out.into()) 284 | } 285 | 286 | // Build the body of the caching function. What is constructed depends on the config value. 287 | fn build_cache_body(full_cache_type: &syn::Type, cache_new: &syn::Expr, 288 | cloned_args: &syn::ExprTuple, inner_fn_call: &syn::ExprCall, 289 | config: &config::Config) -> syn::Block 290 | { 291 | if config.use_tls { 292 | build_tls_cache_body(full_cache_type, cache_new, cloned_args, inner_fn_call) 293 | } else { 294 | build_mutex_cache_body(full_cache_type, cache_new, cloned_args, inner_fn_call) 295 | } 296 | } 297 | 298 | // Build the body of the caching function which puts the cache in thread-local storage. 299 | fn build_tls_cache_body(full_cache_type: &syn::Type, cache_new: &syn::Expr, 300 | cloned_args: &syn::ExprTuple, inner_fn_call: &syn::ExprCall) -> syn::Block 301 | { 302 | parse_quote! { 303 | { 304 | use std::cell::RefCell; 305 | use std::thread_local; 306 | thread_local!( 307 | static cache: RefCell<#full_cache_type> = 308 | RefCell::new(#cache_new); 309 | ); 310 | cache.with(|c| { 311 | let mut cache_ref = c.borrow_mut(); 312 | let cloned_args = #cloned_args; 313 | 314 | let stored_result = cache_ref.get_mut(&cloned_args); 315 | if let Some(stored_result) = stored_result { 316 | return stored_result.clone() 317 | } 318 | 319 | // Don't hold a mutable borrow across 320 | // the recursive function call 321 | drop(cache_ref); 322 | 323 | let ret = #inner_fn_call; 324 | c.borrow_mut().insert(cloned_args, ret.clone()); 325 | ret 326 | }) 327 | } 328 | } 329 | } 330 | 331 | // Build the body of the caching function which guards the static cache with a mutex. 332 | fn build_mutex_cache_body(full_cache_type: &syn::Type, cache_new: &syn::Expr, 333 | cloned_args: &syn::ExprTuple, inner_fn_call: &syn::ExprCall) -> syn::Block 334 | { 335 | parse_quote! { 336 | { 337 | use lazy_static::lazy_static; 338 | use std::sync::Mutex; 339 | 340 | lazy_static! { 341 | static ref cache: Mutex<#full_cache_type> = 342 | Mutex::new(#cache_new); 343 | } 344 | 345 | let cloned_args = #cloned_args; 346 | 347 | let mut cache_unlocked = cache.lock().unwrap(); 348 | let stored_result = cache_unlocked.get_mut(&cloned_args); 349 | if let Some(stored_result) = stored_result { 350 | return stored_result.clone(); 351 | }; 352 | 353 | // must unlock here to allow potentially recursive call 354 | drop(cache_unlocked); 355 | 356 | let ret = #inner_fn_call; 357 | let mut cache_unlocked = cache.lock().unwrap(); 358 | cache_unlocked.insert(cloned_args, ret.clone()); 359 | ret 360 | } 361 | } 362 | } 363 | 364 | fn get_cache_fn_return_type(original_fn: &syn::ItemFn) -> Result> { 365 | if let syn::ReturnType::Type(_, ref ty) = original_fn.decl.output { 366 | Ok(ty.clone()) 367 | } else { 368 | let diag = original_fn.ident.span().unstable() 369 | .error("There's no point of caching the output of a function that has no output"); 370 | return Err(DiagnosticError::new(diag)); 371 | } 372 | } 373 | 374 | fn path_from_ident(ident: syn::Ident) -> syn::Expr { 375 | let mut segments: Punctuated<_, Token![::]> = Punctuated::new(); 376 | segments.push(syn::PathSegment { ident: ident, arguments: syn::PathArguments::None }); 377 | syn::Expr::Path(syn::ExprPath { attrs: Vec::new(), qself: None, path: syn::Path { leading_colon: None, segments: segments} }) 378 | } 379 | 380 | fn make_cloned_args_tuple(args: &Punctuated) -> syn::ExprTuple { 381 | let mut cloned_args = Punctuated::<_, Token![,]>::new(); 382 | for arg in args { 383 | let call = syn::ExprMethodCall { 384 | attrs: Vec::new(), 385 | receiver: Box::new(arg.clone()), 386 | dot_token: syn::token::Dot { spans: [arg.span(); 1] }, 387 | method: syn::Ident::new("clone", proc_macro2::Span::call_site()), 388 | turbofish: None, 389 | paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() }, 390 | args: Punctuated::new(), 391 | }; 392 | cloned_args.push(syn::Expr::MethodCall(call)); 393 | } 394 | syn::ExprTuple { 395 | attrs: Vec::new(), 396 | paren_token: syn::token::Paren { span: proc_macro2::Span::call_site() }, 397 | elems: cloned_args, 398 | } 399 | } 400 | 401 | fn get_args_and_types(f: &syn::ItemFn, config: &config::Config) -> 402 | Result<(Punctuated, Punctuated, Punctuated)> 403 | { 404 | let mut call_args = Punctuated::<_, Token![,]>::new(); 405 | let mut types = Punctuated::<_, Token![,]>::new(); 406 | let mut cache_args = Punctuated::<_, Token![,]>::new(); 407 | 408 | for input in &f.decl.inputs { 409 | match input { 410 | syn::FnArg::SelfValue(p) => { 411 | let diag = p.span().unstable() 412 | .error("`self` arguments are currently unsupported by lru_cache"); 413 | return Err(DiagnosticError::new(diag)); 414 | } 415 | syn::FnArg::SelfRef(p) => { 416 | let diag = p.span().unstable() 417 | .error("`&self` arguments are currently unsupported by lru_cache"); 418 | return Err(DiagnosticError::new(diag)); 419 | } 420 | syn::FnArg::Captured(arg_captured) => { 421 | let mut segments: syn::punctuated::Punctuated<_, Token![::]> = syn::punctuated::Punctuated::new(); 422 | let arg_name; 423 | if let syn::Pat::Ident(ref pat_ident) = arg_captured.pat { 424 | arg_name = pat_ident.ident.clone(); 425 | segments.push(syn::PathSegment { ident: pat_ident.ident.clone(), arguments: syn::PathArguments::None }); 426 | } else { 427 | let diag = arg_captured.span().unstable() 428 | .error("unsupported argument kind"); 429 | return Err(DiagnosticError::new(diag)); 430 | } 431 | 432 | let arg_path = syn::Expr::Path(syn::ExprPath { attrs: Vec::new(), qself: None, path: syn::Path { leading_colon: None, segments } }); 433 | 434 | if !config.ignore_args.contains(&arg_name) { 435 | 436 | // If the arg type is a reference, remove the reference because the arg will be cloned 437 | if let syn::Type::Reference(type_reference) = &arg_captured.ty { 438 | if let Some(m) = type_reference.mutability { 439 | let diag = m.span.unstable() 440 | .error("`mut` reference arguments are not supported as this could lead to incorrect results being stored"); 441 | return Err(DiagnosticError::new(diag)); 442 | } 443 | types.push(type_reference.elem.as_ref().to_owned()); // as_ref -> to_owned unboxes the type 444 | } else { 445 | types.push(arg_captured.ty.clone()); 446 | } 447 | 448 | cache_args.push(arg_path.clone()); 449 | } 450 | 451 | 452 | call_args.push(arg_path); 453 | }, 454 | syn::FnArg::Inferred(p) => { 455 | let diag = p.span().unstable() 456 | .error("inferred arguments are currently unsupported by lru_cache"); 457 | return Err(DiagnosticError::new(diag)); 458 | } 459 | syn::FnArg::Ignored(p) => { 460 | let diag = p.span().unstable() 461 | .error("ignored arguments are currently unsupported by lru_cache"); 462 | return Err(DiagnosticError::new(diag)); 463 | } 464 | } 465 | } 466 | 467 | if types.len() == 1 { 468 | types.push_punct(syn::token::Comma { spans: [proc_macro2::Span::call_site(); 1] }) 469 | } 470 | 471 | Ok((call_args, types, cache_args)) 472 | } 473 | -------------------------------------------------------------------------------- /tests/args_as_ref.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use lru_cache::LruCache; 3 | 4 | #[test] 5 | fn args_as_ref() { 6 | #[cache(LruCache : LruCache::new(20))] 7 | fn fib(x: &u32) -> u64 { 8 | println!("{:?}", x); 9 | if *x <= 1 { 10 | 1 11 | } else { 12 | fib(&(x - 1)) + fib(&(x - 2)) 13 | } 14 | } 15 | 16 | assert_eq!(fib(&19), 6765); 17 | } 18 | 19 | -------------------------------------------------------------------------------- /tests/cache_type.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use std::time::Duration; 3 | use expiring_map::ExpiringMap; 4 | use std::hash::Hash; 5 | 6 | #[test] 7 | fn cache_type() { 8 | use std::f64; 9 | 10 | #[cache(ExpiringMap : ExpiringMap::new(Duration::from_secs(60)))] 11 | fn cached_sqrt(x: u64) -> f64 { 12 | f64::sqrt(x as f64) 13 | } 14 | 15 | assert_eq!(cached_sqrt(9), f64::sqrt(9.0)); 16 | } 17 | 18 | fn custom_create_cache_method(duration: Duration, _extra_arg: u32) -> ExpiringMap 19 | where K: Eq + Hash 20 | { 21 | ExpiringMap::new(duration) 22 | } 23 | 24 | #[test] 25 | fn non_standard_cache_creation() { 26 | // this tests support for caches which do not use a `new` method, 27 | // and/or whose creation function uses more than 1 argument 28 | 29 | use std::f64; 30 | 31 | #[cache(ExpiringMap : custom_create_cache_method(Duration::from_secs(60), 1))] 32 | fn cached_sqrt(x: u64) -> f64 { 33 | f64::sqrt(x as f64) 34 | } 35 | 36 | assert_eq!(cached_sqrt(9), f64::sqrt(9.0)); 37 | } 38 | -------------------------------------------------------------------------------- /tests/clone_only_type.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use lru_cache::LruCache; 3 | 4 | use std::ops; 5 | 6 | 7 | #[derive(Clone, Eq, PartialEq, Hash, Debug, Ord, PartialOrd)] 8 | struct NoCopyI32(i32); 9 | 10 | impl ops::Add for NoCopyI32 { 11 | type Output = NoCopyI32; 12 | fn add(self, rhs: Self) -> Self { 13 | NoCopyI32(self.0 + rhs.0) 14 | } 15 | } 16 | impl ops::Sub for NoCopyI32 { 17 | type Output = NoCopyI32; 18 | fn sub(self, rhs: Self) -> Self { 19 | NoCopyI32(self.0 - rhs.0) 20 | } 21 | } 22 | 23 | #[test] 24 | fn clone_only_type() { 25 | #[cache(LruCache : LruCache::new(20))] 26 | fn fib(x: NoCopyI32) -> NoCopyI32 { 27 | if x <= NoCopyI32(1) { 28 | NoCopyI32(1) 29 | } else { 30 | fib(x.clone() - NoCopyI32(1)) + fib(x - NoCopyI32(2)) 31 | } 32 | } 33 | 34 | assert_eq!(fib(NoCopyI32(19)), NoCopyI32(6765)); 35 | } 36 | -------------------------------------------------------------------------------- /tests/config.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use lru_cache::LruCache; 3 | 4 | use std::thread; 5 | use std::sync::atomic::{AtomicUsize, Ordering}; 6 | use std::time; 7 | 8 | #[test] 9 | fn thread_local_ignore_args() { 10 | #[cache(LruCache : LruCache::new(20))] 11 | #[cache_cfg(ignore_args = call_count)] 12 | #[cache_cfg(thread_local)] 13 | fn fib(x: u32, call_count: &mut u32) -> u64 { 14 | *call_count += 1; 15 | if x <= 1 { 16 | 1 17 | } else { 18 | fib(x - 1, call_count) + fib(x - 2, call_count) 19 | } 20 | } 21 | 22 | let mut call_count = 0; 23 | assert_eq!(fib(39, &mut call_count), 102_334_155); 24 | assert_eq!(call_count, 40); 25 | } 26 | 27 | #[test] 28 | fn multithreaded() { 29 | static CALL_COUNT: AtomicUsize = AtomicUsize::new(0); 30 | 31 | #[cache(LruCache : LruCache::new(20))] 32 | fn fib(x: u32) -> u64 { 33 | CALL_COUNT.fetch_add(1, Ordering::SeqCst); 34 | if x <= 1 { 35 | 1 36 | } else { 37 | fib(x - 1) + fib(x - 2) 38 | } 39 | } 40 | 41 | let t1 = thread::spawn( || { 42 | assert_eq!(fib(39), 102_334_155); 43 | }); 44 | 45 | let ten_millis = time::Duration::from_millis(10); 46 | thread::sleep(ten_millis); 47 | 48 | let t2 = thread::spawn( || { 49 | assert_eq!(fib(39), 102_334_155); 50 | }); 51 | 52 | t1.join().unwrap(); 53 | t2.join().unwrap(); 54 | 55 | // threads should share a cache, so total runs should be less than 40 * 2 56 | assert!(CALL_COUNT.load(Ordering::SeqCst) < 80); 57 | } 58 | -------------------------------------------------------------------------------- /tests/multiple_args.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use lru_cache::LruCache; 3 | 4 | #[test] 5 | fn multiple_args() { 6 | #[cache(LruCache : LruCache::new(20))] 7 | #[inline] 8 | fn ackermann(m: u64, n: u64) -> u64 { 9 | if m == 0 { 10 | n + 1 11 | } else if m > 0 && n == 0 { 12 | ackermann(m - 1, 1) 13 | } else { 14 | ackermann(m - 1, ackermann(m, n - 1)) 15 | } 16 | } 17 | 18 | assert_eq!(ackermann(0, 0), 1); 19 | assert_eq!(ackermann(1, 0), 2); 20 | assert_eq!(ackermann(1, 1), 3); 21 | assert_eq!(ackermann(3, 2), 29); 22 | } 23 | -------------------------------------------------------------------------------- /tests/multiple_caches.rs: -------------------------------------------------------------------------------- 1 | use cache_macro::cache; 2 | use lru_cache::LruCache; 3 | 4 | #[test] 5 | fn multiple_caches() { 6 | use std::f64; 7 | #[cache(LruCache : LruCache::new(20))] 8 | fn cached_sqrt(x: u64) -> f64 { 9 | f64::sqrt(x as f64) 10 | } 11 | #[cache(LruCache : LruCache::new(20))] 12 | fn cached_log(x: u64) -> f64 { 13 | f64::ln(x as f64) 14 | } 15 | 16 | assert_eq!(cached_sqrt(9), f64::sqrt(9.0)); 17 | assert_eq!(cached_log(9), f64::ln(9.0)); 18 | } 19 | --------------------------------------------------------------------------------