├── .gitignore ├── Cargo.toml ├── LICENSE-MIT ├── .travis.yml ├── README.md ├── src ├── freelist.rs ├── boxed.rs ├── composable.rs ├── scoped.rs └── lib.rs └── LICENSE-APACHE /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "allocators" 3 | version = "0.2.0" 4 | authors = ["rphmeier "] 5 | description = "composable memory allocators and utilities for creating more." 6 | documentation = "https://rphmeier.github.io/allocators/" 7 | license = "MIT/Apache-2.0" 8 | repository = "https://github.com/rphmeier/allocators" 9 | keywords = ["allocator", "memory", "allocation"] -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Robert Habermeier 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | rust: 3 | - nightly 4 | after_success: ! '[ $TRAVIS_BRANCH = master ] && 5 | 6 | [ $TRAVIS_PULL_REQUEST = false ] && 7 | 8 | cargo doc && 9 | 10 | echo "" > target/doc/index.html && 12 | 13 | sudo pip install ghp-import && 14 | 15 | ghp-import -n target/doc && 16 | 17 | git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages' 18 | env: 19 | global: 20 | secure: a90GUq1Siw6xNSdHTW17jeWi+uH0CgEQjOcLPpIaAECFix1nxy6DpUf7tor1FxjxtImfcGw9XmKD27C4k16Ju3tB/m9mH1sXINAWuOoRPGf50UAQq6MkoAHTRTFf8RsorA4I9kSn0bJZl4+ghyiRpHK5xRk7o0Twp8Yhitgkj2222ejfMCKd/aHh5OuGhje/pmLyS4xjFAgATtW4KVau0mgJ3GpqdTrqkqsfXIKhtfSHEyyhHrI+uDwJDnUdLcDOrTKSCOHeGsjNRrC03ebo/KfsXLEnYb2YfPe/fpIGt+If3Lr2XvAPWYBl7N23anBvGWuTSaHkwD/gs7T+BSXZ5CRPZhQ8dC3zNvHeZnSm1JDfsGc62GhstjBy865y8upIpYb2Fb/TfPAQJ0yrffHcJO085ewURH2a4/jGuirEWMXR3fq0bt0me49YUYwV6xsULniCKtxtB6BsNCNsFpv9LFcB32Bt8J4Haf5E7T+c29H6H/E93H70TE12gPNwIS+aAt9AQBD2FYgkUAbo7anEw+fA05o8iTMg2rJfacpdHN3pFf2EtAyY73HYPhaj3IQWgmRJv7rWiHhXNtB5wgLxWbhyKccpP247cpf5gPmAs4zG3bcnFb461wfaTd2rAX/dN2kg8fBpFkfUpc8HsfmyxKqumroVZi6lqfBwCxxmgTM= 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Allocators 2 | [![Build Status](https://travis-ci.org/rphmeier/allocators.svg)](https://travis-ci.org/rphmeier/allocators) [![Crates.io](https://img.shields.io/crates/v/allocators.svg)](https://crates.io/crates/allocators) 3 | 4 | ## [Documentation](https://rphmeier.github.io/allocators/allocators/) 5 | 6 | This crate provides a few different memory allocators, as well as an 7 | `Allocator` trait for creating other custom allocators. A main goal of allocators is composability. For this reason, it also provides some composable primitives to be used as building blocks for chained allocators. This crate leans heavily on unsafe/unstable code at the moment, and should be considered very experimental. 8 | In light of [RFC 1398 (Allocators, Take III)](https://github.com/rust-lang/rfcs/pull/1398), this crate has been made more or less obsolete in its current state, and will probably be revised to contain instead a collection of custom allocators without providing the framework. 9 | 10 | # Why? 11 | Users often need to have more fine-grained control over the way memory is allocated in their programs. This crate is a proof-of-concept that these mechanisms can be implemented in Rust and provide a safe interface to their users. 12 | 13 | # Allocator Traits 14 | ## Allocator 15 | This is the core trait for allocators to implement. All a type has to do is implement two unsafe functions: `allocate_raw` and `deallocate_raw`. This will likely require `reallocate_raw` in the future. 16 | 17 | ## BlockOwner 18 | Allocators that implement this can say definitively whether they own a block. 19 | 20 | # Allocator Types 21 | ## Scoped Allocator 22 | This is useful for reusing a block of memory for temporary allocations in a tight loop. Scopes can be nested and values allocated in a scope cannot be moved outside it. 23 | 24 | ```rust 25 | #![feature(placement_in_syntax)] 26 | use allocators::{Allocator, Scoped}; 27 | #[derive(Debug)] 28 | struct Bomb(u8); 29 | impl Drop for Bomb { 30 | fn drop(&mut self) { 31 | println!("Boom! {}", self.0); 32 | } 33 | } 34 | // new scoped allocator with a kilobyte of memory. 35 | let alloc = Scoped::new(1024).unwrap(); 36 | alloc.scope(|inner| { 37 | let mut bombs = Vec::new(); 38 | // allocate_val makes the value on the stack first. 39 | for i in 0..100 { bombs.push(inner.allocate(Bomb(i)).unwrap())} 40 | // watch the bombs go off! 41 | }); 42 | // Allocators also have placement-in syntax. 43 | let my_int = in alloc.make_place().unwrap() { 23 }; 44 | println!("My int: {}", *my_int); 45 | ``` 46 | 47 | ## Free List Allocator 48 | This allocator maintains a list of free blocks of a given size. 49 | ```rust 50 | use allocators::{Allocator, FreeList}; 51 | 52 | // create a FreeList allocator with 64 blocks of 1024 bytes. 53 | let alloc = FreeList::new(1024, 64).unwrap(); 54 | for _ in 0..10 { 55 | // allocate every block 56 | let mut v = Vec::new(); 57 | for i in 0u8..64 { 58 | v.push(alloc.allocate([i; 1024]).unwrap()); 59 | } 60 | // no more blocks :(. 61 | assert!(alloc.allocate([0; 1024]).is_err()); 62 | 63 | // all the blocks get pushed back onto the freelist at the end here, 64 | // memory gets reused efficiently in the next iteration. 65 | } 66 | ``` 67 | 68 | This allocator can yield very good performance for situations like the above, where each block's space is being fully used. 69 | 70 | # Composable Primitives 71 | These are very underdeveloped at the moment, and lack a fluent API as well. They are definitely a back-burner feature at the moment, since the idea of composable allocators hasn't really proved its value yet. 72 | 73 | ## Null Allocator 74 | This will probably get a new name since "Null" has some misleading connotations. 75 | 76 | It fails to allocate any request made to it, and panics when deallocated with. 77 | 78 | ## Fallback Allocator 79 | This composes two `BlockOwners`: a main allocator and a fallback. If the main allocator fails to allocate, it turns to the fallback. 80 | 81 | ## Proxy Allocator 82 | This wraps any allocator and something which implements the `ProxyLogger` trait, which provides functions to log allocation, deallocation, and reallocation in any arbitrary way. It has practical applications in debug builds for measuring how an allocator is being utilized. -------------------------------------------------------------------------------- /src/freelist.rs: -------------------------------------------------------------------------------- 1 | //! A Free List allocator. 2 | 3 | use std::cell::Cell; 4 | use std::mem; 5 | use std::ptr; 6 | 7 | use super::{Allocator, Error, Block, HeapAllocator, HEAP}; 8 | 9 | /// A `FreeList` allocator manages a list of free memory blocks of uniform size. 10 | /// Whenever a block is requested, it returns the first free block. 11 | pub struct FreeList<'a, A: 'a + Allocator> { 12 | alloc: &'a A, 13 | block_size: usize, 14 | free_list: Cell<*mut u8>, 15 | } 16 | 17 | impl FreeList<'static, HeapAllocator> { 18 | /// Creates a new `FreeList` backed by the heap. `block_size` must be greater 19 | /// than or equal to the size of a pointer. 20 | pub fn new(block_size: usize, num_blocks: usize) -> Result { 21 | FreeList::new_from(HEAP, block_size, num_blocks) 22 | } 23 | } 24 | impl<'a, A: 'a + Allocator> FreeList<'a, A> { 25 | /// Creates a new `FreeList` backed by another allocator. `block_size` must be greater 26 | /// than or equal to the size of a pointer. 27 | pub fn new_from(alloc: &'a A, 28 | block_size: usize, 29 | num_blocks: usize) 30 | -> Result { 31 | if block_size < mem::size_of::<*mut u8>() { 32 | return Err(Error::AllocatorSpecific("Block size too small.".into())); 33 | } 34 | 35 | let mut free_list = ptr::null_mut(); 36 | 37 | // allocate each block with maximal alignment. 38 | for _ in 0..num_blocks { 39 | 40 | match unsafe { alloc.allocate_raw(block_size, mem::align_of::<*mut u8>()) } { 41 | Ok(block) => { 42 | let ptr: *mut *mut u8 = block.ptr() as *mut *mut u8; 43 | unsafe { *ptr = free_list } 44 | free_list = block.ptr(); 45 | } 46 | Err(err) => { 47 | // destructor cleans up after us. 48 | drop(FreeList { 49 | alloc: alloc, 50 | block_size: block_size, 51 | free_list: Cell::new(free_list), 52 | }); 53 | 54 | return Err(err); 55 | } 56 | } 57 | } 58 | 59 | Ok(FreeList { 60 | alloc: alloc, 61 | block_size: block_size, 62 | free_list: Cell::new(free_list), 63 | }) 64 | } 65 | } 66 | 67 | unsafe impl<'a, A: 'a + Allocator> Allocator for FreeList<'a, A> { 68 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 69 | if size == 0 { 70 | return Ok(Block::empty()); 71 | } else if size > self.block_size { 72 | return Err(Error::OutOfMemory); 73 | } 74 | 75 | if align > mem::align_of::<*mut u8>() { 76 | return Err(Error::UnsupportedAlignment); 77 | } 78 | 79 | let free_list = self.free_list.get(); 80 | if !free_list.is_null() { 81 | let next_block = *(free_list as *mut *mut u8); 82 | self.free_list.set(next_block); 83 | 84 | Ok(Block::new(free_list, size, align)) 85 | } else { 86 | Err(Error::OutOfMemory) 87 | } 88 | } 89 | 90 | unsafe fn reallocate_raw<'b>(&'b self, block: Block<'b>, new_size: usize) -> Result, (Error, Block<'b>)> { 91 | if new_size == 0 { 92 | Ok(Block::empty()) 93 | } else if block.is_empty() { 94 | Err((Error::UnsupportedAlignment, block)) 95 | } else if new_size <= self.block_size { 96 | Ok(Block::new(block.ptr(), new_size, block.align())) 97 | } else { 98 | Err((Error::OutOfMemory, block)) 99 | } 100 | } 101 | 102 | unsafe fn deallocate_raw(&self, block: Block) { 103 | if !block.is_empty() { 104 | let first = self.free_list.get(); 105 | let ptr = block.ptr(); 106 | *(ptr as *mut *mut u8) = first; 107 | self.free_list.set(ptr); 108 | } 109 | } 110 | } 111 | 112 | impl<'a, A: 'a + Allocator> Drop for FreeList<'a, A> { 113 | fn drop(&mut self) { 114 | let mut free_list = self.free_list.get(); 115 | //free all the blocks in the list. 116 | while !free_list.is_null() { 117 | unsafe { 118 | let next = *(free_list as *mut *mut u8); 119 | self.alloc.deallocate_raw(Block::new(free_list, 120 | self.block_size, 121 | mem::align_of::<*mut u8>())); 122 | free_list = next; 123 | } 124 | } 125 | } 126 | } 127 | 128 | unsafe impl<'a, A: 'a + Allocator + Sync> Send for FreeList<'a, A> {} 129 | 130 | #[cfg(test)] 131 | mod tests { 132 | use super::super::*; 133 | 134 | #[test] 135 | fn it_works() { 136 | let alloc = FreeList::new(1024, 64).ok().unwrap(); 137 | let mut blocks = Vec::new(); 138 | for _ in 0..64 { 139 | blocks.push(alloc.allocate([0u8; 1024]).ok().unwrap()); 140 | } 141 | assert!(alloc.allocate([0u8; 1024]).is_err()); 142 | drop(blocks); 143 | assert!(alloc.allocate([0u8; 1024]).is_ok()); 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/boxed.rs: -------------------------------------------------------------------------------- 1 | use std::any::Any; 2 | use std::borrow::{Borrow, BorrowMut}; 3 | use std::marker::{PhantomData, Unsize}; 4 | use std::mem; 5 | use std::ops::{CoerceUnsized, Deref, DerefMut, InPlace, Placer}; 6 | use std::ops::Place as StdPlace; 7 | use std::ptr::Unique; 8 | 9 | use super::{Allocator, Block}; 10 | 11 | /// An item allocated by a custom allocator. 12 | pub struct AllocBox<'a, T: 'a + ?Sized, A: 'a + ?Sized + Allocator> { 13 | item: Unique, 14 | size: usize, 15 | align: usize, 16 | allocator: &'a A, 17 | } 18 | 19 | impl<'a, T: ?Sized, A: ?Sized + Allocator> AllocBox<'a, T, A> { 20 | /// Consumes this allocated value, yielding the value it manages. 21 | pub fn take(self) -> T where T: Sized { 22 | let val = unsafe { ::std::ptr::read(self.item.as_ptr()) }; 23 | let block = Block::new(self.item.as_ptr() as *mut u8, self.size, self.align); 24 | unsafe { self.allocator.deallocate_raw(block) }; 25 | mem::forget(self); 26 | val 27 | } 28 | 29 | /// Gets a handle to the block of memory this manages. 30 | pub unsafe fn as_block(&self) -> Block { 31 | Block::new(self.item.as_ptr() as *mut u8, self.size, self.align) 32 | } 33 | } 34 | 35 | impl<'a, T: ?Sized, A: ?Sized + Allocator> Deref for AllocBox<'a, T, A> { 36 | type Target = T; 37 | 38 | fn deref(&self) -> &T { 39 | unsafe { self.item.as_ref() } 40 | } 41 | } 42 | 43 | impl<'a, T: ?Sized, A: ?Sized + Allocator> DerefMut for AllocBox<'a, T, A> { 44 | fn deref_mut(&mut self) -> &mut T { 45 | unsafe { self.item.as_mut() } 46 | } 47 | } 48 | 49 | // AllocBox can store trait objects! 50 | impl<'a, T: ?Sized + Unsize, U: ?Sized, A: ?Sized + Allocator> CoerceUnsized> for AllocBox<'a, T, A> {} 51 | 52 | impl<'a, A: ?Sized + Allocator> AllocBox<'a, Any, A> { 53 | /// Attempts to downcast this `AllocBox` to a concrete type. 54 | pub fn downcast(self) -> Result, AllocBox<'a, Any, A>> { 55 | use std::raw::TraitObject; 56 | if self.is::() { 57 | let obj: TraitObject = unsafe { mem::transmute::<*mut Any, TraitObject>(self.item.as_ptr()) }; 58 | let new_allocated = AllocBox { 59 | item: unsafe { Unique::new(obj.data as *mut T) }, 60 | size: self.size, 61 | align: self.align, 62 | allocator: self.allocator, 63 | }; 64 | mem::forget(self); 65 | Ok(new_allocated) 66 | } else { 67 | Err(self) 68 | } 69 | } 70 | } 71 | 72 | impl<'a, T: ?Sized, A: ?Sized + Allocator> Borrow for AllocBox<'a, T, A> { 73 | fn borrow(&self) -> &T { 74 | &**self 75 | } 76 | } 77 | 78 | impl<'a, T: ?Sized, A: ?Sized + Allocator> BorrowMut for AllocBox<'a, T, A> { 79 | fn borrow_mut(&mut self) -> &mut T { 80 | &mut **self 81 | } 82 | } 83 | 84 | impl<'a, T: ?Sized, A: ?Sized + Allocator> Drop for AllocBox<'a, T, A> { 85 | #[inline] 86 | fn drop(&mut self) { 87 | use std::intrinsics::drop_in_place; 88 | unsafe { 89 | drop_in_place(self.item.as_ptr()); 90 | self.allocator.deallocate_raw(Block::new(self.item.as_ptr() as *mut u8, self.size, self.align)); 91 | } 92 | 93 | } 94 | } 95 | 96 | 97 | pub fn make_place(alloc: &A) -> Result, super::Error> { 98 | let (size, align) = (mem::size_of::(), mem::align_of::()); 99 | match unsafe { alloc.allocate_raw(size, align) } { 100 | Ok(block) => { 101 | Ok(Place { 102 | allocator: alloc, 103 | block: block, 104 | _marker: PhantomData, 105 | }) 106 | } 107 | Err(e) => Err(e), 108 | } 109 | } 110 | 111 | /// A place for allocating into. 112 | /// This is only used for in-place allocation, 113 | /// e.g. `let val = in (alloc.make_place().unwrap()) { EXPR }` 114 | pub struct Place<'a, T: 'a, A: 'a + ?Sized + Allocator> { 115 | allocator: &'a A, 116 | block: Block<'a>, 117 | _marker: PhantomData, 118 | } 119 | 120 | impl<'a, T: 'a, A: 'a + ?Sized + Allocator> Placer for Place<'a, T, A> { 121 | type Place = Self; 122 | fn make_place(self) -> Self { 123 | self 124 | } 125 | } 126 | 127 | impl<'a, T: 'a, A: 'a + ?Sized + Allocator> InPlace for Place<'a, T, A> { 128 | type Owner = AllocBox<'a, T, A>; 129 | unsafe fn finalize(self) -> Self::Owner { 130 | let allocated = AllocBox { 131 | item: Unique::new(self.block.ptr() as *mut T), 132 | size: self.block.size(), 133 | align: self.block.align(), 134 | allocator: self.allocator, 135 | }; 136 | 137 | mem::forget(self); 138 | allocated 139 | } 140 | } 141 | 142 | impl<'a, T: 'a, A: 'a + ?Sized + Allocator> StdPlace for Place<'a, T, A> { 143 | fn pointer(&mut self) -> *mut T { 144 | self.block.ptr() as *mut T 145 | } 146 | } 147 | 148 | impl<'a, T: 'a, A: 'a + ?Sized + Allocator> Drop for Place<'a, T, A> { 149 | #[inline] 150 | fn drop(&mut self) { 151 | // almost identical to AllocBox::Drop, but we don't drop 152 | // the value in place. If the finalize 153 | // method was never called, the expression 154 | // to create the value failed and the memory at the 155 | // pointer is still uninitialized, which we don't want to drop. 156 | unsafe { 157 | self.allocator.deallocate_raw(mem::replace(&mut self.block, Block::empty())); 158 | } 159 | 160 | } 161 | } -------------------------------------------------------------------------------- /src/composable.rs: -------------------------------------------------------------------------------- 1 | //! This module contains some composable building blocks to build allocator chains. 2 | 3 | use super::{Allocator, Error, Block, BlockOwner}; 4 | 5 | /// This allocator always fails. 6 | /// It will panic if you try to deallocate with it. 7 | pub struct NullAllocator; 8 | 9 | unsafe impl Allocator for NullAllocator { 10 | unsafe fn allocate_raw(&self, _size: usize, _align: usize) -> Result { 11 | Err(Error::OutOfMemory) 12 | } 13 | 14 | unsafe fn reallocate_raw<'a>(&'a self, block: Block<'a>, _new_size: usize) -> Result, (Error, Block<'a>)> { 15 | Err((Error::OutOfMemory, block)) 16 | } 17 | 18 | unsafe fn deallocate_raw(&self, _block: Block) { 19 | panic!("Attempted to deallocate using null allocator.") 20 | } 21 | } 22 | 23 | impl BlockOwner for NullAllocator { 24 | fn owns_block(&self, _block: &Block) -> bool { 25 | false 26 | } 27 | } 28 | 29 | /// This allocator has a main and a fallback allocator. 30 | /// It will always attempt to allocate first with the main allocator, 31 | /// and second with the fallback. 32 | pub struct Fallback { 33 | main: M, 34 | fallback: F, 35 | } 36 | 37 | impl Fallback { 38 | /// Create a new `Fallback` 39 | pub fn new(main: M, fallback: F) -> Self { 40 | Fallback { 41 | main: main, 42 | fallback: fallback, 43 | } 44 | } 45 | } 46 | 47 | unsafe impl Allocator for Fallback { 48 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 49 | match self.main.allocate_raw(size, align) { 50 | Ok(block) => Ok(block), 51 | Err(_) => self.fallback.allocate_raw(size, align), 52 | } 53 | } 54 | 55 | unsafe fn reallocate_raw<'a>(&'a self, block: Block<'a>, new_size: usize) -> Result, (Error, Block<'a>)> { 56 | if self.main.owns_block(&block) { 57 | self.main.reallocate_raw(block, new_size) 58 | } else if self.fallback.owns_block(&block) { 59 | self.fallback.reallocate_raw(block, new_size) 60 | } else { 61 | Err((Error::AllocatorSpecific("Neither fallback nor main owns this block.".into()), block)) 62 | } 63 | } 64 | 65 | unsafe fn deallocate_raw(&self, block: Block) { 66 | if self.main.owns_block(&block) { 67 | self.main.deallocate_raw(block); 68 | } else if self.fallback.owns_block(&block) { 69 | self.fallback.deallocate_raw(block); 70 | } 71 | } 72 | } 73 | 74 | impl BlockOwner for Fallback { 75 | fn owns_block(&self, block: &Block) -> bool { 76 | self.main.owns_block(block) || self.fallback.owns_block(block) 77 | } 78 | } 79 | 80 | /// Something that logs an allocator's activity. 81 | /// In practice, this may be an output stream, 82 | /// a data collector, or seomthing else entirely. 83 | pub trait ProxyLogger { 84 | /// Called after a successful allocation. 85 | fn allocate_success(&self, block: &Block); 86 | /// Called after a failed allocation. 87 | fn allocate_fail(&self, err: &Error, size: usize, align: usize); 88 | 89 | /// Called when deallocating a block. 90 | fn deallocate(&self, block: &Block); 91 | 92 | /// Called after a successful reallocation. 93 | fn reallocate_success(&self, old_block: &Block, new_block: &Block); 94 | /// Called after a failed reallocation. 95 | fn reallocate_fail(&self, err: &Error, block: &Block, req_size: usize); 96 | } 97 | 98 | /// This wraps an allocator and a logger, logging all allocations 99 | /// and deallocations. 100 | pub struct Proxy { 101 | alloc: A, 102 | logger: L, 103 | } 104 | 105 | impl Proxy { 106 | /// Create a new proxy allocator. 107 | pub fn new(alloc: A, logger: L) -> Self { 108 | Proxy { 109 | alloc: alloc, 110 | logger: logger, 111 | } 112 | } 113 | } 114 | 115 | unsafe impl Allocator for Proxy { 116 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 117 | match self.alloc.allocate_raw(size, align) { 118 | Ok(block) => { 119 | self.logger.allocate_success(&block); 120 | Ok(block) 121 | } 122 | Err(err) => { 123 | self.logger.allocate_fail(&err, size, align); 124 | Err(err) 125 | } 126 | } 127 | } 128 | 129 | unsafe fn reallocate_raw<'a>(&'a self, block: Block<'a>, new_size: usize) -> Result, (Error, Block<'a>)> { 130 | let old_copy = Block::new(block.ptr(), block.size(), block.align()); 131 | 132 | match self.alloc.reallocate_raw(block, new_size) { 133 | Ok(new_block) => { 134 | self.logger.reallocate_success(&old_copy, &new_block); 135 | Ok(new_block) 136 | } 137 | Err((err, old)) => { 138 | self.logger.reallocate_fail(&err, &old, new_size); 139 | Err((err, old)) 140 | } 141 | } 142 | } 143 | 144 | unsafe fn deallocate_raw(&self, block: Block) { 145 | self.logger.deallocate(&block); 146 | self.alloc.deallocate_raw(block); 147 | } 148 | } 149 | 150 | #[cfg(test)] 151 | mod tests { 152 | use super::super::*; 153 | 154 | #[test] 155 | #[should_panic] 156 | fn null_allocate() { 157 | let alloc = NullAllocator; 158 | alloc.allocate(1i32).unwrap(); 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /src/scoped.rs: -------------------------------------------------------------------------------- 1 | //! A scoped linear allocator. This is something of a cross between a stack allocator 2 | //! and a traditional linear allocator. 3 | 4 | use std::cell::Cell; 5 | use std::mem; 6 | use std::ptr; 7 | 8 | use super::{Allocator, Error, Block, BlockOwner, HeapAllocator, HEAP}; 9 | 10 | /// A scoped linear allocator. 11 | pub struct Scoped<'parent, A: 'parent + Allocator> { 12 | allocator: &'parent A, 13 | current: Cell<*mut u8>, 14 | end: *mut u8, 15 | root: bool, 16 | start: *mut u8, 17 | } 18 | 19 | impl Scoped<'static, HeapAllocator> { 20 | /// Creates a new `Scoped` backed by `size` bytes from the heap. 21 | pub fn new(size: usize) -> Result { 22 | Scoped::new_from(HEAP, size) 23 | } 24 | } 25 | 26 | impl<'parent, A: Allocator> Scoped<'parent, A> { 27 | /// Creates a new `Scoped` backed by `size` bytes from the allocator supplied. 28 | pub fn new_from(alloc: &'parent A, size: usize) -> Result { 29 | // Create a memory buffer with the desired size and maximal align from the parent. 30 | match unsafe { alloc.allocate_raw(size, mem::align_of::()) } { 31 | Ok(block) => Ok(Scoped { 32 | allocator: alloc, 33 | current: Cell::new(block.ptr()), 34 | end: unsafe { block.ptr().offset(block.size() as isize) }, 35 | root: true, 36 | start: block.ptr(), 37 | }), 38 | Err(err) => Err(err), 39 | } 40 | } 41 | 42 | /// Calls the supplied function with a new scope of the allocator. 43 | /// 44 | /// Returns the result of the closure or an error if this allocator 45 | /// has already been scoped. 46 | pub fn scope(&self, f: F) -> Result 47 | where F: FnMut(&Self) -> U 48 | { 49 | if self.is_scoped() { 50 | return Err(()); 51 | } 52 | 53 | let mut f = f; 54 | let old = self.current.get(); 55 | let alloc = Scoped { 56 | allocator: self.allocator, 57 | current: self.current.clone(), 58 | end: self.end, 59 | root: false, 60 | start: old, 61 | }; 62 | 63 | // set the current pointer to null as a flag to indicate 64 | // that this allocator is being scoped. 65 | self.current.set(ptr::null_mut()); 66 | let u = f(&alloc); 67 | self.current.set(old); 68 | 69 | mem::forget(alloc); 70 | Ok(u) 71 | } 72 | 73 | // Whether this allocator is currently scoped. 74 | pub fn is_scoped(&self) -> bool { 75 | self.current.get().is_null() 76 | } 77 | } 78 | 79 | unsafe impl<'a, A: Allocator> Allocator for Scoped<'a, A> { 80 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 81 | if self.is_scoped() { 82 | return Err(Error::AllocatorSpecific("Called allocate on already scoped \ 83 | allocator." 84 | .into())); 85 | } 86 | 87 | if size == 0 { 88 | return Ok(Block::empty()); 89 | } 90 | 91 | let current_ptr = self.current.get(); 92 | let aligned_ptr = super::align_forward(current_ptr, align); 93 | let end_ptr = aligned_ptr.offset(size as isize); 94 | 95 | if end_ptr > self.end { 96 | Err(Error::OutOfMemory) 97 | } else { 98 | self.current.set(end_ptr); 99 | Ok(Block::new(aligned_ptr, size, align)) 100 | } 101 | } 102 | 103 | /// Because of the way this allocator is designed, reallocating a block that is not 104 | /// the most recent will lead to fragmentation. 105 | unsafe fn reallocate_raw<'b>(&'b self, block: Block<'b>, new_size: usize) -> Result, (Error, Block<'b>)> { 106 | let current_ptr = self.current.get(); 107 | 108 | if new_size == 0 { 109 | Ok(Block::empty()) 110 | } else if block.is_empty() { 111 | Err((Error::UnsupportedAlignment, block)) 112 | } else if block.ptr().offset(block.size() as isize) == current_ptr { 113 | // if this block is the last allocated, resize it if we can. 114 | // otherwise, we are out of memory. 115 | let new_cur = current_ptr.offset((new_size - block.size()) as isize); 116 | if new_cur < self.end { 117 | self.current.set(new_cur); 118 | Ok(Block::new(block.ptr(), new_size, block.align())) 119 | } else { 120 | Err((Error::OutOfMemory, block)) 121 | } 122 | } else { 123 | // try to allocate a new block at the end, and copy the old mem over. 124 | // this will lead to some fragmentation. 125 | match self.allocate_raw(new_size, block.align()) { 126 | Ok(new_block) => { 127 | ptr::copy_nonoverlapping(block.ptr(), new_block.ptr(), block.size()); 128 | Ok(new_block) 129 | } 130 | Err(err) => { 131 | Err((err, block)) 132 | } 133 | } 134 | } 135 | } 136 | 137 | unsafe fn deallocate_raw(&self, block: Block) { 138 | if block.is_empty() || block.ptr().is_null() { 139 | return; 140 | } 141 | // no op for this unless this is the last allocation. 142 | // The memory gets reused when the scope is cleared. 143 | let current_ptr = self.current.get(); 144 | if !self.is_scoped() && block.ptr().offset(block.size() as isize) == current_ptr { 145 | self.current.set(block.ptr()); 146 | } 147 | } 148 | } 149 | 150 | impl<'a, A: Allocator> BlockOwner for Scoped<'a, A> { 151 | fn owns_block(&self, block: &Block) -> bool { 152 | let ptr = block.ptr(); 153 | 154 | ptr >= self.start && ptr <= self.end 155 | } 156 | } 157 | 158 | impl<'a, A: Allocator> Drop for Scoped<'a, A> { 159 | /// Drops the `Scoped` 160 | fn drop(&mut self) { 161 | let size = self.end as usize - self.start as usize; 162 | // only free if this allocator is the root to make sure 163 | // that memory is freed after destructors for allocated objects 164 | // are called in case of unwind 165 | if self.root && size > 0 { 166 | unsafe { 167 | self.allocator 168 | .deallocate_raw(Block::new(self.start, size, mem::align_of::())) 169 | } 170 | } 171 | } 172 | } 173 | 174 | unsafe impl<'a, A: 'a + Allocator + Sync> Send for Scoped<'a, A> {} 175 | 176 | #[cfg(test)] 177 | mod tests { 178 | use super::super::*; 179 | 180 | #[test] 181 | #[should_panic] 182 | fn use_outer() { 183 | let alloc = Scoped::new(4).unwrap(); 184 | let mut outer_val = alloc.allocate(0i32).unwrap(); 185 | alloc.scope(|_inner| { 186 | // using outer allocator is dangerous and should fail. 187 | outer_val = alloc.allocate(1i32).unwrap(); 188 | }) 189 | .unwrap(); 190 | } 191 | 192 | #[test] 193 | fn scope_scope() { 194 | let alloc = Scoped::new(64).unwrap(); 195 | let _ = alloc.allocate(0).unwrap(); 196 | alloc.scope(|inner| { 197 | let _ = inner.allocate(32); 198 | inner.scope(|bottom| { 199 | let _ = bottom.allocate(23); 200 | }) 201 | .unwrap(); 202 | }) 203 | .unwrap(); 204 | } 205 | 206 | #[test] 207 | fn out_of_memory() { 208 | // allocate more memory than the allocator has. 209 | let alloc = Scoped::new(0).unwrap(); 210 | let (err, _) = alloc.allocate(1i32).err().unwrap(); 211 | assert_eq!(err, Error::OutOfMemory); 212 | } 213 | 214 | #[test] 215 | fn placement_in() { 216 | let alloc = Scoped::new(8_000_000).unwrap(); 217 | // this would smash the stack otherwise. 218 | let _big = in alloc.make_place().unwrap() { [0u8; 8_000_000] }; 219 | } 220 | 221 | #[test] 222 | fn owning() { 223 | let alloc = Scoped::new(64).unwrap(); 224 | 225 | let val = alloc.allocate(1i32).unwrap(); 226 | assert!(alloc.owns(&val)); 227 | 228 | alloc.scope(|inner| { 229 | let in_val = inner.allocate(2i32).unwrap(); 230 | assert!(inner.owns(&in_val)); 231 | assert!(!inner.owns(&val)); 232 | }) 233 | .unwrap(); 234 | } 235 | 236 | #[test] 237 | fn mutex_sharing() { 238 | use std::thread; 239 | use std::sync::{Arc, Mutex}; 240 | let alloc = Scoped::new(64).unwrap(); 241 | let data = Arc::new(Mutex::new(alloc)); 242 | for i in 0..10 { 243 | let data = data.clone(); 244 | thread::spawn(move || { 245 | let alloc_handle = data.lock().unwrap(); 246 | let _ = alloc_handle.allocate(i).unwrap(); 247 | }); 248 | } 249 | } 250 | } 251 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Custom memory allocators and utilities for using them. 2 | //! 3 | //! # Examples 4 | //! ```rust 5 | //! #![feature(placement_in_syntax)] 6 | //! 7 | //! use std::io; 8 | //! use allocators::{Allocator, Scoped, BlockOwner, FreeList, Proxy}; 9 | //! 10 | //! #[derive(Debug)] 11 | //! struct Bomb(u8); 12 | //! 13 | //! impl Drop for Bomb { 14 | //! fn drop(&mut self) { 15 | //! println!("Boom! {}", self.0); 16 | //! } 17 | //! } 18 | //! // new scoped allocator with 4 kilobytes of memory. 19 | //! let alloc = Scoped::new(4 * 1024).unwrap(); 20 | //! 21 | //! alloc.scope(|inner| { 22 | //! let mut bombs = Vec::new(); 23 | //! // allocate makes the value on the stack first. 24 | //! for i in 0..100 { bombs.push(inner.allocate(Bomb(i)).unwrap())} 25 | //! // there's also in-place allocation! 26 | //! let bomb_101 = in inner.make_place().unwrap() { Bomb(101) }; 27 | //! // watch the bombs go off! 28 | //! }); 29 | //! 30 | //! 31 | //! // You can make allocators backed by other allocators. 32 | //! { 33 | //! let secondary_alloc = FreeList::new_from(&alloc, 128, 8).unwrap(); 34 | //! let mut val = secondary_alloc.allocate(0i32).unwrap(); 35 | //! *val = 1; 36 | //! } 37 | //! 38 | //! ``` 39 | 40 | #![feature( 41 | alloc, 42 | coerce_unsized, 43 | heap_api, 44 | placement_new_protocol, 45 | placement_in_syntax, 46 | raw, 47 | unique, 48 | unsize, 49 | )] 50 | 51 | use std::error::Error as StdError; 52 | use std::fmt; 53 | use std::marker::PhantomData; 54 | use std::ptr::Unique; 55 | 56 | use alloc::heap; 57 | 58 | extern crate alloc; 59 | 60 | mod boxed; 61 | pub mod composable; 62 | pub mod freelist; 63 | pub mod scoped; 64 | 65 | pub use boxed::{AllocBox, Place}; 66 | pub use composable::*; 67 | pub use freelist::FreeList; 68 | pub use scoped::Scoped; 69 | 70 | /// A custom memory allocator. 71 | pub unsafe trait Allocator { 72 | /// Attempts to allocate the value supplied to it. 73 | /// 74 | /// # Examples 75 | /// ```rust 76 | /// use allocators::{Allocator, AllocBox}; 77 | /// fn alloc_array(allocator: &A) -> AllocBox<[u8; 1000], A> { 78 | /// allocator.allocate([0; 1000]).ok().unwrap() 79 | /// } 80 | /// ``` 81 | #[inline] 82 | fn allocate(&self, val: T) -> Result, (Error, T)> 83 | where Self: Sized 84 | { 85 | match self.make_place() { 86 | Ok(place) => { 87 | Ok(in place { val }) 88 | } 89 | Err(err) => { 90 | Err((err, val)) 91 | } 92 | } 93 | } 94 | 95 | /// Attempts to create a place to allocate into. 96 | /// For the general purpose, calling `allocate` on the allocator is enough. 97 | /// However, when you know the value you are allocating is too large 98 | /// to be constructed on the stack, you should use in-place allocation. 99 | /// 100 | /// # Examples 101 | /// ```rust 102 | /// #![feature(placement_in_syntax)] 103 | /// use allocators::{Allocator, AllocBox}; 104 | /// fn alloc_array(allocator: &A) -> AllocBox<[u8; 1000], A> { 105 | /// // if 1000 bytes were enough to smash the stack, this would still work. 106 | /// in allocator.make_place().unwrap() { [0; 1000] } 107 | /// } 108 | /// ``` 109 | fn make_place(&self) -> Result, Error> 110 | where Self: Sized 111 | { 112 | boxed::make_place(self) 113 | } 114 | 115 | /// Attempt to allocate a block of memory. 116 | /// 117 | /// Returns either a block of memory allocated 118 | /// or an Error. If `size` is equal to 0, the block returned must 119 | /// be created by `Block::empty()` 120 | /// 121 | /// # Safety 122 | /// Never use the block's pointer outside of the lifetime of the allocator. 123 | /// It must be deallocated with the same allocator as it was allocated with. 124 | /// It is undefined behavior to provide a non power-of-two align. 125 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result; 126 | 127 | /// Reallocate a block of memory. 128 | /// 129 | /// This either returns a new, possibly moved block with the requested size, 130 | /// or the old block back. 131 | /// The new block will have the same alignment as the old. 132 | /// 133 | /// # Safety 134 | /// If given an empty block, it must return it back instead of allocating the new size, 135 | /// since the alignment is unknown. 136 | /// 137 | /// If the requested size is 0, it must deallocate the old block and return an empty one. 138 | unsafe fn reallocate_raw<'a>(&'a self, block: Block<'a>, new_size: usize) -> Result, (Error, Block<'a>)>; 139 | 140 | /// Deallocate the memory referred to by this block. 141 | /// 142 | /// # Safety 143 | /// This block must have been allocated by this allocator. 144 | unsafe fn deallocate_raw(&self, block: Block); 145 | } 146 | 147 | /// An allocator that knows which blocks have been issued by it. 148 | pub trait BlockOwner: Allocator { 149 | /// Whether this allocator owns this allocated value. 150 | fn owns<'a, T, A: Allocator>(&self, val: &AllocBox<'a, T, A>) -> bool { 151 | self.owns_block(& unsafe { val.as_block() }) 152 | } 153 | 154 | /// Whether this allocator owns the block passed to it. 155 | fn owns_block(&self, block: &Block) -> bool; 156 | 157 | /// Joins this allocator with a fallback allocator. 158 | // TODO: Maybe not the right place for this? 159 | // Right now I've been more focused on shaking out the 160 | // specifics of allocation than crafting a fluent API. 161 | fn with_fallback(self, other: O) -> Fallback 162 | where Self: Sized 163 | { 164 | Fallback::new(self, other) 165 | } 166 | } 167 | 168 | /// A block of memory created by an allocator. 169 | pub struct Block<'a> { 170 | ptr: Unique, 171 | size: usize, 172 | align: usize, 173 | _marker: PhantomData<&'a [u8]>, 174 | } 175 | 176 | impl<'a> Block<'a> { 177 | /// Create a new block from the supplied parts. 178 | /// The pointer cannot be null. 179 | /// 180 | /// # Panics 181 | /// Panics if the pointer passed is null. 182 | pub fn new(ptr: *mut u8, size: usize, align: usize) -> Self { 183 | assert!(!ptr.is_null()); 184 | Block { 185 | ptr: unsafe { Unique::new(ptr) }, 186 | size: size, 187 | align: align, 188 | _marker: PhantomData, 189 | } 190 | } 191 | 192 | /// Creates an empty block. 193 | pub fn empty() -> Self { 194 | Block { 195 | ptr: Unique::empty(), 196 | size: 0, 197 | align: 0, 198 | _marker: PhantomData, 199 | } 200 | } 201 | 202 | /// Get the pointer from this block. 203 | pub fn ptr(&self) -> *mut u8 { 204 | self.ptr.as_ptr() 205 | } 206 | /// Get the size of this block. 207 | pub fn size(&self) -> usize { 208 | self.size 209 | } 210 | /// Get the align of this block. 211 | pub fn align(&self) -> usize { 212 | self.align 213 | } 214 | /// Whether this block is empty. 215 | pub fn is_empty(&self) -> bool { 216 | self.size == 0 217 | } 218 | } 219 | 220 | /// Errors that can occur while creating an allocator 221 | /// or allocating from it. 222 | #[derive(Debug, Eq, PartialEq)] 223 | pub enum Error { 224 | /// The allocator failed to allocate the amount of memory requested of it. 225 | OutOfMemory, 226 | /// The allocator does not support the requested alignment. 227 | UnsupportedAlignment, 228 | /// An allocator-specific error message. 229 | AllocatorSpecific(String), 230 | } 231 | 232 | impl fmt::Display for Error { 233 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 234 | formatter.write_str(self.description()) 235 | } 236 | } 237 | 238 | impl StdError for Error { 239 | fn description(&self) -> &str { 240 | use Error::*; 241 | 242 | match *self { 243 | OutOfMemory => { 244 | "Allocator out of memory." 245 | } 246 | UnsupportedAlignment => { 247 | "Attempted to allocate with unsupported alignment." 248 | } 249 | AllocatorSpecific(ref reason) => { 250 | reason 251 | } 252 | } 253 | } 254 | } 255 | 256 | /// Allocator stub that just forwards to heap allocation. 257 | /// It is recommended to use the `HEAP` constant instead 258 | /// of creating a new instance of this, to benefit from 259 | /// the static lifetime that it provides. 260 | #[derive(Debug)] 261 | pub struct HeapAllocator; 262 | 263 | // A constant for allocators to use the heap as a root. 264 | // Values allocated with this are effectively `Box`es. 265 | pub const HEAP: &'static HeapAllocator = &HeapAllocator; 266 | 267 | unsafe impl Allocator for HeapAllocator { 268 | #[inline] 269 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 270 | if size != 0 { 271 | let ptr = heap::allocate(size, align); 272 | if !ptr.is_null() { 273 | Ok(Block::new(ptr, size, align)) 274 | } else { 275 | Err(Error::OutOfMemory) 276 | } 277 | } else { 278 | Ok(Block::empty()) 279 | } 280 | } 281 | 282 | #[inline] 283 | unsafe fn reallocate_raw<'a>(&'a self, block: Block<'a>, new_size: usize) -> Result, (Error, Block<'a>)> { 284 | if new_size == 0 { 285 | self.deallocate_raw(block); 286 | Ok(Block::empty()) 287 | } else if block.is_empty() { 288 | Err((Error::UnsupportedAlignment, block)) 289 | } else { 290 | let new_ptr = heap::reallocate(block.ptr(), block.size(), new_size, block.align()); 291 | 292 | if new_ptr.is_null() { 293 | Err((Error::OutOfMemory, block)) 294 | } else { 295 | Ok(Block::new(new_ptr, new_size, block.align())) 296 | } 297 | } 298 | } 299 | 300 | #[inline] 301 | unsafe fn deallocate_raw(&self, block: Block) { 302 | if !block.is_empty() { 303 | heap::deallocate(block.ptr(), block.size(), block.align()) 304 | } 305 | } 306 | } 307 | 308 | // aligns a pointer forward to the next value aligned with `align`. 309 | #[inline] 310 | fn align_forward(ptr: *mut u8, align: usize) -> *mut u8 { 311 | ((ptr as usize + align - 1) & !(align - 1)) as *mut u8 312 | } 313 | 314 | // implementations for trait object types. 315 | 316 | unsafe impl<'a, A: ?Sized + Allocator + 'a> Allocator for Box { 317 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 318 | (**self).allocate_raw(size, align) 319 | } 320 | 321 | unsafe fn reallocate_raw<'b>(&'b self, block: Block<'b>, new_size: usize) -> Result, (Error, Block<'b>)> { 322 | (**self).reallocate_raw(block, new_size) 323 | } 324 | 325 | unsafe fn deallocate_raw(&self, block: Block) { 326 | (**self).deallocate_raw(block) 327 | } 328 | } 329 | 330 | unsafe impl<'a, 'b: 'a, A: ?Sized + Allocator + 'b> Allocator for &'a A { 331 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 332 | (**self).allocate_raw(size, align) 333 | } 334 | 335 | unsafe fn reallocate_raw<'c>(&'c self, block: Block<'c>, new_size: usize) -> Result, (Error, Block<'c>)> { 336 | (**self).reallocate_raw(block, new_size) 337 | } 338 | 339 | unsafe fn deallocate_raw(&self, block: Block) { 340 | (**self).deallocate_raw(block) 341 | } 342 | } 343 | 344 | unsafe impl<'a, 'b: 'a, A: ?Sized + Allocator + 'b> Allocator for &'a mut A { 345 | unsafe fn allocate_raw(&self, size: usize, align: usize) -> Result { 346 | (**self).allocate_raw(size, align) 347 | } 348 | 349 | unsafe fn reallocate_raw<'c>(&'c self, block: Block<'c>, new_size: usize) -> Result, (Error, Block<'c>)> { 350 | (**self).reallocate_raw(block, new_size) 351 | } 352 | 353 | unsafe fn deallocate_raw(&self, block: Block) { 354 | (**self).deallocate_raw(block) 355 | } 356 | } 357 | 358 | #[cfg(test)] 359 | mod tests { 360 | 361 | use std::any::Any; 362 | 363 | use super::*; 364 | 365 | #[test] 366 | fn heap_lifetime() { 367 | let my_int; 368 | { 369 | my_int = HEAP.allocate(0i32).unwrap(); 370 | } 371 | 372 | assert_eq!(*my_int, 0); 373 | } 374 | #[test] 375 | fn heap_in_place() { 376 | let big = in HEAP.make_place().unwrap() { [0u8; 8_000_000] }; 377 | assert_eq!(big.len(), 8_000_000); 378 | } 379 | 380 | #[test] 381 | fn unsizing() { 382 | #[derive(Debug)] 383 | struct Bomb; 384 | impl Drop for Bomb { 385 | fn drop(&mut self) { 386 | println!("Boom") 387 | } 388 | } 389 | 390 | let my_foo: AllocBox = HEAP.allocate(Bomb).unwrap(); 391 | let _: AllocBox = my_foo.downcast().ok().unwrap(); 392 | } 393 | 394 | #[test] 395 | fn take_out() { 396 | let _: [u8; 1024] = HEAP.allocate([0; 1024]).ok().unwrap().take(); 397 | } 398 | 399 | #[test] 400 | fn boxed_allocator() { 401 | #[derive(Debug)] 402 | struct Increment<'a>(&'a mut i32); 403 | impl<'a> Drop for Increment<'a> { 404 | fn drop(&mut self) { 405 | *self.0 += 1; 406 | } 407 | } 408 | 409 | let mut i = 0; 410 | let alloc: Box = Box::new(HEAP); 411 | { 412 | let _ = alloc.allocate(Increment(&mut i)).unwrap(); 413 | } 414 | assert_eq!(i, 1); 415 | } 416 | } 417 | --------------------------------------------------------------------------------