├── clippy.toml
├── rustfmt.toml
├── .idea
├── .gitignore
├── libraries
│ └── .gitignore
├── encodings.xml
├── markdown-navigator
│ └── profiles_settings.xml
├── codeStyles
│ ├── codeStyleConfig.xml
│ └── Project.xml
├── vcs.xml
├── modules.xml
├── codeStyleSettings.xml
├── copyright
│ ├── profiles_settings.xml
│ └── intellij-module.xml
├── runConfigurations
│ ├── clean.xml
│ └── check_musl.xml
├── compiler.xml
├── misc.xml
├── dictionaries
│ └── raph.xml
└── markdown-navigator.xml
├── .tm_properties
├── .gitmodules
├── LICENSE
├── src
├── allocators
│ ├── bit_set
│ │ ├── BitsInAByte.rs
│ │ ├── mod.rs
│ │ ├── RelativeLocationInBitSet.rs
│ │ ├── AbsoluteLocationInBitSet.rs
│ │ ├── NumberOfBitSetWords.rs
│ │ ├── BitSetWord.rs
│ │ ├── NumberOfBytes.rs
│ │ ├── BlockSize.rs
│ │ ├── NumberOfBits.rs
│ │ └── BitSetWordPointer.rs
│ ├── binary_search_trees
│ │ ├── red_black_tree
│ │ │ ├── mod.rs
│ │ │ ├── Color.rs
│ │ │ ├── ParentAndColor.rs
│ │ │ ├── RedBlackTreeDoubleEndedIterator.rs
│ │ │ ├── Node.rs
│ │ │ └── RedBlackTree.rs
│ │ ├── mod.rs
│ │ ├── BinarySearchTreeWithCachedKnowledgeOfFirstChild.rs
│ │ └── BinarySearchTreesWithCachedKnowledgeOfFirstChild.rs
│ ├── mod.rs
│ ├── MemoryUsageTrackingThreadLocalAllocator.rs
│ ├── ContextAllocator.rs
│ ├── BumpAllocator.rs
│ └── Allocator.rs
├── memory_sources
│ ├── arena
│ │ ├── Unsized.rs
│ │ ├── mod.rs
│ │ ├── SlotIndex.rs
│ │ ├── UnallocatedBlock.rs
│ │ └── Arena.rs
│ ├── mod.rs
│ ├── CoroutineStackMemory.rs
│ ├── MemorySource.rs
│ ├── CoroutineHeapMemorySource.rs
│ ├── CoroutineHeapMemory.rs
│ └── MemoryMapSource.rs
├── MemoryAddress.rs
├── extensions
│ ├── logarithm_base2_as_usize.rs
│ ├── NonNullU8NodePointer.rs
│ ├── mod.rs
│ ├── PointerMutExt.rs
│ ├── NonNullExt.rs
│ ├── U64Ext.rs
│ ├── NonZeroU32Ext.rs
│ ├── UsizeExt.rs
│ ├── PointerExt.rs
│ ├── NonZeroUsizeExt.rs
│ └── NonNullU8Ext.rs
├── adaptors
│ ├── mod.rs
│ ├── global_alloc.rs
│ ├── alloc.rs
│ ├── AllocatorAdaptor.rs
│ ├── AllocToAllocatorAdaptor.rs
│ └── GlobalAllocToAllocatorAdaptor.rs
├── CurrentAllocatorInUse.rs
├── MemoryRange.rs
├── LifetimeHint.rs
├── PerThreadState.rs
├── LocalAllocator.rs
├── LocalAllocatorMemoryUsage.rs
├── GloballyAllocated.rs
├── lib.rs
├── GlobalThreadAndCoroutineSwitchableAllocator.rs
└── GlobalThreadAndCoroutineSwitchableAllocatorInstance.rs
├── context-allocator.iml
├── .gitignore
├── Cargo.toml
├── COPYRIGHT
├── README.md
└── NOTES
/clippy.toml:
--------------------------------------------------------------------------------
1 | .cargo/clippy.toml
--------------------------------------------------------------------------------
/rustfmt.toml:
--------------------------------------------------------------------------------
1 | .cargo/rustfmt.toml
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | workspace.xml
2 |
--------------------------------------------------------------------------------
/.idea/libraries/.gitignore:
--------------------------------------------------------------------------------
1 | *.xml
2 |
--------------------------------------------------------------------------------
/.tm_properties:
--------------------------------------------------------------------------------
1 | .cargo/.tm_properties
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule ".cargo"]
2 | path = .cargo
3 | url = https://github.com/lemonrock/.cargo.git
4 |
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/markdown-navigator/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | LICENSE terms are documented in the COPYRIGHT file at the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
2 |
--------------------------------------------------------------------------------
/.idea/codeStyles/codeStyleConfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/codeStyleSettings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/.idea/copyright/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/.idea/runConfigurations/clean.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/BitsInAByte.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | const BitsInAByte: usize = 8;
6 |
--------------------------------------------------------------------------------
/src/memory_sources/arena/Unsized.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | extern
6 | {
7 | type Unsized;
8 | }
9 |
--------------------------------------------------------------------------------
/.idea/runConfigurations/check_musl.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/src/MemoryAddress.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Represents a memory address.
6 | pub type MemoryAddress = NonNull;
7 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/context-allocator.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/src/memory_sources/arena/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("Arena.rs");
9 | include!("SlotIndex.rs");
10 | include!("UnallocatedBlock.rs");
11 | include!("Unsized.rs");
12 |
--------------------------------------------------------------------------------
/src/extensions/logarithm_base2_as_usize.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[inline(always)]
6 | pub(crate) const fn logarithm_base2_as_usize(value: usize) -> usize
7 | {
8 | value.trailing_zeros() as usize
9 | }
10 |
--------------------------------------------------------------------------------
/src/adaptors/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("alloc.rs");
9 | include!("global_alloc.rs");
10 |
11 |
12 | include!("AllocatorAdaptor.rs");
13 | include!("AllocToAllocatorAdaptor.rs");
14 | include!("GlobalAllocToAllocatorAdaptor.rs");
15 |
--------------------------------------------------------------------------------
/src/memory_sources/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("CoroutineHeapMemory.rs");
9 | include!("CoroutineHeapMemorySource.rs");
10 | include!("CoroutineStackMemory.rs");
11 | include!("MemorySource.rs");
12 | include!("MemoryMapSource.rs");
13 |
--------------------------------------------------------------------------------
/.idea/copyright/intellij-module.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("Node.rs");
9 | include!("NodePointer.rs");
10 | include!("ParentAndColor.rs");
11 | include!("RedBlackTree.rs");
12 | include!("Color.rs");
13 | include!("RedBlackTreeDoubleEndedIterator.rs");
14 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/src/extensions/NonNullU8NodePointer.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | pub(crate) trait NonNullU8NodePointer: NonNullU8Ext
6 | {
7 | #[inline(always)]
8 | fn node_pointer(self) -> NodePointer
9 | {
10 | NodePointer::from_memory_address(self.to_non_null_u8())
11 | }
12 | }
13 |
14 | impl NonNullU8NodePointer for NonNull
15 | {
16 | }
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # This file is part of .cargo. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/.cargo/master/COPYRIGHT. No part of rust1, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | # Copyright © 2016 The developers of .cargo. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/raphaelcohn/.cargo/master/COPYRIGHT.
3 |
4 | # Mac OS X Stuff: http://www.westwind.com/reference/os-x/invisibles.html
5 | .DS_Store
6 | .Trashes
7 | *.swp
8 | # Actually Icon\r\r, but line conversion can mess with this...
9 | Icon[
]
10 |
11 | # Editor, etc
12 | *.lock
13 |
14 | # Cargo related build artifacts
15 | target/
16 | target-install/
17 | .cargo/target
18 | .cargo/out
19 | workspace/target
20 |
21 | # IntelliJ related
22 | .idea/workspace.xml
23 |
--------------------------------------------------------------------------------
/src/extensions/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("logarithm_base2_as_usize.rs");
9 | include!("NonNullExt.rs");
10 | include!("NonNullU8Ext.rs");
11 | include!("NonNullU8NodePointer.rs");
12 | include!("NonZeroU32Ext.rs");
13 | include!("NonZeroUsizeExt.rs");
14 | include!("PointerExt.rs");
15 | include!("PointerMutExt.rs");
16 | include!("U64Ext.rs");
17 | include!("UsizeExt.rs");
18 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | include!("AbsoluteLocationInBitSet.rs");
9 | include!("BitSetAllocator.rs");
10 | include!("BitSetWord.rs");
11 | include!("BitSetWordPointer.rs");
12 | include!("BitsInAByte.rs");
13 | include!("BlockSize.rs");
14 | include!("NumberOfBits.rs");
15 | include!("NumberOfBitSetWords.rs");
16 | include!("NumberOfBytes.rs");
17 | include!("RelativeLocationInBitSet.rs");
18 |
--------------------------------------------------------------------------------
/src/extensions/PointerMutExt.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub(crate) trait PointerMutExt: PointerExt
7 | {
8 | /// Mutable reference.
9 | fn mutable_reference<'a>(self) -> &'a mut T;
10 | }
11 |
12 | impl PointerMutExt for *mut T
13 | {
14 | #[inline(always)]
15 | fn mutable_reference<'a>(self) -> &'a mut T
16 | {
17 | debug_assert!(self.is_not_null(), "null pointers can not be derefenced");
18 |
19 | unsafe { &mut * self }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/CurrentAllocatorInUse.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Records which allocator is currently in use for `Global` allocations.
6 | ///
7 | /// This does not affect reallocations or deallocations in any way.
8 | #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
9 | pub enum CurrentAllocatorInUse
10 | {
11 | /// A coroutine local allocator.
12 | CoroutineLocal,
13 |
14 | /// A thread local allocator.
15 | ThreadLocal,
16 |
17 | /// A global allocator.
18 | Global,
19 | }
20 |
--------------------------------------------------------------------------------
/src/allocators/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | use super::*;
6 |
7 |
8 | use self::bit_set::*;
9 |
10 |
11 | /// A binary search tree based allocator.
12 | pub mod binary_search_trees;
13 |
14 |
15 | /// A bit set based allocator; allows reallocations, but requires a linear scan to find free blocks.
16 | pub mod bit_set;
17 |
18 |
19 | include!("Allocator.rs");
20 | include!("BumpAllocator.rs");
21 | include!("ContextAllocator.rs");
22 | include!("MemoryUsageTrackingThreadLocalAllocator.rs");
23 |
--------------------------------------------------------------------------------
/src/memory_sources/arena/SlotIndex.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct SlotIndex(usize);
7 |
8 | impl SlotIndex
9 | {
10 | const IsFullyAllocatedNextAvailableSlotIndexSentinel: Self = Self(usize::MAX);
11 |
12 | #[inline(always)]
13 | fn is_fully_allocated(self) -> bool
14 | {
15 | self == Self::IsFullyAllocatedNextAvailableSlotIndexSentinel
16 | }
17 |
18 | #[inline(always)]
19 | fn increment(&mut self)
20 | {
21 | self.0 += 1
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/Color.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
6 | #[repr(usize)]
7 | pub(crate) enum Color
8 | {
9 | Red = 0,
10 |
11 | Black = 1,
12 | }
13 |
14 | impl Color
15 | {
16 | #[inline(always)]
17 | pub(crate) fn color_bit(self) -> usize
18 | {
19 | self as usize
20 | }
21 |
22 | #[inline(always)]
23 | pub(crate) fn is_red(self) -> bool
24 | {
25 | self == Color::Red
26 | }
27 |
28 | #[inline(always)]
29 | pub(crate) fn is_black(self) -> bool
30 | {
31 | self == Color::Black
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/memory_sources/CoroutineStackMemory.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Stack memory.
6 | ///
7 | /// On x86-64, the stack needs to be 16 byte aligned with a minimum size of 64 bytes in order to store a `SavedContext`, hence the alignment of `64` (over `16`).
8 | #[repr(C, align(64))]
9 | pub struct CoroutineStackMemory
10 | {
11 | sizing: StackSize
12 | }
13 |
14 | impl Debug for CoroutineStackMemory
15 | {
16 | #[inline(always)]
17 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
18 | {
19 | write!(f, "CoroutineStackMemory({})", size_of::())
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/src/extensions/NonNullExt.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Extensions to make working with NonNull easier.
6 | pub(crate) trait NonNullExt
7 | {
8 | /// To a reference.
9 | fn reference<'any>(self) -> &'any T;
10 |
11 | /// To a mutable reference.
12 | fn mutable_reference<'any>(self) -> &'any mut T;
13 | }
14 |
15 | impl NonNullExt for NonNull
16 | {
17 | #[inline(always)]
18 | fn reference<'any>(self) -> &'any T
19 | {
20 | unsafe { & * self.as_ptr() }
21 | }
22 |
23 | #[inline(always)]
24 | fn mutable_reference<'any>(self) -> &'any mut T
25 | {
26 | unsafe { &mut * self.as_ptr() }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/RelativeLocationInBitSet.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// This is a mixed-radix representation.
6 | #[derive(Default, Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
7 | struct RelativeLocationInBitSet
8 | {
9 | major: NumberOfBitSetWords,
10 | minor: NumberOfBits,
11 | }
12 |
13 | impl RelativeLocationInBitSet
14 | {
15 | #[inline(always)]
16 | fn to_absolute_location_in_bit_set(self, inclusive_start_of_bitset: BitSetWordPointer) -> AbsoluteLocationInBitSet
17 | {
18 | AbsoluteLocationInBitSet
19 | {
20 | major: inclusive_start_of_bitset.increment_in_bit_set_words(self.major),
21 | minor: self.minor,
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/memory_sources/MemorySource.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// A memory source is a sort-of crude allocator that can release memory, originally obtained, say, the operating system, an arena or some fixed range.
6 | ///
7 | /// It is thread-aware but not necessarily thread-safe.
8 | pub trait MemorySource: Debug
9 | {
10 | /// Size.
11 | fn size(&self) -> NonZeroUsize;
12 |
13 | /// Start.
14 | fn allocations_start_from(&self) -> MemoryAddress;
15 |
16 | /// Memory range.
17 | #[inline(always)]
18 | fn memory_range(&self) -> MemoryRange
19 | {
20 | MemoryRange::new(self.allocations_start_from(), self.allocations_start_from().add_non_zero(self.size()))
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/MemoryRange.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Represents a memory range for which an allocator can allocate.
6 | #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
7 | pub struct MemoryRange
8 | {
9 | /// From (inclusive).
10 | pub from: MemoryAddress,
11 |
12 | /// To (exclusive).
13 | pub to: MemoryAddress,
14 | }
15 |
16 | impl MemoryRange
17 | {
18 | /// Create a new instance.
19 | #[inline(always)]
20 | pub const fn new(from: MemoryAddress, to: MemoryAddress) -> Self
21 | {
22 | Self
23 | {
24 | from,
25 | to,
26 | }
27 | }
28 |
29 | #[inline(always)]
30 | fn contains(&self, from_memory_address: MemoryAddress) -> bool
31 | {
32 | from_memory_address >= self.from && from_memory_address < self.to
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/AbsoluteLocationInBitSet.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// This is a mixed-radix representation.
6 | #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
7 | struct AbsoluteLocationInBitSet
8 | {
9 | major: BitSetWordPointer,
10 | minor: NumberOfBits,
11 | }
12 |
13 | impl AbsoluteLocationInBitSet
14 | {
15 | #[inline(always)]
16 | fn align_upwards_to_next_bit_set_word_pointer(self, value_to_return_if_aligned: R, action_if_unaligned: impl FnOnce(&Self) -> R) -> (BitSetWordPointer, R)
17 | {
18 | if unlikely!(self.minor.is_zero())
19 | {
20 | (self.major, value_to_return_if_aligned)
21 | }
22 | else
23 | {
24 | let value_to_return = action_if_unaligned(&self);
25 | (self.major.increment(), value_to_return)
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/LifetimeHint.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// A hint for choosing a memory allocator.
6 | #[derive(Debug)]
7 | pub enum LifetimeHint
8 | {
9 | /// Use this variant for contexts with short-lived lifetimes.
10 | ///
11 | /// Very fast allocation and almost costless deallocation, at the expense of the strong likelihood of running out of memory.
12 | ///
13 | /// Reallocation is very expensive when growing unless reallocating the most recently made allocation.
14 | ShortLived,
15 |
16 | /// Use this variant for contexts with slightly longer than short-lived lifetimes.
17 | ///
18 | /// Slower allocation and deallocation but reallocation is less expensive than for `ShortLived`.
19 | MediumLived,
20 |
21 | /// Use this variant for contexts with long-lived lifetimes.
22 | LongLived,
23 | }
24 |
--------------------------------------------------------------------------------
/src/adaptors/global_alloc.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[doc(hidden)]
6 | #[macro_export]
7 | macro_rules! global_alloc
8 | {
9 | () =>
10 | {
11 | #[inline(always)]
12 | unsafe fn alloc(&self, layout: Layout) -> *mut u8
13 | {
14 | self.GlobalAlloc_allocate(layout)
15 | }
16 |
17 | #[inline(always)]
18 | unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8
19 | {
20 | self.GlobalAlloc_allocate_zeroed(layout)
21 | }
22 |
23 | #[inline(always)]
24 | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout)
25 | {
26 | self.GlobalAlloc_deallocate(ptr, layout)
27 | }
28 |
29 | #[inline(always)]
30 | unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
31 | {
32 | self.GlobalAlloc_realloc(ptr, layout, new_size)
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/extensions/U64Ext.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub(crate) trait U64Ext: Sized + Copy + Ord + Debug
7 | {
8 | /// Round down to power of two exponent (usize).
9 | #[inline(always)]
10 | fn round_down_to_power_of_two_exponent_usize(self, power_of_two_exponent: usize) -> u64
11 | {
12 | self.round_down_to_power_of_two_exponent(power_of_two_exponent as u64)
13 | }
14 |
15 | /// Round down to power of two exponent (u64).
16 | #[inline(always)]
17 | fn round_down_to_power_of_two_exponent(self, power_of_two_exponent: u64) -> u64
18 | {
19 | let value = self.to_u64();
20 |
21 | value & !((1 << power_of_two_exponent) - 1)
22 | }
23 |
24 | #[doc(hidden)]
25 | fn to_u64(self) -> u64;
26 | }
27 |
28 | impl U64Ext for u64
29 | {
30 | #[inline(always)]
31 | fn to_u64(self) -> u64
32 | {
33 | self
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/NumberOfBitSetWords.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct NumberOfBitSetWords(usize);
7 |
8 | impl NumberOfBitSetWords
9 | {
10 | const One: Self = Self(1);
11 |
12 | #[inline(always)]
13 | fn to_number_of_bytes(self) -> NumberOfBytes
14 | {
15 | NumberOfBytes(self.0 * BitSetWord::SizeInBytes)
16 | }
17 |
18 | #[inline(always)]
19 | fn to_number_of_bits(self) -> NumberOfBits
20 | {
21 | NumberOfBits(self.0 * BitSetWord::SizeInBits)
22 | }
23 | }
24 |
25 | impl Sub for NumberOfBitSetWords
26 | {
27 | type Output = Self;
28 |
29 | #[inline(always)]
30 | fn sub(self, other: Self) -> Self::Output
31 | {
32 | debug_assert!(self >= other, "self `{:?}` is less than other `{:?}`", self, other);
33 |
34 | Self(self.0 - other.0)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/src/memory_sources/CoroutineHeapMemorySource.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Heap memory.
6 | ///
7 | /// We align to the most common page size, 4Kb, which will minimize alignment problems of memory allocations from this heap.
8 | #[derive(Debug)]
9 | #[repr(C, align(4096))]
10 | pub struct CoroutineHeapMemorySource(NonNull>);
11 |
12 | impl MemorySource for CoroutineHeapMemorySource
13 | {
14 | #[inline(always)]
15 | fn size(&self) -> NonZeroUsize
16 | {
17 | let size = size_of::>();
18 | debug_assert_ne!(size, 0, "Unsized values are not supported");
19 | new_non_zero_usize(size)
20 | }
21 |
22 | #[inline(always)]
23 | fn allocations_start_from(&self) -> MemoryAddress
24 | {
25 | self.0.cast()
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/BitSetWord.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct BitSetWord(u64);
7 |
8 | impl BitSetWord
9 | {
10 | const SizeInBytes: usize = size_of::();
11 |
12 | const SizeInBits: usize = Self::SizeInBytes * BitsInAByte;
13 |
14 | #[inline(always)]
15 | fn leading_unset_bits(self) -> NumberOfBits
16 | {
17 | NumberOfBits(self.0.leading_zeros() as usize)
18 | }
19 |
20 | #[inline(always)]
21 | fn trailing_unset_bits(self) -> NumberOfBits
22 | {
23 | NumberOfBits(self.0.trailing_zeros() as usize)
24 | }
25 |
26 | #[inline(always)]
27 | fn all_unset_but_not_necessarily_contiguous_bits(self) -> NumberOfBits
28 | {
29 | NumberOfBits(self.0.count_zeros() as usize)
30 | }
31 |
32 | #[inline(always)]
33 | fn to_u64(self) -> u64
34 | {
35 | self.0
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/adaptors/alloc.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[doc(hidden)]
6 | #[macro_export]
7 | macro_rules! alloc
8 | {
9 | () =>
10 | {
11 | #[inline(always)]
12 | fn allocate(&self, layout: Layout)-> Result, AllocError>
13 | {
14 | self.Alloc_allocate(layout)
15 | }
16 |
17 | #[inline(always)]
18 | unsafe fn deallocate(&self, ptr: NonNull, layout: Layout)
19 | {
20 | self.Alloc_deallocate(ptr, layout)
21 | }
22 |
23 | #[inline(always)]
24 | unsafe fn grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) -> Result, AllocError>
25 | {
26 | self.Alloc_grow(ptr, old_layout, new_layout)
27 | }
28 |
29 | #[inline(always)]
30 | unsafe fn shrink(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) -> Result, AllocError>
31 | {
32 | self.Alloc_shrink(ptr, old_layout, new_layout)
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/NumberOfBytes.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct NumberOfBytes(usize);
7 |
8 | impl Sub for NumberOfBytes
9 | {
10 | type Output = Self;
11 |
12 | #[inline(always)]
13 | fn sub(self, other: Self) -> Self::Output
14 | {
15 | debug_assert!(self.0 >= other.0);
16 |
17 | Self(self.0 - other.0)
18 | }
19 | }
20 |
21 | impl NumberOfBytes
22 | {
23 | #[inline(always)]
24 | fn is_zero(self) -> bool
25 | {
26 | self.0 == 0
27 | }
28 |
29 | #[inline(always)]
30 | fn is_not_zero(self) -> bool
31 | {
32 | self.0 != 0
33 | }
34 |
35 | #[inline(always)]
36 | fn to_usize(self) -> usize
37 | {
38 | self.0
39 | }
40 |
41 | #[inline(always)]
42 | fn to_non_zero(self) -> NonZeroUsize
43 | {
44 | self.0.non_zero()
45 | }
46 |
47 | #[inline(always)]
48 | fn to_number_of_bits(self) -> NumberOfBits
49 | {
50 | NumberOfBits(self.0 * BitsInAByte)
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/src/memory_sources/CoroutineHeapMemory.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Heap memory.
6 | ///
7 | /// We align to the most common page size, 4Kb, which will minimize alignment problems of memory allocations from this heap.
8 | #[repr(C, align(4096))]
9 | pub struct CoroutineHeapMemory
10 | {
11 | sizing: CoroutineHeapSize
12 | }
13 |
14 | impl Debug for CoroutineHeapMemory
15 | {
16 | #[inline(always)]
17 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
18 | {
19 | write!(f, "CoroutineHeapMemorySource({})", size_of::())
20 | }
21 | }
22 |
23 | impl CoroutineHeapMemory
24 | {
25 | /// Into a memory source.
26 | #[inline(always)]
27 | pub const fn into_memory_source(&self) -> CoroutineHeapMemorySource
28 | {
29 | CoroutineHeapMemorySource(new_non_null(self as *const CoroutineHeapMemory as *mut CoroutineHeapMemory))
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/PerThreadState.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[doc(hidden)]
6 | pub struct PerThreadState>, ThreadLocalAllocator: LocalAllocator>
7 | {
8 | current_allocator_in_use: CurrentAllocatorInUse,
9 |
10 | coroutine_local_allocator: Option,
11 |
12 | thread_local_allocator: Option>,
13 |
14 | marker: PhantomData,
15 | }
16 |
17 | impl>, ThreadLocalAllocator: LocalAllocator> PerThreadState
18 | {
19 | #[doc(hidden)]
20 | #[inline(always)]
21 | pub const fn empty() -> Self
22 | {
23 | Self
24 | {
25 | current_allocator_in_use: CurrentAllocatorInUse::Global,
26 | coroutine_local_allocator: None,
27 | thread_local_allocator: None,
28 | marker: PhantomData,
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/memory_sources/MemoryMapSource.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// This NUMA-aware memory source allocates memory-mapped data, optionally using NUMA policy to allocate on a memory node closest to the current thread.
6 | ///
7 | /// It is slow and uses system calls.
8 | ///
9 | /// When dropped, any memory obtained with this allocator is ***NOT*** freed.
10 | ///
11 | /// However, it is appropriate as a 'backing store' for other memory sources.
12 | #[derive(Debug)]
13 | pub struct MemoryMapSource(MappedMemory);
14 |
15 | impl MemorySource for MemoryMapSource
16 | {
17 | #[inline(always)]
18 | fn size(&self) -> NonZeroUsize
19 | {
20 | let size = self.0.mapped_size_in_bytes();
21 | new_non_zero_usize(size)
22 | }
23 |
24 | #[inline(always)]
25 | fn allocations_start_from(&self) -> MemoryAddress
26 | {
27 | self.0.virtual_address().into()
28 | }
29 | }
30 |
31 | impl MemoryMapSource
32 | {
33 | /// New instance.
34 | #[inline(always)]
35 | pub fn new(size: NonZeroU64, settings: MappedMemorySettings) -> Result
36 | {
37 | settings.anonymous_memory_map(size).map(Self)
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/ParentAndColor.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
6 | pub(crate) struct ParentAndColor(usize);
7 |
8 | impl Default for ParentAndColor
9 | {
10 | #[inline(always)]
11 | fn default() -> Self
12 | {
13 | Self::new(NodePointer::default(), Color::Red)
14 | }
15 | }
16 |
17 | impl ParentAndColor
18 | {
19 | const ColorBitmask: usize = 0b1;
20 |
21 | const ParentBitmask: usize = !Self::ColorBitmask;
22 |
23 | #[inline(always)]
24 | pub(crate) fn new(parent: NodePointer, color: Color) -> Self
25 | {
26 | debug_assert!(align_of::() >= 2, "Node needs to be aligned to 2 bytes or more otherwise we can not set the color_bit using unused bits in the parent pointer");
27 |
28 | Self((parent.0 as usize & Self::ParentBitmask) | color.color_bit())
29 | }
30 |
31 | #[inline(always)]
32 | pub(crate) fn parent(self) -> NodePointer
33 | {
34 | NodePointer((self.0 & Self::ParentBitmask) as *const Node)
35 | }
36 |
37 | #[inline(always)]
38 | pub(crate) fn color(self) -> Color
39 | {
40 | unsafe { transmute(self.0 & Self::ColorBitmask) }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | # This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | # Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | [package]
6 | name = "context-allocator"
7 | description = "context-allocator"
8 | keywords = ["context-allocator"]
9 | categories = []
10 | license = "MIT"
11 | authors = ["Raphael Cohn "]
12 | homepage = "https://github.com/lemonrock/context-allocator"
13 | repository = "https://github.com/lemonrock/context-allocator.git"
14 | exclude = ["*"]
15 | include = ["README.md", "LICENSE", "COPYRIGHT", "src/**/*.rs", "Cargo.toml", "rustfmt.toml", "clippy.toml"]
16 | readme = "README.md"
17 | publish = true
18 | edition = "2018"
19 | version = "0.3.1"
20 |
21 | [dependencies]
22 | static_assertions = "^1.1"
23 |
24 | [target.'cfg(all(target_os = "linux", target_pointer_width = "64"))'.dependencies]
25 | either = { version = "^1.5.3", features = ["serde"] }
26 | errno = "^0.2.8"
27 | libc = { version = "^0.2.93", features = ["const-extern-fn", "extra_traits", "std"] }
28 | likely = { version = "^0.2.0", path = "../likely" }
29 | linux-support = { version = "^0.0.26", path = "../linux-support" }
30 | magic-ring-buffer = { version = "^0.3.1", path = "../magic-ring-buffer" }
31 | swiss-army-knife = { version = "^0.0.39", path = "../swiss-army-knife" }
32 |
--------------------------------------------------------------------------------
/src/LocalAllocator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// A local allocator is an allocator with a known range of memory addresses it uses for allocated memory.
6 | ///
7 | /// This allows logic to determine which allocator should be used to free (deallocate) which memory pointers.
8 | ///
9 | /// ***It is important that a `LocalAllocator` does nothing on `drop()`***.
10 | pub trait LocalAllocator: Allocator + Sized + Debug
11 | {
12 | /// Creates a new instance.
13 | fn new_local_allocator(memory_source: MS, lifetime_hint: LifetimeHint, block_size_hint: NonZeroUsize) -> Self;
14 |
15 | /// The range of memory addresses that can be used to allocate memory by this allocator.
16 | ///
17 | /// This function is called repeatedly, so ideally should be inline and fast.
18 | fn memory_range(&self) -> MemoryRange;
19 |
20 | /// Returns `true` if this allocator is responsible for an allocation starting with the given `from_memory_address`.
21 | ///
22 | /// This function is called repeatedly, so ideally should be inline and fast.
23 | #[inline(always)]
24 | fn contains(&self, from_memory_address: MemoryAddress) -> bool
25 | {
26 | self.memory_range().contains(from_memory_address)
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/memory_sources/arena/UnallocatedBlock.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[repr(C)]
6 | struct UnallocatedBlock
7 | {
8 | next_available_slot_index: Cell,
9 | _remainder: Unsized,
10 | }
11 |
12 | impl UnallocatedBlock
13 | {
14 | #[inline(always)]
15 | fn initialize(&self, block_size: NonZeroUsize, block_initializer: &impl Fn(MemoryAddress, NonZeroUsize), slot_index: SlotIndex)
16 | {
17 | block_initializer((self as *const Self as *const u8).non_null(), block_size);
18 | self.set_unoccupied_next_available_slot_index(slot_index)
19 | }
20 |
21 | #[inline(always)]
22 | fn next_available_slot_index(&self) -> SlotIndex
23 | {
24 | self.next_available_slot_index.get()
25 | }
26 |
27 | #[inline(always)]
28 | fn set_unoccupied_next_available_slot_index(&self, slot_index: SlotIndex)
29 | {
30 | self.next_available_slot_index.set(slot_index)
31 | }
32 |
33 | #[inline(always)]
34 | fn from_memory_address<'a>(memory_address: MemoryAddress) -> &'a Self
35 | {
36 | unsafe { & * (memory_address.as_ptr() as *const Self) }
37 | }
38 |
39 | #[inline(always)]
40 | fn to_memory_address(&self) -> MemoryAddress
41 | {
42 | (self as *const Self as *const u8 as *mut u8).non_null()
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/extensions/NonZeroU32Ext.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub(crate) trait NonZeroU32Ext: Sized + Copy
7 | {
8 | /// Add.
9 | #[inline(always)]
10 | fn checked_add(self, increment: Self) -> Option
11 | {
12 | self.to_u32().checked_add(increment.to_u32()).map(Self::non_zero_unchecked)
13 | }
14 |
15 | /// Add.
16 | #[inline(always)]
17 | fn add_assign(&mut self, increment: Self)
18 | {
19 | *self = Self::non_zero_unchecked(self.to_u32() + increment.to_u32())
20 | }
21 |
22 | /// Next power of two.
23 | #[inline(always)]
24 | fn next_power_of_two(self) -> Self
25 | {
26 | Self::non_zero_unchecked(self.to_u32().next_power_of_two())
27 | }
28 |
29 | /// Non zero.
30 | #[inline(always)]
31 | fn non_zero(value: u32) -> Self
32 | {
33 | debug_assert_ne!(value, 0, "value is zero");
34 |
35 | Self::non_zero_unchecked(value)
36 | }
37 |
38 | /// Non zero.
39 | fn non_zero_unchecked(value: u32) -> Self;
40 |
41 | #[doc(hidden)]
42 | fn to_u32(self) -> u32;
43 | }
44 |
45 | impl NonZeroU32Ext for NonZeroU32
46 | {
47 | #[inline(always)]
48 | fn to_u32(self) -> u32
49 | {
50 | self.get()
51 | }
52 |
53 | #[inline(always)]
54 | fn non_zero_unchecked(value: u32) -> Self
55 | {
56 | new_non_zero_u32(value)
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/.idea/codeStyles/Project.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/RedBlackTreeDoubleEndedIterator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// An iterator over references to the items of a `RedBlackTree`.
6 | ///
7 | /// Expensive to construct.
8 | pub struct RedBlackTreeDoubleEndedIterator<'a>
9 | {
10 | head: NodePointer,
11 | tail: NodePointer,
12 | tree: &'a RedBlackTree,
13 | }
14 |
15 | impl<'a> Iterator for RedBlackTreeDoubleEndedIterator<'a>
16 | {
17 | type Item = MemoryAddress;
18 |
19 | #[inline(always)]
20 | fn next(&mut self) -> Option
21 | {
22 | let head = self.head;
23 |
24 | if unlikely!(head.is_null())
25 | {
26 | return None
27 | }
28 |
29 | self.head = if head == self.tail
30 | {
31 | self.tail = NodePointer::default();
32 |
33 | NodePointer::default()
34 | }
35 | else
36 | {
37 | head.next()
38 | };
39 |
40 | Some(head.value())
41 | }
42 | }
43 |
44 | impl<'a> DoubleEndedIterator for RedBlackTreeDoubleEndedIterator<'a>
45 | {
46 | #[inline(always)]
47 | fn next_back(&mut self) -> Option
48 | {
49 | let tail = self.tail;
50 |
51 | if unlikely!(tail.is_null())
52 | {
53 | return None
54 | }
55 |
56 | self.tail = if tail == self.head
57 | {
58 | self.head = NodePointer::default();
59 |
60 | NodePointer::default()
61 | }
62 | else
63 | {
64 | tail.previous()
65 | };
66 |
67 | Some(tail.value())
68 | }
69 | }
70 |
71 | impl<'a> Clone for RedBlackTreeDoubleEndedIterator<'a>
72 | {
73 | #[inline(always)]
74 | fn clone(&self) -> RedBlackTreeDoubleEndedIterator<'a>
75 | {
76 | Self
77 | {
78 | head: self.head,
79 | tail: self.tail,
80 | tree: self.tree,
81 | }
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/extensions/UsizeExt.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub trait UsizeExt: Sized + Copy + Ord + Debug
7 | {
8 | /// Is odd.
9 | #[inline(always)]
10 | fn is_odd(self) -> bool
11 | {
12 | self.to_usize() & 0b1 == 0b1
13 | }
14 |
15 | /// Round up.
16 | #[inline(always)]
17 | fn round_up_to_power_of_two(self, non_zero_power_of_two_alignment: NonZeroUsize) -> usize
18 | {
19 | let power_of_two = non_zero_power_of_two_alignment.get();
20 | let power_of_two_less_one = power_of_two - 1;
21 |
22 | let value = self.to_usize();
23 |
24 | debug_assert!(value.checked_add(power_of_two_less_one).is_some(), "non_zero_power_of_two_alignment is far too close to the maximum value of a pointer");
25 |
26 | (value + power_of_two_less_one) & !power_of_two_less_one
27 | }
28 |
29 | /// Round down.
30 | #[inline(always)]
31 | fn round_down_to_power_of_two(self, power_of_two: NonZeroUsize) -> usize
32 | {
33 | let power_of_two_exponent = power_of_two.logarithm_base2();
34 | self.round_down_to_power_of_two_exponent(power_of_two_exponent)
35 | }
36 |
37 | /// Round down to power of two exponent.
38 | #[inline(always)]
39 | fn round_down_to_power_of_two_exponent(self, power_of_two_exponent: usize) -> usize
40 | {
41 | let value = self.to_usize();
42 |
43 | value & !((1 << power_of_two_exponent) - 1)
44 | }
45 |
46 | /// Non zero.
47 | #[inline(always)]
48 | fn non_zero(self) -> NonZeroUsize
49 | {
50 | new_non_zero_usize(self.to_usize())
51 | }
52 |
53 | #[doc(hidden)]
54 | fn to_usize(self) -> usize;
55 | }
56 |
57 | impl UsizeExt for usize
58 | {
59 | #[inline(always)]
60 | fn to_usize(self) -> usize
61 | {
62 | self
63 | }
64 | }
65 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/BlockSize.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct BlockSize
7 | {
8 | block_size: NonZeroUsize,
9 | block_size_less_one: usize,
10 | block_size_power_of_two_exponent: usize,
11 | }
12 |
13 | impl BlockSize
14 | {
15 | #[inline(always)]
16 | fn new(block_size: NonZeroUsize) -> Self
17 | {
18 | debug_assert!(block_size.is_power_of_two(), "block_size `{:?}` is not a power of two", block_size);
19 |
20 | Self
21 | {
22 | block_size,
23 | block_size_less_one: block_size.decrement(),
24 | block_size_power_of_two_exponent: block_size.logarithm_base2(),
25 | }
26 | }
27 |
28 | #[inline(always)]
29 | fn alignment_is_minimum(&self, non_zero_power_of_two_alignment: NonZeroUsize) -> bool
30 | {
31 | non_zero_power_of_two_alignment <= self.block_size
32 | }
33 |
34 | #[inline(always)]
35 | fn number_of_blocks_required(&self, non_zero_size: NonZeroUsize) -> NumberOfBits
36 | {
37 | NumberOfBits((non_zero_size.get() + self.block_size_less_one) >> self.block_size_power_of_two_exponent)
38 | }
39 |
40 | #[inline(always)]
41 | fn blocks_offset(&self, allocations_start_from: MemoryAddress, start_of_allocated_memory: MemoryAddress) -> NumberOfBits
42 | {
43 | debug_assert!(start_of_allocated_memory >= allocations_start_from, "start_of_allocated_memory must be >= allocations_start_from");
44 |
45 | NumberOfBits(start_of_allocated_memory.difference(allocations_start_from) >> self.block_size_power_of_two_exponent)
46 | }
47 |
48 | #[inline(always)]
49 | fn scale_to_memory_offset_in_bytes(&self, number_of_bits: usize) -> NumberOfBytes
50 | {
51 | NumberOfBytes(number_of_bits << self.block_size_power_of_two_exponent)
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/.idea/dictionaries/raph.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | appendv
5 | bindgen
6 | chunksize
7 | cloexec
8 | cvlan
9 | datagrams
10 | datatype
11 | deserialise
12 | deserialiser
13 | dlid
14 | ebusy
15 | edeadlk
16 | einval
17 | epfd
18 | epoll
19 | errormsg
20 | failover
21 | fclass
22 | flowinfo
23 | funcs
24 | getinfo
25 | guid
26 | ibverbs
27 | infini
28 | infiniband
29 | inode
30 | iovcnt
31 | iovec
32 | ipoib
33 | isystem
34 | libfabric
35 | libibverbs
36 | libnuma
37 | maskp
38 | maxevents
39 | mbind
40 | mblkpool
41 | mcast
42 | mcond
43 | mellanox
44 | memcpy
45 | memmove
46 | memset
47 | mlogpool
48 | mmutex
49 | mobjpool
50 | moid
51 | mpol
52 | mpool
53 | mrwlock
54 | msync
55 | musl
56 | nack
57 | nbyte
58 | nelems
59 | nonblock
60 | nonoverlapping
61 | nvml
62 | oflags
63 | pagesize
64 | pkey
65 | pmem
66 | pmemblk
67 | pmemlog
68 | pmemobj
69 | pmemoid
70 | pmemp
71 | pobj
72 | poolsize
73 | pthread
74 | pwait
75 | qkey
76 | rdma
77 | recv
78 | refcnt
79 | rkey
80 | rwlock
81 | serialiser
82 | setlocal
83 | shmem
84 | sigmask
85 | sigs
86 | sockaddr
87 | strdup
88 | timewait
89 | tonode
90 | unextended
91 | unsubscribes
92 | werror
93 | whitelist
94 | wrlock
95 | xrcd
96 |
97 |
98 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/Node.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | // TODO: Save memory be using compressed (32-bit) pointers.
6 | #[repr(align(32))]
7 | #[derive(Debug)]
8 | pub(crate) struct Node
9 | {
10 | left: Cell,
11 | right: Cell,
12 | parent_and_color: Cell,
13 | }
14 |
15 | impl Node
16 | {
17 | #[inline(always)]
18 | pub(crate) fn reset(&mut self)
19 | {
20 | self.left = Cell::default();
21 | self.right = Cell::default();
22 | self.parent_and_color = Cell::default();
23 | }
24 |
25 | #[inline(always)]
26 | pub(crate) fn parent(&self) -> NodePointer
27 | {
28 | self.parent_and_color().parent()
29 | }
30 |
31 | #[inline(always)]
32 | pub(crate) fn set_parent(&self, parent: NodePointer)
33 | {
34 | self.set_parent_and_color(parent, self.color())
35 | }
36 |
37 | #[inline(always)]
38 | pub(crate) fn color(&self) -> Color
39 | {
40 | self.parent_and_color().color()
41 | }
42 |
43 | #[inline(always)]
44 | pub(crate) fn set_color(&self, color: Color)
45 | {
46 | self.set_parent_and_color(self.parent(), color)
47 | }
48 |
49 | #[inline(always)]
50 | pub(crate) fn parent_and_color(&self) -> ParentAndColor
51 | {
52 | self.parent_and_color.get()
53 | }
54 |
55 | #[inline(always)]
56 | pub(crate) fn set_parent_and_color(&self, parent: NodePointer, color: Color)
57 | {
58 | self.parent_and_color.set(ParentAndColor::new(parent, color))
59 | }
60 |
61 | #[inline(always)]
62 | pub(crate) fn left(&self) -> NodePointer
63 | {
64 | self.left.get()
65 | }
66 |
67 | #[inline(always)]
68 | pub(crate) fn set_left(&self, left: NodePointer)
69 | {
70 | self.left.set(left);
71 | }
72 |
73 | #[inline(always)]
74 | pub(crate) fn right(&self) -> NodePointer
75 | {
76 | self.right.get()
77 | }
78 |
79 | #[inline(always)]
80 | pub(crate) fn set_right(&self, right: NodePointer)
81 | {
82 | self.right.set(right);
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/COPYRIGHT:
--------------------------------------------------------------------------------
1 | Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
2 | Comment: Distribution Compilation Copyright and License
3 | Copyright: Copyright © 2018, Raphael Cohn
4 | License: MIT
5 | The MIT License (MIT)
6 | .
7 | Copyright © 2018, Raphael Cohn
8 | .
9 | Permission is hereby granted, free of charge, to any person obtaining a copy
10 | of this software and associated documentation files (the "Software"), to deal
11 | in the Software without restriction, including without limitation the rights
12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
13 | copies of the Software, and to permit persons to whom the Software is
14 | furnished to do so, subject to the following conditions:
15 | .
16 | The above copyright notice and this permission notice shall be included in all
17 | copies or substantial portions of the Software.
18 | .
19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
25 | SOFTWARE.
26 |
27 | Files: *
28 | Copyright: Copyright © 2018, Raphael Cohn
29 | License: MIT
30 | The MIT License (MIT)
31 | .
32 | Copyright © 2018, Raphael Cohn
33 | .
34 | Permission is hereby granted, free of charge, to any person obtaining a copy
35 | of this software and associated documentation files (the "Software"), to deal
36 | in the Software without restriction, including without limitation the rights
37 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
38 | copies of the Software, and to permit persons to whom the Software is
39 | furnished to do so, subject to the following conditions:
40 | .
41 | The above copyright notice and this permission notice shall be included in all
42 | copies or substantial portions of the Software.
43 | .
44 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
45 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
46 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
47 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
48 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
49 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
50 | SOFTWARE.
51 |
--------------------------------------------------------------------------------
/src/extensions/PointerExt.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub(crate) trait PointerExt: Sized
7 | {
8 | /// Non null.
9 | fn non_null(self) -> NonNull;
10 |
11 | /// Add.
12 | fn add_bytes(self, offset: usize) -> Self;
13 |
14 | /// Add.
15 | #[inline(always)]
16 | fn add_bytes_u32(self, offset: u32) -> Self
17 | {
18 | self.add_bytes(offset as usize)
19 | }
20 |
21 | /// Add.
22 | #[inline(always)]
23 | fn add_bytes_non_zero_u32(self, offset: NonZeroU32) -> Self
24 | {
25 | self.add_bytes_u32(offset.get())
26 | }
27 |
28 | /// To usize.
29 | fn to_usize(self) -> usize;
30 |
31 | /// Is not null.
32 | fn is_not_null(self) -> bool;
33 |
34 | /// Reference.
35 | fn reference<'a>(self) -> &'a T;
36 | }
37 |
38 | impl PointerExt for *const T
39 | {
40 | #[inline(always)]
41 | fn non_null(self) -> NonNull
42 | {
43 | new_non_null(self as *mut T)
44 | }
45 |
46 | #[inline(always)]
47 | fn add_bytes(self, offset: usize) -> Self
48 | {
49 | ((self as usize) + offset) as *const T
50 | }
51 |
52 | #[inline(always)]
53 | fn to_usize(self) -> usize
54 | {
55 | self as usize
56 | }
57 |
58 | #[inline(always)]
59 | fn is_not_null(self) -> bool
60 | {
61 | !self.is_null()
62 | }
63 |
64 | #[inline(always)]
65 | fn reference<'a>(self) -> &'a T
66 | {
67 | debug_assert!(self.is_not_null(), "null pointers can not be derefenced");
68 |
69 | unsafe { & * self }
70 | }
71 | }
72 |
73 | impl PointerExt for *mut T
74 | {
75 | #[inline(always)]
76 | fn non_null(self) -> NonNull
77 | {
78 | new_non_null(self)
79 | }
80 |
81 | #[inline(always)]
82 | fn add_bytes(self, offset: usize) -> Self
83 | {
84 | ((self as usize) + offset) as *mut T
85 | }
86 |
87 | #[inline(always)]
88 | fn to_usize(self) -> usize
89 | {
90 | self as usize
91 | }
92 |
93 | #[inline(always)]
94 | fn is_not_null(self) -> bool
95 | {
96 | !self.is_null()
97 | }
98 |
99 | #[inline(always)]
100 | fn reference<'a>(self) -> &'a T
101 | {
102 | debug_assert!(self.is_not_null(), "null pointers can not be derefenced");
103 |
104 | unsafe { & * self }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/mod.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | //! There are various kinds of binary search tree available suitable for storing a free list of free blocks.
6 | //!
7 | //! Normally, these use nodes allocated from a heap, with fields for pointers to a key and a value.
8 | //! As we know unused blocks of memory are free, we can re-use these as nodes.
9 | //! We can then dispense with the value - it is the pointer to the node itself, that being the free block - and with the key (which is the same as the pointer to the node itself, too).
10 | //! All binary search tree nodes need pointers to a lefthand (or lesser) node and righthand (or greater) node.
11 | //! We can compress these pointers by using a `u32` relative offset which is scaled down by the minimum size of a free block (eg if a node had to be 8 bytes, the relative offset would be scaled by 8, giving a maximum relative offset of 4Gb x 8 => 32Gb).
12 | //! The minimum size of a free block is dictated by the size of the fields required to represent a binary search tree node.
13 | //! For effectiveness, a free block size must be a power of two.
14 | //!
15 | //! Of the types of tree we know of, the following are probably most suitable for allocating and deallocating free blocks:-
16 | //!
17 | //! * A red-black tree;
18 | //! * A left-leaning red-black tree (Sedgwick);
19 | //! * An AA (Arne Andersson) tree;
20 | //! * An AVL (Adelson-Velsky and Landis) tree.
21 | //! * Scapegoat tree.
22 | //!
23 | //! There are trade-offs in choosing one to use:-
24 | //!
25 | //! * Whilst AA tree ands AVL trees perform better generally for look-ups than Red-Black tree, they usually are worse for deletions and insertions;
26 | //! * Deletions and insertions are a major part of the operations of free list (indeed, if splitting free blocks into smaller ones is at all common, they are the dominant operation);
27 | //! * An AA tree requires an additional 4 - 8 bytes to hold an integer 'level`;
28 | //! * A Red-Black tree requires an additional bit to hold a color combined with a `parent` pointer.
29 |
30 |
31 | use super::*;
32 |
33 |
34 | pub(crate) mod red_black_tree;
35 |
36 |
37 | include!("BinarySearchTreesWithCachedKnowledgeOfFirstChild.rs");include!("BinarySearchTreeWithCachedKnowledgeOfFirstChild.rs");
38 | include!("MultipleBinarySearchTreeAllocator.rs");
39 |
--------------------------------------------------------------------------------
/src/adaptors/AllocatorAdaptor.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Adapts an `Allocator` to the `GlobalAlloc` and `Alloc` traits.
6 | #[derive(Debug)]
7 | #[repr(transparent)]
8 | pub struct AllocatorAdaptor<'a, A: 'a + Allocator + ?Sized>(pub(crate) &'a A);
9 |
10 | impl<'a, A: 'a + Allocator> Deref for AllocatorAdaptor<'a, A>
11 | {
12 | type Target = A;
13 |
14 | #[inline(always)]
15 | fn deref(&self) -> &Self::Target
16 | {
17 | self.0
18 | }
19 | }
20 |
21 | unsafe impl<'a, A: 'a + Allocator + ?Sized> GlobalAlloc for AllocatorAdaptor<'a, A>
22 | {
23 | global_alloc!();
24 | }
25 |
26 | unsafe impl<'a, A: 'a + Allocator + ?Sized> Alloc for AllocatorAdaptor<'a, A>
27 | {
28 | alloc!();
29 | }
30 |
31 | impl<'a, A: 'a + Allocator + ?Sized> Allocator for AllocatorAdaptor<'a, A>
32 | {
33 | #[inline(always)]
34 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
35 | {
36 | self.0.allocate(non_zero_size, non_zero_power_of_two_alignment)
37 | }
38 |
39 | #[inline(always)]
40 | fn deallocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
41 | {
42 | self.0.deallocate(non_zero_size, non_zero_power_of_two_alignment, current_memory)
43 | }
44 |
45 | #[inline(always)]
46 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
47 | {
48 | self.0.growing_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved)
49 | }
50 |
51 | #[inline(always)]
52 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
53 | {
54 | self.0.shrinking_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved)
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/src/LocalAllocatorMemoryUsage.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Local allocator memory usage.
6 | ///
7 | /// Only accurate when recorded.
8 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
9 | pub struct LocalAllocatorMemoryUsage
10 | {
11 | allocated: Cell,
12 |
13 | deallocated: Cell,
14 |
15 | growing_reallocated: Cell,
16 |
17 | shrinking_reallocated: Cell,
18 | }
19 |
20 | impl Default for LocalAllocatorMemoryUsage
21 | {
22 | #[inline(always)]
23 | fn default() -> Self
24 | {
25 | Self::new()
26 | }
27 | }
28 |
29 | impl Sub for LocalAllocatorMemoryUsage
30 | {
31 | type Output = Self;
32 |
33 | #[inline(always)]
34 | fn sub(self, rhs: Self) -> Self::Output
35 | {
36 | Self
37 | {
38 | allocated: Cell::new(self.get_allocated() - rhs.get_allocated()),
39 | deallocated: Cell::new(self.get_deallocated() - rhs.get_deallocated()),
40 | growing_reallocated: Cell::new(self.get_growing_reallocated() - rhs.get_growing_reallocated()),
41 | shrinking_reallocated: Cell::new(self.get_shrinking_reallocated() - rhs.get_shrinking_reallocated())
42 | }
43 | }
44 | }
45 |
46 | impl LocalAllocatorMemoryUsage
47 | {
48 | /// New instance.
49 | pub const fn new() -> Self
50 | {
51 | Self
52 | {
53 | allocated: Cell::new(0),
54 |
55 | deallocated: Cell::new(0),
56 |
57 | growing_reallocated: Cell::new(0),
58 |
59 | shrinking_reallocated: Cell::new(0),
60 | }
61 | }
62 |
63 | /// Memory usage.
64 | #[inline(always)]
65 | pub fn usage(&self) -> u64
66 | {
67 | (self.get_allocated() + self.get_growing_reallocated()) - (self.get_deallocated() + self.get_shrinking_reallocated())
68 | }
69 |
70 | #[inline(always)]
71 | fn allocated(&self, size: usize)
72 | {
73 | self.allocated.set(self.get_allocated() + size as u64);
74 | }
75 |
76 | #[inline(always)]
77 | fn deallocated(&self, size: NonZeroUsize)
78 | {
79 | self.deallocated.set(self.get_deallocated() + size.get() as u64);
80 | }
81 |
82 | #[inline(always)]
83 | fn growing_reallocated(&self, non_zero_current_size: NonZeroUsize, size: usize)
84 | {
85 | self.growing_reallocated.set(self.get_growing_reallocated() + ((size - non_zero_current_size.get()) as u64));
86 | }
87 |
88 | #[inline(always)]
89 | fn shrinking_reallocated(&self, non_zero_current_size: NonZeroUsize, size: usize)
90 | {
91 | self.shrinking_reallocated.set(self.get_shrinking_reallocated() + ((non_zero_current_size.get() - size) as u64));
92 | }
93 |
94 | #[inline(always)]
95 | fn get_allocated(&self) -> u64
96 | {
97 | self.allocated.get()
98 | }
99 |
100 | #[inline(always)]
101 | fn get_deallocated(&self) -> u64
102 | {
103 | self.deallocated.get()
104 | }
105 |
106 | #[inline(always)]
107 | fn get_growing_reallocated(&self) -> u64
108 | {
109 | self.growing_reallocated.get()
110 | }
111 |
112 | #[inline(always)]
113 | fn get_shrinking_reallocated(&self) -> u64
114 | {
115 | self.shrinking_reallocated.get()
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # context-allocator
2 |
3 | This provides allocators suitable for a number of use cases.
4 |
5 | All of these allocators implement the traits `std::alloc::GlobalAlloc` and `std::alloc::Allocator`, as we as a common base trait, `Allocator`.
6 |
7 | The most useful is a global allocator which allows switching between thread, coroutine and global (and thuse lockable) memory allocators, using the macro `global_thread_and_coroutine_switchable_allocator()`.
8 |
9 | Allocators provided include:-
10 |
11 | * `BumpAllocator`, a never-freeing bump allocator with slight optimization for reallocating the last allocation.
12 | * `BitSetAllocator`, an allocator that uses a bit set of free blocks; uses 64-bit chunks to optimize searches.
13 | * `MultipleBinarySearchTreeAllocator`, an efficient allocator which minimizes fragmentation by using multiple red-black trees of free blocks which are aggresively defragmented.
14 | * `ContextAllocator`, a choice of either `BumpAllocator`, `BitSetAllocator` or `MultipleBinarySearchTreeAllocator`.
15 | * `MemoryMapAllocator`, a NUMA-aware mmap allocator with support for NUMA policies.
16 | * `GlobalThreadAndCoroutineSwitchableAllocator`, suitable for replacing the global allocator and provides switchable allocators for global, thread local and context (coroutine) local needs; must b created using the macro `global_thread_and_coroutine_switchable_allocator`.
17 |
18 | Allocators use a `MemorySource` to obtain and release memory.
19 | Memory sources provided include:-
20 |
21 | * `MemoryMapSource`, useful for thread-local allocators as it can obtain memory from NUMA-local memory.
22 | * `ArenaMemorySource`, an arena of fixed blocks which is itself backed by a memory source; this is useful as a source for the `BumpAllocator` and `BitSetAllocator` when used for contexts.
23 |
24 | Additionally a number of adaptors are provided:-
25 |
26 | * `AllocatorAdaptor`, an adaptor of `Allocator` to `GlobalAlloc` and `Alloc`; use it by calling `Allocator.adapt()`
27 | * `GlobalAllocToAllocatorAdaptor`, an adaptor of `GlobalAlloc` to `Allocator`, useful for assigning a global allocator to `GlobalThreadAndCoroutineSwitchableAllocator`.
28 | * `AllocToAllocatorAdaptor`, an adaptor of `Alloc` to `Allocator`.
29 |
30 | When using `GlobalThreadAndCoroutineSwitchableAllocator`, it is possible to save and restore the allocator state for the currently running context (coroutine).
31 | It is also possible to create a lockless, fast thread-local allocator which make use of NUMA memory, unlike a conventional malloc.
32 |
33 |
34 | ## Future
35 |
36 | * Investigate wrapping [Rampant Pixel's Memory Allocator](https://github.com/rampantpixels/rpmalloc).
37 | * Investigate using DPDK's allocator.
38 | * Investigate a B-tree backed allocator.
39 | * Investigate a design that uses multiple doubly-linked 'free' lists of blocks; blocks can be variable in size but the free list is sorted
40 | * Iteration over a particular free-list range may encountered blocks too small, or blocks so large they can be split up.
41 | * This design is similar to that used by DPDK.
42 | * To make the allocator multi-threaded, DPDK takes a spin lock on a particular 'heap', which is a set of free lists.
43 | * Investigate a fall-back over-size allocator for a thread-local allocator, which could use the `NumaMemoryMapSource` underneath.
44 | * Investigate supporting over-size allocations in `MultipleBinarySearchTreeAllocator` by scanning the largest binary search tree for contiguous blocks.
45 | * Investigate a persistent-memory backed allocator.
46 | * Properly support excess allocations and Alloc's grow_in_place functions, but only if these are used by downstream collections.
47 | * Investigate the use of the `BMI1` intrinsics `_blsi_u64` (extract lowest set bit), `_blsmsk_u64` and `_blsr_u64`.
48 |
49 |
50 | ## Licensing
51 |
52 | The license for this project is MIT.
53 |
--------------------------------------------------------------------------------
/src/adaptors/AllocToAllocatorAdaptor.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Adapts implementations of `std::alloc::Allocator` (renamed to `Alloc` to avoid a namespace clash and match `GlobalAlloc`) to `Allocator`.
6 | pub struct AllocToAllocatorAdaptor(UnsafeCell);
7 |
8 | impl Debug for AllocToAllocatorAdaptor
9 | {
10 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
11 | {
12 | write!(f, "AllocToAllocatorAdaptor")
13 | }
14 | }
15 |
16 | impl Deref for AllocToAllocatorAdaptor
17 | {
18 | type Target = A;
19 |
20 | #[inline(always)]
21 | fn deref(&self) -> &Self::Target
22 | {
23 | self.0.get().reference()
24 | }
25 | }
26 |
27 | impl Allocator for AllocToAllocatorAdaptor
28 | {
29 | #[inline(always)]
30 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
31 | {
32 | Self::map_non_null_slice(self.reference().allocate(Self::layout(non_zero_size, non_zero_power_of_two_alignment)))
33 | }
34 |
35 | #[inline(always)]
36 | fn deallocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
37 | {
38 | unsafe { self.reference().deallocate(current_memory, Self::layout(non_zero_size, non_zero_power_of_two_alignment)) }
39 | }
40 |
41 | #[inline(always)]
42 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, _current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
43 | {
44 | Self::map_non_null_slice(unsafe { self.reference().grow(current_memory, Self::layout(non_zero_current_size, non_zero_power_of_two_current_alignment), Self::layout(non_zero_new_size, non_zero_power_of_two_new_alignment)) })
45 | }
46 |
47 | #[inline(always)]
48 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, _current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
49 | {
50 | Self::map_non_null_slice(unsafe { self.reference().shrink(current_memory, Self::layout(non_zero_current_size, non_zero_power_of_two_current_alignment), Self::layout(non_zero_new_size, non_zero_power_of_two_new_alignment)) })
51 | }
52 | }
53 |
54 | impl AllocToAllocatorAdaptor
55 | {
56 | /// New instance.
57 | #[inline(always)]
58 | pub const fn new(underlying: A) -> Self
59 | {
60 | Self(UnsafeCell::new(underlying))
61 | }
62 |
63 | #[inline(always)]
64 | fn reference(&self) -> &A
65 | {
66 | self.0.get().reference()
67 | }
68 |
69 | #[inline(always)]
70 | fn layout(non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Layout
71 | {
72 | unsafe { Layout::from_size_align_unchecked(non_zero_size.get(), non_zero_power_of_two_alignment.get()) }
73 | }
74 |
75 | #[inline(always)]
76 | fn map_non_null_slice(result: Result, AllocError>) -> Result<(NonNull, usize), AllocError>
77 | {
78 | result.map(|non_null_slice| (non_null_slice.as_non_null_ptr(), non_null_slice.len()))
79 | }
80 | }
81 |
82 | impl AllocToAllocatorAdaptor
83 | {
84 | /// System malloc.
85 | pub const System: Self = Self::new(System);
86 | }
87 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/NumberOfBits.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Default, Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct NumberOfBits(usize);
7 |
8 | impl Add for NumberOfBits
9 | {
10 | type Output = Self;
11 |
12 | #[inline(always)]
13 | fn add(self, other: Self) -> Self::Output
14 | {
15 | Self(self.0 + other.0)
16 | }
17 | }
18 |
19 | impl Add for NumberOfBits
20 | {
21 | type Output = Self;
22 |
23 | #[inline(always)]
24 | fn add(self, other: usize) -> Self::Output
25 | {
26 | Self(self.0 + other)
27 | }
28 | }
29 |
30 | impl Sub for NumberOfBits
31 | {
32 | type Output = Self;
33 |
34 | #[inline(always)]
35 | fn sub(self, other: Self) -> Self::Output
36 | {
37 | debug_assert!(self >= other, "self `{:?}` is less than other `{:?}`", self, other);
38 |
39 | Self(self.0 - other.0)
40 | }
41 | }
42 |
43 | impl SubAssign for NumberOfBits
44 | {
45 | #[inline(always)]
46 | fn sub_assign(&mut self, other: Self)
47 | {
48 | debug_assert!(self.0 >= other.0, "self `{:?}` is less than other `{:?}`", self, other);
49 |
50 | self.0 -= other.0
51 | }
52 | }
53 |
54 | impl Shr for NumberOfBits
55 | {
56 | type Output = Self;
57 |
58 | #[inline(always)]
59 | fn shr(self, rhs: usize) -> Self::Output
60 | {
61 | Self(self.0 >> rhs)
62 | }
63 | }
64 |
65 | impl NumberOfBits
66 | {
67 | const Zero: Self = Self(0);
68 |
69 | const InBitSetWord: Self = Self(BitSetWord::SizeInBits);
70 |
71 | #[inline(always)]
72 | fn is_zero(self) -> bool
73 | {
74 | self == Self::Zero
75 | }
76 |
77 | #[inline(always)]
78 | fn is_not_zero(self) -> bool
79 | {
80 | self != Self::Zero
81 | }
82 |
83 | #[inline(always)]
84 | fn to_usize(self) -> usize
85 | {
86 | self.0 as usize
87 | }
88 |
89 | #[inline(always)]
90 | fn to_u64(self) -> u64
91 | {
92 | self.0 as u64
93 | }
94 |
95 | #[inline(always)]
96 | fn remainder_of_bits_that_do_not_fit_in_a_bit_set_word(self) -> Self
97 | {
98 | Self(self.0 % BitSetWord::SizeInBits)
99 | }
100 |
101 | #[inline(always)]
102 | fn round_up_to_number_of_bit_set_words(self) -> NumberOfBitSetWords
103 | {
104 | NumberOfBitSetWords((self.0 + BitSetWord::SizeInBits - 1) / BitSetWord::SizeInBits)
105 | }
106 |
107 | #[inline(always)]
108 | fn scale_to_memory_offset_in_bytes(self, block_size: &BlockSize) -> NumberOfBytes
109 | {
110 | block_size.scale_to_memory_offset_in_bytes(self.0)
111 | }
112 |
113 | #[inline(always)]
114 | fn to_absolute_location_in_bit_set(self, inclusive_start_of_bitset: BitSetWordPointer) -> AbsoluteLocationInBitSet
115 | {
116 | self.to_relative_location_in_bit_set().to_absolute_location_in_bit_set(inclusive_start_of_bitset)
117 | }
118 |
119 | #[inline(always)]
120 | fn to_relative_location_in_bit_set(self) -> RelativeLocationInBitSet
121 | {
122 | let major = self.number_of_bit_set_words_rounded_down();
123 | let minor = self - major.to_number_of_bits();
124 | RelativeLocationInBitSet
125 | {
126 | major,
127 | minor
128 | }
129 | }
130 |
131 | #[inline(always)]
132 | fn is_one_bit_set_word(self) -> bool
133 | {
134 | self.0 == BitSetWord::SizeInBits
135 | }
136 |
137 | #[inline(always)]
138 | fn less_than_a_bit_set_word_required(self) -> bool
139 | {
140 | self.0 < BitSetWord::SizeInBits
141 | }
142 |
143 | #[inline(always)]
144 | fn number_of_bit_set_words_rounded_down(self) -> NumberOfBitSetWords
145 | {
146 | NumberOfBitSetWords(self.0 / BitSetWord::SizeInBits)
147 | }
148 | }
149 |
--------------------------------------------------------------------------------
/src/adaptors/GlobalAllocToAllocatorAdaptor.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Adapts implementations of `GlobalAlloc` to `Allocator`.
6 | pub struct GlobalAllocToAllocatorAdaptor(pub GA);
7 |
8 | impl Debug for GlobalAllocToAllocatorAdaptor
9 | {
10 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
11 | {
12 | write!(f, "GlobalAllocToAllocatorAdaptor")
13 | }
14 | }
15 |
16 | impl Deref for GlobalAllocToAllocatorAdaptor
17 | {
18 | type Target = GA;
19 |
20 | #[inline(always)]
21 | fn deref(&self) -> &Self::Target
22 | {
23 | &self.0
24 | }
25 | }
26 |
27 | impl Allocator for GlobalAllocToAllocatorAdaptor
28 | {
29 | #[inline(always)]
30 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
31 | {
32 | let pointer = unsafe { self.0.alloc(Self::layout(non_zero_size, non_zero_power_of_two_alignment)) };
33 | Self::adapt_pointer(pointer, non_zero_size)
34 | }
35 |
36 | #[inline(always)]
37 | fn deallocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
38 | {
39 | unsafe { self.0.dealloc(current_memory.as_ptr(), Self::layout(non_zero_size, non_zero_power_of_two_alignment)) }
40 | }
41 |
42 | #[inline(always)]
43 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
44 | {
45 | if unlikely!(non_zero_power_of_two_new_alignment > non_zero_power_of_two_current_alignment)
46 | {
47 | return Err(AllocError)
48 | }
49 |
50 | if unlikely!(current_memory_can_not_be_moved)
51 | {
52 | return Err(AllocError)
53 | }
54 | let pointer = unsafe { self.0.realloc(current_memory.as_ptr(), Self::layout(non_zero_current_size, non_zero_power_of_two_current_alignment), non_zero_new_size.get()) };
55 | Self::adapt_pointer(pointer, non_zero_new_size)
56 | }
57 |
58 | #[inline(always)]
59 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
60 | {
61 | if unlikely!(non_zero_power_of_two_new_alignment > non_zero_power_of_two_current_alignment)
62 | {
63 | return Err(AllocError)
64 | }
65 |
66 | if unlikely!(current_memory_can_not_be_moved)
67 | {
68 | return Err(AllocError)
69 | }
70 |
71 | let pointer = unsafe { self.0.realloc(current_memory.as_ptr(), Self::layout(non_zero_current_size, non_zero_power_of_two_current_alignment), non_zero_new_size.get()) };
72 | Self::adapt_pointer(pointer, non_zero_new_size)
73 | }
74 | }
75 |
76 | impl GlobalAllocToAllocatorAdaptor
77 | {
78 | #[inline(always)]
79 | fn layout(non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Layout
80 | {
81 | unsafe { Layout::from_size_align_unchecked(non_zero_size.get(), non_zero_power_of_two_alignment.get()) }
82 | }
83 |
84 | #[inline(always)]
85 | fn adapt_pointer(pointer: *mut u8, non_zero_new_size: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
86 | {
87 | if unlikely!(pointer.is_null())
88 | {
89 | Err(AllocError)
90 | }
91 | else
92 | {
93 | Ok((new_non_null(pointer), non_zero_new_size.get()))
94 | }
95 | }
96 | }
97 |
98 | impl GlobalAllocToAllocatorAdaptor
99 | {
100 | /// System malloc.
101 | pub const System: Self = Self(System);
102 | }
103 |
--------------------------------------------------------------------------------
/src/allocators/MemoryUsageTrackingThreadLocalAllocator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// An allocator designed to track memory usage.
6 | ///
7 | /// This allocator tracks memory usage based on requested memory sizes, not actualy allocated sizes.
8 | /// This is because growing (or shrinking) reallocations do not know the original actually allocated sizes.
9 | ///
10 | /// This allocator is not thread-safe.
11 | #[derive(Debug)]
12 | pub struct MemoryUsageTrackingThreadLocalAllocator>
13 | {
14 | local_allocator: LA,
15 |
16 | local_allocator_memory_usage: LocalAllocatorMemoryUsage,
17 | }
18 |
19 | impl> Allocator for MemoryUsageTrackingThreadLocalAllocator
20 | {
21 | #[inline(always)]
22 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
23 | {
24 | let result = self.local_allocator.allocate(non_zero_size, non_zero_power_of_two_alignment)?;
25 | self.local_allocator_memory_usage.allocated(result.1);
26 | Ok(result)
27 | }
28 |
29 | #[inline(always)]
30 | fn deallocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
31 | {
32 | self.local_allocator.deallocate(non_zero_size, non_zero_power_of_two_alignment, current_memory);
33 | self.local_allocator_memory_usage.deallocated(non_zero_size);
34 | }
35 |
36 | #[inline(always)]
37 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
38 | {
39 | let result = self.local_allocator.growing_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved)?;
40 | self.local_allocator_memory_usage.growing_reallocated(non_zero_current_size, result.1);
41 | Ok(result)
42 | }
43 |
44 | #[inline(always)]
45 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
46 | {
47 | let result = self.local_allocator.shrinking_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved)?;
48 | self.local_allocator_memory_usage.shrinking_reallocated(non_zero_current_size, result.1);
49 | Ok(result)
50 | }
51 | }
52 |
53 | impl> LocalAllocator for MemoryUsageTrackingThreadLocalAllocator
54 | {
55 | #[inline(always)]
56 | fn new_local_allocator(memory_source: MemoryMapSource, lifetime_hint: LifetimeHint, block_size_hint: NonZeroUsize) -> Self
57 | {
58 | Self
59 | {
60 | local_allocator: LA::new_local_allocator(memory_source, lifetime_hint, block_size_hint),
61 | local_allocator_memory_usage: Default::default()
62 | }
63 | }
64 |
65 | #[inline(always)]
66 | fn memory_range(&self) -> MemoryRange
67 | {
68 | self.local_allocator.memory_range()
69 | }
70 | }
71 |
72 | impl> MemoryUsageTrackingThreadLocalAllocator
73 | {
74 | /// Create a new instance.
75 | #[inline(always)]
76 | pub const fn new(local_allocator: LA) -> Self
77 | {
78 | Self
79 | {
80 | local_allocator,
81 | local_allocator_memory_usage: LocalAllocatorMemoryUsage::new(),
82 | }
83 | }
84 |
85 | /// Memory usage.
86 | #[inline(always)]
87 | pub fn memory_usage(&self) -> &LocalAllocatorMemoryUsage
88 | {
89 | &self.local_allocator_memory_usage
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/NOTES:
--------------------------------------------------------------------------------
1 | We want a:-
2 | - single-threaded allocator
3 | - probably SLAB-based
4 | - ?we do not want to extensions object sizes over page size (4Kb)?
5 | - to be able to re-use a mmap'd or similar piece of memory which is NUMA LOCAL
6 | - see libnuma for how to do this.
7 |
8 | - elfmalloc provides a slab-allocator
9 |
10 | Or
11 | - we can use a simple never-freeing allocator
12 | - resize-in-place becomes very inefficient
13 |
14 |
15 | Allocator choices:-
16 | - Redox's allocator
17 | https://crates.io/crates/ralloc
18 | - Bump allocators (never free)
19 | - build-our-own, never-freeing: suffers from severe inefficiences with realloc which increases size
20 | - https://crates.io/crates/bump_alloc
21 | - https://crates.io/crates/bh_alloc
22 | - https://crates.io/crates/scratchpad
23 | - https://crates.io/crates/obstack
24 | - https://crates.io/crates/jenga
25 | - Use elfmalloc's bsalloc: https://github.com/ezrosent/allocators-rs/tree/master/bsalloc
26 | - Linked list allocator
27 | eg https://crates.io/crates/linked_list_allocator
28 | - scans list of free blocks to find first block large enough (O(n))
29 | - severe inefficiences with realloc which increases size
30 | eg https://rphmeier.github.io/allocators/allocators/index.html
31 | - Buddy allocators
32 | https://crates.io/crates/buddy_system_allocator
33 | https://crates.io/crates/alloc_buddy_simple2
34 | - ?https://github.com/dropbox/rust-alloc-no-stdlib/blob/master/src/stack_allocator.rs
35 | - https://docs.rs/alloc-cortex-m/0.3.5/alloc_cortex_m/ - hmmm??
36 | - ?https://crates.io/crates/xalloc? not appropriate?
37 | - Use a slab allocator
38 | - https://crates.io/crates/simple-slab - no use - assumes a fixed size object.
39 | - Use elfmalloc's slab: https://github.com/ezrosent/allocators-rs/tree/master/slab-alloc
40 | - https://crates.io/crates/slab_allocator (seems to re-use the inefficient linked-list-allocator above)
41 | - Re-implement that from Linux
42 | - Try to implement slub
43 | - Try to implement SLOB
44 | - Use an allocator of memory pools
45 | - create memory pools for 8b, 16b, 32b, etc up to, say 256b.
46 | - have an oversize pool - how?
47 | - use linked lists for the non-oversize pools
48 | - on coroutine creation creator indicates how much in each pool.
49 | - Rework our own toblerone allocator from persistent memory w/o multithreading problems
50 |
51 | - Rework third party
52 | - eg can we have a thread-local tcmalloc? hoard?
53 |
54 | - Our own
55 | - when a block is free'd, add it to the top of a free list
56 | - merge blocks wherever possible
57 | - can realloc in place for larger size if this was the last block allocated (and, if used a mirror ring buffer, could wrap-around a memory allocation)
58 | - maintain multiple free lists but update other free lists if allocations 'hit' them.
59 | - explore Linux's SLOB, which also works well for small memories.
60 |
61 |
62 |
63 |
64 | First, "slab" has become a generic name referring to a memory allocation strategy employing an object cache, enabling efficient allocation and deallocation of kernel objects. It was first documented by Sun engineer Jeff Bonwick1 and implemented in the Solaris 2.4 kernel.
65 |
66 | Linux currently offers three choices for its "slab" allocator:
67 |
68 | - Slab is the original, based on Bonwick's seminal paper and available since Linux kernel version 2.2. It is a faithful implementation of Bonwick's proposal, augmented by the multiprocessor changes described in Bonwick's follow-up paper2.
69 |
70 | - Slub is the next-generation replacement memory allocator, which has been the default in the Linux kernel since 2.6.23. It continues to employ the basic "slab" model, but fixes several deficiencies in Slab's design, particularly around systems with large numbers of processors. Slub is simpler than Slab.
71 |
72 | - SLOB (Simple List Of Blocks) is a memory allocator optimized for embedded systems with very little memory—on the order of megabytes. It applies a very simple first-fit algorithm on a list of blocks, not unlike the old K&R-style heap allocator. In eliminating nearly all of the overhad from the memory allocator, SLOB is a good fit for systems under extreme memory constraints, but it offers none of the benefits described in 1 and can suffer from pathological fragmentation.
73 |
74 | What should you use? Slub, unless you are building a kernel for an embedded device with limited in memory. In that case, I would benchmark Slub versus SLOB and see what works best for your workload. There is no reason to use Slab; it will likely be removed from future Linux kernel releases.
75 |
--------------------------------------------------------------------------------
/.idea/markdown-navigator.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
--------------------------------------------------------------------------------
/src/extensions/NonZeroUsizeExt.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Useful extensions.
6 | pub trait NonZeroUsizeExt: Sized + Copy + Ord + Debug
7 | {
8 | /// Next power of two.
9 | #[inline(always)]
10 | fn next_power_of_two(self) -> Self
11 | {
12 | Self::non_zero_unchecked(self.to_usize().next_power_of_two())
13 | }
14 |
15 | /// Round up.
16 | #[inline(always)]
17 | fn round_up_to_power_of_two(self, non_zero_power_of_two_alignment: NonZeroUsize) -> Self
18 | {
19 | let power_of_two = non_zero_power_of_two_alignment.get();
20 | let power_of_two_less_one = power_of_two - 1;
21 |
22 | debug_assert!(self.checked_add(power_of_two_less_one).is_some(), "non_zero_power_of_two_alignment is far too close to the maximum value of a pointer");
23 |
24 | Self::non_zero(self.add(power_of_two_less_one).to_usize() & !power_of_two_less_one)
25 | }
26 |
27 | /// Round down.
28 | #[inline(always)]
29 | fn round_down_to_power_of_two(self, power_of_two: NonZeroUsize) -> usize
30 | {
31 | let value = self.to_usize();
32 | let power_of_two_exponent = power_of_two.logarithm_base2();
33 |
34 | value & !((1 << power_of_two_exponent) - 1)
35 | }
36 |
37 | /// Divide.
38 | #[inline(always)]
39 | fn divide_power_of_two_by_power_of_two(self, divisor: NonZeroUsize) -> usize
40 | {
41 | debug_assert!(self.to_usize().is_power_of_two(), "self `{:?}` is not a power of two", self);
42 | debug_assert!(divisor.is_power_of_two(), "divisor `{:?}` is not a power of two", divisor);
43 |
44 | self.to_usize() >> divisor.logarithm_base2()
45 | }
46 |
47 | /// Logarithm base two.
48 | #[inline(always)]
49 | fn logarithm_base2(self) -> usize
50 | {
51 | self.to_usize().trailing_zeros() as usize
52 | }
53 |
54 | /// Decrement.
55 | #[inline(always)]
56 | fn decrement(self) -> usize
57 | {
58 | self.to_usize() - 1
59 | }
60 |
61 | /// Add.
62 | #[inline(always)]
63 | fn add(self, increment: usize) -> Self
64 | {
65 | Self::non_zero(self.to_usize() + increment)
66 | }
67 |
68 | /// Add.
69 | #[inline(always)]
70 | fn add_non_zero(self, increment: NonZeroUsize) -> Self
71 | {
72 | Self::non_zero(self.to_usize() + increment.get())
73 | }
74 |
75 | /// Add.
76 | #[inline(always)]
77 | fn checked_add(self, increment: usize) -> Option
78 | {
79 | self.to_usize().checked_add(increment).map(Self::non_zero)
80 | }
81 |
82 | /// Add.
83 | #[inline(always)]
84 | fn add_assign(&mut self, increment: usize)
85 | {
86 | *self = (*self).add(increment)
87 | }
88 |
89 | /// Double.
90 | #[inline(always)]
91 | fn doubled(self) -> NonZeroUsize
92 | {
93 | (self.to_usize() << 1).non_zero()
94 | }
95 |
96 | /// Difference.
97 | #[inline(always)]
98 | fn difference(self, other: Self) -> usize
99 | {
100 | debug_assert!(self >= other, "other `{:?}` is less than self `{:?}`", other, self);
101 |
102 | self.to_usize() - other.to_usize()
103 | }
104 |
105 | /// Difference.
106 | #[inline(always)]
107 | fn difference_non_zero(self, other: Self) -> NonZeroUsize
108 | {
109 | self.difference(other).non_zero()
110 | }
111 |
112 | /// Multiply.
113 | #[inline(always)]
114 | fn multiply(self, other: Self) -> NonZeroUsize
115 | {
116 | (self.to_usize() * other.to_usize()).non_zero()
117 | }
118 |
119 | /// Is odd.
120 | #[inline(always)]
121 | fn is_odd(self) -> bool
122 | {
123 | self.to_usize().is_odd()
124 | }
125 |
126 | /// Non zero.
127 | #[inline(always)]
128 | fn to_non_zero_u32(self) -> NonZeroU32
129 | {
130 | let usize = self.to_usize();
131 | debug_assert!(usize <= u32::MAX as usize, "exceeds `{}` u32::MAX `{}`", usize, u32::MAX);
132 | NonZeroU32::non_zero_unchecked(usize as u32)
133 | }
134 |
135 | /// Non zero.
136 | #[inline(always)]
137 | fn non_zero(value: usize) -> Self
138 | {
139 | debug_assert_ne!(value, 0, "value is zero");
140 |
141 | Self::non_zero_unchecked(value)
142 | }
143 |
144 | /// Non zero.
145 | fn non_zero_unchecked(value: usize) -> Self;
146 |
147 | #[doc(hidden)]
148 | fn to_usize(self) -> usize;
149 | }
150 |
151 | impl NonZeroUsizeExt for NonZeroUsize
152 | {
153 | #[inline(always)]
154 | fn difference(self, other: Self) -> usize
155 | {
156 | debug_assert!(self >= other, "other `{:?}` is less than self `{:?}`", other, self);
157 |
158 | self.get() - other.get()
159 | }
160 |
161 | #[inline(always)]
162 | fn non_zero_unchecked(value: usize) -> Self
163 | {
164 | new_non_zero_usize(value)
165 | }
166 |
167 | #[inline(always)]
168 | fn to_usize(self) -> usize
169 | {
170 | self.get()
171 | }
172 | }
173 |
--------------------------------------------------------------------------------
/src/allocators/bit_set/BitSetWordPointer.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
6 | struct BitSetWordPointer(NonNull);
7 |
8 | impl BitSetWordPointer
9 | {
10 | #[inline(always)]
11 | fn into_memory_address(self) -> MemoryAddress
12 | {
13 | self.0.cast()
14 | }
15 |
16 | #[inline(always)]
17 | fn wrap(memory_address: MemoryAddress) -> Self
18 | {
19 | debug_assert_eq!(memory_address.to_usize() % BitSetWord::SizeInBytes, 0, "memory_address `{:?}` must be a multiple of 8", memory_address);
20 |
21 | Self(memory_address.cast::())
22 | }
23 |
24 | #[inline(always)]
25 | fn difference_in_number_of_bits(self, lower: Self) -> NumberOfBits
26 | {
27 | self.difference_in_number_of_bytes(lower).to_number_of_bits()
28 | }
29 |
30 | #[inline(always)]
31 | fn difference_in_number_of_bytes(self, lower: Self) -> NumberOfBytes
32 | {
33 | NumberOfBytes(self.memory_address().difference(lower.memory_address()))
34 | }
35 |
36 | #[inline(always)]
37 | fn set_bottom_bits(self, number_of_lower_bits_to_set: NumberOfBits)
38 | {
39 | self.memory_address().set_bottom_bits_of_u64(number_of_lower_bits_to_set.0)
40 | }
41 |
42 | #[inline(always)]
43 | fn set_some_bits(self, current: BitSetWord, bits_to_set: u64)
44 | {
45 | self.memory_address().write(current.to_u64() | bits_to_set)
46 | }
47 |
48 | #[inline(always)]
49 | fn set_top_bits(self, number_of_upper_bits_to_set: NumberOfBits)
50 | {
51 | self.memory_address().set_top_bits_of_u64(number_of_upper_bits_to_set.0)
52 | }
53 |
54 | #[inline(always)]
55 | fn set_all_bits_and_increment_assign(&mut self)
56 | {
57 | self.set_all_bits_to(0xFFFF_FFFF_FFFF_FFFF)
58 | }
59 |
60 | #[inline(always)]
61 | fn unset_bottom_bits(self, number_of_lower_bits_to_unset: NumberOfBits)
62 | {
63 | self.memory_address().unset_bottom_bits_of_u64(number_of_lower_bits_to_unset.0)
64 | }
65 |
66 | #[inline(always)]
67 | fn unset_middle_bits(self, number_of_bits_to_unset: NumberOfBits, number_of_lower_bits: NumberOfBits)
68 | {
69 | self.memory_address().unset_middle_bits_of_u64(number_of_bits_to_unset.0, number_of_lower_bits.0)
70 | }
71 |
72 | #[inline(always)]
73 | fn unset_top_bits(self, number_of_upper_bits_to_unset: NumberOfBits)
74 | {
75 | self.memory_address().unset_top_bits_of_u64(number_of_upper_bits_to_unset.0)
76 | }
77 |
78 | #[inline(always)]
79 | fn unset_all_bits_and_increment_assign(&mut self)
80 | {
81 | self.set_all_bits_to(0x0000_0000_0000_0000)
82 | }
83 |
84 | #[doc(hidden)]
85 | #[inline(always)]
86 | fn set_all_bits_to(&mut self, value: u64)
87 | {
88 | let mut memory_address = self.memory_address();
89 | memory_address.write_and_advance(value);
90 | self.0 = memory_address.cast::();
91 | }
92 |
93 | #[inline(always)]
94 | fn increment_assign(&mut self)
95 | {
96 | *self = (*self).increment()
97 | }
98 |
99 | #[inline(always)]
100 | fn increment(self) -> Self
101 | {
102 | self.increment_in_bit_set_words(NumberOfBitSetWords::One)
103 | }
104 |
105 | #[inline(always)]
106 | fn increment_in_bit_set_words(self, number_of_bit_set_words: NumberOfBitSetWords) -> Self
107 | {
108 | self.increment_in_bytes(number_of_bit_set_words.to_number_of_bytes())
109 | }
110 |
111 | #[inline(always)]
112 | fn bit_set_word(self) -> BitSetWord
113 | {
114 | BitSetWord(self.memory_address().read_u64())
115 | }
116 |
117 | #[inline(always)]
118 | fn decrement_in_bit_set_words(self, number_of_bit_set_words: NumberOfBitSetWords) -> Self
119 | {
120 | self.decrement_in_bytes(number_of_bit_set_words.to_number_of_bytes())
121 | }
122 |
123 | #[inline(always)]
124 | fn increment_in_bytes(self, number_of_bytes: NumberOfBytes) -> Self
125 | {
126 | let number_of_bytes = number_of_bytes.0;
127 |
128 | debug_assert_eq!(number_of_bytes % BitSetWord::SizeInBytes, 0, "number_of_bytes `{:?}` is not a multiple of the size of an u64", number_of_bytes);
129 |
130 | Self(self.memory_address().add(number_of_bytes).cast::())
131 | }
132 |
133 | #[inline(always)]
134 | fn decrement_in_bytes(self, number_of_bytes: NumberOfBytes) -> Self
135 | {
136 | let number_of_bytes = number_of_bytes.0;
137 |
138 | debug_assert_eq!(number_of_bytes % BitSetWord::SizeInBytes, 0, "number_of_bytes `{:?}` is not a multiple of the size of an u64", number_of_bytes);
139 |
140 | Self(self.memory_address().subtract(number_of_bytes).cast::())
141 | }
142 |
143 | #[doc(hidden)]
144 | #[inline(always)]
145 | fn memory_address(self) -> MemoryAddress
146 | {
147 | self.0.cast::()
148 | }
149 | }
150 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/BinarySearchTreeWithCachedKnowledgeOfFirstChild.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug)]
6 | pub(crate) struct BinarySearchTreeWithCachedKnowledgeOfFirstChild
7 | {
8 | tree: RedBlackTree,
9 | cached_first_child: NodePointer,
10 | }
11 |
12 | impl Default for BinarySearchTreeWithCachedKnowledgeOfFirstChild
13 | {
14 | fn default() -> Self
15 | {
16 | let tree = RedBlackTree::default();
17 | Self
18 | {
19 | cached_first_child: tree.first_child(),
20 | tree,
21 | }
22 | }
23 | }
24 |
25 | impl BinarySearchTreeWithCachedKnowledgeOfFirstChild
26 | {
27 | #[inline(always)]
28 | pub(crate) fn has_blocks(&self) -> bool
29 | {
30 | self.tree.has_blocks()
31 | }
32 |
33 | #[inline(always)]
34 | pub(crate) fn find(&self, key: MemoryAddress) -> NodePointer
35 | {
36 | self.tree.find(key)
37 | }
38 |
39 | #[inline(always)]
40 | pub(crate) fn blocks_to_coalesce(&mut self, inserted_node_pointer: NodePointer, difference: NonZeroUsize, block_size: NonZeroUsize, furthest_back_contiguous_with_inserted_node_pointer_memory_address: MemoryAddress, furthest_forward_contiguous_with_inserted_node_pointer_memory_address: MemoryAddress) -> (MemoryAddress, MemoryAddress)
41 | {
42 | let number_of_contiguous_blocks_excluding_inserted_node = difference.divide_power_of_two_by_power_of_two(block_size);
43 |
44 | let even_sic_total_number_of_contiguous_blocks_to_coalesce = number_of_contiguous_blocks_excluding_inserted_node.is_odd();
45 |
46 | if even_sic_total_number_of_contiguous_blocks_to_coalesce
47 | {
48 | (furthest_back_contiguous_with_inserted_node_pointer_memory_address, furthest_forward_contiguous_with_inserted_node_pointer_memory_address)
49 | }
50 | else
51 | {
52 | let insert_node_pointer_memory_address = inserted_node_pointer.value();
53 | if unlikely!(furthest_forward_contiguous_with_inserted_node_pointer_memory_address == insert_node_pointer_memory_address)
54 | {
55 | (furthest_back_contiguous_with_inserted_node_pointer_memory_address, furthest_forward_contiguous_with_inserted_node_pointer_memory_address.node_pointer().previous().value())
56 | }
57 | else if unlikely!(furthest_back_contiguous_with_inserted_node_pointer_memory_address == insert_node_pointer_memory_address)
58 | {
59 | let furthest_back_node_pointer = furthest_back_contiguous_with_inserted_node_pointer_memory_address.node_pointer();
60 |
61 | (furthest_back_node_pointer.next().value(), furthest_forward_contiguous_with_inserted_node_pointer_memory_address)
62 | }
63 | else
64 | {
65 | let furthest_back_node_pointer = self.insert_memory_address(furthest_back_contiguous_with_inserted_node_pointer_memory_address);
66 |
67 | (furthest_back_node_pointer.next().value(), furthest_forward_contiguous_with_inserted_node_pointer_memory_address)
68 | }
69 | }
70 | }
71 |
72 | #[inline(always)]
73 | pub(crate) fn remove_contiguous_blocks(&mut self, first_block_memory_address: MemoryAddress, last_block_memory_address: MemoryAddress, block_size: NonZeroUsize)
74 | {
75 | let mut to_remove_memory_address = first_block_memory_address;
76 | while
77 | {
78 | let to_remove_node_pointer = to_remove_memory_address.node_pointer();
79 | let is_cached_first_child = to_remove_node_pointer == self.cached_first_child();
80 | self.remove(to_remove_node_pointer, is_cached_first_child);
81 |
82 | to_remove_memory_address.add_assign_non_zero(block_size);
83 | likely!(to_remove_memory_address <= last_block_memory_address)
84 | }
85 | {}
86 | }
87 |
88 | #[inline(always)]
89 | pub(crate) fn remove(&mut self, node_pointer: NodePointer, is_cached_first_child: bool)
90 | {
91 | if unlikely!(is_cached_first_child)
92 | {
93 | self.update_cached_first_child(node_pointer.next());
94 | }
95 |
96 | self.tree.remove_node_pointer(node_pointer);
97 | self.debug_assert_cached_first_child_is_valid();
98 | }
99 |
100 | #[inline(always)]
101 | pub(crate) fn insert_memory_address(&mut self, memory_address: MemoryAddress) -> NodePointer
102 | {
103 | let cached_first_child = self.cached_first_child();
104 |
105 | if unlikely!(cached_first_child.is_null() || memory_address < cached_first_child.value())
106 | {
107 | self.update_cached_first_child(memory_address.node_pointer())
108 | }
109 |
110 | self.tree.insert_memory_address(memory_address);
111 | self.debug_assert_cached_first_child_is_valid();
112 | memory_address.node_pointer()
113 | }
114 |
115 | #[inline(always)]
116 | pub(crate) fn double_ended_iterate<'a>(&'a self) -> RedBlackTreeDoubleEndedIterator<'a>
117 | {
118 | self.tree.double_ended_iterate()
119 | }
120 |
121 | #[inline(always)]
122 | pub(crate) fn cached_first_child(&self) -> NodePointer
123 | {
124 | self.cached_first_child
125 | }
126 |
127 | #[inline(always)]
128 | fn update_cached_first_child(&mut self, new_first_child_to_cache: NodePointer)
129 | {
130 | self.cached_first_child = new_first_child_to_cache
131 | }
132 |
133 | #[inline(always)]
134 | fn debug_assert_cached_first_child_is_valid(&self)
135 | {
136 | debug_assert_eq!(self.cached_first_child, self.tree.first_child(), "First child is not valid");
137 | }
138 | }
139 |
--------------------------------------------------------------------------------
/src/GloballyAllocated.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Needed for `Vec`, `String`, `Arc`, `Rc` and the like when shared between threads in messages.
6 | ///
7 | /// This is because `String` and `Vec`, for example, don't allocate if constructed with an empty capacity; thus it is possible to create an instance whilst in the context, say, of a ThreadAllocator but then resize in the context of a GlobalAllocator.
8 | /// Since no allocation has occurred, the ThreadAllocator won't be tracking the memory for, say, the `String`.
9 | /// Hence memory gets freed in a thread-unsafe manner.
10 | /// Oops!
11 | pub struct GloballyAllocated>
12 | {
13 | value: ManuallyDrop,
14 | global_allocator: &'static GTACSA,
15 | marker: PhantomData,
16 | }
17 |
18 | impl> Drop for GloballyAllocated
19 | {
20 | #[inline(always)]
21 | fn drop(&mut self)
22 | {
23 | self.as_mut(|t| unsafe { drop_in_place(t) })
24 | }
25 | }
26 |
27 | impl> Deref for GloballyAllocated
28 | {
29 | type Target = T;
30 |
31 | #[inline(always)]
32 | fn deref(&self) -> &Self::Target
33 | {
34 | self.value.deref()
35 | }
36 | }
37 |
38 | impl> Debug for GloballyAllocated
39 | {
40 | #[inline(always)]
41 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
42 | {
43 | self.deref().fmt(f)
44 | }
45 | }
46 |
47 | impl> Display for GloballyAllocated
48 | {
49 | #[inline(always)]
50 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
51 | {
52 | self.deref().fmt(f)
53 | }
54 | }
55 |
56 | impl> PartialEq for GloballyAllocated
57 | {
58 | #[inline(always)]
59 | fn eq(&self, rhs: &Self) -> bool
60 | {
61 | self.deref() == rhs.deref()
62 | }
63 | }
64 |
65 | impl> Eq for GloballyAllocated
66 | {
67 | }
68 |
69 | impl> PartialOrd for GloballyAllocated
70 | {
71 | #[inline(always)]
72 | fn partial_cmp(&self, rhs: &Self) -> Option
73 | {
74 | self.deref().partial_cmp(rhs.deref())
75 | }
76 | }
77 |
78 | impl> Ord for GloballyAllocated
79 | {
80 | #[inline(always)]
81 | fn cmp(&self, rhs: &Self) -> Ordering
82 | {
83 | self.deref().cmp(rhs.deref())
84 | }
85 | }
86 |
87 | impl> Hash for GloballyAllocated
88 | {
89 | #[inline(always)]
90 | fn hash(&self, state: &mut H)
91 | {
92 | self.deref().hash(state)
93 | }
94 | }
95 |
96 | impl> Clone for GloballyAllocated
97 | {
98 | #[inline(always)]
99 | fn clone(&self) -> Self
100 | {
101 | Self::allocate(self.global_allocator, ||
102 | {
103 | self.value.deref().clone()
104 | })
105 | }
106 | }
107 |
108 | impl> GloballyAllocated
109 | {
110 | /// Allocate.
111 | #[inline(always)]
112 | pub fn allocate(global_allocator: &'static GTACSA, callback: impl FnOnce() -> T + UnwindSafe) -> Self
113 | {
114 | Self
115 | {
116 | value: ManuallyDrop::new(global_allocator.callback_with_global_allocator(callback)),
117 | global_allocator,
118 | marker: PhantomData,
119 | }
120 | }
121 |
122 | /// Mutable reference.
123 | #[inline(always)]
124 | pub fn as_mut R + UnwindSafe, R>(&mut self, callback: F) -> R
125 | {
126 | self.global_allocator.callback_with_global_allocator(AssertUnwindSafe(|| callback(self.value.deref_mut())))
127 | }
128 | }
129 |
130 | impl> GloballyAllocated, CoroutineHeapSize, GTACSA>
131 | {
132 | /// Clone specialized for `Arc`.
133 | #[inline(always)]
134 | pub fn clone_arc(&self) -> Self
135 | {
136 | Self
137 | {
138 | value: ManuallyDrop::new(self.value.deref().clone()),
139 | global_allocator: self.global_allocator,
140 | marker: PhantomData,
141 | }
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/src/memory_sources/arena/Arena.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// An arena memory source.
6 | #[derive(Debug)]
7 | pub struct Arena
8 | {
9 | next_available_slot_index: Cell,
10 |
11 | block_size: NonZeroUsize,
12 | #[cfg(debug_assertions)] number_of_blocks: NonZeroUsize,
13 |
14 | memory_source: MS,
15 | allocations_start_from: MemoryAddress,
16 | memory_source_size: NonZeroUsize,
17 | }
18 |
19 | impl Drop for Arena
20 | {
21 | #[inline(always)]
22 | fn drop(&mut self)
23 | {
24 | self.memory_source.release(self.memory_source_size, self.allocations_start_from)
25 | }
26 | }
27 |
28 | impl MemorySource for Arena
29 | {
30 | #[inline(always)]
31 | fn obtain(&self, non_zero_size: NonZeroUsize) -> Result
32 | {
33 | debug_assert!(non_zero_size <= self.block_size);
34 |
35 | let next_available_slot_index = self.next_available_slot_index.get();
36 |
37 | if unlikely!(next_available_slot_index.is_fully_allocated())
38 | {
39 | return Err(AllocError)
40 | }
41 |
42 | let unallocated_block = self.unallocated_block(next_available_slot_index);
43 | self.next_available_slot_index.set(unallocated_block.next_available_slot_index());
44 |
45 | Ok(unallocated_block.to_memory_address())
46 | }
47 |
48 | #[inline(always)]
49 | fn release(&self, non_zero_size: NonZeroUsize, current_memory: MemoryAddress)
50 | {
51 | debug_assert!(non_zero_size <= self.block_size);
52 |
53 | let unallocated_block = UnallocatedBlock::from_memory_address(current_memory);
54 | unallocated_block.set_unoccupied_next_available_slot_index(self.next_available_slot_index.get());
55 |
56 | self.next_available_slot_index.set(self.slot_index_from_block(unallocated_block));
57 | }
58 | }
59 |
60 | impl Arena
61 | {
62 | /// Create a new instance by memory size and block size.
63 | #[inline(always)]
64 | pub fn new_by_amount(memory_source: MS, block_size: NonZeroUsize, memory_source_size: NonZeroUsize, block_initializer: impl Fn(MemoryAddress, NonZeroUsize)) -> Result
65 | {
66 | let number_of_blocks = ((memory_source_size.get() + (block_size.get() - 1)) / block_size.get()).non_zero();
67 |
68 | Self::new(memory_source, block_size, number_of_blocks, block_initializer)
69 | }
70 |
71 | /// Creates a new instance.
72 | ///
73 | /// `block_size` must be at least 8 to be useful.
74 | /// `block_initializer` takes the address of a block and the size of a block; after it is called, the block will have the first 8 bytes (4 bytes on 32-bit platforms) overwritten with a slot index pointer.
75 | #[inline(always)]
76 | pub fn new(memory_source: MS, block_size: NonZeroUsize, number_of_blocks: NonZeroUsize, block_initializer: impl Fn(MemoryAddress, NonZeroUsize)) -> Result
77 | {
78 | let memory_source_size = block_size.multiply(number_of_blocks);
79 |
80 | let allocations_start_from = memory_source.obtain(memory_source_size)?;
81 |
82 | Self::initialize_blocks_so_they_are_a_singly_linked_list(block_size, block_initializer, memory_source_size, allocations_start_from);
83 |
84 | Ok
85 | (
86 | Self
87 | {
88 | next_available_slot_index: Cell::default(),
89 |
90 | block_size,
91 | #[cfg(debug_assertions)] number_of_blocks,
92 |
93 | memory_source,
94 | allocations_start_from,
95 | memory_source_size,
96 | }
97 | )
98 | }
99 |
100 | #[inline(always)]
101 | fn initialize_blocks_so_they_are_a_singly_linked_list(block_size: NonZeroUsize, block_initializer: impl Fn(MemoryAddress, NonZeroUsize), memory_source_size: NonZeroUsize, allocations_start_from: MemoryAddress)
102 | {
103 | let mut slot_index = SlotIndex(1);
104 | let mut block_memory_address = allocations_start_from;
105 | let allocations_end_at = allocations_start_from.add_non_zero(memory_source_size);
106 | let allocations_end_at_less_one_block = allocations_end_at.subtract_non_zero(block_size);
107 | while block_memory_address != allocations_end_at_less_one_block
108 | {
109 | let unallocated_block = UnallocatedBlock::from_memory_address(block_memory_address);
110 | unallocated_block.initialize(block_size, &block_initializer, slot_index);
111 |
112 | slot_index.increment();
113 | block_memory_address.add_assign_non_zero(block_size)
114 | }
115 | UnallocatedBlock::from_memory_address(allocations_end_at_less_one_block).initialize(block_size, &block_initializer, SlotIndex::IsFullyAllocatedNextAvailableSlotIndexSentinel);
116 | }
117 |
118 | #[inline(always)]
119 | fn unallocated_block(&self, slot_index: SlotIndex) -> &UnallocatedBlock
120 | {
121 | UnallocatedBlock::from_memory_address(self.block_from_slot_index(slot_index))
122 | }
123 |
124 | #[inline(always)]
125 | fn block_from_slot_index(&self, slot_index: SlotIndex) -> MemoryAddress
126 | {
127 | debug_assert_ne!(slot_index, SlotIndex::IsFullyAllocatedNextAvailableSlotIndexSentinel, "Should never get IsFullyAllocatedNextAvailableSlotIndexSentinel for `slot_index`");
128 |
129 | debug_assert!(slot_index.0 < self.number_of_blocks.get(), "Arena index was out-of-range");
130 |
131 | self.allocations_start_from.add(self.block_size.get() * slot_index.0)
132 | }
133 |
134 | #[inline(always)]
135 | fn slot_index_from_block(&self, unallocated_block: &UnallocatedBlock) -> SlotIndex
136 | {
137 | SlotIndex(unallocated_block.to_memory_address().difference(self.allocations_start_from) / self.block_size.get())
138 | }
139 | }
140 |
--------------------------------------------------------------------------------
/src/allocators/ContextAllocator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// An allocator designed for contexts with different lifetimes.
6 | ///
7 | /// This allocator NEVER grows or shrinks its memory region.
8 | ///
9 | /// This allocator is not thread-safe.
10 | #[derive(Debug)]
11 | pub enum ContextAllocator
12 | {
13 | /// Use this variant for contexts with short-lived lifetimes.
14 | ///
15 | /// Very fast allocation and almost costless deallocation, at the expense of the strong likelihood of running out of memory.
16 | ///
17 | /// Reallocation is very expensive when growing unless reallocating the most recently made allocation.
18 | ShortLived(BumpAllocator),
19 |
20 | /// Use this variant for contexts with slightly longer than short-lived lifetimes.
21 | ///
22 | /// Slower allocation and deallocation but reallocation is less expensive than for `ShortLived`.
23 | MediumLived(BitSetAllocator),
24 |
25 | /// Use this variant for contexts with long-lived lifetimes.
26 | LongLived(MultipleBinarySearchTreeAllocator),
27 | }
28 |
29 | impl Allocator for ContextAllocator
30 | {
31 | #[inline(always)]
32 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
33 | {
34 | use self::ContextAllocator::*;
35 |
36 | match *self
37 | {
38 | ShortLived(ref allocator) => allocator.allocate(non_zero_size, non_zero_power_of_two_alignment),
39 |
40 | MediumLived(ref allocator) => allocator.allocate(non_zero_size, non_zero_power_of_two_alignment),
41 |
42 | LongLived(ref allocator) => allocator.allocate(non_zero_size, non_zero_power_of_two_alignment),
43 | }
44 | }
45 |
46 | #[inline(always)]
47 | fn deallocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
48 | {
49 | use self::ContextAllocator::*;
50 |
51 | match *self
52 | {
53 | ShortLived(ref allocator) => allocator.deallocate(non_zero_size, non_zero_power_of_two_alignment, current_memory),
54 |
55 | MediumLived(ref allocator) => allocator.deallocate(non_zero_size, non_zero_power_of_two_alignment, current_memory),
56 |
57 | LongLived(ref allocator) => allocator.deallocate(non_zero_size, non_zero_power_of_two_alignment, current_memory),
58 | }
59 | }
60 |
61 | #[inline(always)]
62 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
63 | {
64 | use self::ContextAllocator::*;
65 |
66 | match *self
67 | {
68 | ShortLived(ref allocator) => allocator.growing_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
69 |
70 | MediumLived(ref allocator) => allocator.growing_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
71 |
72 | LongLived(ref allocator) => allocator.growing_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
73 | }
74 | }
75 |
76 | #[inline(always)]
77 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
78 | {
79 | use self::ContextAllocator::*;
80 |
81 | match *self
82 | {
83 | ShortLived(ref allocator) => allocator.shrinking_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
84 |
85 | MediumLived(ref allocator) => allocator.shrinking_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
86 |
87 | LongLived(ref allocator) => allocator.shrinking_reallocate(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved),
88 | }
89 | }
90 | }
91 |
92 | impl LocalAllocator for ContextAllocator
93 | {
94 | #[inline(always)]
95 | fn new_local_allocator(memory_source: MS, lifetime_hint: LifetimeHint, block_size_hint: NonZeroUsize) -> Self
96 | {
97 | use self::LifetimeHint::*;
98 |
99 | match lifetime_hint
100 | {
101 | ShortLived => ContextAllocator::ShortLived(BumpAllocator::new_local_allocator(memory_source, lifetime_hint, block_size_hint)),
102 | MediumLived => ContextAllocator::MediumLived(BitSetAllocator::new_local_allocator(memory_source, lifetime_hint, block_size_hint)),
103 | LongLived => ContextAllocator::LongLived(MultipleBinarySearchTreeAllocator::new_local_allocator(memory_source, lifetime_hint, block_size_hint)),
104 | }
105 | }
106 |
107 | #[inline(always)]
108 | fn memory_range(&self) -> MemoryRange
109 | {
110 | use self::ContextAllocator::*;
111 |
112 | match *self
113 | {
114 | ShortLived(ref allocator) => allocator.memory_range(),
115 |
116 | MediumLived(ref allocator) => allocator.memory_range(),
117 |
118 | LongLived(ref allocator) => allocator.memory_range(),
119 | }
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/BinarySearchTreesWithCachedKnowledgeOfFirstChild.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | pub(crate) struct BinarySearchTreesWithCachedKnowledgeOfFirstChild
6 | {
7 | binary_search_trees_of_free_blocks_sorted_by_ascending_memory_address_and_indexed_by_power_of_two_exponent_less_smallest_power_of_two: [UnsafeCell; Self::NumberOfBinarySearchTrees],
8 | }
9 |
10 | impl Debug for BinarySearchTreesWithCachedKnowledgeOfFirstChild
11 | {
12 | #[inline(always)]
13 | fn fmt(&self, f: &mut Formatter) -> fmt::Result
14 | {
15 | writeln!(f)?;
16 | writeln!(f, "\tBlockSize => Count Cached first child is null?")?;
17 | for binary_search_tree_index in 0 .. Self::NumberOfBinarySearchTrees
18 | {
19 | let block_size = Self::binary_search_tree_index_to_block_size(binary_search_tree_index);
20 | let binary_search_tree = self.binary_search_trees_of_free_blocks_sorted_by_ascending_memory_address_and_indexed_by_power_of_two_exponent_less_smallest_power_of_two[binary_search_tree_index].get().mutable_reference();
21 |
22 | let has_blocks = binary_search_tree.has_blocks();
23 | if has_blocks
24 | {
25 | let mut count = 0;
26 | for _ in binary_search_tree.double_ended_iterate()
27 | {
28 | count += 1;
29 | }
30 |
31 | writeln!(f, "\t{:?} => {:?} {:?}", block_size, count, binary_search_tree.cached_first_child().is_null())?;
32 | }
33 | }
34 | Ok(())
35 | }
36 | }
37 |
38 | impl Default for BinarySearchTreesWithCachedKnowledgeOfFirstChild
39 | {
40 | #[inline(always)]
41 | fn default() -> Self
42 | {
43 | Self
44 | {
45 | binary_search_trees_of_free_blocks_sorted_by_ascending_memory_address_and_indexed_by_power_of_two_exponent_less_smallest_power_of_two: Default::default(),
46 | }
47 | }
48 | }
49 |
50 | impl BinarySearchTreesWithCachedKnowledgeOfFirstChild
51 | {
52 | const SmallestInclusivePowerOfTwoExponent: NonZeroUsize = Self::logarithm_base2(size_of::());
53 |
54 | pub(crate) const NumberOfBinarySearchTrees: usize = 16;
55 |
56 | const LargestInclusiveBinarySearchTreeIndex: usize = Self::NumberOfBinarySearchTrees - 1;
57 |
58 | const LargestInclusivePowerOfTwoExponent: NonZeroUsize = new_non_zero_usize(Self::binary_search_tree_index_to_power_of_two_exponent(Self::LargestInclusiveBinarySearchTreeIndex));
59 |
60 | pub(crate) const MinimumAllocationSize: NonZeroUsize = new_non_zero_usize(1 << Self::SmallestInclusivePowerOfTwoExponent.get());
61 |
62 | pub(crate) const MaximumAllocationSize: NonZeroUsize = new_non_zero_usize(1 << Self::LargestInclusivePowerOfTwoExponent.get());
63 |
64 | pub(crate) const MinimumAlignment: NonZeroUsize = Self::MinimumAllocationSize;
65 |
66 | const MaximumAlignment: NonZeroUsize = Self::MaximumAllocationSize;
67 |
68 | #[inline(always)]
69 | const fn logarithm_base2(value: usize) -> NonZeroUsize
70 | {
71 | new_non_zero_usize(logarithm_base2_as_usize(value))
72 | }
73 |
74 | #[inline(always)]
75 | pub(crate) fn binary_search_tree_index(block_size: NonZeroUsize) -> usize
76 | {
77 | debug_assert_eq!(block_size.next_power_of_two(), block_size, "A block_size was not passed");
78 | debug_assert!(block_size >= Self::MinimumAllocationSize, "Block size was too small");
79 | debug_assert!(block_size <= Self::MaximumAllocationSize, "Block size was too large");
80 |
81 | let power_of_two_exponent = logarithm_base2_as_usize(block_size.get());
82 |
83 | power_of_two_exponent - Self::SmallestInclusivePowerOfTwoExponent.get()
84 | }
85 |
86 | #[inline(always)]
87 | const fn binary_search_tree_index_to_power_of_two_exponent(binary_search_tree_index: usize) -> usize
88 | {
89 | Self::SmallestInclusivePowerOfTwoExponent.get() + binary_search_tree_index
90 | }
91 |
92 | #[inline(always)]
93 | pub(crate) fn binary_search_tree_index_to_block_size(binary_search_tree_index: usize) -> usize
94 | {
95 | 1 << Self::binary_search_tree_index_to_power_of_two_exponent(binary_search_tree_index)
96 | }
97 |
98 | #[inline(always)]
99 | pub(crate) fn size_is_less_than_minimum_allocation_size(size: usize) -> bool
100 | {
101 | size < Self::MinimumAllocationSize.get()
102 | }
103 |
104 | #[inline(always)]
105 | pub(crate) fn size_is_greater_than_minimum_allocation_size(size: usize) -> bool
106 | {
107 | size >= Self::MinimumAllocationSize.get()
108 | }
109 |
110 | #[inline(always)]
111 | pub(crate) fn size_exceeds_maximum_allocation_size(non_zero_size: NonZeroUsize) -> bool
112 | {
113 | non_zero_size > Self::MaximumAllocationSize
114 | }
115 |
116 | #[inline(always)]
117 | pub(crate) fn alignment_exceeds_maximum_alignment(non_zero_power_of_two_alignment: NonZeroUsize) -> bool
118 | {
119 | non_zero_power_of_two_alignment > Self::MaximumAlignment
120 | }
121 |
122 | #[inline(always)]
123 | pub(crate) fn floor_size_to_minimum(unfloored_non_zero_size: NonZeroUsize) -> NonZeroUsize
124 | {
125 | max(unfloored_non_zero_size, Self::MinimumAllocationSize)
126 | }
127 |
128 | #[inline(always)]
129 | pub(crate) fn floor_alignment_to_minimum(unfloored_non_zero_power_of_two_alignment: NonZeroUsize) -> NonZeroUsize
130 | {
131 | max(unfloored_non_zero_power_of_two_alignment, Self::MinimumAlignment)
132 | }
133 |
134 | #[inline(always)]
135 | pub(crate) fn binary_search_tree_for(&self, binary_search_tree_index: usize) -> &mut BinarySearchTreeWithCachedKnowledgeOfFirstChild
136 | {
137 | debug_assert!(binary_search_tree_index < Self::NumberOfBinarySearchTrees, "binary_search_tree_index `{}` is too large", binary_search_tree_index);
138 |
139 | self.binary_search_trees_of_free_blocks_sorted_by_ascending_memory_address_and_indexed_by_power_of_two_exponent_less_smallest_power_of_two.get_unchecked_safe(binary_search_tree_index).get().mutable_reference()
140 | }
141 |
142 | #[inline(always)]
143 | pub(crate) fn smallest_power_of_two_difference(difference: usize) -> NonZeroUsize
144 | {
145 | debug_assert!(Self::size_is_greater_than_minimum_allocation_size(difference), "difference `{}` is too small to be a block", difference);
146 |
147 | (1 << difference.trailing_zeros()).non_zero()
148 | }
149 |
150 | #[allow(dead_code)]
151 | #[inline(always)]
152 | pub(crate) fn largest_power_of_two_difference(difference: usize) -> NonZeroUsize
153 | {
154 | debug_assert!(Self::size_is_greater_than_minimum_allocation_size(difference), "difference `{}` is too small to be a block", difference);
155 |
156 | const BitsInAByte: usize = 8;
157 | const BitsInAnUsize: usize = size_of::() * BitsInAByte;
158 | const ZeroBased: usize = BitsInAnUsize - 1;
159 |
160 | let shift = ZeroBased - difference.leading_zeros() as usize;
161 |
162 | (1 << shift).non_zero()
163 |
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/src/extensions/NonNullU8Ext.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// Extensions to make working with NonNull easier.
6 | pub trait NonNullU8Ext: Sized + Copy + Ord + Debug
7 | {
8 | /// Round up to power of two.
9 | #[inline(always)]
10 | fn round_up_to_power_of_two(self, non_zero_power_of_two_alignment: NonZeroUsize) -> Self
11 | {
12 | Self::from_usize(self.to_usize().non_zero().round_up_to_power_of_two(non_zero_power_of_two_alignment).to_usize())
13 | }
14 |
15 | /// Add.
16 | #[inline(always)]
17 | fn add(self, increment: usize) -> Self
18 | {
19 | Self::from_usize(self.to_usize() + increment)
20 | }
21 |
22 | /// Add.
23 | #[inline(always)]
24 | fn add_non_zero(self, increment: NonZeroUsize) -> Self
25 | {
26 | Self::from_usize(self.to_usize() + increment.get())
27 | }
28 |
29 | /// Add.
30 | #[inline(always)]
31 | fn checked_add(self, increment: usize) -> Option
32 | {
33 | self.to_usize().checked_add(increment).map(Self::from_usize)
34 | }
35 |
36 | /// Add.
37 | #[inline(always)]
38 | fn add_assign(&mut self, increment: usize)
39 | {
40 | *self = (*self).add(increment)
41 | }
42 |
43 | /// Add.
44 | #[inline(always)]
45 | fn add_assign_non_zero(&mut self, increment: NonZeroUsize)
46 | {
47 | self.add_assign(increment.get())
48 | }
49 |
50 | /// Subtract.
51 | #[inline(always)]
52 | fn subtract(self, decrement: usize) -> Self
53 | {
54 | let usize = self.to_usize();
55 | debug_assert!(usize >= decrement, "decrement is too large");
56 |
57 | Self::from_usize(usize - decrement)
58 | }
59 |
60 | /// Subtract.
61 | #[inline(always)]
62 | fn subtract_non_zero(self, decrement: NonZeroUsize) -> Self
63 | {
64 | self.subtract(decrement.get())
65 | }
66 |
67 | /// Difference.
68 | #[inline(always)]
69 | fn difference(self, other: Self) -> usize
70 | {
71 | debug_assert!(self >= other, "other `{:?}` is less than self `{:?}`", other, self);
72 |
73 | self.to_usize() - other.to_usize()
74 | }
75 |
76 | /// Difference.
77 | #[inline(always)]
78 | fn difference_u32(self, other: Self) -> u32
79 | {
80 | let difference_usize = self.difference(other);
81 | debug_assert!(difference_usize <= u32::MAX as usize, "difference `{}` exceeds u32::MAX `{}`", difference_usize, u32::MAX);
82 | difference_usize as u32
83 | }
84 |
85 | /// Difference.
86 | #[inline(always)]
87 | fn difference_u32_non_zero(self, other: Self) -> NonZeroU32
88 | {
89 | NonZeroU32::non_zero(self.difference_u32(other))
90 | }
91 |
92 | /// Read.
93 | #[inline(always)]
94 | fn read(self) -> V
95 | {
96 | unsafe { (self.to_pointer() as *const V).read() }
97 | }
98 |
99 | /// Read.
100 | #[inline(always)]
101 | fn read_u64(self) -> u64
102 | {
103 | self.read::()
104 | }
105 |
106 | /// Write.
107 | #[inline(always)]
108 | fn write(self, value: V)
109 | {
110 | unsafe { (self.to_pointer() as *mut V).write(value) }
111 | }
112 |
113 | /// Write and advance.
114 | #[inline(always)]
115 | fn write_and_advance(&mut self, value: V)
116 | {
117 | self.write(value);
118 | self.add_assign(size_of::())
119 | }
120 |
121 | #[doc(hidden)]
122 | #[inline(always)]
123 | fn or_u64(self, bits_to_set: u64)
124 | {
125 | let current_value = self.read_u64();
126 | self.write::(current_value | bits_to_set)
127 | }
128 |
129 | #[doc(hidden)]
130 | #[inline(always)]
131 | fn and_u64(self, bits_to_preserve: u64)
132 | {
133 | let current_value = self.read_u64();
134 | self.write::(current_value & bits_to_preserve)
135 | }
136 |
137 | #[doc(hidden)]
138 | const BitsInAByte: usize = 8;
139 |
140 | #[doc(hidden)]
141 | const BitsInAnU64: usize = size_of::() * Self::BitsInAByte;
142 |
143 | #[doc(hidden)]
144 | #[inline(always)]
145 | fn set_bottom_bits_of_u64(self, number_of_bits_to_set: usize)
146 | {
147 | self.set_middle_bits_of_u64(number_of_bits_to_set, number_of_bits_to_set)
148 | }
149 |
150 | #[doc(hidden)]
151 | #[inline(always)]
152 | fn set_middle_bits_of_u64(self, number_of_bits_to_set: usize, number_of_lower_bits: usize)
153 | {
154 | debug_assert!(number_of_bits_to_set <= Self::BitsInAnU64);
155 | debug_assert!(number_of_lower_bits <= Self::BitsInAnU64);
156 | debug_assert!(number_of_bits_to_set <= number_of_lower_bits, "number_of_lower_bits `{}` is greater than number_of_bits_to_set `{}`", number_of_lower_bits, number_of_bits_to_set);
157 |
158 | let number_of_bits_to_set = number_of_bits_to_set as u64;
159 | let number_of_lower_bits = number_of_lower_bits as u64;
160 |
161 | self.or_u64(((1 << number_of_bits_to_set) - 1) << (number_of_lower_bits - number_of_bits_to_set));
162 | }
163 |
164 | #[doc(hidden)]
165 | #[inline(always)]
166 | fn set_top_bits_of_u64(self, number_of_bits_to_set: usize)
167 | {
168 | self.set_middle_bits_of_u64(number_of_bits_to_set, Self::BitsInAnU64 as usize)
169 | }
170 |
171 | #[doc(hidden)]
172 | #[inline(always)]
173 | fn unset_bottom_bits_of_u64(self, number_of_bits_to_unset: usize)
174 | {
175 | self.unset_middle_bits_of_u64(number_of_bits_to_unset, number_of_bits_to_unset)
176 | }
177 |
178 | #[doc(hidden)]
179 | #[inline(always)]
180 | fn unset_middle_bits_of_u64(self, number_of_bits_to_unset: usize, number_of_lower_bits: usize)
181 | {
182 | debug_assert!(number_of_bits_to_unset <= Self::BitsInAnU64);
183 | debug_assert!(number_of_lower_bits <= Self::BitsInAnU64);
184 | debug_assert!(number_of_bits_to_unset <= number_of_lower_bits, "number_of_lower_bits `{}` is greater than number_of_bits_to_unset `{}`", number_of_lower_bits, number_of_bits_to_unset);
185 |
186 | let number_of_bits_to_unset = number_of_bits_to_unset as u64;
187 |
188 | let number_of_lower_bits = number_of_lower_bits as u64;
189 |
190 | let bits_to_preserve = !((1 << number_of_bits_to_unset - 1) << (number_of_lower_bits - number_of_bits_to_unset));
191 | self.and_u64(bits_to_preserve);
192 | }
193 |
194 | #[doc(hidden)]
195 | #[inline(always)]
196 | fn unset_top_bits_of_u64(self, number_of_bits_to_unset: usize)
197 | {
198 | self.unset_middle_bits_of_u64(number_of_bits_to_unset, Self::BitsInAnU64 as usize)
199 | }
200 |
201 | /// Is aligned to.
202 | #[inline(always)]
203 | fn is_aligned_to(self, non_zero_power_of_two_alignment: NonZeroUsize) -> bool
204 | {
205 | let value = self.to_usize();
206 | let bitmask = non_zero_power_of_two_alignment.get() - 1;
207 |
208 | value & bitmask == 0
209 | }
210 |
211 | #[doc(hidden)]
212 | #[inline(always)]
213 | fn to_pointer(self) -> *mut u8
214 | {
215 | self.to_non_null_u8().as_ptr()
216 | }
217 |
218 | #[doc(hidden)]
219 | fn to_non_null_u8(self) -> NonNull;
220 |
221 | #[doc(hidden)]
222 | fn to_usize(self) -> usize;
223 |
224 | #[doc(hidden)]
225 | fn from_usize(value: usize) -> Self;
226 | }
227 |
228 | impl NonNullU8Ext for MemoryAddress
229 | {
230 | #[inline(always)]
231 | fn to_non_null_u8(self) -> NonNull
232 | {
233 | self
234 | }
235 |
236 | #[inline(always)]
237 | fn to_usize(self) -> usize
238 | {
239 | self.as_ptr() as usize
240 | }
241 |
242 | #[inline(always)]
243 | fn from_usize(value: usize) -> Self
244 | {
245 | (value as *mut u8).non_null()
246 | }
247 | }
248 |
--------------------------------------------------------------------------------
/src/allocators/binary_search_trees/red_black_tree/RedBlackTree.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
6 | pub(crate) struct RedBlackTree
7 | {
8 | root: NodePointer,
9 | }
10 |
11 | impl Default for RedBlackTree
12 | {
13 | #[inline(always)]
14 | fn default() -> Self
15 | {
16 | Self::new()
17 | }
18 | }
19 |
20 | impl RedBlackTree
21 | {
22 | /// Creates an empty `RedBlackTree`.
23 | #[inline(always)]
24 | pub(crate) const fn new() -> Self
25 | {
26 | Self
27 | {
28 | root: NodePointer::null(),
29 | }
30 | }
31 |
32 | /// Returns `true` if the tree is empty.
33 | #[inline(always)]
34 | pub(crate) fn is_empty(&self) -> bool
35 | {
36 | self.root.is_null()
37 | }
38 |
39 | #[inline(always)]
40 | pub(crate) fn has_blocks(&self) -> bool
41 | {
42 | self.root.is_not_null()
43 | }
44 |
45 | #[inline(always)]
46 | pub(crate) fn first_child(&self) -> NodePointer
47 | {
48 | self.root.first_child()
49 | }
50 |
51 | #[inline(always)]
52 | pub(crate) fn remove_node_pointer(&mut self, node_pointer: NodePointer)
53 | {
54 | node_pointer.remove(&mut self.root)
55 | }
56 |
57 | #[inline(always)]
58 | pub(crate) fn insert_memory_address(&mut self, value: MemoryAddress) -> NodePointer
59 | {
60 | let new = self.reset_node(value);
61 |
62 | if unlikely!(self.is_empty())
63 | {
64 | self.insert_root(new);
65 | }
66 | else
67 | {
68 | let key = value;
69 | let mut tree = self.root;
70 | loop
71 | {
72 | if key < tree.key()
73 | {
74 | let left = tree.left();
75 | if unlikely!(left.is_null())
76 | {
77 | tree.insert_left(new, &mut self.root);
78 | break
79 | }
80 | else
81 | {
82 | tree = left
83 | }
84 | }
85 | else
86 | {
87 | let right = tree.right();
88 | if unlikely!(right.is_null())
89 | {
90 | tree.insert_right(new, &mut self.root);
91 | break
92 | }
93 | else
94 | {
95 | tree = right
96 | }
97 | }
98 | }
99 | }
100 | new
101 | }
102 |
103 | /// Gets an iterator over the objects in the `RedBlackTree`, in ascending key order.
104 | ///
105 | /// Creating the iterator itself is not efficient.
106 | #[allow(dead_code)]
107 | #[inline(always)]
108 | pub(crate) fn double_ended_iterate<'a>(&'a self) -> RedBlackTreeDoubleEndedIterator<'a>
109 | {
110 | if self.is_empty()
111 | {
112 | RedBlackTreeDoubleEndedIterator
113 | {
114 | head: NodePointer::default(),
115 | tail: NodePointer::default(),
116 | tree: self,
117 | }
118 | }
119 | else
120 | {
121 | RedBlackTreeDoubleEndedIterator
122 | {
123 | head: self.root.first_child(),
124 | tail: self.root.last_child(),
125 | tree: self,
126 | }
127 | }
128 | }
129 |
130 | /// Constructs a double-ended iterator over a sub-range of elements in the tree, starting at `minimum`, and ending at `maximum`.
131 | ///
132 | /// If `minimum` is `Unbounded`, then it will be treated as "negative infinity", and if `maximum` is `Unbounded`, then it will be treated as "positive infinity".
133 | /// Thus `range(Unbounded, Unbounded)` will yield the whole collection, and so is a more expensive choise than using `double_ended_iterate()`.
134 | ///
135 | /// If `maximum` is less than `minimum` then an empty iterator is returned.
136 | /// If `maximum` or `minimum` is not found then a then an empty iterator is returned.
137 | ///
138 | /// Creating the iterator itself is not efficient.
139 | #[allow(dead_code)]
140 | #[inline(always)]
141 | pub(crate) fn double_ended_range_iterate<'a>(&'a self, minimum: Bound, maximum: Bound) -> RedBlackTreeDoubleEndedIterator<'a>
142 | {
143 | let lower = self.lower_bound(minimum);
144 | let upper = self.upper_bound(maximum);
145 | if likely!(lower.is_not_null() && upper.is_not_null())
146 | {
147 | let lower_key = lower.key();
148 | let upper_key = upper.key();
149 |
150 | if upper_key >= lower_key
151 | {
152 | RedBlackTreeDoubleEndedIterator
153 | {
154 | head: lower,
155 | tail: upper,
156 | tree: self,
157 | }
158 | }
159 | else
160 | {
161 | self.empty_iterator()
162 | }
163 | }
164 | else
165 | {
166 | self.empty_iterator()
167 | }
168 | }
169 |
170 | /// Returns a `NodePointer` pointing to an element with the given key.
171 | ///
172 | /// If no such element is found then a null `NodePointer` is returned.
173 | #[inline(always)]
174 | pub(crate) fn find(&self, key: MemoryAddress) -> NodePointer
175 | {
176 | use self::Ordering::*;
177 |
178 | let mut tree = self.root;
179 | while tree.is_not_null()
180 | {
181 | match key.cmp(&tree.key())
182 | {
183 | Less => tree = tree.left(),
184 | Equal => return tree,
185 | Greater => tree = tree.right(),
186 | }
187 | }
188 |
189 | NodePointer::default()
190 | }
191 |
192 | /// Returns a `NodePointer` pointing to the first element whose key is above the given bound.
193 | ///
194 | /// If no such element is found then a null `NodePointer` is returned.
195 | #[allow(dead_code)]
196 | #[inline(always)]
197 | pub(crate) fn lower_bound(&self, bound: Bound) -> NodePointer
198 | {
199 | let mut tree = self.root;
200 | let mut result = NodePointer::default();
201 | while tree.is_not_null()
202 | {
203 | let cond = match bound
204 | {
205 | Unbounded => true,
206 |
207 | Included(key) => key <= tree.key(),
208 |
209 | Excluded(key) => key < tree.key(),
210 | };
211 |
212 | if cond
213 | {
214 | result = tree;
215 | tree = tree.left();
216 | }
217 | else
218 | {
219 | tree = tree.right();
220 | }
221 | }
222 | result
223 | }
224 |
225 | /// Returns a `NodePointer` pointing to the last element whose key is below the given bound.
226 | ///
227 | /// If no such element is found then a null `NodePointer` is returned.
228 | #[allow(dead_code)]
229 | #[inline(always)]
230 | pub(crate) fn upper_bound(&self, bound: Bound) -> NodePointer
231 | {
232 | let mut tree = self.root;
233 | let mut result = NodePointer::default();
234 | while tree.is_not_null()
235 | {
236 | let cond = match bound
237 | {
238 | Unbounded => false,
239 |
240 | Included(key) => key < tree.key(),
241 |
242 | Excluded(key) => key <= tree.key(),
243 | };
244 |
245 | if cond
246 | {
247 | tree = tree.left();
248 | }
249 | else
250 | {
251 | result = tree;
252 | tree = tree.right();
253 | }
254 | }
255 | result
256 | }
257 |
258 | #[inline(always)]
259 | fn empty_iterator<'a>(&'a self) -> RedBlackTreeDoubleEndedIterator<'a>
260 | {
261 | RedBlackTreeDoubleEndedIterator
262 | {
263 | head: NodePointer::default(),
264 | tail: NodePointer::default(),
265 | tree: self,
266 | }
267 | }
268 |
269 | #[inline(always)]
270 | fn reset_node(&self, value: MemoryAddress) -> NodePointer
271 | {
272 | let node_pointer = value.node_pointer();
273 | node_pointer.reset();
274 | node_pointer
275 | }
276 |
277 | #[inline(always)]
278 | fn insert_root(&mut self, node: NodePointer)
279 | {
280 | node.set_parent_and_color(NodePointer::default(), Color::Black);
281 | node.set_left(NodePointer::default());
282 | node.set_right(NodePointer::default());
283 | self.root = node;
284 | }
285 | }
286 |
--------------------------------------------------------------------------------
/src/lib.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | #![allow(non_snake_case)]
6 | #![allow(non_upper_case_globals)]
7 | #![allow(non_camel_case_types)]
8 | #![deny(missing_docs)]
9 | #![deny(unreachable_patterns)]
10 | #![feature(allocator_api)]
11 | #![feature(arbitrary_self_types)]
12 | #![feature(const_fn)]
13 | #![feature(const_fn_fn_ptr_basics)]
14 | #![feature(core_intrinsics)]
15 | #![feature(extern_types)]
16 | #![feature(nonnull_slice_from_raw_parts)]
17 | #![feature(nonzero_is_power_of_two)]
18 | #![feature(slice_ptr_get)]
19 | #![feature(slice_ptr_len)]
20 | #![feature(thread_local)]
21 |
22 |
23 | //! # context-allocator
24 | //!
25 | //! This provides allocators suitable for a number of use cases.
26 | //!
27 | //! All of these allocators implement the traits `std::alloc::GlobalAlloc` and `std::alloc::Alloc`, as we as a common base trait, `Allocator`.
28 | //!
29 | //! The most useful is a global allocator which allows switching between thread, coroutine and global (and thuse lockable) memory allocators, using the macro `global_thread_and_coroutine_switchable_allocator()`.
30 | //!
31 | //! Allocators provided include:-
32 | //!
33 | //! * `BumpAllocator`, a never-freeing bump allocator with slight optimization for reallocating the last allocation.
34 | //! * `BitSetAllocator`, an allocator that uses a bit set of free blocks; uses 64-bit chunks to optimize searches.
35 | //! * `MultipleBinarySearchTreeAllocator`, an efficient allocator which minimizes fragmentation by using multiple red-black trees of free blocks which are aggresively defragmented.
36 | //! * `ContextAllocator`, a choice of either `BumpAllocator`, `BitSetAllocator` or `MultipleBinarySearchTreeAllocator`.
37 | //! * `MemoryMapAllocator`, a NUMA-aware mmap allocator with support for NUMA policies.
38 | //! * `GlobalThreadAndCoroutineSwitchableAllocator`, suitable for replacing the global allocator and provides switchable allocators for global, thread local and context (coroutine) local needs; must be created using the macro `global_thread_and_coroutine_switchable_allocator`.
39 | //!
40 | //! Allocators use a `MemorySource` to obtain and release memory.
41 | //! Memory sources provided include:-
42 | //!
43 | //! * `MemoryMapSource`, useful for thread-local allocators as it can obtain memory from NUMA-local memory.
44 | //! * `ArenaMemorySource`, an arena of fixed blocks which is itself backed by a memory source; this is useful as a source for the `BumpAllocator` and `BitSetAllocator` when used for contexts.
45 | //!
46 | //! Additionally a number of adaptors are provided:-
47 | //!
48 | //! * `AllocatorAdaptor`, an adaptor of `Allocator` to `GlobalAlloc` and `Alloc`; use it by calling `Allocator.adapt()`
49 | //! * `GlobalAllocToAllocatorAdaptor`, an adaptor of `GlobalAlloc` to `Allocator`, useful for assigning a global allocator to `GlobalThreadAndCoroutineSwitchableAllocator`.
50 | //! * `AllocToAllocatorAdaptor`, an adaptor of `Alloc` to `Allocator`.
51 | //!
52 | //! When using `GlobalThreadAndCoroutineSwitchableAllocator`, it is possible to save and restore the allocator state for the currently running context (coroutine).
53 | //! It is also possible to create a lockless, fast thread-local allocator which make use of NUMA memory, unlike a conventional malloc.
54 | //!
55 | //!
56 | //! ## Future
57 | //!
58 | //! * Investigate wrapping [Rampant Pixel's Memory Allocator](https://github.com/rampantpixels/rpmalloc).
59 | //! * Investigate a B-tree backed allocator.
60 | //! * Investigate a design that uses multiple doubly-linked 'free' lists of blocks; blocks can be variable in size but the free list is sorted
61 | //! * Iteration over a particular free-list range may encountered blocks too small, or blocks so large they can be split up.
62 | //! * This design is similar to that used by DPDK.
63 | //! * To make the allocator multi-threaded, DPDK takes a spin lock on a particular 'heap', which is a set of free lists.
64 | //! * Investigate a fall-back over-size allocator for a thread-local allocator, which could use the `NumaMemoryMapSource` underneath.
65 | //! * Investigate supporting over-size allocations in `MultipleBinarySearchTreeAllocator` by scanning the largest binary search tree for contiguous blocks.
66 | //! * Investigate a persistent-memory backed allocator.
67 | //! * Properly support excess allocations and Alloc's grow_in_place functions, but only if these are used by downstream collections.
68 | //! * Investigate the use of the `BMI1` intrinsics `_blsi_u64` (extract lowest set bit), `_blsmsk_u64` and `_blsr_u64`.
69 | //!
70 | //!
71 | //! ## Licensing
72 | //!
73 | //! The license for this project is MIT.
74 |
75 |
76 | use static_assertions::assert_cfg;
77 | assert_cfg!(target_os = "linux");
78 | assert_cfg!(target_pointer_width = "64");
79 |
80 |
81 | use self::adaptors::*;
82 | use self::allocators::*;
83 | use self::binary_search_trees::*;
84 | use self::binary_search_trees::red_black_tree::*;
85 | use self::extensions::*;
86 | use self::memory_sources::*;
87 | use either::*;
88 | use likely::*;
89 | use linux_support::memory::mapping::*;
90 | use magic_ring_buffer::memory_sizes::MemorySize;
91 | use std::alloc::Allocator as Alloc;
92 | use std::alloc::AllocError;
93 | use std::alloc::Layout;
94 | use std::alloc::GlobalAlloc;
95 | use std::alloc::System;
96 | use std::collections::Bound;
97 | use std::collections::Bound::*;
98 | use std::cell::Cell;
99 | use std::cell::UnsafeCell;
100 | use std::cmp::max;
101 | use std::cmp::Ordering;
102 | use std::fmt;
103 | use std::fmt::Debug;
104 | use std::fmt::Display;
105 | use std::fmt::Formatter;
106 | use std::hash::Hash;
107 | use std::hash::Hasher;
108 | use std::marker::PhantomData;
109 | use std::mem::align_of;
110 | use std::mem::ManuallyDrop;
111 | use std::mem::replace;
112 | use std::mem::size_of;
113 | use std::mem::transmute;
114 | use std::num::NonZeroU32;
115 | use std::num::NonZeroU64;
116 | use std::num::NonZeroUsize;
117 | use std::ops::Add;
118 | use std::ops::Deref;
119 | use std::ops::DerefMut;
120 | use std::ops::Shr;
121 | use std::ops::Sub;
122 | use std::ops::SubAssign;
123 | use std::panic::AssertUnwindSafe;
124 | use std::panic::catch_unwind;
125 | use std::panic::RefUnwindSafe;
126 | use std::panic::resume_unwind;
127 | use std::panic::UnwindSafe;
128 | use std::ptr::drop_in_place;
129 | use std::ptr::NonNull;
130 | use std::ptr::null;
131 | use std::ptr::null_mut;
132 | use std::sync::Arc;
133 | use swiss_army_knife::get_unchecked::GetUnchecked;
134 | use swiss_army_knife::non_zero::new_non_null;
135 | use swiss_army_knife::non_zero::new_non_zero_u32;
136 | use swiss_army_knife::non_zero::new_non_zero_usize;
137 |
138 |
139 | /// Adapt various allocator traits to one another.
140 | pub mod adaptors;
141 |
142 |
143 | /// Allocators.
144 | pub mod allocators;
145 |
146 |
147 | /// Extensions useful for working with memory; not a stable part of the API of this crate.
148 | pub mod extensions;
149 |
150 |
151 | /// Memory sources.
152 | pub mod memory_sources;
153 |
154 |
155 | include!("CurrentAllocatorInUse.rs");
156 | include!("GloballyAllocated.rs");
157 | include!("GlobalThreadAndCoroutineSwitchableAllocator.rs");
158 | include!("GlobalThreadAndCoroutineSwitchableAllocatorInstance.rs");
159 | include!("LifetimeHint.rs");
160 | include!("LocalAllocator.rs");
161 | include!("LocalAllocatorMemoryUsage.rs");
162 | include!("MemoryAddress.rs");
163 | include!("MemoryRange.rs");
164 | include!("PerThreadState.rs");
165 |
166 |
167 | #[cfg(test)] global_thread_and_coroutine_switchable_allocator!(MyGlobalAllocator, BumpAllocator>, MultipleBinarySearchTreeAllocator, GlobalAllocToAllocatorAdaptor, GlobalAllocToAllocatorAdaptor(System));
168 |
--------------------------------------------------------------------------------
/src/GlobalThreadAndCoroutineSwitchableAllocator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// A trait that all such allocators implement.
6 | ///
7 | /// Create a new instance using `GlobalThreadAndCoroutineSwitchableAllocatorInstance`.
8 | pub trait GlobalThreadAndCoroutineSwitchableAllocator: RefUnwindSafe + Sync + GlobalAlloc + Allocator + Debug + Alloc
9 | {
10 | /// Type of the coroutine local allocator.
11 | type CoroutineLocalAllocator: LocalAllocator>;
12 |
13 | /// Type of the thread-local allocator.
14 | type ThreadLocalAllocator: LocalAllocator;
15 |
16 | /// Type of the global allocator.
17 | type GlobalAllocator: Allocator;
18 |
19 | /// Thread-local allocator memory usage.
20 | ///
21 | /// Panics if the thread-local allocator has not been initialized.
22 | #[inline(always)]
23 | fn thread_local_allocator_memory_usage R>(&self, local_allocator_message_usage_user: LAMUU) -> R
24 | {
25 | local_allocator_message_usage_user(self.thread_local_allocator().expect("Thread local allocator needs to have been initialized").memory_usage())
26 | }
27 |
28 | #[doc(hidden)]
29 | #[inline(always)]
30 | fn coroutine_local_allocator(&self) -> Option<&Self::CoroutineLocalAllocator>
31 | {
32 | self.use_per_thread_state(|per_thread_state| match &per_thread_state.coroutine_local_allocator
33 | {
34 | &Some(ref x) => Some(unsafe { & * (x as *const Self::CoroutineLocalAllocator) }),
35 | &None => None,
36 | })
37 | }
38 |
39 | #[doc(hidden)]
40 | #[inline(always)]
41 | fn thread_local_allocator(&self) -> Option<&MemoryUsageTrackingThreadLocalAllocator>
42 | {
43 | self.use_per_thread_state(|per_thread_state| match &per_thread_state.thread_local_allocator
44 | {
45 | &Some(ref x) => Some(unsafe { & * (x as *const MemoryUsageTrackingThreadLocalAllocator) }),
46 | &None => None,
47 | })
48 | }
49 |
50 | #[doc(hidden)]
51 | fn global_allocator(&self) -> &Self::GlobalAllocator;
52 |
53 | /// Swaps the coroutine local allocator.
54 | ///
55 | /// Used before calling a coroutine.
56 | ///
57 | /// Used after calling a coroutine.
58 | #[inline(always)]
59 | fn swap_coroutine_local_allocator(&self, replacement: Option) -> Option
60 | {
61 | self.use_per_thread_state(|per_thread_state| replace(&mut per_thread_state.coroutine_local_allocator, replacement))
62 | }
63 |
64 | /// Initializes the thread-local allocator.
65 | #[inline(always)]
66 | fn initialize_thread_local_allocator(&self, thread_local_allocator: Self::ThreadLocalAllocator)
67 | {
68 | self.use_per_thread_state(|per_thread_state|
69 | {
70 | debug_assert!(per_thread_state.thread_local_allocator.is_none(), "Already initialized thread-local allocator");
71 |
72 | per_thread_state.thread_local_allocator = Some(MemoryUsageTrackingThreadLocalAllocator::new(thread_local_allocator))
73 | })
74 | }
75 |
76 | /// Drops the thread-local allocator.
77 | ///
78 | /// Panics in debug if no thread-local allocator has been initialized with `initialize_thread_local_allocator()`.
79 | #[inline(always)]
80 | fn drop_thread_local_allocator(&self)
81 | {
82 | self.use_per_thread_state(|per_thread_state|
83 | {
84 | debug_assert!(per_thread_state.thread_local_allocator.is_some(), "Already deinitialized thread-local allocator");
85 |
86 | per_thread_state.thread_local_allocator = None
87 | })
88 | }
89 |
90 | /// Replace the current allocator in use.
91 | ///
92 | /// Used internally except when executing a coroutine, which, since it swaps its stack, could not use a `callback_with_coroutine_local_allocator()` method if one existed.
93 | ///
94 | /// ```
95 | /// /// Switch the current allocator in use to coroutine local and execute the callback; restore it after calling the callback unless a panic occurs.
96 | // #[inline(always)]
97 | // fn callback_with_coroutine_local_allocator R + UnwindSafe, R>(&self, callback: F) -> R
98 | // {
99 | // self.callback_with_different_current_allocator(CurrentAllocatorInUse::CoroutineLocal, callback)
100 | // }
101 | /// ```
102 | #[inline(always)]
103 | fn replace_current_allocator_in_use(&self, replacement: CurrentAllocatorInUse) -> CurrentAllocatorInUse
104 | {
105 | let was = self.save_current_allocator_in_use();
106 | self.restore_current_allocator_in_use(replacement);
107 | was
108 | }
109 |
110 | /// Save the current allocator in use.
111 | #[inline(always)]
112 | fn save_current_allocator_in_use(&self) -> CurrentAllocatorInUse
113 | {
114 | self.use_per_thread_state(|per_thread_state| per_thread_state.current_allocator_in_use)
115 | }
116 |
117 | /// Restore the current allocator in use.
118 | #[inline(always)]
119 | fn restore_current_allocator_in_use(&self, restore_to: CurrentAllocatorInUse)
120 | {
121 | self.use_per_thread_state(|per_thread_state| per_thread_state.current_allocator_in_use = restore_to)
122 | }
123 |
124 | /// Callback with the thread-local allocator, detailing changes in memory usage.
125 | ///
126 | /// Panics if the thread-local allocator has not been initialized.
127 | #[inline(always)]
128 | fn callback_with_thread_local_allocator_detailing_memory_usage R + UnwindSafe, R>(&self, our_usage: &Cell, callback: F) -> R
129 | {
130 | let thread_local_allocator_memory_usage_before = self.thread_local_allocator_memory_usage(LocalAllocatorMemoryUsage::usage);
131 | let result = self.callback_with_thread_local_allocator(callback);
132 | let thread_local_allocator_memory_usage_after = self.thread_local_allocator_memory_usage(LocalAllocatorMemoryUsage::usage);
133 |
134 | let was_our_usage = our_usage.get();
135 | our_usage.set
136 | (
137 | if thread_local_allocator_memory_usage_after >= thread_local_allocator_memory_usage_before
138 | {
139 | was_our_usage + (thread_local_allocator_memory_usage_after - thread_local_allocator_memory_usage_before)
140 | }
141 | else
142 | {
143 | was_our_usage - (thread_local_allocator_memory_usage_before - thread_local_allocator_memory_usage_after)
144 | }
145 | );
146 |
147 | result
148 | }
149 |
150 | /// Switch the current allocator in use to thread-local and execute the callback; restore it after calling the callback unless a panic occurs.
151 | #[inline(always)]
152 | fn callback_with_thread_local_allocator R + UnwindSafe, R>(&self, callback: F) -> R
153 | {
154 | self.callback_with_different_current_allocator(CurrentAllocatorInUse::ThreadLocal, callback)
155 | }
156 |
157 | /// Switch the current allocator in use to global and execute the callback; restore it after calling the callback unless a panic occurs.
158 | #[inline(always)]
159 | fn callback_with_global_allocator R + UnwindSafe, R>(&self, callback: F) -> R
160 | {
161 | self.callback_with_different_current_allocator(CurrentAllocatorInUse::Global, callback)
162 | }
163 |
164 | #[doc(hidden)]
165 | #[inline(always)]
166 | fn callback_with_different_current_allocator R + UnwindSafe, R>(&self, different: CurrentAllocatorInUse, callback: F) -> R
167 | {
168 | let restore_to = self.save_current_allocator_in_use();
169 | self.restore_current_allocator_in_use(different);
170 | let result = match catch_unwind(callback)
171 | {
172 | Ok(result) => result,
173 | Err(panic) => resume_unwind(panic)
174 | };
175 | self.restore_current_allocator_in_use(restore_to);
176 | result
177 | }
178 |
179 | #[doc(hidden)]
180 | #[inline(always)]
181 | fn use_per_thread_state) -> R, R>(&self, user: User) -> R
182 | {
183 | unsafe { user(&mut * (self.per_thread_state())().as_ptr()) }
184 | }
185 |
186 | #[doc(hidden)]
187 | fn per_thread_state(&self) -> fn() -> NonNull>;
188 | }
189 |
--------------------------------------------------------------------------------
/src/allocators/BumpAllocator.rs:
--------------------------------------------------------------------------------
1 | // This file is part of context-allocator. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT. No part of context-allocator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2019 The developers of context-allocator. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/context-allocator/master/COPYRIGHT.
3 |
4 |
5 | /// This is a very simple bump allocator of minimal utility.
6 | ///
7 | /// It:-
8 | ///
9 | /// * Can efficiently shrink and grow (reallocate) for the most recent allocation made (useful when pushing to a RawVec, say).
10 | /// * Has no wrapping around at the end (but this could be achieved using a mirror ring buffer).
11 | /// * Has no ability to resize in place if dead space occurs before next allocation because of alignment.
12 | ///
13 | /// Is suitable for use with short-lived coroutines, such as those used to make a DNS query.
14 | ///
15 | /// This allocator NEVER grows or shrinks its memory region.
16 | ///
17 | /// This allocator is not thread-safe.
18 | #[derive(Debug)]
19 | pub struct BumpAllocator
20 | {
21 | most_recent_allocation_pointer: Cell,
22 | next_allocation_at_pointer: Cell,
23 | ends_at_pointer: MemoryAddress,
24 |
25 | memory_source: MS,
26 | }
27 |
28 | macro_rules! allocation_ends_at_pointer
29 | {
30 | ($self: ident, $non_zero_size: ident, $allocation_from: ident) =>
31 | {
32 | {
33 | // NOTE: This evil code is used so that we can use an if hint of `unlikely!` rather than an unhinted `match` for `result`.
34 | let allocation_ends_at_pointer: MemoryAddress =
35 | {
36 | let size = $non_zero_size.get();
37 | let pointer: *mut u8 = unsafe { transmute($allocation_from.checked_add(size)) };
38 | if unlikely!(pointer.is_null())
39 | {
40 | return Err(AllocError)
41 | }
42 | unsafe { transmute(pointer) }
43 | };
44 |
45 | if unlikely!(allocation_ends_at_pointer > $self.ends_at_pointer)
46 | {
47 | return Err(AllocError)
48 | }
49 |
50 | allocation_ends_at_pointer
51 | }
52 | }
53 | }
54 |
55 | impl Allocator for BumpAllocator
56 | {
57 | #[inline(always)]
58 | fn allocate(&self, non_zero_size: NonZeroUsize, non_zero_power_of_two_alignment: NonZeroUsize) -> Result<(NonNull, usize), AllocError>
59 | {
60 | debug_assert!(non_zero_power_of_two_alignment <= Self::MaximumPowerOfTwoAlignment, "non_zero_power_of_two_alignment `{}` exceeds `{}`", non_zero_power_of_two_alignment, Self::MaximumPowerOfTwoAlignment);
61 |
62 | let next_allocation_at_rounded_up_pointer = self.next_allocation_at_pointer.get().round_up_to_power_of_two(non_zero_power_of_two_alignment);
63 |
64 | self.most_recent_allocation_pointer.set(next_allocation_at_rounded_up_pointer);
65 | self.next_allocation_at_pointer.set(allocation_ends_at_pointer!(self, non_zero_size, next_allocation_at_rounded_up_pointer));
66 | let actual_size = (self.next_allocation_at_pointer.get().as_ptr() as usize) - (next_allocation_at_rounded_up_pointer.as_ptr() as usize);
67 |
68 | Ok((next_allocation_at_rounded_up_pointer, actual_size))
69 | }
70 |
71 | #[inline(always)]
72 | fn deallocate(&self, _non_zero_size: NonZeroUsize, _non_zero_power_of_two_alignment: NonZeroUsize, current_memory: NonNull)
73 | {
74 | if unlikely!(current_memory == self.most_recent_allocation_pointer.get())
75 | {
76 | self.next_allocation_at_pointer.set(self.most_recent_allocation_pointer.get())
77 | }
78 | }
79 |
80 | #[inline(always)]
81 | fn growing_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
82 | {
83 | debug_assert!(non_zero_power_of_two_new_alignment <= Self::MaximumPowerOfTwoAlignment, "non_zero_power_of_two_new_alignment `{}` exceeds `{}`", non_zero_power_of_two_new_alignment, Self::MaximumPowerOfTwoAlignment);
84 |
85 | if unlikely!(self.fits_at_current_location(non_zero_power_of_two_new_alignment, current_memory))
86 | {
87 | let last = self.most_recent_allocation_pointer.get();
88 | self.next_allocation_at_pointer.set(allocation_ends_at_pointer!(self, non_zero_new_size, current_memory));
89 |
90 | let actual_size = (self.next_allocation_at_pointer.get().as_ptr() as usize) - (last.as_ptr() as usize);
91 | Ok((current_memory, actual_size))
92 | }
93 | else
94 | {
95 | self.allocate_and_copy(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved, non_zero_current_size.get())
96 | }
97 | }
98 |
99 | #[inline(always)]
100 | fn shrinking_reallocate(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool) -> Result<(NonNull, usize), AllocError>
101 | {
102 | debug_assert!(non_zero_power_of_two_new_alignment <= Self::MaximumPowerOfTwoAlignment, "non_zero_power_of_two_new_alignment `{}` exceeds `{}`", non_zero_power_of_two_new_alignment, Self::MaximumPowerOfTwoAlignment);
103 |
104 | let new_size = non_zero_new_size.get();
105 | if unlikely!(self.fits_at_current_location(non_zero_power_of_two_new_alignment, current_memory))
106 | {
107 | self.next_allocation_at_pointer.set(current_memory.add(new_size));
108 | Ok((current_memory, new_size))
109 | }
110 | else if likely!(Self::new_alignment_can_be_accommodated(non_zero_power_of_two_new_alignment, current_memory))
111 | {
112 | Ok((current_memory, new_size))
113 | }
114 | else
115 | {
116 | self.allocate_and_copy(non_zero_new_size, non_zero_power_of_two_new_alignment, non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory, current_memory_can_not_be_moved, non_zero_new_size.get())
117 | }
118 | }
119 | }
120 |
121 | impl LocalAllocator for BumpAllocator
122 | {
123 | #[inline(always)]
124 | fn new_local_allocator(memory_source: MS, _lifetime_hint: LifetimeHint, _block_size_hint: NonZeroUsize) -> Self
125 | {
126 | Self::new(memory_source)
127 | }
128 |
129 | #[inline(always)]
130 | fn memory_range(&self) -> MemoryRange
131 | {
132 | MemoryRange::new(self.allocations_start_from(), self.ends_at_pointer)
133 | }
134 | }
135 |
136 | impl BumpAllocator
137 | {
138 | const MaximumPowerOfTwoAlignment: NonZeroUsize = new_non_zero_usize(4096);
139 |
140 | /// New instance wrapping a block of memory.
141 | #[inline(always)]
142 | pub fn new(memory_source: MS) -> Self
143 | {
144 | let allocations_start_from = memory_source.allocations_start_from();
145 |
146 | Self
147 | {
148 | most_recent_allocation_pointer: Cell::new(allocations_start_from),
149 | next_allocation_at_pointer: Cell::new(allocations_start_from),
150 | ends_at_pointer: allocations_start_from.add_non_zero(memory_source.size()),
151 |
152 | memory_source,
153 | }
154 | }
155 |
156 | #[inline(always)]
157 | fn allocations_start_from(&self) -> MemoryAddress
158 | {
159 | self.ends_at_pointer.subtract_non_zero(self.memory_source.size())
160 | }
161 |
162 | #[inline(always)]
163 | fn fits_at_current_location(&self, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull) -> bool
164 | {
165 | current_memory == self.most_recent_allocation_pointer.get() && Self::new_alignment_can_be_accommodated(non_zero_power_of_two_current_alignment, current_memory)
166 | }
167 |
168 | #[inline(always)]
169 | fn new_alignment_can_be_accommodated(non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull) -> bool
170 | {
171 | current_memory.is_aligned_to(non_zero_power_of_two_current_alignment)
172 | }
173 |
174 | #[inline(always)]
175 | fn allocate_and_copy(&self, non_zero_new_size: NonZeroUsize, non_zero_power_of_two_new_alignment: NonZeroUsize, non_zero_current_size: NonZeroUsize, non_zero_power_of_two_current_alignment: NonZeroUsize, current_memory: NonNull, current_memory_can_not_be_moved: bool, amount_to_copy: usize) -> Result<(NonNull, usize), AllocError>
176 | {
177 | if unlikely!(current_memory_can_not_be_moved)
178 | {
179 | return Err(AllocError)
180 | }
181 |
182 | let (new_memory, actual_size) = self.allocate(non_zero_new_size, non_zero_power_of_two_new_alignment)?;
183 | unsafe { new_memory.as_ptr().copy_from(current_memory.as_ptr(), amount_to_copy) };
184 | self.deallocate(non_zero_current_size, non_zero_power_of_two_current_alignment, current_memory);
185 | Ok((new_memory, actual_size))
186 | }
187 | }
188 |
--------------------------------------------------------------------------------
/src/GlobalThreadAndCoroutineSwitchableAllocatorInstance.rs:
--------------------------------------------------------------------------------
1 | // This file is part of linux-support. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT. No part of linux-support, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
2 | // Copyright © 2020 The developers of linux-support. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-support/master/COPYRIGHT.
3 |
4 |
5 | /// Used for a global allocator with the `#[global_allocator]` trait.
6 | ///
7 | /// See documentation of `new()`.
8 | #[derive(Debug)]
9 | pub struct GlobalThreadAndCoroutineSwitchableAllocatorInstance>, ThreadLocalAllocator: LocalAllocator, GlobalAllocator: Allocator>
10 | {
11 | global_allocator: GlobalAllocator,
12 |
13 | per_thread_state: fn() -> NonNull>,
14 |
15 | marker: PhantomData<(CoroutineHeapSize, CoroutineLocalAllocator, ThreadLocalAllocator)>,
16 | }
17 |
18 | impl