├── .cargo └── config.toml ├── .github ├── FUNDING.yml ├── actions-rs │ └── grcov.yml └── workflows │ ├── docs.yml │ ├── grcov.yml │ ├── lint.yml │ ├── publish.yml │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── benches └── region.rs ├── rustfmt.toml └── src ├── affix.rs ├── callback_ref.rs ├── chunk.rs ├── fallback.rs ├── helper.rs ├── lib.rs ├── macros.rs ├── null.rs ├── proxy.rs ├── region ├── mod.rs └── raw.rs ├── segregate.rs └── stats.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | rustflags = ["-C", "target-cpu=native"] 3 | 4 | [profile.release] 5 | lto = true 6 | # codegen-units = 1 7 | panic = "abort" 8 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: TimDiekmann 2 | ko_fi: timdiekmann 3 | -------------------------------------------------------------------------------- /.github/actions-rs/grcov.yml: -------------------------------------------------------------------------------- 1 | branch: true 2 | ignore-not-existing: true 3 | llvm: true 4 | filter: covered 5 | output-type: lcov 6 | output-path: ./lcov.info 7 | source-dir: . 8 | ignore: 9 | - "/*" 10 | - "C:/*" 11 | - "../*" 12 | excl-line: "#\\[derive\\(" 13 | excl-start: "mod tests \\{" 14 | excl-br-line: "#\\[derive\\(" 15 | excl-br-start: "mod tests \\{" 16 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | docs: 10 | name: Documentation 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout source code 14 | uses: actions/checkout@v2 15 | with: 16 | persist-credentials: false 17 | 18 | - name: Install Rust 19 | uses: actions-rs/toolchain@v1 20 | with: 21 | profile: minimal 22 | toolchain: nightly 23 | override: true 24 | 25 | - name: Build documentation 26 | uses: actions-rs/cargo@v1 27 | with: 28 | command: doc 29 | args: --verbose --no-deps --all-features 30 | 31 | - name: Finalize documentation 32 | run: | 33 | CRATE_NAME=$(echo '${{ github.repository }}' | tr '[:upper:]' '[:lower:]' | cut -f2 -d"/") 34 | echo "" > target/doc/index.html 35 | touch target/doc/.nojekyll 36 | 37 | - name: Upload as artifact 38 | uses: actions/upload-artifact@v2 39 | with: 40 | name: Documentation 41 | path: target/doc 42 | 43 | - name: Deploy 44 | uses: JamesIves/github-pages-deploy-action@releases/v3 45 | with: 46 | ACCESS_TOKEN: ${{ secrets.GH_PAT }} 47 | BRANCH: gh-pages 48 | FOLDER: target/doc 49 | -------------------------------------------------------------------------------- /.github/workflows/grcov.yml: -------------------------------------------------------------------------------- 1 | name: Coverage 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | grcov: 11 | name: Coverage 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: 16 | - ubuntu-latest 17 | # We don't have branches on OS currently 18 | # - macOS-latest 19 | # - windows-latest 20 | toolchain: 21 | - nightly 22 | cargo_flags: 23 | - "--all-features" 24 | steps: 25 | - name: Checkout source code 26 | uses: actions/checkout@v2 27 | 28 | - name: Install Rust 29 | uses: actions-rs/toolchain@v1 30 | with: 31 | profile: minimal 32 | toolchain: ${{ matrix.toolchain }} 33 | override: true 34 | 35 | - name: Install grcov 36 | uses: actions-rs/install@v0.1 37 | with: 38 | crate: grcov 39 | version: latest 40 | use-tool-cache: true 41 | 42 | - name: Test 43 | uses: actions-rs/cargo@v1 44 | with: 45 | command: test 46 | args: --all --no-fail-fast ${{ matrix.cargo_flags }} 47 | env: 48 | CARGO_INCREMENTAL: "0" 49 | RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort -Cdebug-assertions=off' 50 | RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Copt-level=0 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests -Cpanic=abort -Cdebug-assertions=off' 51 | 52 | - name: Generate coverage data 53 | id: grcov 54 | # uses: actions-rs/grcov@v0.1 55 | run: | 56 | grcov target/debug/ \ 57 | --branch \ 58 | --llvm \ 59 | --source-dir . \ 60 | --output-path lcov.info \ 61 | --ignore='/**' \ 62 | --ignore='C:/**' \ 63 | --ignore='../**' \ 64 | --ignore-not-existing \ 65 | --excl-line "#\\[derive\\(" \ 66 | --excl-br-line "#\\[derive\\(" \ 67 | --excl-start "#\\[cfg\\(test\\)\\]" \ 68 | --excl-br-start "#\\[cfg\\(test\\)\\]" \ 69 | --commit-sha ${{ github.sha }} \ 70 | --service-job-id ${{ github.job }} \ 71 | --service-name "GitHub Actions" \ 72 | --service-number ${{ github.run_id }} 73 | 74 | - name: Upload coverage as artifact 75 | uses: actions/upload-artifact@v2 76 | with: 77 | name: lcov.info 78 | # path: ${{ steps.grcov.outputs.report }} 79 | path: lcov.info 80 | 81 | - name: Upload coverage to codecov.io 82 | uses: codecov/codecov-action@v1 83 | with: 84 | # file: ${{ steps.grcov.outputs.report }} 85 | file: lcov.info 86 | fail_ci_if_error: true 87 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | defaults: 10 | run: 11 | shell: bash 12 | 13 | env: 14 | CLIPPY_PARAMS: -W clippy::all -W clippy::pedantic -W clippy::nursery -W clippy::cargo 15 | 16 | jobs: 17 | rustfmt: 18 | name: rustfmt 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout source code 22 | uses: actions/checkout@v2 23 | 24 | - name: Install Rust 25 | uses: actions-rs/toolchain@v1 26 | with: 27 | profile: minimal 28 | toolchain: nightly 29 | override: true 30 | components: rustfmt 31 | 32 | - name: Run rustfmt 33 | uses: actions-rs/cargo@v1 34 | with: 35 | command: fmt 36 | args: --all -- --check --verbose 37 | 38 | # tomlfmt: 39 | # name: tomlfmt 40 | # runs-on: ubuntu-latest 41 | # steps: 42 | # - name: Checkout source code 43 | # uses: actions/checkout@master 44 | 45 | # - name: Install Rust 46 | # uses: actions-rs/toolchain@v1 47 | # with: 48 | # profile: minimal 49 | # toolchain: nightly 50 | # override: true 51 | 52 | # - name: Install tomlfmt 53 | # uses: actions-rs/install@v0.1 54 | # with: 55 | # crate: cargo-tomlfmt 56 | # version: latest 57 | # use-tool-cache: true 58 | 59 | # - name: Run Tomlfmt 60 | # uses: actions-rs/cargo@v1 61 | # with: 62 | # command: tomlfmt 63 | # args: --dryrun 64 | 65 | clippy: 66 | name: clippy 67 | runs-on: ubuntu-latest 68 | steps: 69 | - name: Checkout source code 70 | uses: actions/checkout@master 71 | 72 | - name: Install Rust 73 | uses: actions-rs/toolchain@v1 74 | with: 75 | profile: minimal 76 | toolchain: nightly 77 | override: true 78 | components: clippy 79 | 80 | - name: Run clippy 81 | uses: actions-rs/clippy-check@v1 82 | with: 83 | token: ${{ secrets.GITHUB_TOKEN }} 84 | args: --all-features -- ${{ env.CLIPPY_PARAMS }} 85 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | 8 | jobs: 9 | test: 10 | name: Publish 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout source code 14 | uses: actions/checkout@v2 15 | 16 | - name: Install Rust 17 | uses: actions-rs/toolchain@v1 18 | with: 19 | profile: minimal 20 | toolchain: nightly 21 | override: true 22 | 23 | - name: Publish 24 | uses: actions-rs/cargo@v1 25 | with: 26 | command: publish 27 | args: --verbose --all-features --token ${{ secrets.CARGO_TOKEN }} 28 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | name: Test 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | matrix: 15 | os: 16 | - ubuntu-latest 17 | - windows-latest 18 | - macOS-latest 19 | toolchain: 20 | - nightly 21 | cargo_flags: 22 | - "--no-default-features" 23 | - "--all-features" 24 | steps: 25 | - name: Checkout source code 26 | uses: actions/checkout@v2 27 | 28 | - name: Install Rust 29 | uses: actions-rs/toolchain@v1 30 | with: 31 | profile: minimal 32 | toolchain: ${{ matrix.toolchain }} 33 | override: true 34 | 35 | - name: Build 36 | uses: actions-rs/cargo@v1 37 | with: 38 | command: build 39 | args: --all ${{ matrix.cargo_flags }} 40 | 41 | - name: Test 42 | uses: actions-rs/cargo@v1 43 | with: 44 | command: test 45 | args: --all ${{ matrix.cargo_flags }} 46 | 47 | 48 | miri: 49 | name: miri 50 | runs-on: ubuntu-latest 51 | steps: 52 | - name: Checkout source code 53 | uses: actions/checkout@master 54 | 55 | - name: Install Rust 56 | uses: actions-rs/toolchain@v1 57 | with: 58 | profile: minimal 59 | toolchain: nightly 60 | override: true 61 | components: miri 62 | 63 | - name: Setup miri 64 | run: cargo miri setup 65 | 66 | - name: Run miri 67 | run: cargo miri test --all-features 68 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | Cargo.lock 3 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [v0.5](https://docs.rs/alloc-compose/0.5) 2 | 3 | - Add `ReallocInPlace` trait 4 | - Fix a bug in `Segregate` returning the wrong size 5 | 6 | **Breaking Changes:** 7 | - Update to nightly-2020-08-10, which uses `NonNull<[u8]>` and bans `InPlace` reallocations 8 | - Add `AllocAll` trait and move some methods from `Region` into that trait 9 | - Change `Region` to require `[MaybeUninit]` rather than `[u8]` 10 | - Remove `MemoryMarker` 11 | 12 | ## [v0.4](https://docs.rs/alloc-compose/0.4) 13 | 14 | - **Breaking Change** Using unified naming scheme 15 | - **Breaking Change** Change `CallbackRef` to listen on `before_` and `after_` events 16 | - Greatly improve documentation of `Affix` 17 | 18 | ### [v0.3.1](https://docs.rs/alloc-compose/0.3) 19 | 20 | - Add more documentation 21 | - Add more tests 22 | 23 | ## [v0.3.0](https://docs.rs/alloc-compose/0.3) 24 | 25 | - **Breaking Change** Use `const_generics` in `SegregateAlloc` 26 | - Add `AffixAlloc`, `ChunkAlloc`, and `MemoryMarker` 27 | - Add more tests 28 | 29 | ## [v0.2](https://docs.rs/alloc-compose/0.2) 30 | 31 | - **Breaking Change** Use `core::alloc` instead of `alloc_wg` 32 | - Add `Region`, `CallbackRef`, `Proxy`, and `stats` 33 | - Add more tests 34 | 35 | ## [v0.1](https://docs.rs/alloc-compose/0.1) 36 | 37 | - Initial release: `Owns`, `NullAlloc`, `FallbackAlloc`, and `SegregateAlloc` 38 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by submitting an issue. All complaints will be reviewed and investigated 59 | and will result in a response that is deemed necessary and appropriate to the 60 | circumstances. The project team is obligated to maintain confidentiality with 61 | regard to the reporter of an incident. Further details of specific enforcement 62 | policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "alloc-compose" 3 | version = "0.5.0" 4 | authors = ["Tim Diekmann "] 5 | edition = "2018" 6 | description = "Composable allocator structures for plugging together more powerful allocators" 7 | repository = "https://github.com/TimDiekmann/alloc-compose" 8 | documentation = "https://docs.rs/alloc-compose" 9 | readme = "README.md" 10 | keywords = ["alloc"] 11 | categories = ["no-std"] 12 | license = "MIT OR Apache-2.0" 13 | exclude = [".github/**"] 14 | 15 | [features] 16 | alloc = [] 17 | default = ["alloc"] 18 | intrinsics = [] 19 | 20 | [dev-dependencies] 21 | criterion = { version = "0.3", features = ["real_blackbox"] } 22 | 23 | [[bench]] 24 | name = "region" 25 | harness = false 26 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Test Status](https://github.com/TimDiekmann/alloc-compose/workflows/Test/badge.svg?event=push&branch=master)](https://github.com/TimDiekmann/alloc-compose/actions?query=workflow%3ATest+event%3Apush+branch%3Amaster) 2 | [![Coverage Status](https://codecov.io/gh/TimDiekmann/alloc-compose/branch/master/graph/badge.svg)](https://codecov.io/gh/TimDiekmann/alloc-compose) 3 | [![Docs master](https://img.shields.io/static/v1?label=docs&message=master&color=5479ab)](https://timdiekmann.github.io/alloc-compose/alloc_compose/index.html) 4 | [![Docs.rs](https://docs.rs/alloc-compose/badge.svg)](https://docs.rs/alloc-compose) 5 | [![Crates.io](https://img.shields.io/crates/v/alloc-compose)](https://crates.io/crates/alloc-compose) 6 | ![Crates.io](https://img.shields.io/crates/l/alloc-compose) 7 | 8 | --- 9 | 10 | Important note 11 | -------------- 12 | 13 | Due to some changes to `AllocRef` it was hard to keep this crate updated. I'll readd the functionality from v0.5.0 from time to time. Most things have to be refactored as `AllocRef`s reallocation methods now takes two layouts. 14 | 15 | The most interesting part as of now is probably `Region` and its variants. 16 | In future version, composable blocks like `AffixAllocator` or `Proxy` will be added. 17 | 18 | --- 19 | 20 | Composable allocator structures for plugging together more powerful allocators. 21 | 22 | `alloc-compose` relies on [`AllocRef`] as allocator trait. Until `AllocRef` has been stabilized, this crate requires a nightly compiler. 23 | 24 | 25 | The design of composable allocators is inspired by 26 | [`std::allocator` Is to Allocation what `std::vector` Is to Vexation][vid] by Andrei 27 | Alexandrescu and the [Phobos Standard Library][phobos] of the [D Programming Language][D]. 28 | 29 | [`AllocRef`]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html 30 | [vid]: https://www.youtube.com/watch?v=LIb3L4vKZ7U 31 | [phobos]: https://github.com/dlang/phobos 32 | [D]: https://dlang.org/ 33 | 34 | License 35 | ------- 36 | 37 | Alloc-Compose is distributed under the terms of both the MIT license and the Apache License (Version 2.0). 38 | 39 | See [LICENSE-APACHE](https://github.com/TimDiekmann/alloc-compose/blob/master/LICENSE-APACHE) and [LICENSE-MIT](https://github.com/TimDiekmann/alloc-compose/blob/master/LICENSE-MIT) for details. 40 | -------------------------------------------------------------------------------- /benches/region.rs: -------------------------------------------------------------------------------- 1 | #![feature(allocator_api)] 2 | 3 | use alloc_compose::{region::*, AllocateAll}; 4 | use core::{ 5 | alloc::{AllocRef, Layout}, 6 | mem::MaybeUninit, 7 | }; 8 | 9 | use criterion::{black_box, criterion_group, criterion_main, Bencher, Criterion}; 10 | 11 | fn regions(c: &mut Criterion) { 12 | let mut group = c.benchmark_group("region"); 13 | let mut data = [MaybeUninit::uninit(); 1024 * 1024]; 14 | 15 | #[inline] 16 | fn run(region: impl AllocRef + AllocateAll, b: &mut Bencher) { 17 | b.iter(|| { 18 | for _ in 0..16 { 19 | region.alloc(black_box(Layout::new::<[u8; 16]>())).unwrap(); 20 | } 21 | region.deallocate_all(); 22 | }) 23 | } 24 | 25 | group.bench_function("Region", |b| run(Region::new(&mut data), b)); 26 | group.bench_function("SharedRegion", |b| run(SharedRegion::new(&mut data), b)); 27 | group.bench_function("IntrusiveRegion", |b| { 28 | run(IntrusiveRegion::new(&mut data), b) 29 | }); 30 | group.bench_function("&Region", |b| run(&Region::new(&mut data), b)); 31 | 32 | group.finish(); 33 | } 34 | 35 | criterion_group! { 36 | name = benches; 37 | config = Criterion::default().sample_size(1000).measurement_time(std::time::Duration::from_secs(3)); 38 | targets = regions 39 | } 40 | criterion_main!(benches); 41 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | # General 2 | edition = "2018" 3 | version = "Two" 4 | unstable_features = true 5 | 6 | # Line breaking 7 | newline_style = "Unix" 8 | 9 | # Comments 10 | format_code_in_doc_comments = true 11 | 12 | # Formatting 13 | imports_layout = "HorizontalVertical" 14 | format_strings = true 15 | merge_derives = true 16 | format_macro_matchers = true 17 | overflow_delimited_expr = true 18 | 19 | # Reordering 20 | merge_imports = true 21 | 22 | # Shorthands 23 | use_try_shorthand = true 24 | use_field_init_shorthand = true 25 | condense_wildcard_suffixes = true 26 | -------------------------------------------------------------------------------- /src/affix.rs: -------------------------------------------------------------------------------- 1 | use crate::{helper::AllocInit, AllocAll, ReallocInPlace}; 2 | use core::{ 3 | alloc::{AllocErr, AllocRef, Layout}, 4 | fmt, 5 | marker::PhantomData, 6 | mem::{self, MaybeUninit}, 7 | ptr::{self, NonNull}, 8 | }; 9 | 10 | /// An allocator that requests some extra memory from the parent allocator for storing 11 | /// a prefix and/or a suffix. 12 | /// 13 | /// The alignment of the memory block is the maximum of the alignment of `Prefix` and the requested 14 | /// alignment. This may introduce an unused padding between `Prefix` and the returned memory. 15 | /// 16 | /// To get a pointer to the prefix or the suffix, the [`prefix()`] and [`suffix()`] may be called. 17 | /// 18 | /// [`prefix()`]: Self::prefix 19 | /// [`suffix()`]: Self::suffix 20 | /// 21 | /// # Performance 22 | /// 23 | /// Generally it's faster to calculate the pointer to the prefix than the pointer to the suffix, as 24 | /// the extended layout of `Prefix` and the requested memory is needed in order to calculate the 25 | /// `Suffix` pointer. Additionally, in most cases it's recommended to use a prefix over a suffix for 26 | /// a more efficient use of memory. However, small prefixes blunt the alignment so if a large 27 | /// alignment with a small affix is needed, suffixes may be the better option. 28 | /// 29 | /// For layouts known at compile time the compiler is able to optimize away almost all calculations. 30 | /// 31 | /// # Examples 32 | /// 33 | /// `Prefix` is `12` bytes in size and has an alignment requirement of `4` bytes. `Suffix` is `16` 34 | /// bytes in size, the requested layout requires `28` bytes, both with an alignment of `8` bytes. 35 | /// The parent allocator returns memory blocks of `128` bytes to demonstrate the behavior on 36 | /// overallocating. 37 | /// ``` 38 | /// #![feature(allocator_api)] 39 | /// 40 | /// use alloc_compose::{Affix, Chunk}; 41 | /// use std::alloc::{Layout, System}; 42 | /// 43 | /// type Prefix = [u32; 3]; 44 | /// # assert_eq!(core::mem::size_of::(), 12); 45 | /// # assert_eq!(core::mem::align_of::(), 4); 46 | /// type Suffix = [u64; 2]; 47 | /// # assert_eq!(core::mem::size_of::(), 16); 48 | /// # assert_eq!(core::mem::align_of::(), 8); 49 | /// type Alloc = Affix, Prefix, Suffix>; 50 | /// 51 | /// let layout = Layout::from_size_align(28, 8)?; 52 | /// # Ok::<(), core::alloc::LayoutErr>(()) 53 | /// ``` 54 | /// 55 | /// The memory layout differs depending on `Prefix` and `Suffix`: 56 | /// 57 | /// ``` 58 | /// #![feature(slice_ptr_get, slice_ptr_len)] 59 | /// # #![feature(allocator_api)] 60 | /// # use alloc_compose::{Affix, Chunk}; 61 | /// # use std::alloc::{Layout, System}; 62 | /// 63 | /// use core::alloc::AllocRef; 64 | /// # type Prefix = [u32; 3]; 65 | /// # type Suffix = [u64; 2]; 66 | /// # type Alloc = Affix, Prefix, Suffix>; 67 | /// # let layout = Layout::from_size_align(28, 8).unwrap(); 68 | /// 69 | /// let mut my_alloc = Alloc::default(); 70 | /// 71 | /// // 0 12 16 44 48 64 128 72 | /// // ╞═ Prefix ══╡ ╞════ requested memory ═════╡ ╞═══ Suffix ════╡ │ 73 | /// // ┢┳┳┳┳┳┳┳┳┳┳┳╅┬┬┬╆┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳┳╈┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╅┬┬╌╌╌╌┬┬┤ 74 | /// // ┡┻┻┻┻┻┻┻┻┻┻┻┹┴┴┴╄┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╇┻┻┻╇┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┹┴┴╌╌╌╌┴┴┘ 75 | /// // │ ├┄┄┄┄┄┄ layout.size() ┄┄┄┄┄┄┘ │ 76 | /// // │ ├┄┄┄┄┄┄┄┄ memory.len() ┄┄┄┄┄┄┄┄┄┤ 77 | /// // └→ prefix() └→ memory └→ suffix() 78 | /// let memory = my_alloc.alloc(layout)?; 79 | /// 80 | /// assert_eq!(memory.len(), 32); 81 | /// unsafe { 82 | /// assert_eq!( 83 | /// Alloc::prefix(memory.as_non_null_ptr(), layout).cast().as_ptr(), 84 | /// memory.as_mut_ptr().sub(16) 85 | /// ); 86 | /// assert_eq!( 87 | /// Alloc::suffix(memory.as_non_null_ptr(), layout).cast().as_ptr(), 88 | /// memory.as_mut_ptr().add(32) 89 | /// ); 90 | /// } 91 | /// # Ok::<(), core::alloc::AllocErr>(()) 92 | /// ``` 93 | /// 94 | /// The memory between `Prefix` and the requested memory is unused. If there is a padding between 95 | /// the requested memory and the suffix, this can be used as extra memory for the allocation. The 96 | /// memory after `Suffix` is also unused as `Suffix` is typed. This results in `68` bytes unused 97 | /// memory. 98 | /// 99 | /// If `Suffix` is a zero-sized type, the space after the requested memory block can be used: 100 | /// 101 | /// ``` 102 | /// # #![feature(allocator_api, slice_ptr_get, slice_ptr_len)] 103 | /// # use alloc_compose::{Affix, Chunk}; 104 | /// # use std::alloc::{Layout, System, AllocRef}; 105 | /// use core::ptr::NonNull; 106 | /// # type Prefix = [u32; 3]; 107 | /// 108 | /// // For convenience, the suffix can be ommitted 109 | /// type Alloc = Affix, Prefix>; 110 | /// # let layout = Layout::from_size_align(28, 8).unwrap(); 111 | /// 112 | /// let mut my_alloc = Alloc::default(); 113 | /// 114 | /// // 0 12 16 44 48 64 128 115 | /// // ╞═ Prefix ══╡ ╞════ requested memory ═════╡ │ │ │ 116 | /// // ┢┳┳┳┳┳┳┳┳┳┳┳╅┬┬┬╆┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳┳╈┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳╍╍╍╍┳┳┪ 117 | /// // ┡┻┻┻┻┻┻┻┻┻┻┻┹┴┴┴╄┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╇┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╍╍╍╍┻┻┩ 118 | /// // │ ├┄┄┄┄┄┄ layout.size() ┄┄┄┄┄┄┘ │ 119 | /// // │ ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄ memory.len() ┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┘ 120 | /// // └→ prefix() └→ memory 121 | /// let memory = my_alloc.alloc(layout)?; 122 | /// 123 | /// assert_eq!(memory.len(), 112); 124 | /// unsafe { 125 | /// assert_eq!( 126 | /// Alloc::prefix(memory.as_non_null_ptr(), layout).cast().as_ptr(), 127 | /// memory.as_mut_ptr().sub(16) 128 | /// ); 129 | /// assert_eq!(Alloc::suffix(memory.as_non_null_ptr(), layout), NonNull::dangling()); 130 | /// } 131 | /// # Ok::<(), core::alloc::AllocErr>(()) 132 | /// ``` 133 | /// 134 | /// This results in only `4` bytes unused memory. 135 | /// 136 | /// If `Prefix` is a zero-sized type, this results in a waste of memory: 137 | /// 138 | /// ``` 139 | /// # #![feature(allocator_api, slice_ptr_get, slice_ptr_len)] 140 | /// # use alloc_compose::{Affix, Chunk}; 141 | /// # use std::alloc::{Layout, System, AllocRef}; 142 | /// # use core::ptr::NonNull; 143 | /// # type Suffix = [u64; 2]; 144 | /// type Alloc = Affix, (), Suffix>; 145 | /// # let layout = Layout::from_size_align(28, 8).unwrap(); 146 | /// 147 | /// let mut my_alloc = Alloc::default(); 148 | /// 149 | /// // 0 28 32 48 64 128 150 | /// // ╞════ requested memory ═════╡ ╞═══ Suffix ════╡ │ │ 151 | /// // ┢┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳┳╈┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╅┬┬┬┬┬┬┬┬┬┬┬┬┬┬┬┼┬┬╌╌╌╌┬┬┤ 152 | /// // ┡┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╇┻┻┻╇┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┹┴┴┴┴┴┴┴┴┴┴┴┴┴┴┴┴┴┴╌╌╌╌┴┴┘ 153 | /// // ├┄┄┄┄┄┄ layout.size() ┄┄┄┄┄┄┘ │ 154 | /// // ├┄┄┄┄┄┄┄┄ memory.len() ┄┄┄┄┄┄┄┄┄┤ 155 | /// // └→ memory └→ suffix() 156 | /// let memory = my_alloc.alloc(layout)?; 157 | /// 158 | /// assert_eq!(memory.len(), 32); 159 | /// unsafe { 160 | /// assert_eq!(Alloc::prefix(memory.as_non_null_ptr(), layout), NonNull::dangling()); 161 | /// assert_eq!( 162 | /// Alloc::suffix(memory.as_non_null_ptr(), layout).cast().as_ptr(), 163 | /// memory.as_mut_ptr().add(32) 164 | /// ); 165 | /// } 166 | /// # Ok::<(), core::alloc::AllocErr>(()) 167 | /// ``` 168 | /// 169 | /// This results in 80 bytes unused memory. As can be seen, if possible a prefix should be 170 | /// preferred to the suffix. 171 | /// 172 | /// If both, `Prefix` and `Suffix` are ZSTs, this behaves like the parent allocator: 173 | /// 174 | /// ``` 175 | /// # #![feature(allocator_api, slice_ptr_get, slice_ptr_len)] 176 | /// # use alloc_compose::{Affix, Chunk}; 177 | /// # use std::alloc::{Layout, System, AllocRef}; 178 | /// # use core::ptr::NonNull; 179 | /// # type Suffix = [u64; 2]; 180 | /// type Alloc = Affix, (), ()>; 181 | /// # let layout = Layout::from_size_align(28, 8).unwrap(); 182 | /// 183 | /// let mut my_alloc = Alloc::default(); 184 | /// 185 | /// // 0 28 32 48 64 128 186 | /// // ╞════ requested memory ═════╡ │ │ │ │ 187 | /// // ┢┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳┳╈┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳┳┳┳┳┳┳┳┳┳┳┳┳┳╈┳┳╍╍╍╍┳┳┪ 188 | /// // ┡┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╇┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻┻╍╍╍╍┻┻┩ 189 | /// // ├┄┄┄┄┄┄ layout.size() ┄┄┄┄┄┄┘ │ 190 | /// // ├┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄ memory.len() ┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┄┘ 191 | /// // └→ memory 192 | /// let memory = my_alloc.alloc(layout)?; 193 | /// 194 | /// assert_eq!(memory.len(), 128); 195 | /// unsafe { 196 | /// assert_eq!(Alloc::prefix(memory.as_non_null_ptr(), layout), NonNull::dangling()); 197 | /// assert_eq!(Alloc::suffix(memory.as_non_null_ptr(), layout), NonNull::dangling()); 198 | /// } 199 | /// # Ok::<(), core::alloc::AllocErr>(()) 200 | /// ``` 201 | pub struct Affix { 202 | /// The parent allocator to be used as backend 203 | pub parent: Alloc, 204 | _prefix: PhantomData, 205 | _suffix: PhantomData, 206 | } 207 | 208 | impl fmt::Debug for Affix { 209 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 210 | f.debug_struct("Affix") 211 | .field("parent", &self.parent) 212 | .finish() 213 | } 214 | } 215 | 216 | impl Default for Affix { 217 | fn default() -> Self { 218 | Self::new(Alloc::default()) 219 | } 220 | } 221 | 222 | impl Clone for Affix { 223 | fn clone(&self) -> Self { 224 | Self::new(self.parent.clone()) 225 | } 226 | } 227 | 228 | impl Copy for Affix {} 229 | 230 | impl PartialEq for Affix { 231 | fn eq(&self, other: &Self) -> bool { 232 | self.parent.eq(&other.parent) 233 | } 234 | } 235 | 236 | impl Eq for Affix {} 237 | 238 | unsafe impl Send for Affix {} 239 | unsafe impl Sync for Affix {} 240 | impl Unpin for Affix {} 241 | 242 | impl Affix { 243 | pub const fn new(parent: Alloc) -> Self { 244 | Self { 245 | parent, 246 | _prefix: PhantomData, 247 | _suffix: PhantomData, 248 | } 249 | } 250 | 251 | fn allocation_layout(layout: Layout) -> Option<(Layout, usize, usize)> { 252 | let (layout, prefix_offset) = Layout::new::().extend(layout).ok()?; 253 | let (layout, suffix_offset) = layout.extend(Layout::new::()).ok()?; 254 | Some((layout, prefix_offset, suffix_offset)) 255 | } 256 | 257 | /// Returns a pointer to the prefix. 258 | /// 259 | /// # Safety 260 | /// 261 | /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and 262 | /// * `layout` must *[fit]* that block of memory. 263 | /// 264 | /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory 265 | /// [fit]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#memory-fitting 266 | pub unsafe fn prefix(ptr: NonNull, layout: Layout) -> NonNull { 267 | if mem::size_of::() == 0 { 268 | NonNull::dangling() 269 | } else { 270 | let (_, prefix, _) = Self::allocation_layout(layout).unwrap(); 271 | NonNull::new_unchecked(ptr.as_ptr().sub(prefix)).cast() 272 | } 273 | } 274 | 275 | /// Returns a pointer to the suffix. 276 | /// 277 | /// # Safety 278 | /// 279 | /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and 280 | /// * `layout` must *[fit]* that block of memory. 281 | /// 282 | /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory 283 | /// [fit]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#memory-fitting 284 | pub unsafe fn suffix(ptr: NonNull, layout: Layout) -> NonNull { 285 | if mem::size_of::() == 0 { 286 | NonNull::dangling() 287 | } else { 288 | let (_, prefix, suffix) = Self::allocation_layout(layout).unwrap(); 289 | NonNull::new_unchecked(ptr.as_ptr().add(suffix - prefix)).cast() 290 | } 291 | } 292 | 293 | fn create_ptr(ptr: NonNull<[u8]>, offset_prefix: usize, offset_suffix: usize) -> NonNull<[u8]> { 294 | let len = if mem::size_of::() == 0 { 295 | ptr.len() - offset_prefix 296 | } else { 297 | offset_suffix - offset_prefix 298 | }; 299 | let ptr = unsafe { NonNull::new_unchecked(ptr.as_mut_ptr().add(offset_prefix)) }; 300 | 301 | NonNull::slice_from_raw_parts(ptr, len) 302 | } 303 | 304 | #[inline] 305 | fn alloc_impl( 306 | layout: Layout, 307 | alloc: impl FnOnce(Layout) -> Result, AllocErr>, 308 | ) -> Result, AllocErr> { 309 | let (layout, offset_prefix, offset_suffix) = 310 | Self::allocation_layout(layout).ok_or(AllocErr)?; 311 | 312 | Ok(Self::create_ptr( 313 | alloc(layout)?, 314 | offset_prefix, 315 | offset_suffix, 316 | )) 317 | } 318 | 319 | #[inline] 320 | unsafe fn grow_impl( 321 | old_ptr: NonNull, 322 | old_layout: Layout, 323 | new_size: usize, 324 | init: AllocInit, 325 | grow: impl FnOnce(NonNull, Layout, usize) -> Result, AllocErr>, 326 | ) -> Result, AllocErr> { 327 | let (old_alloc_layout, old_offset_prefix, old_offset_suffix) = 328 | Self::allocation_layout(old_layout).ok_or(AllocErr)?; 329 | let old_base_ptr = NonNull::new_unchecked(old_ptr.as_ptr().sub(old_offset_prefix)); 330 | 331 | let suffix = Self::suffix(old_ptr, old_layout) 332 | .cast::>() 333 | .as_ptr() 334 | .read(); 335 | 336 | let new_layout = 337 | Layout::from_size_align(new_size, old_layout.align()).map_err(|_| AllocErr)?; 338 | let (new_alloc_layout, new_offset_prefix, new_offset_suffix) = 339 | Self::allocation_layout(new_layout).ok_or(AllocErr)?; 340 | 341 | let new_base_ptr = grow(old_base_ptr, old_alloc_layout, new_alloc_layout.size())?; 342 | 343 | if init == AllocInit::Zeroed { 344 | ptr::write_bytes( 345 | new_base_ptr 346 | .as_non_null_ptr() 347 | .as_ptr() 348 | .add(old_offset_suffix), 349 | 0, 350 | mem::size_of::(), 351 | ); 352 | } 353 | 354 | let new_ptr = Self::create_ptr(new_base_ptr, new_offset_prefix, new_offset_suffix); 355 | 356 | Self::suffix(new_ptr.as_non_null_ptr(), new_layout) 357 | .cast::>() 358 | .as_ptr() 359 | .write(suffix); 360 | 361 | Ok(new_ptr) 362 | } 363 | 364 | #[inline] 365 | unsafe fn shrink_impl( 366 | old_ptr: NonNull, 367 | old_layout: Layout, 368 | new_size: usize, 369 | shrink: impl FnOnce(NonNull, Layout, usize) -> Result, AllocErr>, 370 | ) -> Result, AllocErr> { 371 | let (old_alloc_layout, old_offset_prefix, _) = 372 | Self::allocation_layout(old_layout).ok_or(AllocErr)?; 373 | let old_base_ptr = NonNull::new_unchecked(old_ptr.as_ptr().sub(old_offset_prefix)); 374 | 375 | let suffix = Self::suffix(old_ptr, old_layout) 376 | .cast::>() 377 | .as_ptr() 378 | .read(); 379 | 380 | let new_layout = 381 | Layout::from_size_align(new_size, old_layout.align()).map_err(|_| AllocErr)?; 382 | let (new_alloc_layout, new_offset_prefix, new_offset_suffix) = 383 | Self::allocation_layout(new_layout).ok_or(AllocErr)?; 384 | 385 | let new_base_ptr = shrink(old_base_ptr, old_alloc_layout, new_alloc_layout.size())?; 386 | 387 | let new_ptr = Self::create_ptr(new_base_ptr, new_offset_prefix, new_offset_suffix); 388 | 389 | Self::suffix(new_ptr.as_non_null_ptr(), new_layout) 390 | .cast::>() 391 | .as_ptr() 392 | .write(suffix); 393 | 394 | Ok(new_ptr) 395 | } 396 | } 397 | 398 | unsafe impl AllocRef for Affix 399 | where 400 | Alloc: AllocRef, 401 | { 402 | impl_alloc_ref!(parent); 403 | 404 | unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { 405 | let (layout, prefix_offset, _) = Self::allocation_layout(layout).unwrap(); 406 | let base_ptr = ptr.as_ptr().sub(prefix_offset); 407 | self.parent 408 | .dealloc(NonNull::new_unchecked(base_ptr), layout) 409 | } 410 | } 411 | 412 | unsafe impl AllocAll for Affix 413 | where 414 | Alloc: AllocAll, 415 | { 416 | impl_alloc_all!(parent); 417 | } 418 | 419 | unsafe impl ReallocInPlace for Affix 420 | where 421 | Alloc: ReallocInPlace, 422 | { 423 | impl_realloc_in_place!(parent); 424 | } 425 | 426 | #[cfg(test)] 427 | mod tests { 428 | #![allow(clippy::wildcard_imports)] 429 | use super::*; 430 | use crate::helper::tracker; 431 | use core::fmt; 432 | use std::alloc::System; 433 | 434 | #[allow(clippy::too_many_lines)] 435 | fn test_alloc( 436 | prefix: Prefix, 437 | layout: Layout, 438 | suffix: Suffix, 439 | offset_prefix: usize, 440 | offset_suffix: usize, 441 | ) where 442 | Prefix: fmt::Debug + Copy + PartialEq, 443 | Suffix: fmt::Debug + Copy + PartialEq, 444 | { 445 | unsafe { 446 | let mut alloc = tracker(Affix::<_, Prefix, Suffix>::new(tracker(System))); 447 | let memory = alloc 448 | .alloc_zeroed(layout) 449 | .unwrap_or_else(|_| panic!("Could not allocate {} bytes", layout.size())); 450 | 451 | if mem::size_of::() == 0 { 452 | assert_eq!( 453 | Affix::::prefix(memory.as_non_null_ptr(), layout), 454 | NonNull::dangling() 455 | ); 456 | } else { 457 | assert_eq!( 458 | Affix::::prefix(memory.as_non_null_ptr(), layout) 459 | .cast() 460 | .as_ptr(), 461 | memory.as_mut_ptr().sub(offset_prefix) 462 | ); 463 | } 464 | if mem::size_of::() == 0 { 465 | assert_eq!( 466 | Affix::::suffix(memory.as_non_null_ptr(), layout), 467 | NonNull::dangling() 468 | ); 469 | } else { 470 | assert_eq!( 471 | Affix::::suffix(memory.as_non_null_ptr(), layout) 472 | .cast() 473 | .as_ptr(), 474 | memory.as_mut_ptr().add(offset_suffix) 475 | ); 476 | } 477 | 478 | Affix::::prefix(memory.as_non_null_ptr(), layout) 479 | .as_ptr() 480 | .write(prefix); 481 | Affix::::suffix(memory.as_non_null_ptr(), layout) 482 | .as_ptr() 483 | .write(suffix); 484 | 485 | assert_eq!( 486 | Affix::::prefix(memory.as_non_null_ptr(), layout).as_ref(), 487 | &prefix 488 | ); 489 | assert_eq!( 490 | Affix::::suffix(memory.as_non_null_ptr(), layout).as_ref(), 491 | &suffix 492 | ); 493 | 494 | let old_size = memory.len(); 495 | let memory = alloc 496 | .grow_zeroed(memory.as_non_null_ptr(), layout, memory.len() * 2) 497 | .expect("Could not grow allocation"); 498 | let layout = 499 | Layout::from_size_align(memory.len(), layout.align()).expect("Invalid layout"); 500 | 501 | for i in old_size..memory.len() { 502 | assert_eq!(*memory.get_unchecked_mut(i).as_ref(), 0); 503 | } 504 | 505 | assert_eq!( 506 | Affix::::prefix(memory.as_non_null_ptr(), layout).as_ref(), 507 | &prefix 508 | ); 509 | assert_eq!( 510 | Affix::::suffix(memory.as_non_null_ptr(), layout).as_ref(), 511 | &suffix 512 | ); 513 | 514 | let memory = alloc 515 | .shrink(memory.as_non_null_ptr(), layout, layout.size()) 516 | .expect("Could not shrink allocation"); 517 | let layout = 518 | Layout::from_size_align(memory.len(), layout.align()).expect("Invalid layout"); 519 | 520 | assert_eq!( 521 | Affix::::prefix(memory.as_non_null_ptr(), layout).as_ref(), 522 | &prefix 523 | ); 524 | assert_eq!( 525 | Affix::::suffix(memory.as_non_null_ptr(), layout).as_ref(), 526 | &suffix 527 | ); 528 | 529 | alloc.dealloc(memory.as_non_null_ptr(), layout); 530 | } 531 | } 532 | 533 | #[test] 534 | fn test_alloc_u16_u32_u16() { 535 | test_alloc::(0xDEDE, Layout::new::(), 0xEFEF, 4, 4) 536 | } 537 | 538 | #[test] 539 | fn test_alloc_zst_u32_zst() { 540 | test_alloc::<(), ()>((), Layout::new::(), (), 0, 0) 541 | } 542 | 543 | #[test] 544 | fn test_alloc_zst_u32_u16() { 545 | test_alloc::<(), u16>((), Layout::new::(), 0xEFEF, 0, 4) 546 | } 547 | 548 | #[test] 549 | fn test_alloc_u16_u64_zst() { 550 | test_alloc::(0xDEDE, Layout::new::(), (), 4, 0) 551 | } 552 | 553 | #[repr(align(1024))] 554 | #[derive(Debug, Copy, Clone, PartialEq)] 555 | struct AlignTo1024 { 556 | a: u16, 557 | } 558 | 559 | #[repr(align(64))] 560 | #[derive(Debug, Copy, Clone, PartialEq)] 561 | struct AlignTo64; 562 | 563 | #[test] 564 | fn test_alloc_a1024_u32_zst() { 565 | test_alloc::(AlignTo1024 { a: 0xDEDE }, Layout::new::(), (), 1024, 0) 566 | } 567 | 568 | #[test] 569 | fn test_alloc_u16_u32_a1024() { 570 | test_alloc::( 571 | 0xDEDE, 572 | Layout::new::(), 573 | AlignTo1024 { a: 0xEFEF }, 574 | 4, 575 | 1020, 576 | ) 577 | } 578 | 579 | #[test] 580 | fn test_alloc_a64_u32_zst() { 581 | test_alloc::(AlignTo64, Layout::new::(), (), 0, 0) 582 | } 583 | 584 | #[test] 585 | fn test_alloc_u16_u32_a64() { 586 | test_alloc::(0xDEDE, Layout::new::(), AlignTo64, 4, 0) 587 | } 588 | } 589 | -------------------------------------------------------------------------------- /src/callback_ref.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::{AllocError, Layout}, 3 | ptr::NonNull, 4 | }; 5 | 6 | /// Backend for the [`Proxy`] allocator. 7 | /// 8 | /// As `Callback` is used in `Proxy` and `AllocRef` requires, that a cloned allocator must 9 | /// behave like the same allocator, `Clone` must not be implemented on types, which don't 10 | /// have a shared state. It's possible to use a reference by calling [`by_ref`] or to 11 | /// wrapping them into `Rc` or `Arc` in order to make them cloneable instead. Note, that 12 | /// `Box`, `Rc`, and `Arc` requires the `"alloc"`-feature to be enabled. 13 | /// 14 | /// [`by_ref`]: CallbackRef::by_ref 15 | /// [`Proxy`]: crate::Proxy 16 | /// 17 | /// # Safety 18 | /// * `Clone` must not be implemented on types, which don't have a shared state. 19 | #[allow(unused_variables)] 20 | pub unsafe trait CallbackRef { 21 | /// Called before [`alloc`] was invoked. 22 | /// 23 | /// [`alloc`]: core::alloc::AllocRef::alloc 24 | #[inline] 25 | fn before_allocate(&self, layout: Layout) {} 26 | 27 | /// Called after [`alloc`] was invoked. 28 | /// 29 | /// [`alloc`]: core::alloc::AllocRef::alloc 30 | #[inline] 31 | fn after_allocate(&self, layout: Layout, result: Result, AllocError>) {} 32 | 33 | /// Called before [`alloc_zeroed`] was invoked. 34 | /// 35 | /// [`alloc_zeroed`]: core::alloc::AllocRef::alloc_zeroed 36 | #[inline] 37 | fn before_allocate_zeroed(&self, layout: Layout) {} 38 | 39 | /// Called after [`alloc_zeroed`] was invoked. 40 | /// 41 | /// [`alloc_zeroed`]: core::alloc::AllocRef::alloc_zeroed 42 | #[inline] 43 | fn after_allocate_zeroed(&self, layout: Layout, result: Result, AllocError>) {} 44 | 45 | /// Called before [`allocate_all`] was invoked. 46 | /// 47 | /// [`allocate_all`]: crate::AllocateAll::allocate_all 48 | #[inline] 49 | fn before_allocate_all(&self) {} 50 | 51 | /// Called after [`allocate_all`] was invoked. 52 | /// 53 | /// [`allocate_all`]: crate::AllocateAll::allocate_all 54 | #[inline] 55 | fn after_allocate_all(&self, result: Result, AllocError>) {} 56 | 57 | /// Called before [`allocate_all_zeroed`] was invoked. 58 | /// 59 | /// [`allocate_all_zeroed`]: crate::AllocateAll::allocate_all_zeroed 60 | #[inline] 61 | fn before_allocate_all_zeroed(&self) {} 62 | 63 | /// Called after [`allocate_all_zeroed`] was invoked. 64 | /// 65 | /// [`allocate_all_zeroed`]: crate::AllocateAll::allocate_all_zeroed 66 | #[inline] 67 | fn after_allocate_all_zeroed(&self, result: Result, AllocError>) {} 68 | 69 | /// Called before [`dealloc`] was invoked. 70 | /// 71 | /// [`dealloc`]: core::alloc::AllocRef::dealloc 72 | #[inline] 73 | fn before_deallocate(&self, ptr: NonNull, layout: Layout) {} 74 | 75 | /// Called after [`dealloc`] was invoked. 76 | /// 77 | /// [`dealloc`]: core::alloc::AllocRef::dealloc 78 | #[inline] 79 | fn after_deallocate(&self, ptr: NonNull, layout: Layout) {} 80 | 81 | /// Called before [`deallocate_all`] was invoked. 82 | /// 83 | /// [`deallocate_all`]: crate::AllocateAll::deallocate_all 84 | #[inline] 85 | fn before_deallocate_all(&self) {} 86 | 87 | /// Called after [`deallocate_all`] was invoked. 88 | /// 89 | /// [`deallocate_all`]: crate::AllocateAll::deallocate_all 90 | #[inline] 91 | fn after_deallocate_all(&self) {} 92 | 93 | /// Called before [`grow`] was invoked. 94 | /// 95 | /// [`grow`]: core::alloc::AllocRef::grow 96 | #[inline] 97 | fn before_grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) {} 98 | 99 | /// Called after [`grow`] was invoked. 100 | /// 101 | /// [`grow`]: core::alloc::AllocRef::grow 102 | #[inline] 103 | fn after_grow( 104 | &self, 105 | ptr: NonNull, 106 | old_layout: Layout, 107 | new_layout: Layout, 108 | result: Result, AllocError>, 109 | ) { 110 | } 111 | 112 | /// Called before [`grow_zeroed`] was invoked. 113 | /// 114 | /// [`grow_zeroed`]: core::alloc::AllocRef::grow_zeroed 115 | #[inline] 116 | fn before_grow_zeroed(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) {} 117 | 118 | /// Called after [`grow_zeroed`] was invoked. 119 | /// 120 | /// [`grow_zeroed`]: core::alloc::AllocRef::grow_zeroed 121 | #[inline] 122 | fn after_grow_zeroed( 123 | &self, 124 | ptr: NonNull, 125 | old_layout: Layout, 126 | new_layout: Layout, 127 | result: Result, AllocError>, 128 | ) { 129 | } 130 | 131 | /// Called before [`grow_in_place`] was invoked. 132 | /// 133 | /// [`grow_in_place`]: crate::ReallocateInPlace::grow_in_place 134 | #[inline] 135 | fn before_grow_in_place(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) {} 136 | 137 | /// Called after [`grow_in_place`] was invoked. 138 | /// 139 | /// [`grow_in_place`]: crate::ReallocateInPlace::grow_in_place 140 | #[inline] 141 | fn after_grow_in_place( 142 | &self, 143 | ptr: NonNull, 144 | old_layout: Layout, 145 | new_layout: Layout, 146 | result: Result, 147 | ) { 148 | } 149 | 150 | /// Called before [`grow_in_place_zeroed`] was invoked. 151 | /// 152 | /// [`grow_in_place_zeroed`]: crate::ReallocateInPlace::grow_in_place_zeroed 153 | #[inline] 154 | fn before_grow_in_place_zeroed( 155 | &self, 156 | ptr: NonNull, 157 | old_layout: Layout, 158 | new_layout: Layout, 159 | ) { 160 | } 161 | 162 | /// Called after [`grow_in_place_zeroed`] was invoked. 163 | /// 164 | /// [`grow_in_place_zeroed`]: crate::ReallocateInPlace::grow_in_place_zeroed 165 | #[inline] 166 | fn after_grow_in_place_zeroed( 167 | &self, 168 | ptr: NonNull, 169 | old_layout: Layout, 170 | new_layout: Layout, 171 | result: Result, 172 | ) { 173 | } 174 | 175 | /// Called before [`shrink`] was invoked. 176 | /// 177 | /// [`shrink`]: core::alloc::AllocRef::shrink 178 | #[inline] 179 | fn before_shrink(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) {} 180 | 181 | /// Called after [`shrink`] was invoked. 182 | /// 183 | /// [`shrink`]: core::alloc::AllocRef::shrink 184 | #[inline] 185 | fn after_shrink( 186 | &self, 187 | ptr: NonNull, 188 | old_layout: Layout, 189 | new_layout: Layout, 190 | result: Result, AllocError>, 191 | ) { 192 | } 193 | 194 | /// Called before [`shrink_in_place`] was invoked. 195 | /// 196 | /// [`shrink_in_place`]: crate::ReallocateInPlace::shrink_in_place 197 | #[inline] 198 | fn before_shrink_in_place(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) {} 199 | 200 | /// Called after [`shrink_in_place`] was invoked. 201 | /// 202 | /// [`shrink_in_place`]: crate::ReallocateInPlace::shrink_in_place 203 | #[inline] 204 | fn after_shrink_in_place( 205 | &self, 206 | ptr: NonNull, 207 | old_layout: Layout, 208 | new_layout: Layout, 209 | result: Result, 210 | ) { 211 | } 212 | 213 | /// Called before [`owns`] was invoked. 214 | /// 215 | /// [`owns`]: crate::Owns::owns 216 | #[inline] 217 | fn before_owns(&self) {} 218 | 219 | /// Called after [`owns`] was invoked. 220 | /// 221 | /// [`owns`]: crate::Owns::owns 222 | #[inline] 223 | fn after_owns(&self, success: bool) {} 224 | 225 | /// Creates a "by reference" adaptor for this instance of `CallbackRef`. 226 | /// 227 | /// The returned adaptor also implements `CallbackRef` and will simply borrow this. 228 | #[inline] 229 | fn by_ref(&self) -> &Self { 230 | self 231 | } 232 | } 233 | 234 | macro_rules! impl_alloc_stats { 235 | ($(#[$meta:meta])* $ty:ty) => { 236 | $(#[$meta])* 237 | unsafe impl CallbackRef for $ty where C: CallbackRef + ?Sized { 238 | #[inline] 239 | fn before_allocate(&self, layout: Layout) { 240 | (**self).before_allocate(layout) 241 | } 242 | 243 | #[inline] 244 | fn after_allocate(&self, layout: Layout, result: Result, AllocError>) { 245 | (**self).after_allocate(layout, result) 246 | } 247 | 248 | #[inline] 249 | fn before_allocate_zeroed(&self, layout: Layout) { 250 | (**self).before_allocate_zeroed(layout) 251 | } 252 | 253 | #[inline] 254 | fn after_allocate_zeroed(&self, layout: Layout, result: Result, AllocError>) { 255 | (**self).after_allocate_zeroed(layout, result) 256 | } 257 | 258 | #[inline] 259 | fn before_allocate_all(&self) { 260 | (**self).before_allocate_all() 261 | } 262 | 263 | #[inline] 264 | fn after_allocate_all(&self, result: Result, AllocError>) { 265 | (**self).after_allocate_all(result) 266 | } 267 | 268 | #[inline] 269 | fn before_allocate_all_zeroed(&self) { 270 | (**self).before_allocate_all_zeroed() 271 | } 272 | 273 | #[inline] 274 | fn after_allocate_all_zeroed( 275 | &self, 276 | result: Result, AllocError>, 277 | ) { 278 | (**self).after_allocate_all_zeroed(result) 279 | } 280 | 281 | #[inline] 282 | fn before_deallocate(&self, ptr: NonNull, layout: Layout) { 283 | (**self).before_deallocate(ptr, layout) 284 | } 285 | 286 | #[inline] 287 | fn after_deallocate(&self, ptr: NonNull, layout: Layout) { 288 | (**self).after_deallocate(ptr, layout) 289 | } 290 | 291 | #[inline] 292 | fn before_deallocate_all(&self) { 293 | (**self).before_deallocate_all() 294 | } 295 | 296 | #[inline] 297 | fn after_deallocate_all(&self) { 298 | (**self).after_deallocate_all() 299 | } 300 | 301 | #[inline] 302 | fn before_grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 303 | (**self).before_grow(ptr, old_layout, new_layout) 304 | } 305 | 306 | #[inline] 307 | fn after_grow( 308 | &self, 309 | ptr: NonNull, 310 | old_layout: Layout, 311 | new_layout: Layout, 312 | result: Result, AllocError>, 313 | ) { 314 | (**self).after_grow(ptr, old_layout, new_layout, result) 315 | } 316 | 317 | #[inline] 318 | fn before_grow_zeroed(&self, ptr: NonNull, 319 | old_layout: Layout, 320 | new_layout: Layout,) { 321 | (**self).before_grow_zeroed(ptr, old_layout, new_layout) 322 | } 323 | 324 | #[inline] 325 | fn after_grow_zeroed( 326 | &self, 327 | ptr: NonNull, 328 | old_layout: Layout, 329 | new_layout: Layout, 330 | result: Result, AllocError>, 331 | ) { 332 | (**self).after_grow_zeroed(ptr, old_layout, new_layout, result) 333 | } 334 | 335 | #[inline] 336 | fn before_grow_in_place(&self, ptr: NonNull, 337 | old_layout: Layout, 338 | new_layout: Layout,) { 339 | (**self).before_grow_in_place(ptr, old_layout, new_layout) 340 | } 341 | 342 | #[inline] 343 | fn after_grow_in_place( 344 | &self, 345 | ptr: NonNull, 346 | old_layout: Layout, 347 | new_layout: Layout, 348 | result: Result, 349 | ) { 350 | (**self).after_grow_in_place(ptr, old_layout, new_layout, result) 351 | } 352 | 353 | #[inline] 354 | fn before_grow_in_place_zeroed( 355 | &self, 356 | ptr: NonNull, 357 | old_layout: Layout, 358 | new_layout: Layout, 359 | ) { 360 | (**self).before_grow_in_place_zeroed(ptr, old_layout, new_layout) 361 | } 362 | 363 | #[inline] 364 | fn after_grow_in_place_zeroed( 365 | &self, 366 | ptr: NonNull, 367 | old_layout: Layout, 368 | new_layout: Layout, 369 | result: Result, 370 | ) { 371 | (**self).after_grow_in_place_zeroed(ptr, old_layout, new_layout, result) 372 | } 373 | 374 | #[inline] 375 | fn before_shrink(&self, ptr: NonNull, 376 | old_layout: Layout, 377 | new_layout: Layout,) { 378 | (**self).before_shrink(ptr, old_layout, new_layout) 379 | } 380 | 381 | #[inline] 382 | fn after_shrink( 383 | &self, 384 | ptr: NonNull, 385 | old_layout: Layout, 386 | new_layout: Layout, 387 | result: Result, AllocError>, 388 | ) { 389 | (**self).after_shrink(ptr, old_layout, new_layout, result) 390 | } 391 | 392 | #[inline] 393 | fn before_shrink_in_place(&self, ptr: NonNull, 394 | old_layout: Layout, 395 | new_layout: Layout,) { 396 | (**self).before_shrink_in_place(ptr, old_layout, new_layout) 397 | } 398 | 399 | #[inline] 400 | fn after_shrink_in_place( 401 | &self, 402 | ptr: NonNull, 403 | old_layout: Layout, 404 | new_layout: Layout, 405 | result: Result, 406 | ) { 407 | (**self).after_shrink_in_place(ptr, old_layout, new_layout, result) 408 | } 409 | 410 | #[inline] 411 | fn before_owns(&self) { 412 | (**self).before_owns() 413 | } 414 | 415 | #[inline] 416 | fn after_owns(&self, success: bool) { 417 | (**self).after_owns(success) 418 | } 419 | } 420 | }; 421 | } 422 | 423 | impl_alloc_stats!(&C); 424 | #[cfg(any(doc, feature = "alloc"))] 425 | impl_alloc_stats!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::boxed::Box); 426 | #[cfg(any(doc, feature = "alloc"))] 427 | impl_alloc_stats!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::rc::Rc); 428 | #[cfg(any(doc, feature = "alloc"))] 429 | impl_alloc_stats!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::sync::Arc); 430 | 431 | #[cfg(test)] 432 | mod tests { 433 | use crate::CallbackRef; 434 | use alloc::{boxed::Box, rc::Rc, sync::Arc}; 435 | use core::{ 436 | alloc::{AllocError, Layout}, 437 | cell::Cell, 438 | ptr::NonNull, 439 | }; 440 | 441 | #[derive(Default)] 442 | struct Callback { 443 | before_allocate: Cell, 444 | after_allocate: Cell, 445 | before_allocate_zeroed: Cell, 446 | after_allocate_zeroed: Cell, 447 | before_allocate_all: Cell, 448 | after_allocate_all: Cell, 449 | before_allocate_all_zeroed: Cell, 450 | after_allocate_all_zeroed: Cell, 451 | before_deallocate: Cell, 452 | after_deallocate: Cell, 453 | before_deallocate_all: Cell, 454 | after_deallocate_all: Cell, 455 | before_grow: Cell, 456 | after_grow: Cell, 457 | before_grow_zeroed: Cell, 458 | after_grow_zeroed: Cell, 459 | before_grow_in_place: Cell, 460 | after_grow_in_place: Cell, 461 | before_grow_in_place_zeroed: Cell, 462 | after_grow_in_place_zeroed: Cell, 463 | before_shrink: Cell, 464 | after_shrink: Cell, 465 | before_shrink_in_place: Cell, 466 | after_shrink_in_place: Cell, 467 | before_owns: Cell, 468 | after_owns: Cell, 469 | } 470 | 471 | unsafe impl CallbackRef for Callback { 472 | fn before_allocate(&self, _layout: Layout) { 473 | self.before_allocate.set(self.before_allocate.get() + 1) 474 | } 475 | fn after_allocate(&self, _layout: Layout, _result: Result, AllocError>) { 476 | self.after_allocate.set(self.after_allocate.get() + 1) 477 | } 478 | fn before_allocate_zeroed(&self, _layout: Layout) { 479 | self.before_allocate_zeroed 480 | .set(self.before_allocate_zeroed.get() + 1) 481 | } 482 | fn after_allocate_zeroed( 483 | &self, 484 | _layout: Layout, 485 | _result: Result, AllocError>, 486 | ) { 487 | self.after_allocate_zeroed 488 | .set(self.after_allocate_zeroed.get() + 1) 489 | } 490 | fn before_allocate_all(&self) { 491 | self.before_allocate_all 492 | .set(self.before_allocate_all.get() + 1) 493 | } 494 | fn after_allocate_all(&self, _result: Result, AllocError>) { 495 | self.after_allocate_all 496 | .set(self.after_allocate_all.get() + 1) 497 | } 498 | fn before_allocate_all_zeroed(&self) { 499 | self.before_allocate_all_zeroed 500 | .set(self.before_allocate_all_zeroed.get() + 1) 501 | } 502 | fn after_allocate_all_zeroed(&self, _result: Result, AllocError>) { 503 | self.after_allocate_all_zeroed 504 | .set(self.after_allocate_all_zeroed.get() + 1) 505 | } 506 | fn before_deallocate(&self, _ptr: NonNull, _layout: Layout) { 507 | self.before_deallocate.set(self.before_deallocate.get() + 1) 508 | } 509 | fn after_deallocate(&self, _ptr: NonNull, _layout: Layout) { 510 | self.after_deallocate.set(self.after_deallocate.get() + 1) 511 | } 512 | fn before_deallocate_all(&self) { 513 | self.before_deallocate_all 514 | .set(self.before_deallocate_all.get() + 1) 515 | } 516 | fn after_deallocate_all(&self) { 517 | self.after_deallocate_all 518 | .set(self.after_deallocate_all.get() + 1) 519 | } 520 | fn before_grow(&self, _ptr: NonNull, _old_layout: Layout, _new_layout: Layout) { 521 | self.before_grow.set(self.before_grow.get() + 1) 522 | } 523 | fn after_grow( 524 | &self, 525 | _ptr: NonNull, 526 | _old_layout: Layout, 527 | _new_layout: Layout, 528 | _result: Result, AllocError>, 529 | ) { 530 | self.after_grow.set(self.after_grow.get() + 1) 531 | } 532 | fn before_grow_zeroed(&self, _ptr: NonNull, _old_layout: Layout, _new_layout: Layout) { 533 | self.before_grow_zeroed 534 | .set(self.before_grow_zeroed.get() + 1) 535 | } 536 | fn after_grow_zeroed( 537 | &self, 538 | _ptr: NonNull, 539 | _old_layout: Layout, 540 | _new_layout: Layout, 541 | _result: Result, AllocError>, 542 | ) { 543 | self.after_grow_zeroed.set(self.after_grow_zeroed.get() + 1) 544 | } 545 | fn before_grow_in_place( 546 | &self, 547 | _ptr: NonNull, 548 | _old_layout: Layout, 549 | _new_layout: Layout, 550 | ) { 551 | self.before_grow_in_place 552 | .set(self.before_grow_in_place.get() + 1) 553 | } 554 | fn after_grow_in_place( 555 | &self, 556 | _ptr: NonNull, 557 | _old_layout: Layout, 558 | _new_layout: Layout, 559 | _result: Result, 560 | ) { 561 | self.after_grow_in_place 562 | .set(self.after_grow_in_place.get() + 1) 563 | } 564 | fn before_grow_in_place_zeroed( 565 | &self, 566 | _ptr: NonNull, 567 | _old_layout: Layout, 568 | _new_layout: Layout, 569 | ) { 570 | self.before_grow_in_place_zeroed 571 | .set(self.before_grow_in_place_zeroed.get() + 1) 572 | } 573 | fn after_grow_in_place_zeroed( 574 | &self, 575 | _ptr: NonNull, 576 | _old_layout: Layout, 577 | _new_layout: Layout, 578 | _result: Result, 579 | ) { 580 | self.after_grow_in_place_zeroed 581 | .set(self.after_grow_in_place_zeroed.get() + 1) 582 | } 583 | fn before_shrink(&self, _ptr: NonNull, _old_layout: Layout, _new_layout: Layout) { 584 | self.before_shrink.set(self.before_shrink.get() + 1) 585 | } 586 | fn after_shrink( 587 | &self, 588 | _ptr: NonNull, 589 | _old_layout: Layout, 590 | _new_layout: Layout, 591 | _result: Result, AllocError>, 592 | ) { 593 | self.after_shrink.set(self.after_shrink.get() + 1) 594 | } 595 | fn before_shrink_in_place( 596 | &self, 597 | _ptr: NonNull, 598 | _old_layout: Layout, 599 | _new_layout: Layout, 600 | ) { 601 | self.before_shrink_in_place 602 | .set(self.before_shrink_in_place.get() + 1) 603 | } 604 | fn after_shrink_in_place( 605 | &self, 606 | _ptr: NonNull, 607 | _old_layout: Layout, 608 | _new_layout: Layout, 609 | _result: Result, 610 | ) { 611 | self.after_shrink_in_place 612 | .set(self.after_shrink_in_place.get() + 1) 613 | } 614 | fn before_owns(&self) { 615 | self.before_owns.set(self.before_owns.get() + 1) 616 | } 617 | fn after_owns(&self, _success: bool) { 618 | self.after_owns.set(self.after_owns.get() + 1) 619 | } 620 | } 621 | 622 | fn test_callback(callback: impl CallbackRef) { 623 | callback.before_allocate(Layout::new::<()>()); 624 | callback.after_allocate(Layout::new::<()>(), Err(AllocError)); 625 | callback.before_allocate_zeroed(Layout::new::<()>()); 626 | callback.after_allocate_zeroed(Layout::new::<()>(), Err(AllocError)); 627 | callback.before_allocate_all(); 628 | callback.after_allocate_all(Err(AllocError)); 629 | callback.before_allocate_all_zeroed(); 630 | callback.after_allocate_all_zeroed(Err(AllocError)); 631 | callback.before_deallocate(NonNull::dangling(), Layout::new::<()>()); 632 | callback.after_deallocate(NonNull::dangling(), Layout::new::<()>()); 633 | callback.before_deallocate_all(); 634 | callback.after_deallocate_all(); 635 | callback.before_grow( 636 | NonNull::dangling(), 637 | Layout::new::<()>(), 638 | Layout::new::<()>(), 639 | ); 640 | callback.after_grow( 641 | NonNull::dangling(), 642 | Layout::new::<()>(), 643 | Layout::new::<()>(), 644 | Err(AllocError), 645 | ); 646 | callback.before_grow_zeroed( 647 | NonNull::dangling(), 648 | Layout::new::<()>(), 649 | Layout::new::<()>(), 650 | ); 651 | callback.after_grow_zeroed( 652 | NonNull::dangling(), 653 | Layout::new::<()>(), 654 | Layout::new::<()>(), 655 | Err(AllocError), 656 | ); 657 | callback.before_grow_in_place( 658 | NonNull::dangling(), 659 | Layout::new::<()>(), 660 | Layout::new::<()>(), 661 | ); 662 | callback.after_grow_in_place( 663 | NonNull::dangling(), 664 | Layout::new::<()>(), 665 | Layout::new::<()>(), 666 | Err(AllocError), 667 | ); 668 | callback.before_grow_in_place_zeroed( 669 | NonNull::dangling(), 670 | Layout::new::<()>(), 671 | Layout::new::<()>(), 672 | ); 673 | callback.after_grow_in_place_zeroed( 674 | NonNull::dangling(), 675 | Layout::new::<()>(), 676 | Layout::new::<()>(), 677 | Err(AllocError), 678 | ); 679 | callback.before_shrink( 680 | NonNull::dangling(), 681 | Layout::new::<()>(), 682 | Layout::new::<()>(), 683 | ); 684 | callback.after_shrink( 685 | NonNull::dangling(), 686 | Layout::new::<()>(), 687 | Layout::new::<()>(), 688 | Err(AllocError), 689 | ); 690 | callback.after_shrink_in_place( 691 | NonNull::dangling(), 692 | Layout::new::<()>(), 693 | Layout::new::<()>(), 694 | Err(AllocError), 695 | ); 696 | callback.before_shrink_in_place( 697 | NonNull::dangling(), 698 | Layout::new::<()>(), 699 | Layout::new::<()>(), 700 | ); 701 | callback.before_owns(); 702 | callback.after_owns(false); 703 | } 704 | 705 | fn check_counts(callback: &Callback) { 706 | assert_eq!(callback.before_allocate.get(), 1); 707 | assert_eq!(callback.after_allocate.get(), 1); 708 | assert_eq!(callback.before_allocate_zeroed.get(), 1); 709 | assert_eq!(callback.after_allocate_zeroed.get(), 1); 710 | assert_eq!(callback.before_allocate_all.get(), 1); 711 | assert_eq!(callback.after_allocate_all.get(), 1); 712 | assert_eq!(callback.before_allocate_all_zeroed.get(), 1); 713 | assert_eq!(callback.after_allocate_all_zeroed.get(), 1); 714 | assert_eq!(callback.before_deallocate.get(), 1); 715 | assert_eq!(callback.after_deallocate.get(), 1); 716 | assert_eq!(callback.before_deallocate_all.get(), 1); 717 | assert_eq!(callback.after_deallocate_all.get(), 1); 718 | assert_eq!(callback.before_grow.get(), 1); 719 | assert_eq!(callback.after_grow.get(), 1); 720 | assert_eq!(callback.before_grow_zeroed.get(), 1); 721 | assert_eq!(callback.after_grow_zeroed.get(), 1); 722 | assert_eq!(callback.before_grow_in_place.get(), 1); 723 | assert_eq!(callback.after_grow_in_place.get(), 1); 724 | assert_eq!(callback.before_grow_in_place_zeroed.get(), 1); 725 | assert_eq!(callback.after_grow_in_place_zeroed.get(), 1); 726 | assert_eq!(callback.before_shrink.get(), 1); 727 | assert_eq!(callback.after_shrink.get(), 1); 728 | assert_eq!(callback.before_shrink_in_place.get(), 1); 729 | assert_eq!(callback.after_shrink_in_place.get(), 1); 730 | assert_eq!(callback.before_owns.get(), 1); 731 | assert_eq!(callback.after_owns.get(), 1); 732 | } 733 | 734 | #[test] 735 | fn plain() { 736 | let callback = Callback::default(); 737 | test_callback(callback.by_ref()); 738 | check_counts(&callback); 739 | } 740 | 741 | #[test] 742 | fn boxed() { 743 | let callback = Box::new(Callback::default()); 744 | test_callback(callback.by_ref()); 745 | check_counts(&callback); 746 | } 747 | 748 | #[test] 749 | fn rc() { 750 | let callback = Rc::new(Callback::default()); 751 | test_callback(callback.by_ref()); 752 | check_counts(&callback); 753 | } 754 | 755 | #[test] 756 | fn arc() { 757 | let callback = Arc::new(Callback::default()); 758 | test_callback(callback.by_ref()); 759 | check_counts(&callback); 760 | } 761 | } 762 | -------------------------------------------------------------------------------- /src/chunk.rs: -------------------------------------------------------------------------------- 1 | use crate::{helper::AllocInit, Owns, ReallocateInPlace}; 2 | use core::{ 3 | alloc::{AllocError, AllocRef, Layout}, 4 | ptr::NonNull, 5 | }; 6 | 7 | /// Allocate memory with a multiple size of the provided chunk size. 8 | /// 9 | /// # Examples 10 | /// 11 | /// ```rust 12 | /// #![feature(allocator_api, slice_ptr_len)] 13 | /// 14 | /// use alloc_compose::Chunk; 15 | /// use std::alloc::{AllocRef, Layout, System}; 16 | /// 17 | /// let mut alloc = Chunk::<_, 64>(System); 18 | /// let ptr = alloc.alloc(Layout::new::<[u8; 16]>())?; 19 | /// assert_eq!(ptr.len() % 64, 0); 20 | /// assert!(ptr.len() >= 64); 21 | /// # Ok::<(), core::alloc::AllocError>(()) 22 | /// ``` 23 | /// 24 | /// When growing or shrinking the memory, `Chunk` will try to alter 25 | /// the memory in place before delegating to the underlying allocator. 26 | /// 27 | /// ```rust 28 | /// #![feature(slice_ptr_get)] 29 | /// # #![feature(allocator_api, slice_ptr_len)] 30 | /// # use alloc_compose::Chunk; 31 | /// # use std::{alloc::{AllocRef, Layout, System}}; 32 | /// # let mut alloc = Chunk::<_, 64>(System); 33 | /// # let ptr = alloc.alloc(Layout::new::<[u8; 16]>())?; 34 | /// 35 | /// let new_ptr = unsafe { 36 | /// alloc.grow( 37 | /// ptr.as_non_null_ptr(), 38 | /// Layout::new::<[u8; 16]>(), 39 | /// Layout::new::<[u8; 24]>(), 40 | /// )? 41 | /// }; 42 | /// 43 | /// assert_eq!(ptr, new_ptr); 44 | /// # Ok::<(), core::alloc::AllocError>(()) 45 | /// ``` 46 | /// 47 | /// This can be enforced by using [`ReallocateInPlace::grow_in_place`]. 48 | /// 49 | /// ```rust 50 | /// # #![feature(allocator_api, slice_ptr_len, slice_ptr_get)] 51 | /// # use alloc_compose::Chunk; 52 | /// # use std::{alloc::{AllocRef, Layout, System}}; 53 | /// # let mut alloc = Chunk::<_, 64>(System); 54 | /// # let ptr = alloc.alloc(Layout::new::<[u8; 24]>())?; 55 | /// use alloc_compose::ReallocateInPlace; 56 | /// 57 | /// let len = unsafe { 58 | /// alloc.grow_in_place( 59 | /// ptr.as_non_null_ptr(), 60 | /// Layout::new::<[u8; 24]>(), 61 | /// Layout::new::<[u8; 32]>(), 62 | /// )? 63 | /// }; 64 | /// 65 | /// assert_eq!(len % 64, 0); 66 | /// assert!(len >= 64); 67 | /// # Ok::<(), core::alloc::AllocError>(()) 68 | /// ``` 69 | #[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] 70 | pub struct Chunk(pub A); 71 | 72 | mod sealed { 73 | pub trait SizeIsPowerOfTwo {} 74 | } 75 | use sealed::SizeIsPowerOfTwo; 76 | 77 | macro_rules! is_power_of_two { 78 | ($($N:literal)+) => { 79 | $( 80 | impl SizeIsPowerOfTwo for Chunk {} 81 | )+ 82 | }; 83 | } 84 | 85 | is_power_of_two!(1 2 3 4 5 6 7); 86 | #[cfg(any( 87 | target_pointer_width = "16", 88 | target_pointer_width = "32", 89 | target_pointer_width = "64" 90 | ))] 91 | is_power_of_two!(8 9 10 11 12 13 14 15); 92 | #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] 93 | is_power_of_two!(16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31); 94 | #[cfg(target_pointer_width = "64")] 95 | is_power_of_two!(32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63); 96 | 97 | impl Chunk 98 | where 99 | Self: SizeIsPowerOfTwo, 100 | { 101 | fn round_up(size: usize) -> Result { 102 | Ok((size.checked_add(SIZE).ok_or(AllocError)? - 1) & !(SIZE - 1)) 103 | } 104 | 105 | unsafe fn round_up_unchecked(size: usize) -> usize { 106 | let new_size = (size.wrapping_add(SIZE) - 1) & !(SIZE - 1); 107 | debug_assert_eq!(new_size, Self::round_up(size).unwrap()); 108 | new_size 109 | } 110 | 111 | const fn round_down(size: usize) -> usize { 112 | size & !(SIZE - 1) 113 | } 114 | 115 | const fn round_down_ptr_len(ptr: NonNull<[u8]>) -> NonNull<[u8]> { 116 | NonNull::slice_from_raw_parts(ptr.as_non_null_ptr(), Self::round_down(ptr.len())) 117 | } 118 | 119 | #[inline] 120 | fn alloc_impl( 121 | layout: Layout, 122 | alloc: impl FnOnce(Layout) -> Result, AllocError>, 123 | ) -> Result, AllocError> { 124 | let new_size = Self::round_up(layout.size())?; 125 | let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) }; 126 | 127 | alloc(new_layout).map(Self::round_down_ptr_len) 128 | } 129 | 130 | #[inline] 131 | unsafe fn grow_impl( 132 | old_ptr: NonNull, 133 | old_layout: Layout, 134 | new_layout: Layout, 135 | init: AllocInit, 136 | grow: impl FnOnce(NonNull, Layout, Layout) -> Result, AllocError>, 137 | ) -> Result, AllocError> { 138 | let old_size = old_layout.size(); 139 | let current_size = Self::round_up_unchecked(old_size); 140 | let new_size = new_layout.size(); 141 | if new_layout.align() <= old_layout.align() && new_size <= current_size { 142 | let ptr = NonNull::slice_from_raw_parts(old_ptr, current_size); 143 | init.init_offset(ptr, old_size); 144 | return Ok(ptr); 145 | } 146 | 147 | grow( 148 | old_ptr, 149 | Layout::from_size_align_unchecked(current_size, old_layout.align()), 150 | Layout::from_size_align_unchecked(Self::round_up(new_size)?, new_layout.align()), 151 | ) 152 | .map(Self::round_down_ptr_len) 153 | } 154 | 155 | #[inline] 156 | unsafe fn shrink_impl( 157 | old_ptr: NonNull, 158 | old_layout: Layout, 159 | new_layout: Layout, 160 | shrink: impl FnOnce(NonNull, Layout, Layout) -> Result, AllocError>, 161 | ) -> Result, AllocError> { 162 | let current_size = Self::round_up_unchecked(old_layout.size()); 163 | let new_size = new_layout.size(); 164 | if new_layout.align() <= old_layout.align() && new_layout.size() > current_size - SIZE { 165 | return Ok(NonNull::slice_from_raw_parts(old_ptr, current_size)); 166 | } 167 | 168 | shrink( 169 | old_ptr, 170 | old_layout, 171 | Layout::from_size_align_unchecked( 172 | Self::round_up_unchecked(new_size), 173 | new_layout.align(), 174 | ), 175 | ) 176 | .map(Self::round_down_ptr_len) 177 | } 178 | } 179 | 180 | unsafe impl AllocRef for Chunk 181 | where 182 | Self: SizeIsPowerOfTwo, 183 | { 184 | impl_alloc_ref!(0); 185 | 186 | unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { 187 | crate::check_dealloc_precondition(ptr, layout); 188 | 189 | self.0.dealloc( 190 | ptr, 191 | Layout::from_size_align_unchecked( 192 | Self::round_up_unchecked(layout.size()), 193 | layout.align(), 194 | ), 195 | ) 196 | } 197 | } 198 | 199 | // unsafe impl AllocateAll for Chunk 200 | // where 201 | // Self: SizeIsPowerOfTwo, 202 | // { 203 | // impl_alloc_all!(0); 204 | // } 205 | 206 | unsafe impl ReallocateInPlace for Chunk 207 | where 208 | Self: SizeIsPowerOfTwo, 209 | { 210 | impl_realloc_in_place_spec!(0); 211 | } 212 | 213 | unsafe impl ReallocateInPlace for Chunk 214 | where 215 | Self: SizeIsPowerOfTwo, 216 | { 217 | impl_realloc_in_place!(0); 218 | } 219 | 220 | impl Owns for Chunk 221 | where 222 | Self: SizeIsPowerOfTwo, 223 | { 224 | fn owns(&self, memory: NonNull<[u8]>) -> bool { 225 | self.0.owns(memory) 226 | } 227 | } 228 | 229 | #[cfg(test)] 230 | mod tests { 231 | use super::Chunk; 232 | use crate::{helper::tracker, ReallocateInPlace}; 233 | use alloc::alloc::Global; 234 | use core::alloc::{AllocRef, Layout}; 235 | 236 | #[test] 237 | fn alloc() { 238 | let alloc = Chunk::<_, 64>(tracker(Global)); 239 | let memory = alloc 240 | .alloc(Layout::new::<[u8; 2]>()) 241 | .expect("Could not allocate 64 bytes"); 242 | assert_eq!(memory.len() % 64, 0); 243 | assert!(memory.len() >= 64); 244 | 245 | unsafe { 246 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::()); 247 | } 248 | } 249 | 250 | #[test] 251 | fn dealloc() { 252 | let alloc = Chunk::<_, 64>(tracker(Global)); 253 | 254 | unsafe { 255 | let memory = alloc 256 | .alloc(Layout::new::<[u8; 4]>()) 257 | .expect("Could not allocate 4 bytes"); 258 | assert_eq!(memory.len() % 64, 0); 259 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 4]>()); 260 | 261 | let memory = alloc 262 | .alloc(Layout::new::<[u8; 8]>()) 263 | .expect("Could not allocate 8 bytes"); 264 | assert_eq!(memory.len() % 64, 0); 265 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 8]>()); 266 | 267 | let memory = alloc 268 | .alloc(Layout::new::<[u8; 32]>()) 269 | .expect("Could not allocate 32 bytes"); 270 | assert_eq!(memory.len() % 64, 0); 271 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 32]>()); 272 | 273 | let memory = alloc 274 | .alloc(Layout::new::<[u8; 64]>()) 275 | .expect("Could not allocate 64 bytes"); 276 | assert_eq!(memory.len() % 64, 0); 277 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 64]>()); 278 | } 279 | } 280 | 281 | #[test] 282 | fn grow() { 283 | let alloc = Chunk::<_, 64>(tracker(Global)); 284 | 285 | let memory = alloc 286 | .alloc(Layout::new::<[u8; 4]>()) 287 | .expect("Could not allocate 4 bytes"); 288 | assert_eq!(memory.len() % 64, 0); 289 | 290 | unsafe { 291 | let len = alloc 292 | .grow_in_place( 293 | memory.as_non_null_ptr(), 294 | Layout::new::<[u8; 4]>(), 295 | Layout::new::<[u8; 64]>(), 296 | ) 297 | .expect("Could not grow to 8 bytes"); 298 | assert_eq!(len % 64, 0); 299 | assert!(len >= 64); 300 | 301 | let len = alloc 302 | .grow_in_place( 303 | memory.as_non_null_ptr(), 304 | Layout::new::<[u8; 8]>(), 305 | Layout::new::<[u8; 64]>(), 306 | ) 307 | .expect("Could not grow to 64 bytes"); 308 | assert_eq!(len % 64, 0); 309 | assert!(len >= 64); 310 | 311 | alloc 312 | .grow_in_place( 313 | memory.as_non_null_ptr(), 314 | Layout::new::<[u8; 64]>(), 315 | Layout::new::<[u8; 65]>(), 316 | ) 317 | .expect_err("Could grow to 65 bytes in place"); 318 | 319 | let memory = alloc 320 | .grow( 321 | memory.as_non_null_ptr(), 322 | Layout::new::<[u8; 64]>(), 323 | Layout::new::<[u8; 65]>(), 324 | ) 325 | .expect("Could not grow to 65 bytes"); 326 | 327 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 65]>()); 328 | } 329 | } 330 | 331 | #[test] 332 | fn shrink() { 333 | let alloc = Chunk::<_, 64>(tracker(Global)); 334 | 335 | let memory = alloc 336 | .alloc(Layout::new::<[u8; 128]>()) 337 | .expect("Could not allocate 128 bytes"); 338 | assert_eq!(memory.len() % 64, 0); 339 | 340 | unsafe { 341 | let len = alloc 342 | .shrink_in_place( 343 | memory.as_non_null_ptr(), 344 | Layout::new::<[u8; 128]>(), 345 | Layout::new::<[u8; 100]>(), 346 | ) 347 | .expect("Could not shrink to 100 bytes"); 348 | assert_eq!(len % 64, 0); 349 | assert!(len >= 128); 350 | 351 | let len = alloc 352 | .shrink_in_place( 353 | memory.as_non_null_ptr(), 354 | Layout::new::<[u8; 100]>(), 355 | Layout::new::<[u8; 65]>(), 356 | ) 357 | .expect("Could not shrink to 65 bytes"); 358 | assert_eq!(len % 64, 0); 359 | assert!(len >= 128); 360 | 361 | alloc 362 | .shrink_in_place( 363 | memory.as_non_null_ptr(), 364 | Layout::new::<[u8; 65]>(), 365 | Layout::new::<[u8; 64]>(), 366 | ) 367 | .expect_err("Could shrink to 64 bytes in place"); 368 | 369 | let memory = alloc 370 | .shrink( 371 | memory.as_non_null_ptr(), 372 | Layout::new::<[u8; 128]>(), 373 | Layout::new::<[u8; 64]>(), 374 | ) 375 | .expect("Could not shrink to 64 bytes"); 376 | 377 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 64]>()); 378 | } 379 | } 380 | } 381 | -------------------------------------------------------------------------------- /src/fallback.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | helper::{grow_fallback, AllocInit}, 3 | Owns, 4 | }; 5 | use core::{ 6 | alloc::{AllocError, AllocRef, Layout}, 7 | ptr::NonNull, 8 | }; 9 | 10 | /// An allocator equivalent of an "or" operator in algebra. 11 | /// 12 | /// An allocation request is first attempted with the `Primary` allocator. If that fails, the 13 | /// request is forwarded to the `Fallback` allocator. All other requests are dispatched 14 | /// appropriately to one of the two allocators. 15 | /// 16 | /// A `Fallback` is useful for fast, special-purpose allocators backed up by general-purpose 17 | /// allocators like [`Global`] or [`System`]. 18 | /// 19 | /// [`Global`]: https://doc.rust-lang.org/alloc/alloc/struct.Global.html 20 | /// [`System`]: https://doc.rust-lang.org/std/alloc/struct.System.html 21 | /// 22 | /// # Example 23 | /// 24 | /// ```rust 25 | /// #![feature(allocator_api, slice_ptr_get)] 26 | /// 27 | /// use alloc_compose::{region::Region, Fallback, Owns}; 28 | /// use std::{ 29 | /// alloc::{AllocRef, Layout, System}, 30 | /// mem::MaybeUninit, 31 | /// }; 32 | /// 33 | /// let mut data = [MaybeUninit::new(0); 32]; 34 | /// let mut alloc = Fallback { 35 | /// primary: Region::new(&mut data), 36 | /// secondary: System, 37 | /// }; 38 | /// 39 | /// let small_memory = alloc.alloc(Layout::new::())?; 40 | /// let big_memory = alloc.alloc(Layout::new::<[u32; 64]>())?; 41 | /// 42 | /// assert!(alloc.primary.owns(small_memory)); 43 | /// assert!(!alloc.primary.owns(big_memory)); 44 | /// 45 | /// unsafe { 46 | /// // `big_memory` was allocated from `System`, we can dealloc it directly 47 | /// System.dealloc(big_memory.as_non_null_ptr(), Layout::new::<[u32; 64]>()); 48 | /// alloc.dealloc(small_memory.as_non_null_ptr(), Layout::new::()); 49 | /// }; 50 | /// # Ok::<(), core::alloc::AllocError>(()) 51 | /// ``` 52 | #[derive(Debug, Copy, Clone)] 53 | pub struct Fallback { 54 | /// The primary allocator 55 | pub primary: Primary, 56 | /// The fallback allocator 57 | pub secondary: Secondary, 58 | } 59 | 60 | unsafe impl AllocRef for Fallback 61 | where 62 | Primary: AllocRef + Owns, 63 | Secondary: AllocRef, 64 | { 65 | fn alloc(&self, layout: Layout) -> Result, AllocError> { 66 | match self.primary.alloc(layout) { 67 | primary @ Ok(_) => primary, 68 | Err(_) => self.secondary.alloc(layout), 69 | } 70 | } 71 | 72 | fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { 73 | match self.primary.alloc_zeroed(layout) { 74 | primary @ Ok(_) => primary, 75 | Err(_) => self.secondary.alloc_zeroed(layout), 76 | } 77 | } 78 | 79 | unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { 80 | if self 81 | .primary 82 | .owns(NonNull::slice_from_raw_parts(ptr, layout.size())) 83 | { 84 | self.primary.dealloc(ptr, layout) 85 | } else { 86 | self.secondary.dealloc(ptr, layout) 87 | } 88 | } 89 | 90 | unsafe fn grow( 91 | &self, 92 | ptr: NonNull, 93 | old_layout: Layout, 94 | new_layout: Layout, 95 | ) -> Result, AllocError> { 96 | if self 97 | .primary 98 | .owns(NonNull::slice_from_raw_parts(ptr, old_layout.size())) 99 | { 100 | if let Ok(memory) = self.primary.grow(ptr, old_layout, new_layout) { 101 | Ok(memory) 102 | } else { 103 | grow_fallback( 104 | &self.primary, 105 | &self.secondary, 106 | ptr, 107 | old_layout, 108 | new_layout, 109 | AllocInit::Uninitialized, 110 | ) 111 | } 112 | } else { 113 | self.secondary.grow(ptr, old_layout, new_layout) 114 | } 115 | } 116 | 117 | unsafe fn grow_zeroed( 118 | &self, 119 | ptr: NonNull, 120 | old_layout: Layout, 121 | new_layout: Layout, 122 | ) -> Result, AllocError> { 123 | if self 124 | .primary 125 | .owns(NonNull::slice_from_raw_parts(ptr, old_layout.size())) 126 | { 127 | if let Ok(memory) = self.primary.grow_zeroed(ptr, old_layout, new_layout) { 128 | Ok(memory) 129 | } else { 130 | grow_fallback( 131 | &self.primary, 132 | &self.secondary, 133 | ptr, 134 | old_layout, 135 | new_layout, 136 | AllocInit::Zeroed, 137 | ) 138 | } 139 | } else { 140 | self.secondary.grow_zeroed(ptr, old_layout, new_layout) 141 | } 142 | } 143 | 144 | unsafe fn shrink( 145 | &self, 146 | ptr: NonNull, 147 | old_layout: Layout, 148 | new_layout: Layout, 149 | ) -> Result, AllocError> { 150 | if self 151 | .primary 152 | .owns(NonNull::slice_from_raw_parts(ptr, old_layout.size())) 153 | { 154 | self.primary.shrink(ptr, old_layout, new_layout) 155 | } else { 156 | self.secondary.shrink(ptr, old_layout, new_layout) 157 | } 158 | } 159 | } 160 | 161 | impl Owns for Fallback 162 | where 163 | Primary: Owns, 164 | Secondary: Owns, 165 | { 166 | fn owns(&self, memory: NonNull<[u8]>) -> bool { 167 | self.primary.owns(memory) || self.secondary.owns(memory) 168 | } 169 | } 170 | 171 | #[cfg(test)] 172 | mod tests { 173 | use super::Fallback; 174 | use crate::{helper, region::Region, Chunk, Owns}; 175 | use alloc::alloc::Global; 176 | use core::{ 177 | alloc::{AllocRef, Layout}, 178 | mem::MaybeUninit, 179 | }; 180 | 181 | #[test] 182 | fn alloc() { 183 | let mut data = [MaybeUninit::new(0); 32]; 184 | let alloc = Fallback { 185 | primary: helper::tracker(Region::new(&mut data)), 186 | secondary: helper::tracker(Global), 187 | }; 188 | 189 | let small_memory = alloc 190 | .alloc(Layout::new::()) 191 | .expect("Could not allocate 4 bytes"); 192 | let big_memory = alloc 193 | .alloc(Layout::new::<[u8; 64]>()) 194 | .expect("Could not allocate 64 bytes"); 195 | 196 | assert!(alloc.primary.owns(small_memory)); 197 | assert!(!alloc.primary.owns(big_memory)); 198 | unsafe { 199 | alloc.dealloc(small_memory.as_non_null_ptr(), Layout::new::()); 200 | alloc.dealloc(big_memory.as_non_null_ptr(), Layout::new::<[u8; 64]>()); 201 | }; 202 | } 203 | 204 | #[test] 205 | fn grow() { 206 | let mut data = [MaybeUninit::new(0); 80]; 207 | let alloc = Fallback { 208 | primary: helper::tracker(Chunk::(Region::new(&mut data))), 209 | secondary: helper::tracker(Global), 210 | }; 211 | 212 | let memory = alloc 213 | .alloc(Layout::new::<[u8; 32]>()) 214 | .expect("Could not allocate 4 bytes"); 215 | assert!(alloc.primary.owns(memory)); 216 | 217 | unsafe { 218 | let memory = alloc 219 | .grow( 220 | memory.as_non_null_ptr(), 221 | Layout::new::<[u8; 32]>(), 222 | Layout::new::<[u8; 64]>(), 223 | ) 224 | .expect("Could not grow to 64 bytes"); 225 | assert!(alloc.primary.owns(memory)); 226 | assert_eq!(memory.len(), 64); 227 | 228 | let memory = alloc 229 | .grow( 230 | memory.as_non_null_ptr(), 231 | Layout::new::<[u8; 64]>(), 232 | Layout::new::<[u8; 128]>(), 233 | ) 234 | .expect("Could not grow to 128 bytes"); 235 | assert!(!alloc.primary.owns(memory)); 236 | 237 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 128]>()); 238 | }; 239 | } 240 | 241 | #[test] 242 | fn shrink() { 243 | let mut data = [MaybeUninit::new(0); 80]; 244 | let alloc = Fallback { 245 | primary: helper::tracker(Chunk::(Region::new(&mut data))), 246 | secondary: helper::tracker(Global), 247 | }; 248 | 249 | let memory = alloc 250 | .alloc(Layout::new::<[u8; 64]>()) 251 | .expect("Could not allocate 64 bytes"); 252 | assert!(alloc.primary.owns(memory)); 253 | 254 | unsafe { 255 | let memory = alloc 256 | .shrink( 257 | memory.as_non_null_ptr(), 258 | Layout::new::<[u8; 64]>(), 259 | Layout::new::<[u8; 32]>(), 260 | ) 261 | .expect("Could not shrink to 32 bytes"); 262 | assert!(alloc.primary.owns(memory)); 263 | 264 | let memory = alloc 265 | .grow( 266 | memory.as_non_null_ptr(), 267 | Layout::new::<[u8; 32]>(), 268 | Layout::new::<[u8; 128]>(), 269 | ) 270 | .expect("Could not grow to 128 bytes"); 271 | assert!(!alloc.primary.owns(memory)); 272 | 273 | let memory = alloc 274 | .shrink( 275 | memory.as_non_null_ptr(), 276 | Layout::new::<[u8; 128]>(), 277 | Layout::new::<[u8; 96]>(), 278 | ) 279 | .expect("Could not shrink to 96 bytes"); 280 | assert!(!alloc.primary.owns(memory)); 281 | 282 | alloc.dealloc(memory.as_non_null_ptr(), Layout::new::<[u8; 96]>()); 283 | } 284 | } 285 | 286 | #[test] 287 | fn owns() { 288 | let mut data_1 = [MaybeUninit::new(0); 32]; 289 | let mut data_2 = [MaybeUninit::new(0); 64]; 290 | let alloc = Fallback { 291 | primary: Region::new(&mut data_1), 292 | secondary: Region::new(&mut data_2), 293 | }; 294 | 295 | let memory = alloc 296 | .alloc(Layout::new::<[u8; 32]>()) 297 | .expect("Could not allocate 32 bytes"); 298 | assert!(alloc.primary.owns(memory)); 299 | assert!(alloc.owns(memory)); 300 | 301 | let memory = alloc 302 | .alloc(Layout::new::<[u8; 64]>()) 303 | .expect("Could not allocate 64 bytes"); 304 | assert!(alloc.secondary.owns(memory)); 305 | assert!(alloc.owns(memory)); 306 | } 307 | } 308 | -------------------------------------------------------------------------------- /src/helper.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | alloc::{AllocError, AllocRef, Layout}, 3 | ptr::{self, NonNull}, 4 | }; 5 | 6 | #[derive(Copy, Clone, PartialEq, Eq)] 7 | pub enum AllocInit { 8 | Uninitialized, 9 | Zeroed, 10 | } 11 | 12 | impl AllocInit { 13 | #[inline] 14 | pub unsafe fn init_offset(self, ptr: NonNull<[u8]>, offset: usize) { 15 | debug_assert!( 16 | offset <= ptr.len(), 17 | "`offset` must be smaller than or equal to `ptr.len()`" 18 | ); 19 | match self { 20 | Self::Uninitialized => (), 21 | Self::Zeroed => ptr 22 | .as_non_null_ptr() 23 | .as_ptr() 24 | .add(offset) 25 | .write_bytes(0, ptr.len() - offset), 26 | } 27 | } 28 | } 29 | 30 | // #[derive(Copy, Clone, PartialEq, Eq)] 31 | // pub enum ReallocPlacement { 32 | // MayMove, 33 | // InPlace, 34 | // } 35 | 36 | pub(in crate) unsafe fn grow_fallback( 37 | a1: &A1, 38 | a2: &A2, 39 | ptr: NonNull, 40 | old_layout: Layout, 41 | new_layout: Layout, 42 | init: AllocInit, 43 | ) -> Result, AllocError> { 44 | let new_ptr = match init { 45 | AllocInit::Uninitialized => a2.alloc(new_layout)?, 46 | AllocInit::Zeroed => a2.alloc_zeroed(new_layout)?, 47 | }; 48 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); 49 | a1.dealloc(ptr, old_layout); 50 | Ok(new_ptr) 51 | } 52 | 53 | pub(in crate) unsafe fn shrink_fallback( 54 | a1: &A1, 55 | a2: &A2, 56 | ptr: NonNull, 57 | old_layout: Layout, 58 | new_layout: Layout, 59 | ) -> Result, AllocError> { 60 | let new_ptr = a2.alloc(new_layout)?; 61 | ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_layout.size()); 62 | a1.dealloc(ptr, old_layout); 63 | Ok(new_ptr) 64 | } 65 | 66 | #[cfg(test)] 67 | pub fn tracker(alloc: A) -> crate::Proxy { 68 | crate::Proxy { 69 | alloc, 70 | callbacks: self::tests::Tracker::default(), 71 | } 72 | } 73 | 74 | #[cfg(test)] 75 | mod tests { 76 | use super::tracker; 77 | use crate::{CallbackRef, Chunk}; 78 | use alloc::{alloc::Global, collections::BTreeMap}; 79 | use core::{ 80 | alloc::{AllocError, AllocRef, Layout}, 81 | cell::RefCell, 82 | ptr::NonNull, 83 | }; 84 | 85 | #[cfg(test)] 86 | #[derive(Default)] 87 | pub struct Tracker { 88 | map: RefCell, (usize, Layout)>>, 89 | } 90 | 91 | impl Tracker { 92 | #[track_caller] 93 | fn assert_fit_memory(&self, ptr: NonNull, layout: Layout, name: &str) { 94 | let map = self.map.borrow(); 95 | let (size, old_layout) = map.get(&ptr).expect( 96 | "`ptr` must denote a block of memory currently allocated via this allocator", 97 | ); 98 | assert_eq!( 99 | layout.align(), 100 | old_layout.align(), 101 | "`{0}` must fit that block of memory. The block must be allocated with the same \ 102 | alignment as `{1}.align()`. Expected alignment of {1}, got {2}", 103 | name, 104 | old_layout.align(), 105 | layout.align() 106 | ); 107 | if *size == old_layout.size() { 108 | assert_eq!( 109 | layout.size(), 110 | old_layout.size(), 111 | "`{0}` must fit that block of memory. The provided `{0}.size()` must fall in \ 112 | the range `min ..= max`. Expected size of {1}, got {2}", 113 | name, 114 | old_layout.size(), 115 | layout.size() 116 | ) 117 | } else { 118 | assert!( 119 | layout.size() <= *size && layout.size() >= old_layout.size(), 120 | "`{0}` must fit that block of memory. The provided `{0}.size()` must fall in \ 121 | the range `min ..= max`. Expected size between `{1} ..= {2}`, got {3}", 122 | name, 123 | old_layout.size(), 124 | size, 125 | layout.size() 126 | ) 127 | } 128 | } 129 | } 130 | 131 | #[cfg(test)] 132 | unsafe impl CallbackRef for Tracker { 133 | fn after_allocate(&self, layout: Layout, result: Result, AllocError>) { 134 | if let Ok(ptr) = result { 135 | self.map 136 | .borrow_mut() 137 | .insert(ptr.as_non_null_ptr(), (ptr.len(), layout)); 138 | } 139 | } 140 | 141 | fn after_allocate_zeroed(&self, layout: Layout, result: Result, AllocError>) { 142 | self.after_allocate(layout, result) 143 | } 144 | 145 | fn after_allocate_all(&self, result: Result, AllocError>) { 146 | if let Ok(ptr) = result { 147 | let layout = 148 | Layout::from_size_align(ptr.len(), 1).expect("Invalid layout for allocate_all"); 149 | self.after_allocate(layout, result); 150 | } 151 | } 152 | 153 | fn after_allocate_all_zeroed(&self, result: Result, AllocError>) { 154 | self.after_allocate_all(result) 155 | } 156 | 157 | #[track_caller] 158 | fn before_deallocate(&self, ptr: NonNull, layout: Layout) { 159 | self.assert_fit_memory(ptr, layout, "layout"); 160 | } 161 | 162 | fn after_deallocate(&self, ptr: NonNull, _layout: Layout) { 163 | let mut map = self.map.borrow_mut(); 164 | map.remove(&ptr); 165 | } 166 | 167 | fn after_deallocate_all(&self) { 168 | let mut map = self.map.borrow_mut(); 169 | map.clear() 170 | } 171 | 172 | #[track_caller] 173 | fn before_grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 174 | self.assert_fit_memory(ptr, old_layout, "old_layout"); 175 | assert!( 176 | new_layout.size() >= old_layout.size(), 177 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`, \ 178 | expected {} >= {}", 179 | new_layout.size(), 180 | old_layout.size() 181 | ); 182 | } 183 | 184 | fn after_grow( 185 | &self, 186 | ptr: NonNull, 187 | old_layout: Layout, 188 | new_layout: Layout, 189 | result: Result, AllocError>, 190 | ) { 191 | if result.is_ok() { 192 | self.after_deallocate(ptr, old_layout); 193 | self.after_allocate(new_layout, result); 194 | } 195 | } 196 | 197 | #[track_caller] 198 | fn before_grow_zeroed(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 199 | self.before_grow(ptr, old_layout, new_layout) 200 | } 201 | 202 | fn after_grow_zeroed( 203 | &self, 204 | ptr: NonNull, 205 | old_layout: Layout, 206 | new_layout: Layout, 207 | result: Result, AllocError>, 208 | ) { 209 | self.after_grow(ptr, old_layout, new_layout, result) 210 | } 211 | 212 | #[track_caller] 213 | fn before_grow_in_place(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 214 | self.before_grow(ptr, old_layout, new_layout) 215 | } 216 | 217 | fn after_grow_in_place( 218 | &self, 219 | ptr: NonNull, 220 | old_layout: Layout, 221 | new_layout: Layout, 222 | result: Result, 223 | ) { 224 | self.after_grow( 225 | ptr, 226 | old_layout, 227 | new_layout, 228 | result.map(|len| NonNull::slice_from_raw_parts(ptr, len)), 229 | ) 230 | } 231 | 232 | #[track_caller] 233 | fn before_grow_in_place_zeroed( 234 | &self, 235 | ptr: NonNull, 236 | old_layout: Layout, 237 | new_layout: Layout, 238 | ) { 239 | self.before_grow_in_place(ptr, old_layout, new_layout) 240 | } 241 | 242 | fn after_grow_in_place_zeroed( 243 | &self, 244 | ptr: NonNull, 245 | old_layout: Layout, 246 | new_layout: Layout, 247 | result: Result, 248 | ) { 249 | self.after_grow_in_place(ptr, old_layout, new_layout, result) 250 | } 251 | 252 | #[track_caller] 253 | fn before_shrink(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 254 | self.assert_fit_memory(ptr, old_layout, "old_layout"); 255 | assert!( 256 | new_layout.size() <= old_layout.size(), 257 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`, \ 258 | expected {} <= {}", 259 | new_layout.size(), 260 | old_layout.size() 261 | ); 262 | } 263 | 264 | fn after_shrink( 265 | &self, 266 | ptr: NonNull, 267 | old_layout: Layout, 268 | new_layout: Layout, 269 | result: Result, AllocError>, 270 | ) { 271 | if result.is_ok() { 272 | self.after_deallocate(ptr, old_layout); 273 | self.after_allocate(new_layout, result); 274 | } 275 | } 276 | 277 | #[track_caller] 278 | fn before_shrink_in_place(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { 279 | self.before_shrink(ptr, old_layout, new_layout) 280 | } 281 | 282 | fn after_shrink_in_place( 283 | &self, 284 | ptr: NonNull, 285 | old_layout: Layout, 286 | new_layout: Layout, 287 | result: Result, 288 | ) { 289 | self.after_shrink( 290 | ptr, 291 | old_layout, 292 | new_layout, 293 | result.map(|len| NonNull::slice_from_raw_parts(ptr, len)), 294 | ) 295 | } 296 | } 297 | 298 | struct DeallocGuard { 299 | allocator: A, 300 | ptr: NonNull, 301 | layout: Layout, 302 | } 303 | 304 | impl DeallocGuard { 305 | fn new(allocator: A, ptr: NonNull<[u8]>, layout: Layout) -> Self { 306 | Self { 307 | allocator, 308 | ptr: ptr.as_non_null_ptr(), 309 | layout, 310 | } 311 | } 312 | } 313 | 314 | impl Drop for DeallocGuard { 315 | fn drop(&mut self) { 316 | unsafe { self.allocator.dealloc(self.ptr, self.layout) } 317 | } 318 | } 319 | 320 | #[test] 321 | #[should_panic = "`new_layout.size()` must be greater than or equal to `old_layout.size()`"] 322 | fn tracker_grow_size_greater_layout() { 323 | let alloc = tracker(Global); 324 | let layout = Layout::new::<[u8; 4]>(); 325 | let memory = alloc.alloc(layout).expect("Could not allocate 4 bytes"); 326 | let _guard = DeallocGuard::new(Global, memory, layout); 327 | let _ = unsafe { alloc.grow(memory.as_non_null_ptr(), layout, Layout::new::<[u8; 2]>()) }; 328 | } 329 | 330 | #[test] 331 | #[should_panic = "`old_layout` must fit that block of memory"] 332 | fn tracker_grow_layout_size_exact() { 333 | let alloc = tracker(Global); 334 | let layout = Layout::new::<[u8; 4]>(); 335 | let memory = alloc.alloc(layout).expect("Could not allocate 4 bytes"); 336 | let _guard = DeallocGuard::new(Global, memory, layout); 337 | let _ = unsafe { 338 | alloc.grow( 339 | memory.as_non_null_ptr(), 340 | Layout::new::<[u8; 2]>(), 341 | Layout::new::<[u8; 10]>(), 342 | ) 343 | }; 344 | } 345 | 346 | #[test] 347 | #[should_panic = "`old_layout` must fit that block of memory"] 348 | fn tracker_grow_layout_size_range() { 349 | let alloc = tracker(Chunk::::default()); 350 | let layout = Layout::new::<[u8; 4]>(); 351 | let memory = alloc.alloc(layout).expect("Could not allocate 4 bytes"); 352 | let _guard = DeallocGuard::new(Chunk::::default(), memory, layout); 353 | let _ = unsafe { 354 | alloc.grow( 355 | memory.as_non_null_ptr(), 356 | Layout::new::<[u8; 2]>(), 357 | Layout::new::<[u8; 10]>(), 358 | ) 359 | }; 360 | } 361 | 362 | #[test] 363 | #[should_panic = "`old_layout` must fit that block of memory"] 364 | fn tracker_grow_layout_align() { 365 | let alloc = tracker(Global); 366 | let layout = Layout::new::<[u8; 4]>(); 367 | let memory = alloc.alloc(layout).expect("Could not allocate 4 bytes"); 368 | let _guard = DeallocGuard::new(Global, memory, layout); 369 | let _ = unsafe { 370 | alloc.grow( 371 | memory.as_non_null_ptr(), 372 | Layout::new::<[u16; 2]>(), 373 | Layout::new::<[u8; 4]>(), 374 | ) 375 | }; 376 | } 377 | 378 | #[test] 379 | #[should_panic = "`ptr` must denote a block of memory currently allocated via this allocator"] 380 | fn tracker_grow_ptr() { 381 | let alloc = tracker(Global); 382 | let layout = Layout::new::<[u8; 4]>(); 383 | let memory = alloc.alloc(layout).expect("Could not allocate 4 bytes"); 384 | let _guard = DeallocGuard::new(Global, memory, layout); 385 | let _ = unsafe { 386 | alloc.grow( 387 | NonNull::dangling(), 388 | Layout::new::<[u8; 4]>(), 389 | Layout::new::<[u8; 10]>(), 390 | ) 391 | }; 392 | } 393 | } 394 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![no_std] 2 | #![cfg_attr(doc, feature(doc_cfg, external_doc))] 3 | #![cfg_attr(feature = "intrinsics", feature(core_intrinsics))] 4 | #![cfg_attr(doc, doc(include = "../README.md"))] 5 | #![feature( 6 | min_const_generics, 7 | specialization, 8 | allocator_api, 9 | nonnull_slice_from_raw_parts, 10 | const_nonnull_slice_from_raw_parts, 11 | slice_ptr_get, 12 | slice_ptr_len, 13 | const_slice_ptr_len 14 | )] 15 | #![cfg_attr(test, feature(maybe_uninit_slice))] 16 | #![allow(incomplete_features, clippy::must_use_candidate)] 17 | 18 | #[cfg(any(feature = "alloc", doc, test))] 19 | extern crate alloc; 20 | 21 | // pub mod stats; 22 | 23 | mod helper; 24 | #[macro_use] 25 | mod macros; 26 | 27 | // mod affix; 28 | mod callback_ref; 29 | mod chunk; 30 | mod fallback; 31 | mod null; 32 | mod proxy; 33 | pub mod region; 34 | pub mod stats; 35 | // mod segregate; 36 | 37 | use core::{ 38 | alloc::{AllocError, Layout}, 39 | ptr::NonNull, 40 | }; 41 | 42 | pub use self::{ 43 | callback_ref::CallbackRef, 44 | chunk::Chunk, 45 | fallback::Fallback, 46 | null::Null, 47 | proxy::Proxy, 48 | }; 49 | 50 | #[cfg(feature = "intrinsics")] 51 | mod intrinsics { 52 | pub use core::intrinsics::{assume, unlikely}; 53 | } 54 | 55 | #[cfg(not(feature = "intrinsics"))] 56 | mod intrinsics { 57 | #![allow(clippy::missing_const_for_fn, clippy::inline_always, unused)] 58 | 59 | #[inline(always)] 60 | pub fn unlikely(b: bool) -> bool { 61 | b 62 | } 63 | 64 | #[inline(always)] 65 | pub const unsafe fn assume(_: bool) {} 66 | } 67 | 68 | #[allow(unused_imports)] 69 | use crate::intrinsics::{assume, unlikely}; 70 | 71 | #[allow(non_snake_case)] 72 | mod SIZE {} 73 | 74 | /// Extends `AllocRef` for allocating or deallocating all memory at once. 75 | pub unsafe trait AllocateAll { 76 | /// Attempts to allocate all of the memory the allocator can provide. 77 | /// 78 | /// If the allocator is currently not managing any memory, then it returns all the memory 79 | /// available to the allocator. Subsequent are unlikely to succeed. 80 | /// 81 | /// On success, returns `[NonNull<[u8]>]` meeting the size and alignment guarantees of `layout`. 82 | /// 83 | /// The returned block may have a larger size than specified by `layout.size()`, and may or may 84 | /// not have its contents initialized. 85 | /// 86 | /// Also see [`AllocRef::alloc`] 87 | /// 88 | /// [`AllocRef::alloc`]: core::alloc::AllocRef::alloc 89 | /// 90 | /// # Errors 91 | /// 92 | /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet 93 | /// allocators size or alignment constraints. 94 | /// 95 | /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or 96 | /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement 97 | /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) 98 | /// 99 | /// Clients wishing to abort computation in response to an allocation error are encouraged to 100 | /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. 101 | /// 102 | /// [`handle_alloc_error`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html 103 | fn allocate_all(&self) -> Result, AllocError>; 104 | 105 | /// Behaves like `alloc_all`, but also ensures that the returned memory is zero-initialized. 106 | /// 107 | /// Also see [`AllocRef::alloc_zeroed`] 108 | /// 109 | /// [`AllocRef::alloc_zeroed`]: core::alloc::AllocRef::alloc_zeroed 110 | /// 111 | /// # Errors 112 | /// 113 | /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet 114 | /// allocators size or alignment constraints. 115 | /// 116 | /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or 117 | /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement 118 | /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) 119 | /// 120 | /// Clients wishing to abort computation in response to an allocation error are encouraged to 121 | /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. 122 | /// 123 | /// [`handle_alloc_error`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html 124 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 125 | let ptr = self.allocate_all()?; 126 | // SAFETY: `allocate_all` returns a valid memory block 127 | unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) } 128 | Ok(ptr) 129 | } 130 | 131 | /// Deallocates all the memory the allocator had allocated. 132 | fn deallocate_all(&self); 133 | 134 | /// Returns the total capacity available in this allocator. 135 | fn capacity(&self) -> usize; 136 | 137 | /// Returns the free capacity left for allocating. 138 | fn capacity_left(&self) -> usize; 139 | 140 | /// Returns if the allocator is currently not holding memory. 141 | fn is_empty(&self) -> bool { 142 | self.capacity() == self.capacity_left() 143 | } 144 | 145 | /// Returns if the allocator has no more capacity left. 146 | fn is_full(&self) -> bool { 147 | self.capacity_left() == 0 148 | } 149 | } 150 | /// Extends `AllocRef` to support growing and shrinking in place. 151 | pub unsafe trait ReallocateInPlace { 152 | /// Attempts to extend the memory block. 153 | /// 154 | /// Returns the new actual size of the allocated memory. The pointer is suitable for holding 155 | /// data described by a new layout with `layout`’s alignment and a size given by `new_size`. 156 | /// To accomplish this, the allocator may extend the allocation referenced by `ptr` to fit the 157 | /// new layout. 158 | /// 159 | /// If this method returns `Err`, the allocator was not able to grow the memory without 160 | /// changing the pointer. The ownership of the memory block has not been transferred to 161 | /// this allocator, and the contents of the memory block are unaltered. 162 | /// 163 | /// # Safety 164 | /// 165 | /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. 166 | /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). 167 | /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. 168 | /// 169 | /// [*currently allocated*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#currently-allocated-memory 170 | /// [*fit*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#memory-fitting 171 | /// 172 | /// # Errors 173 | /// 174 | /// Returns `Err` if the new layout does not meet the allocators size and alignment 175 | /// constraints of the allocator, or if growing otherwise fails. 176 | /// 177 | /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or 178 | /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement 179 | /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) 180 | /// 181 | /// Clients wishing to abort computation in response to an allocation error are encouraged to 182 | /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. 183 | /// 184 | /// [`handle_alloc_error`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html 185 | unsafe fn grow_in_place( 186 | &self, 187 | ptr: NonNull, 188 | old_layout: Layout, 189 | new_layout: Layout, 190 | ) -> Result; 191 | 192 | /// Behaves like `grow_in_place`, but also ensures that the new contents are set to zero before 193 | /// being returned. 194 | /// 195 | /// The memory block will contain the following contents after a successful call to 196 | /// `grow_zeroed`: 197 | /// * Bytes `0..old_layout.size()` are preserved from the original allocation. 198 | /// * Bytes `old_layout.size()..old_size` will either be preserved or zeroed, depending on 199 | /// the allocator implementation. `old_size` refers to the size of the memory block prior 200 | /// to the `grow_zeroed` call, which may be larger than the size that was originally 201 | /// requested when it was allocated. 202 | /// * Bytes `old_size..new_size` are zeroed. `new_size` refers to the size of the memory 203 | /// block returned by the `grow_zeroed` call. 204 | /// 205 | /// # Safety 206 | /// 207 | /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. 208 | /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). 209 | /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. 210 | /// 211 | /// [*currently allocated*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#currently-allocated-memory 212 | /// [*fit*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#memory-fitting 213 | /// 214 | /// # Errors 215 | /// 216 | /// Returns `Err` if the new layout does not meet the allocators size and alignment 217 | /// constraints of the allocator, or if growing otherwise fails. 218 | /// 219 | /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or 220 | /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement 221 | /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) 222 | /// 223 | /// Clients wishing to abort computation in response to an allocation error are encouraged to 224 | /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. 225 | /// 226 | /// [`handle_alloc_error`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html 227 | unsafe fn grow_in_place_zeroed( 228 | &self, 229 | ptr: NonNull, 230 | old_layout: Layout, 231 | new_layout: Layout, 232 | ) -> Result; 233 | 234 | /// Attempts to shrink the memory block. 235 | /// 236 | /// Returns the new actual size of the allocated memory. The pointer is suitable for holding 237 | /// data described by a new layout with `layout`’s alignment and a size given by `new_size`. 238 | /// To accomplish this, the allocator may extend the allocation referenced by `ptr` to fit the 239 | /// new layout. 240 | /// 241 | /// If this method returns `Err`, the allocator was not able to shrink the memory without 242 | /// changing the pointer. The ownership of the memory block has not been transferred to 243 | /// this allocator, and the contents of the memory block are unaltered. 244 | /// 245 | /// # Safety 246 | /// 247 | /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. 248 | /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). 249 | /// * `new_layout.size()` must be smaller than or equal to `old_layout.size()`. 250 | /// 251 | /// [*currently allocated*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#currently-allocated-memory 252 | /// [*fit*]: https://doc.rust-lang.org/nightly/alloc/alloc/trait.AllocRef.html#memory-fitting 253 | /// 254 | /// # Errors 255 | /// 256 | /// Returns `Err` if the new layout does not meet the allocator's size and alignment 257 | /// constraints of the allocator, or if shrinking otherwise fails. 258 | /// 259 | /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or 260 | /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement 261 | /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) 262 | /// 263 | /// Clients wishing to abort computation in response to an allocation error are encouraged to 264 | /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. 265 | /// 266 | /// [`handle_alloc_error`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html 267 | unsafe fn shrink_in_place( 268 | &self, 269 | ptr: NonNull, 270 | old_layout: Layout, 271 | new_layout: Layout, 272 | ) -> Result; 273 | } 274 | 275 | /// Trait to determine if a given memory block is owned by an allocator. 276 | pub trait Owns { 277 | /// Returns if the allocator *owns* the passed memory. 278 | fn owns(&self, ptr: NonNull<[u8]>) -> bool; 279 | } 280 | 281 | macro_rules! impl_traits { 282 | ($(#[$meta:meta])* $ty:ty ) => { 283 | $(#[$meta])* 284 | unsafe impl AllocateAll for $ty 285 | where 286 | A: AllocateAll + ?Sized, 287 | { 288 | fn allocate_all(&self) -> Result, AllocError> { 289 | (**self).allocate_all() 290 | } 291 | 292 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 293 | (**self).allocate_all_zeroed() 294 | } 295 | 296 | fn deallocate_all(&self) { 297 | (**self).deallocate_all() 298 | } 299 | 300 | fn capacity(&self) -> usize { 301 | (**self).capacity() 302 | } 303 | 304 | fn capacity_left(&self) -> usize { 305 | (**self).capacity_left() 306 | } 307 | 308 | fn is_empty(&self) -> bool { 309 | (**self).is_empty() 310 | } 311 | 312 | fn is_full(&self) -> bool { 313 | (**self).is_full() 314 | } 315 | } 316 | 317 | $(#[$meta])* 318 | unsafe impl ReallocateInPlace for $ty 319 | where 320 | A: ReallocateInPlace + ?Sized, 321 | { 322 | unsafe fn grow_in_place( 323 | &self, 324 | ptr: NonNull, 325 | old_layout: Layout, 326 | new_layout: Layout, 327 | ) -> Result { 328 | (**self).grow_in_place(ptr, old_layout, new_layout) 329 | } 330 | 331 | unsafe fn grow_in_place_zeroed( 332 | &self, 333 | ptr: NonNull, 334 | old_layout: Layout, 335 | new_layout: Layout, 336 | ) -> Result { 337 | (**self).grow_in_place_zeroed(ptr, old_layout, new_layout) 338 | } 339 | 340 | unsafe fn shrink_in_place( 341 | &self, 342 | ptr: NonNull, 343 | old_layout: Layout, 344 | new_layout: Layout, 345 | ) -> Result { 346 | (**self).shrink_in_place(ptr, old_layout, new_layout) 347 | } 348 | } 349 | 350 | $(#[$meta])* 351 | impl Owns for $ty 352 | where 353 | A: Owns + ?Sized, 354 | { 355 | fn owns(&self, ptr: NonNull<[u8]>) -> bool { 356 | (**self).owns(ptr) 357 | } 358 | } 359 | }; 360 | } 361 | 362 | impl_traits!(&A); 363 | #[cfg(any(doc, feature = "alloc"))] 364 | impl_traits!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::boxed::Box); 365 | #[cfg(any(doc, feature = "alloc"))] 366 | impl_traits!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::rc::Rc); 367 | #[cfg(any(doc, feature = "alloc"))] 368 | impl_traits!(#[cfg_attr(doc, doc(cfg(feature = "alloc")))] alloc::sync::Arc); 369 | 370 | #[track_caller] 371 | #[inline] 372 | fn check_dealloc_precondition(ptr: NonNull, layout: Layout) { 373 | debug_assert!( 374 | ptr.as_ptr() as usize >= layout.align(), 375 | "`ptr` allocated with the same alignment as `layout.align()`, expected {} >= {}", 376 | ptr.as_ptr() as usize, 377 | layout.align() 378 | ); 379 | } 380 | 381 | #[track_caller] 382 | #[inline] 383 | fn check_grow_precondition(ptr: NonNull, old_layout: Layout, new_layout: Layout) { 384 | debug_assert!( 385 | ptr.as_ptr() as usize >= old_layout.align(), 386 | "`ptr` allocated with the same alignment as `old_layout.align()`, expected {} >= {}", 387 | ptr.as_ptr() as usize, 388 | old_layout.align() 389 | ); 390 | debug_assert!( 391 | new_layout.size() >= old_layout.size(), 392 | "`new_layout.size()` must be greater than or equal to `old_layout.size()`, expected {} >= \ 393 | {}", 394 | new_layout.size(), 395 | old_layout.size() 396 | ); 397 | } 398 | 399 | #[track_caller] 400 | #[inline] 401 | fn check_shrink_precondition(ptr: NonNull, old_layout: Layout, new_layout: Layout) { 402 | debug_assert!( 403 | ptr.as_ptr() as usize >= old_layout.align(), 404 | "`ptr` allocated with the same alignment as `old_layout.align()`, expected {} >= {}", 405 | ptr.as_ptr() as usize, 406 | old_layout.align() 407 | ); 408 | debug_assert!( 409 | new_layout.size() <= old_layout.size(), 410 | "`new_layout.size()` must be smaller than or equal to `old_layout.size()`, expected {} <= \ 411 | {}", 412 | new_layout.size(), 413 | old_layout.size() 414 | ); 415 | } 416 | -------------------------------------------------------------------------------- /src/macros.rs: -------------------------------------------------------------------------------- 1 | macro_rules! impl_global_alloc { 2 | ($ty:path) => { 3 | unsafe impl core::alloc::GlobalAlloc for $ty { 4 | unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 { 5 | core::alloc::AllocRef::alloc(&self, layout) 6 | .map(core::ptr::NonNull::as_mut_ptr) 7 | .unwrap_or(core::ptr::null_mut()) 8 | } 9 | 10 | unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) { 11 | core::alloc::AllocRef::dealloc( 12 | &self, 13 | core::ptr::NonNull::new_unchecked(ptr), 14 | layout, 15 | ) 16 | } 17 | 18 | unsafe fn alloc_zeroed(&self, layout: core::alloc::Layout) -> *mut u8 { 19 | core::alloc::AllocRef::alloc_zeroed(&self, layout) 20 | .map(core::ptr::NonNull::as_mut_ptr) 21 | .unwrap_or(core::ptr::null_mut()) 22 | } 23 | 24 | unsafe fn realloc( 25 | &self, 26 | ptr: *mut u8, 27 | layout: core::alloc::Layout, 28 | new_size: usize, 29 | ) -> *mut u8 { 30 | if new_size > layout.size() { 31 | core::alloc::AllocRef::grow( 32 | &self, 33 | core::ptr::NonNull::new_unchecked(ptr), 34 | layout, 35 | core::alloc::Layout::from_size_align_unchecked(new_size, layout.align()), 36 | ) 37 | .map(core::ptr::NonNull::as_mut_ptr) 38 | .unwrap_or(core::ptr::null_mut()) 39 | } else { 40 | core::alloc::AllocRef::shrink( 41 | &self, 42 | core::ptr::NonNull::new_unchecked(ptr), 43 | layout, 44 | core::alloc::Layout::from_size_align_unchecked(new_size, layout.align()), 45 | ) 46 | .map(core::ptr::NonNull::as_mut_ptr) 47 | .unwrap_or(core::ptr::null_mut()) 48 | } 49 | } 50 | } 51 | }; 52 | } 53 | 54 | macro_rules! impl_alloc_ref { 55 | ($parent:tt) => { 56 | fn alloc(&self, layout: Layout) -> Result, AllocError> { 57 | Self::alloc_impl(layout, |l| self.$parent.alloc(l)) 58 | } 59 | 60 | fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { 61 | Self::alloc_impl(layout, |l| self.$parent.alloc_zeroed(l)) 62 | } 63 | 64 | unsafe fn grow( 65 | &self, 66 | ptr: NonNull, 67 | old_layout: Layout, 68 | new_layout: Layout, 69 | ) -> Result, AllocError> { 70 | crate::check_grow_precondition(ptr, old_layout, new_layout); 71 | Self::grow_impl( 72 | ptr, 73 | old_layout, 74 | new_layout, 75 | AllocInit::Uninitialized, 76 | |ptr, old_layout, new_layout| self.$parent.grow(ptr, old_layout, new_layout), 77 | ) 78 | } 79 | 80 | unsafe fn grow_zeroed( 81 | &self, 82 | ptr: NonNull, 83 | old_layout: Layout, 84 | new_layout: Layout, 85 | ) -> Result, AllocError> { 86 | crate::check_grow_precondition(ptr, old_layout, new_layout); 87 | Self::grow_impl( 88 | ptr, 89 | old_layout, 90 | new_layout, 91 | AllocInit::Zeroed, 92 | |ptr, old_layout, new_layout| self.$parent.grow_zeroed(ptr, old_layout, new_layout), 93 | ) 94 | } 95 | 96 | unsafe fn shrink( 97 | &self, 98 | ptr: NonNull, 99 | old_layout: Layout, 100 | new_layout: Layout, 101 | ) -> Result, AllocError> { 102 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 103 | Self::shrink_impl( 104 | ptr, 105 | old_layout, 106 | new_layout, 107 | |ptr, old_layout, new_layout| self.$parent.shrink(ptr, old_layout, new_layout), 108 | ) 109 | } 110 | }; 111 | } 112 | macro_rules! impl_alloc_all { 113 | ($parent:tt) => { 114 | fn allocate_all(&self) -> Result, AllocError> { 115 | Self::allocate_all_impl(|| self.$parent.allocate_all()) 116 | } 117 | 118 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 119 | Self::allocate_all_impl(|| self.$parent.allocate_all_zeroed()) 120 | } 121 | 122 | fn deallocate_all(&self) { 123 | self.$parent.deallocate_all() 124 | } 125 | 126 | fn capacity(&self) -> usize { 127 | self.$parent.capacity() 128 | } 129 | 130 | fn capacity_left(&self) -> usize { 131 | self.$parent.capacity_left() 132 | } 133 | }; 134 | } 135 | 136 | macro_rules! impl_realloc_in_place { 137 | ($parent:tt) => { 138 | unsafe fn grow_in_place( 139 | &self, 140 | ptr: NonNull, 141 | old_layout: Layout, 142 | new_layout: Layout, 143 | ) -> Result { 144 | crate::check_grow_precondition(ptr, old_layout, new_layout); 145 | Self::grow_impl( 146 | ptr, 147 | old_layout, 148 | new_layout, 149 | AllocInit::Uninitialized, 150 | |ptr, old_layout, new_layout| { 151 | crate::check_grow_precondition(ptr, old_layout, new_layout); 152 | self.$parent 153 | .grow_in_place(ptr, old_layout, new_layout) 154 | .map(|len| NonNull::slice_from_raw_parts(ptr, len)) 155 | }, 156 | ) 157 | .map(NonNull::len) 158 | } 159 | 160 | unsafe fn grow_in_place_zeroed( 161 | &self, 162 | ptr: NonNull, 163 | old_layout: Layout, 164 | new_layout: Layout, 165 | ) -> Result { 166 | crate::check_grow_precondition(ptr, old_layout, new_layout); 167 | Self::grow_impl( 168 | ptr, 169 | old_layout, 170 | new_layout, 171 | AllocInit::Zeroed, 172 | |ptr, old_layout, new_layout| { 173 | crate::check_grow_precondition(ptr, old_layout, new_layout); 174 | self.$parent 175 | .grow_in_place_zeroed(ptr, old_layout, new_layout) 176 | .map(|len| NonNull::slice_from_raw_parts(ptr, len)) 177 | }, 178 | ) 179 | .map(NonNull::len) 180 | } 181 | 182 | unsafe fn shrink_in_place( 183 | &self, 184 | ptr: NonNull, 185 | old_layout: Layout, 186 | new_layout: Layout, 187 | ) -> Result { 188 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 189 | Self::shrink_impl( 190 | ptr, 191 | old_layout, 192 | new_layout, 193 | |ptr, old_layout, new_layout| { 194 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 195 | self.$parent 196 | .shrink_in_place(ptr, old_layout, new_layout) 197 | .map(|len| NonNull::slice_from_raw_parts(ptr, len)) 198 | }, 199 | ) 200 | .map(NonNull::len) 201 | } 202 | }; 203 | } 204 | 205 | macro_rules! impl_realloc_in_place_spec { 206 | ($parent:tt) => { 207 | default unsafe fn grow_in_place( 208 | &self, 209 | ptr: NonNull, 210 | old_layout: Layout, 211 | new_layout: Layout, 212 | ) -> Result { 213 | crate::check_grow_precondition(ptr, old_layout, new_layout); 214 | Self::grow_impl( 215 | ptr, 216 | old_layout, 217 | new_layout, 218 | AllocInit::Uninitialized, 219 | |ptr, old_layout, new_layout| { 220 | crate::check_grow_precondition(ptr, old_layout, new_layout); 221 | Err(AllocError) 222 | }, 223 | ) 224 | .map(NonNull::len) 225 | } 226 | 227 | default unsafe fn grow_in_place_zeroed( 228 | &self, 229 | ptr: NonNull, 230 | old_layout: Layout, 231 | new_layout: Layout, 232 | ) -> Result { 233 | crate::check_grow_precondition(ptr, old_layout, new_layout); 234 | Self::grow_impl( 235 | ptr, 236 | old_layout, 237 | new_layout, 238 | AllocInit::Zeroed, 239 | |ptr, old_layout, new_layout| { 240 | crate::check_grow_precondition(ptr, old_layout, new_layout); 241 | Err(AllocError) 242 | }, 243 | ) 244 | .map(NonNull::len) 245 | } 246 | 247 | default unsafe fn shrink_in_place( 248 | &self, 249 | ptr: NonNull, 250 | old_layout: Layout, 251 | new_layout: Layout, 252 | ) -> Result { 253 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 254 | Self::shrink_impl( 255 | ptr, 256 | old_layout, 257 | new_layout, 258 | |ptr, old_layout, new_layout| { 259 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 260 | Err(AllocError) 261 | }, 262 | ) 263 | .map(NonNull::len) 264 | } 265 | }; 266 | } 267 | -------------------------------------------------------------------------------- /src/null.rs: -------------------------------------------------------------------------------- 1 | use crate::{AllocateAll, Owns, ReallocateInPlace}; 2 | use core::{ 3 | alloc::{AllocError, AllocRef, Layout}, 4 | ptr::NonNull, 5 | }; 6 | 7 | /// An emphatically empty implementation of `AllocRef`. 8 | /// 9 | /// Although it has no direct use, it is useful as a "terminator" in composite allocators 10 | /// or for disabling the global allocator. 11 | /// 12 | /// # Examples 13 | /// 14 | /// The `Null` will always return `Err`: 15 | /// 16 | /// ```rust 17 | /// #![feature(allocator_api)] 18 | /// 19 | /// use alloc_compose::Null; 20 | /// use std::alloc::{AllocRef, Global, Layout}; 21 | /// 22 | /// let memory = Null.alloc(Layout::new::()); 23 | /// assert!(memory.is_err()) 24 | /// ``` 25 | /// 26 | /// Even if a zero-sized allocation is requested: 27 | /// 28 | /// ```rust 29 | /// # #![feature(allocator_api)] 30 | /// # use alloc_compose::Null; 31 | /// # use std::alloc::{AllocRef, Global, Layout}; 32 | /// let memory = Null.alloc(Layout::new::<()>()); 33 | /// assert!(memory.is_err()) 34 | /// ``` 35 | /// 36 | /// ## Disabling the global allocator 37 | /// 38 | /// ```rust, no_run 39 | /// use alloc_compose::Null; 40 | /// 41 | /// #[global_allocator] 42 | /// static A: Null = Null; 43 | /// ``` 44 | #[derive(Debug, Copy, Clone)] 45 | pub struct Null; 46 | 47 | unsafe impl AllocRef for Null { 48 | /// Will always return `Err(AllocErr)`. 49 | fn alloc(&self, _layout: Layout) -> Result, AllocError> { 50 | Err(AllocError) 51 | } 52 | 53 | /// Will always return `Err(AllocErr)`. 54 | fn alloc_zeroed(&self, _layout: Layout) -> Result, AllocError> { 55 | Err(AllocError) 56 | } 57 | 58 | /// Must not be called, as allocation always fails. 59 | unsafe fn dealloc(&self, _ptr: NonNull, _layout: Layout) { 60 | unreachable!("Null::dealloc must never be called as allocation always fails") 61 | } 62 | 63 | /// Must not be called, as allocation always fails. 64 | unsafe fn grow( 65 | &self, 66 | _ptr: NonNull, 67 | _old_layout: Layout, 68 | _new_layout: Layout, 69 | ) -> Result, AllocError> { 70 | unreachable!("Null::grow must never be called as allocation always fails") 71 | } 72 | 73 | /// Must not be called, as allocation always fails. 74 | unsafe fn grow_zeroed( 75 | &self, 76 | _ptr: NonNull, 77 | _old_layout: Layout, 78 | _new_layout: Layout, 79 | ) -> Result, AllocError> { 80 | unreachable!("Null::grow_zeroed must never be called as allocation always fails") 81 | } 82 | 83 | /// Must not be called, as allocation always fails. 84 | unsafe fn shrink( 85 | &self, 86 | _ptr: NonNull, 87 | _old_layout: Layout, 88 | _new_layout: Layout, 89 | ) -> Result, AllocError> { 90 | unreachable!("Null::shrink must never be called as allocation always fails") 91 | } 92 | } 93 | 94 | unsafe impl AllocateAll for Null { 95 | fn allocate_all(&self) -> Result, AllocError> { 96 | Err(AllocError) 97 | } 98 | 99 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 100 | Err(AllocError) 101 | } 102 | 103 | fn deallocate_all(&self) {} 104 | 105 | fn capacity(&self) -> usize { 106 | 0 107 | } 108 | 109 | fn capacity_left(&self) -> usize { 110 | 0 111 | } 112 | } 113 | 114 | unsafe impl ReallocateInPlace for Null { 115 | /// Must not be called, as allocation always fails. 116 | unsafe fn grow_in_place( 117 | &self, 118 | _ptr: NonNull, 119 | _old_layout: Layout, 120 | _new_layout: Layout, 121 | ) -> Result { 122 | unreachable!("Null::grow_in_place must never be called as allocation always fails") 123 | } 124 | 125 | /// Must not be called, as allocation always fails. 126 | unsafe fn grow_in_place_zeroed( 127 | &self, 128 | _ptr: NonNull, 129 | _old_layout: Layout, 130 | _new_layout: Layout, 131 | ) -> Result { 132 | unreachable!("Null::grow_in_place_zeroed must never be called as allocation always fails") 133 | } 134 | 135 | /// Must not be called, as allocation always fails. 136 | unsafe fn shrink_in_place( 137 | &self, 138 | _ptr: NonNull, 139 | _old_layout: Layout, 140 | _new_layout: Layout, 141 | ) -> Result { 142 | unreachable!("Null::shrink_in_place must never be called as allocation always fails") 143 | } 144 | } 145 | 146 | impl Owns for Null { 147 | /// Will always return `false. 148 | fn owns(&self, _memory: NonNull<[u8]>) -> bool { 149 | false 150 | } 151 | } 152 | 153 | impl_global_alloc!(Null); 154 | 155 | #[cfg(test)] 156 | mod tests { 157 | #![allow(clippy::wildcard_imports)] 158 | use super::*; 159 | 160 | #[test] 161 | #[should_panic(expected = "unreachable")] 162 | fn dealloc() { 163 | unsafe { Null.dealloc(NonNull::dangling(), Layout::new::<()>()) }; 164 | } 165 | 166 | #[test] 167 | fn alloc() { 168 | assert!(Null.alloc(Layout::new::()).is_err()); 169 | assert!(Null.alloc_zeroed(Layout::new::()).is_err()); 170 | assert!(Null.allocate_all().is_err()); 171 | assert!(Null.allocate_all_zeroed().is_err()); 172 | assert_eq!(Null.capacity(), 0); 173 | assert_eq!(Null.capacity_left(), 0); 174 | Null.deallocate_all(); 175 | } 176 | 177 | #[test] 178 | #[should_panic(expected = "unreachable")] 179 | fn grow() { 180 | unsafe { 181 | let _ = Null.grow( 182 | NonNull::dangling(), 183 | Layout::new::<()>(), 184 | Layout::new::<()>(), 185 | ); 186 | }; 187 | } 188 | 189 | #[test] 190 | #[should_panic(expected = "unreachable")] 191 | fn grow_zeroed() { 192 | unsafe { 193 | let _ = Null.grow_zeroed( 194 | NonNull::dangling(), 195 | Layout::new::<()>(), 196 | Layout::new::<()>(), 197 | ); 198 | }; 199 | } 200 | 201 | #[test] 202 | #[should_panic(expected = "unreachable")] 203 | fn grow_in_place() { 204 | unsafe { 205 | let _ = Null.grow_in_place( 206 | NonNull::dangling(), 207 | Layout::new::<()>(), 208 | Layout::new::<()>(), 209 | ); 210 | }; 211 | } 212 | 213 | #[test] 214 | #[should_panic(expected = "unreachable")] 215 | fn grow_in_place_zeroed() { 216 | unsafe { 217 | let _ = Null.grow_in_place_zeroed( 218 | NonNull::dangling(), 219 | Layout::new::<()>(), 220 | Layout::new::<()>(), 221 | ); 222 | }; 223 | } 224 | 225 | #[test] 226 | #[should_panic(expected = "unreachable")] 227 | fn shrink() { 228 | unsafe { 229 | let _ = Null.shrink( 230 | NonNull::dangling(), 231 | Layout::new::<()>(), 232 | Layout::new::<()>(), 233 | ); 234 | }; 235 | } 236 | 237 | #[test] 238 | #[should_panic(expected = "unreachable")] 239 | fn shrink_in_place() { 240 | unsafe { 241 | let _ = Null.shrink_in_place( 242 | NonNull::dangling(), 243 | Layout::new::<()>(), 244 | Layout::new::<()>(), 245 | ); 246 | }; 247 | } 248 | 249 | #[test] 250 | fn owns() { 251 | assert!(!Null.owns(NonNull::slice_from_raw_parts(NonNull::dangling(), 0))); 252 | } 253 | 254 | #[test] 255 | fn debug() { 256 | assert_eq!(alloc::format!("{:?}", Null), "Null"); 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /src/proxy.rs: -------------------------------------------------------------------------------- 1 | use crate::{AllocateAll, CallbackRef, Owns, ReallocateInPlace}; 2 | use core::{ 3 | alloc::{AllocError, AllocRef, Layout}, 4 | ptr::NonNull, 5 | }; 6 | 7 | /// Calls the provided callbacks when invoking methods on `AllocRef`. 8 | /// 9 | /// A typical use case for a `Proxy` allocator is collecting statistics. `alloc-compose` provides 10 | /// different implementations for [`CallbackRef`][]. 11 | /// 12 | /// # Examples 13 | /// 14 | /// ```rust 15 | /// #![feature(allocator_api, slice_ptr_get)] 16 | /// 17 | /// use alloc_compose::{stats, CallbackRef, Proxy}; 18 | /// use std::alloc::{AllocRef, Layout, System}; 19 | /// 20 | /// let counter = stats::Counter::default(); 21 | /// let mut alloc = Proxy { 22 | /// alloc: System, 23 | /// callbacks: counter.by_ref(), 24 | /// }; 25 | /// 26 | /// unsafe { 27 | /// let memory = alloc.alloc(Layout::new::())?; 28 | /// alloc.dealloc(memory.as_non_null_ptr(), Layout::new::()); 29 | /// } 30 | /// 31 | /// assert_eq!(counter.num_allocs(), 1); 32 | /// assert_eq!(counter.num_deallocs(), 1); 33 | /// # Ok::<(), core::alloc::AllocError>(()) 34 | /// ``` 35 | /// 36 | /// If more information is needed, one can either implement `CallbackRef` itself or use a more 37 | /// fine-grained callback: 38 | /// 39 | /// ```rust 40 | /// # #![feature(allocator_api, slice_ptr_get)] 41 | /// # use alloc_compose::{stats, CallbackRef, Proxy}; 42 | /// # use std::alloc::{AllocRef, Layout}; 43 | /// use alloc_compose::{ 44 | /// region::Region, 45 | /// stats::{AllocInitFilter, ResultFilter}, 46 | /// }; 47 | /// use core::mem::MaybeUninit; 48 | /// 49 | /// let counter = stats::FilteredCounter::default(); 50 | /// let mut data = [MaybeUninit::new(0); 32]; 51 | /// let mut alloc = Proxy { 52 | /// alloc: Region::new(&mut data), 53 | /// callbacks: counter.by_ref(), 54 | /// }; 55 | /// 56 | /// unsafe { 57 | /// let memory = alloc.alloc(Layout::new::())?; 58 | /// alloc.dealloc(memory.as_non_null_ptr(), Layout::new::()); 59 | /// 60 | /// alloc.alloc_zeroed(Layout::new::<[u32; 64]>()).unwrap_err(); 61 | /// } 62 | /// 63 | /// assert_eq!(counter.num_allocates(), 2); 64 | /// assert_eq!( 65 | /// counter.num_allocates_filter(AllocInitFilter::None, ResultFilter::Ok), 66 | /// 1 67 | /// ); 68 | /// assert_eq!( 69 | /// counter.num_allocates_filter(AllocInitFilter::Zeroed, ResultFilter::Err), 70 | /// 1 71 | /// ); 72 | /// # Ok::<(), core::alloc::AllocError>(()) 73 | /// ``` 74 | #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 75 | pub struct Proxy { 76 | pub alloc: A, 77 | pub callbacks: C, 78 | } 79 | 80 | unsafe impl AllocRef for Proxy { 81 | #[track_caller] 82 | fn alloc(&self, layout: Layout) -> Result, AllocError> { 83 | self.callbacks.before_allocate(layout); 84 | let result = self.alloc.alloc(layout); 85 | self.callbacks.after_allocate(layout, result); 86 | result 87 | } 88 | 89 | #[track_caller] 90 | fn alloc_zeroed(&self, layout: Layout) -> Result, AllocError> { 91 | self.callbacks.before_allocate_zeroed(layout); 92 | let result = self.alloc.alloc_zeroed(layout); 93 | self.callbacks.after_allocate_zeroed(layout, result); 94 | result 95 | } 96 | 97 | #[track_caller] 98 | unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { 99 | crate::check_dealloc_precondition(ptr, layout); 100 | self.callbacks.before_deallocate(ptr, layout); 101 | self.alloc.dealloc(ptr, layout); 102 | self.callbacks.after_deallocate(ptr, layout); 103 | } 104 | 105 | #[track_caller] 106 | unsafe fn grow( 107 | &self, 108 | ptr: NonNull, 109 | old_layout: Layout, 110 | new_layout: Layout, 111 | ) -> Result, AllocError> { 112 | crate::check_grow_precondition(ptr, old_layout, new_layout); 113 | self.callbacks.before_grow(ptr, old_layout, new_layout); 114 | let result = self.alloc.grow(ptr, old_layout, new_layout); 115 | self.callbacks 116 | .after_grow(ptr, old_layout, new_layout, result); 117 | result 118 | } 119 | 120 | #[track_caller] 121 | unsafe fn grow_zeroed( 122 | &self, 123 | ptr: NonNull, 124 | old_layout: Layout, 125 | new_layout: Layout, 126 | ) -> Result, AllocError> { 127 | crate::check_grow_precondition(ptr, old_layout, new_layout); 128 | self.callbacks 129 | .before_grow_zeroed(ptr, old_layout, new_layout); 130 | let result = self.alloc.grow_zeroed(ptr, old_layout, new_layout); 131 | self.callbacks 132 | .after_grow_zeroed(ptr, old_layout, new_layout, result); 133 | result 134 | } 135 | 136 | #[track_caller] 137 | unsafe fn shrink( 138 | &self, 139 | ptr: NonNull, 140 | old_layout: Layout, 141 | new_layout: Layout, 142 | ) -> Result, AllocError> { 143 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 144 | self.callbacks.before_shrink(ptr, old_layout, new_layout); 145 | let result = self.alloc.shrink(ptr, old_layout, new_layout); 146 | self.callbacks 147 | .after_shrink(ptr, old_layout, new_layout, result); 148 | result 149 | } 150 | } 151 | 152 | unsafe impl AllocateAll for Proxy { 153 | #[track_caller] 154 | fn allocate_all(&self) -> Result, AllocError> { 155 | self.callbacks.before_allocate_all(); 156 | let result = self.alloc.allocate_all(); 157 | self.callbacks.after_allocate_all(result); 158 | result 159 | } 160 | 161 | #[track_caller] 162 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 163 | self.callbacks.before_allocate_all_zeroed(); 164 | let result = self.alloc.allocate_all_zeroed(); 165 | self.callbacks.after_allocate_all_zeroed(result); 166 | result 167 | } 168 | 169 | #[track_caller] 170 | fn deallocate_all(&self) { 171 | self.callbacks.before_deallocate_all(); 172 | self.alloc.deallocate_all(); 173 | self.callbacks.after_deallocate_all(); 174 | } 175 | 176 | #[track_caller] 177 | #[inline] 178 | fn capacity(&self) -> usize { 179 | self.alloc.capacity() 180 | } 181 | 182 | #[track_caller] 183 | #[inline] 184 | fn capacity_left(&self) -> usize { 185 | self.alloc.capacity_left() 186 | } 187 | 188 | #[track_caller] 189 | #[inline] 190 | fn is_empty(&self) -> bool { 191 | self.alloc.is_empty() 192 | } 193 | 194 | #[track_caller] 195 | #[inline] 196 | fn is_full(&self) -> bool { 197 | self.alloc.is_full() 198 | } 199 | } 200 | 201 | unsafe impl ReallocateInPlace for Proxy { 202 | #[track_caller] 203 | unsafe fn grow_in_place( 204 | &self, 205 | ptr: NonNull, 206 | old_layout: Layout, 207 | new_layout: Layout, 208 | ) -> Result { 209 | crate::check_grow_precondition(ptr, old_layout, new_layout); 210 | self.callbacks 211 | .before_grow_in_place(ptr, old_layout, new_layout); 212 | let result = self.alloc.grow_in_place(ptr, old_layout, new_layout); 213 | self.callbacks 214 | .after_grow_in_place(ptr, old_layout, new_layout, result); 215 | result 216 | } 217 | 218 | #[track_caller] 219 | unsafe fn grow_in_place_zeroed( 220 | &self, 221 | ptr: NonNull, 222 | old_layout: Layout, 223 | new_layout: Layout, 224 | ) -> Result { 225 | crate::check_grow_precondition(ptr, old_layout, new_layout); 226 | self.callbacks 227 | .before_grow_in_place_zeroed(ptr, old_layout, new_layout); 228 | let result = self.alloc.grow_in_place_zeroed(ptr, old_layout, new_layout); 229 | self.callbacks 230 | .after_grow_in_place_zeroed(ptr, old_layout, new_layout, result); 231 | result 232 | } 233 | 234 | #[track_caller] 235 | unsafe fn shrink_in_place( 236 | &self, 237 | ptr: NonNull, 238 | old_layout: Layout, 239 | new_layout: Layout, 240 | ) -> Result { 241 | crate::check_shrink_precondition(ptr, old_layout, new_layout); 242 | self.callbacks 243 | .before_shrink_in_place(ptr, old_layout, new_layout); 244 | let result = self.alloc.shrink_in_place(ptr, old_layout, new_layout); 245 | self.callbacks 246 | .after_shrink_in_place(ptr, old_layout, new_layout, result); 247 | result 248 | } 249 | } 250 | 251 | impl Owns for Proxy { 252 | fn owns(&self, ptr: NonNull<[u8]>) -> bool { 253 | self.callbacks.before_owns(); 254 | let owns = self.alloc.owns(ptr); 255 | self.callbacks.after_owns(owns); 256 | owns 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /src/region/mod.rs: -------------------------------------------------------------------------------- 1 | //! Stack-based allocators with user-provided memory 2 | //! 3 | //! A region allocator allocates memory straight from one contiguous chunk. There is no 4 | //! deallocation, and once the region is full, allocation requests returns [`AllocError`]. 5 | //! A region only stores a reference to the provided memory and a pointer to the current position. 6 | //! 7 | //! This module provides three kinds of stack-based allocators: [`Region`], [`SharedRegion`], and 8 | //! [`IntrusiveRegion`]. All three allocators uses a user-provided memory to allocate and differ 9 | //! in the way how they store the pointer to the current position. 10 | //! 11 | //! # Which region allocator to chose? 12 | //! 13 | //! Every region allocator is more or less on-par. They slightly differ in performance depending 14 | //! on how many allocations are made and how big the allocations are. 15 | //! 16 | //! - [`Region`] stores a current position in a [`Cell`] right next to the reference to the memory. 17 | //! - [`SharedRegion`] wraps the [`Cell`] in a [`RC`] to support cloning of the allocator. 18 | //! - [`IntrusiveRegion`] stores the pointer to the current position at the end of the provided 19 | //! memory block. 20 | //! 21 | //! This results in the fact, that [`Region`] cannot be cloned. However, using [`AllocRef::by_ref`] 22 | //! returns a reference to the region, which can itself be cloned. 23 | //! [`SharedRegion`] and [`IntrusiveRegion`] both can be cloned. The [`IntrusiveRegion`] has a 24 | //! better performance in most cases due to cache coherence, but it's hard to say exactly, how much 25 | //! capacity the allocator will have exactly, as the pointer to the current position has to be well 26 | //! aligned. If this feature is important, [`SharedRegion`] or [`Region`] should be used instead. 27 | //! [`SharedRegion`] is only available with the `alloc`-feature, as it requires the [`Rc`] to 28 | //! allocate memory to store the pointer in. 29 | //! 30 | //! [`Rc`]: alloc::rc::Rc 31 | //! [`Cell`]: core::cell::Cell 32 | //! 33 | //! ## Examples 34 | //! 35 | //! ```rust 36 | //! #![feature(allocator_api)] 37 | //! 38 | //! use alloc_compose::{region::Region, Owns}; 39 | //! use core::{ 40 | //! alloc::{AllocRef, Layout}, 41 | //! mem::MaybeUninit, 42 | //! }; 43 | //! 44 | //! let mut data = [MaybeUninit::uninit(); 64]; 45 | //! let region = Region::new(&mut data); 46 | //! 47 | //! let memory = region.alloc(Layout::new::())?; 48 | //! assert!(region.owns(memory)); 49 | //! # Ok::<(), core::alloc::AllocError>(()) 50 | //! ``` 51 | //! 52 | //! This allocator can also be used in collection types of the std-library: 53 | //! 54 | //! ```rust 55 | //! #![feature(nonnull_slice_from_raw_parts)] 56 | //! # #![feature(allocator_api)] 57 | //! # use alloc_compose::{region::Region, Owns}; 58 | //! # use core::{alloc::{AllocRef, Layout}, mem::MaybeUninit}; 59 | //! # let mut data = [MaybeUninit::uninit(); 64]; 60 | //! # let region = Region::new(&mut data); 61 | //! 62 | //! use core::ptr::NonNull; 63 | //! 64 | //! let mut vec: Vec = Vec::new_in(region.by_ref()); 65 | //! vec.extend(&[10, 20, 30]); 66 | //! assert_eq!(vec, [10, 20, 30]); 67 | //! 68 | //! let ptr = unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) }; 69 | //! let memory = NonNull::slice_from_raw_parts(ptr.cast(), 12); 70 | //! assert!(region.owns(memory)); 71 | //! ``` 72 | //! 73 | //! To reset the allocator, [`AllocateAll::deallocate_all`] may be used: 74 | //! 75 | //! ```rust 76 | //! # #![feature(allocator_api)] 77 | //! # use alloc_compose::{region::Region, Owns}; 78 | //! # use core::{alloc::{AllocRef, Layout}, mem::MaybeUninit}; 79 | //! # let mut data = [MaybeUninit::uninit(); 64]; 80 | //! # let region = Region::new(&mut data); 81 | //! # let _ = region.alloc(Layout::new::())?; 82 | //! use alloc_compose::AllocateAll; 83 | //! 84 | //! assert!(!region.is_empty()); 85 | //! region.deallocate_all(); 86 | //! assert!(region.is_empty()); 87 | //! # Ok::<(), core::alloc::AllocError>(()) 88 | //! ``` 89 | 90 | pub mod raw; 91 | 92 | use self::raw::*; 93 | use crate::{AllocateAll, Owns}; 94 | use core::{ 95 | alloc::{AllocError, AllocRef, Layout}, 96 | marker::PhantomData, 97 | mem::MaybeUninit, 98 | ptr::NonNull, 99 | }; 100 | 101 | /// A stack allocator over an user-defined region of memory. 102 | /// 103 | /// It holds a lifetime to the provided memory block, which ensures, that the allocator does not 104 | /// outlive the underlying memory. 105 | /// 106 | /// For a version without lifetime see [`RawRegion`] instead. 107 | pub struct Region<'mem> { 108 | raw: RawRegion, 109 | _marker: PhantomData<&'mem mut [MaybeUninit]>, 110 | } 111 | 112 | impl<'mem> Region<'mem> { 113 | /// Creates a new region from the given memory block. 114 | #[inline] 115 | pub fn new(memory: &'mem mut [MaybeUninit]) -> Self { 116 | let memory = NonNull::from(memory); 117 | let memory = NonNull::slice_from_raw_parts(memory.cast(), memory.len()); 118 | Self { 119 | raw: unsafe { RawRegion::new(memory) }, 120 | _marker: PhantomData, 121 | } 122 | } 123 | } 124 | 125 | /// A clonable region allocator based on `Rc`. 126 | /// 127 | /// It holds a lifetime to the provided memory block, which ensures, that the allocator does not 128 | /// outlive the underlying memory. 129 | /// 130 | /// For a version without lifetime see [`RawSharedRegion`] instead. 131 | #[derive(Clone)] 132 | #[cfg(any(doc, feature = "alloc"))] 133 | #[cfg_attr(doc, doc(cfg(feature = "alloc")))] 134 | pub struct SharedRegion<'mem> { 135 | raw: RawSharedRegion, 136 | _marker: PhantomData<&'mem mut [MaybeUninit]>, 137 | } 138 | 139 | #[cfg(any(doc, feature = "alloc"))] 140 | impl<'mem> SharedRegion<'mem> { 141 | /// Creates a new region from the given memory block. 142 | #[inline] 143 | pub fn new(memory: &'mem mut [MaybeUninit]) -> Self { 144 | let memory = NonNull::from(memory); 145 | let memory = NonNull::slice_from_raw_parts(memory.cast(), memory.len()); 146 | Self { 147 | raw: unsafe { RawSharedRegion::new(memory) }, 148 | _marker: PhantomData, 149 | } 150 | } 151 | } 152 | 153 | /// An intrusive region allocator, which stores the current posision in the provided memory. 154 | /// 155 | /// It holds a lifetime to the provided memory block, which ensures, that the allocator does not 156 | /// outlive the underlying memory. 157 | /// 158 | /// For a version without lifetime see [`RawIntrusiveRegion`] instead. 159 | #[derive(Clone)] 160 | pub struct IntrusiveRegion<'mem> { 161 | raw: RawIntrusiveRegion, 162 | _marker: PhantomData<&'mem mut [MaybeUninit]>, 163 | } 164 | 165 | impl<'mem> IntrusiveRegion<'mem> { 166 | /// Creates a new region from the given memory block. 167 | /// 168 | /// # Panics 169 | /// 170 | /// This function panics, when `memory` is not large enough to properly store a pointer. 171 | #[inline] 172 | pub fn new(memory: &'mem mut [MaybeUninit]) -> Self { 173 | let memory = NonNull::from(memory); 174 | let memory = NonNull::slice_from_raw_parts(memory.cast(), memory.len()); 175 | Self { 176 | raw: unsafe { RawIntrusiveRegion::new(memory) }, 177 | _marker: PhantomData, 178 | } 179 | } 180 | } 181 | 182 | macro_rules! impl_region { 183 | ($ty:ident, $raw:ty) => { 184 | impl PartialEq for $ty<'_> { 185 | #[inline] 186 | fn eq(&self, rhs: &Self) -> bool { 187 | self.raw == rhs.raw 188 | } 189 | } 190 | 191 | impl PartialEq<$raw> for $ty<'_> { 192 | #[inline] 193 | fn eq(&self, rhs: &$raw) -> bool { 194 | &self.raw == rhs 195 | } 196 | } 197 | 198 | impl PartialEq<$ty<'_>> for $raw { 199 | #[inline] 200 | fn eq(&self, rhs: &$ty<'_>) -> bool { 201 | self == &rhs.raw 202 | } 203 | } 204 | 205 | unsafe impl AllocRef for $ty<'_> { 206 | #[inline] 207 | fn alloc(&self, layout: Layout) -> Result, AllocError> { 208 | self.raw.alloc(layout) 209 | } 210 | 211 | #[inline] 212 | unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { 213 | self.raw.dealloc(ptr, layout) 214 | } 215 | 216 | #[inline] 217 | unsafe fn grow( 218 | &self, 219 | ptr: NonNull, 220 | old_layout: Layout, 221 | new_layout: Layout, 222 | ) -> Result, AllocError> { 223 | self.raw.grow(ptr, old_layout, new_layout) 224 | } 225 | 226 | #[inline] 227 | unsafe fn grow_zeroed( 228 | &self, 229 | ptr: NonNull, 230 | old_layout: Layout, 231 | new_layout: Layout, 232 | ) -> Result, AllocError> { 233 | self.raw.grow(ptr, old_layout, new_layout) 234 | } 235 | 236 | #[inline] 237 | unsafe fn shrink( 238 | &self, 239 | ptr: NonNull, 240 | old_layout: Layout, 241 | new_layout: Layout, 242 | ) -> Result, AllocError> { 243 | self.raw.grow(ptr, old_layout, new_layout) 244 | } 245 | } 246 | 247 | unsafe impl AllocateAll for $ty<'_> { 248 | #[inline] 249 | fn allocate_all(&self) -> Result, AllocError> { 250 | self.raw.allocate_all() 251 | } 252 | 253 | #[inline] 254 | fn allocate_all_zeroed(&self) -> Result, AllocError> { 255 | self.raw.allocate_all_zeroed() 256 | } 257 | 258 | #[inline] 259 | fn deallocate_all(&self) { 260 | self.raw.deallocate_all() 261 | } 262 | 263 | #[inline] 264 | fn capacity(&self) -> usize { 265 | self.raw.capacity() 266 | } 267 | 268 | #[inline] 269 | fn capacity_left(&self) -> usize { 270 | self.raw.capacity_left() 271 | } 272 | } 273 | 274 | impl Owns for $ty<'_> { 275 | #[inline] 276 | fn owns(&self, memory: NonNull<[u8]>) -> bool { 277 | self.raw.owns(memory) 278 | } 279 | } 280 | 281 | impl_global_alloc!($ty<'_>); 282 | }; 283 | } 284 | 285 | impl_region!(Region, RawRegion); 286 | #[cfg(any(doc, feature = "alloc"))] 287 | impl_region!(SharedRegion, RawSharedRegion); 288 | impl_region!(IntrusiveRegion, RawIntrusiveRegion); 289 | 290 | #[cfg(test)] 291 | mod tests { 292 | #![allow(clippy::wildcard_imports)] 293 | use super::*; 294 | use crate::helper::tracker; 295 | use core::{cell::Cell, mem}; 296 | 297 | fn aligned_slice(memory: &mut [MaybeUninit], size: usize) -> &mut [MaybeUninit] { 298 | let ptr = memory.as_mut_ptr() as usize; 299 | let start = (ptr + 31) & !(31); 300 | assert!(memory.len() >= start - ptr + size); 301 | unsafe { core::slice::from_raw_parts_mut(start as *mut MaybeUninit, size) } 302 | } 303 | 304 | macro_rules! impl_tests { 305 | ($namespace:ident, $ty:ident, $extra:expr) => { 306 | mod $namespace { 307 | use super::*; 308 | 309 | #[test] 310 | fn alloc_zero() { 311 | let mut raw_data = [MaybeUninit::::new(1); 128]; 312 | let data = aligned_slice(&mut raw_data, 32 + $extra); 313 | let region = tracker(<$ty>::new(data)); 314 | 315 | assert_eq!(region.capacity(), 32); 316 | assert!(region.is_empty()); 317 | 318 | region 319 | .alloc(Layout::new::<[u8; 0]>()) 320 | .expect("Could not allocated 0 bytes"); 321 | assert!(region.is_empty()); 322 | 323 | unsafe { 324 | drop(region); 325 | assert_eq!(MaybeUninit::slice_assume_init_ref(data)[..32], [1; 32]); 326 | } 327 | } 328 | 329 | #[test] 330 | fn alloc_zeroed() { 331 | let mut raw_data = [MaybeUninit::::new(1); 128]; 332 | let data = aligned_slice(&mut raw_data, 32 + $extra); 333 | let region = tracker(<$ty>::new(data)); 334 | 335 | assert_eq!(region.capacity(), 32); 336 | assert!(region.is_empty()); 337 | 338 | region 339 | .alloc_zeroed(Layout::new::<[u8; 32]>()) 340 | .expect("Could not allocated 32 bytes"); 341 | assert!(!region.is_empty()); 342 | 343 | unsafe { 344 | drop(region); 345 | assert_eq!(MaybeUninit::slice_assume_init_ref(data)[..32], [0; 32]); 346 | } 347 | } 348 | 349 | #[test] 350 | fn alloc_small() { 351 | let mut raw_data = [MaybeUninit::::new(1); 128]; 352 | let data = aligned_slice(&mut raw_data, 32 + $extra); 353 | let region = tracker(<$ty>::new(data)); 354 | 355 | assert_eq!(region.capacity(), 32); 356 | assert_eq!(region.capacity(), region.capacity_left()); 357 | 358 | region 359 | .alloc_zeroed(Layout::new::<[u8; 16]>()) 360 | .expect("Could not allocated 16 bytes"); 361 | assert_eq!(region.capacity_left(), 16); 362 | 363 | unsafe { 364 | drop(region); 365 | assert_eq!(MaybeUninit::slice_assume_init_ref(&data[0..16]), [1; 16]); 366 | assert_eq!(MaybeUninit::slice_assume_init_ref(&data[16..32]), [0; 16]); 367 | } 368 | } 369 | 370 | #[test] 371 | fn alloc_uninitialzed() { 372 | let mut raw_data = [MaybeUninit::::new(1); 128]; 373 | let data = aligned_slice(&mut raw_data, 32 + $extra); 374 | let region = tracker(<$ty>::new(data)); 375 | 376 | region 377 | .alloc(Layout::new::<[u8; 32]>()) 378 | .expect("Could not allocated 32 bytes"); 379 | assert_eq!(region.capacity_left(), 0); 380 | 381 | unsafe { 382 | drop(region); 383 | assert_eq!(MaybeUninit::slice_assume_init_ref(&data)[..32], [1; 32]); 384 | } 385 | } 386 | 387 | #[test] 388 | fn alloc_all() { 389 | let mut raw_data = [MaybeUninit::::new(1); 128]; 390 | let data = aligned_slice(&mut raw_data, 32 + $extra); 391 | let region = tracker(<$ty>::new(data)); 392 | 393 | assert_eq!(region.capacity(), 32); 394 | assert!(region.is_empty()); 395 | 396 | let ptr = region 397 | .alloc(Layout::new::()) 398 | .expect("Could not allocated 1 byte"); 399 | assert_eq!(ptr.len(), 1); 400 | assert_eq!(region.capacity_left(), 31, "capacity left"); 401 | 402 | let ptr = region 403 | .allocate_all_zeroed() 404 | .expect("Could not allocated rest of the bytes"); 405 | assert_eq!(ptr.len(), 31, "len"); 406 | assert!(region.is_full()); 407 | 408 | region.deallocate_all(); 409 | assert!(region.is_empty()); 410 | 411 | region 412 | .alloc(Layout::new::<[u8; 16]>()) 413 | .expect("Could not allocate 16 bytes"); 414 | region 415 | .alloc(Layout::new::<[u8; 17]>()) 416 | .expect_err("Could allocate more than 32 bytes"); 417 | } 418 | 419 | #[test] 420 | fn alloc_fail() { 421 | let mut raw_data = [MaybeUninit::::new(1); 128]; 422 | let data = aligned_slice(&mut raw_data, 32 + $extra); 423 | let region = tracker(<$ty>::new(data)); 424 | 425 | region 426 | .alloc(Layout::new::<[u8; 33]>()) 427 | .expect_err("Could allocate 33 bytes"); 428 | } 429 | 430 | #[test] 431 | fn alloc_aligned() { 432 | let mut raw_data = [MaybeUninit::::new(1); 128]; 433 | let data = aligned_slice(&mut raw_data, 32 + $extra); 434 | let region = tracker(<$ty>::new(data)); 435 | 436 | region 437 | .alloc(Layout::from_size_align(5, 1).expect("Invalid layout")) 438 | .expect("Could not allocate 5 Bytes"); 439 | let capacity = region.capacity_left(); 440 | 441 | let ptr = region 442 | .alloc(Layout::from_size_align(16, 16).expect("Invalid layout")) 443 | .expect("Could not allocate 16 Bytes"); 444 | assert_eq!(capacity - 16 - 11, region.capacity_left()); 445 | assert_eq!(ptr.as_mut_ptr() as usize % 16, 0); 446 | } 447 | } 448 | }; 449 | } 450 | 451 | impl_tests!(exclusive, Region, 0); 452 | #[cfg(any(doc, feature = "alloc"))] 453 | impl_tests!(shared, SharedRegion, 0); 454 | impl_tests!( 455 | intrusive, 456 | IntrusiveRegion, 457 | mem::size_of::>>>() 458 | ); 459 | 460 | #[test] 461 | fn vec() { 462 | let mut raw_data = [MaybeUninit::::new(1); 128]; 463 | let data = aligned_slice(&mut raw_data, 32); 464 | let region = tracker(Region::new(data)); 465 | let mut vec = alloc::vec::Vec::new_in(region.by_ref()); 466 | vec.push(10); 467 | } 468 | 469 | // #[test] 470 | // fn dealloc() { 471 | // let mut data = [MaybeUninit::new(1); 32]; 472 | // let mut region = Region::new(&mut data); 473 | // let layout = Layout::from_size_align(8, 1).expect("Invalid layout"); 474 | 475 | // let memory = region.alloc(layout).expect("Could not allocate 8 bytes"); 476 | // assert!(region.owns(memory)); 477 | // assert_eq!(region.capacity_left(), 24); 478 | 479 | // unsafe { 480 | // region.dealloc(memory.as_non_null_ptr(), layout); 481 | // } 482 | // assert_eq!(region.capacity_left(), 32); 483 | // assert!(!region.owns(memory)); 484 | 485 | // let memory = region.alloc(layout).expect("Could not allocate 8 bytes"); 486 | // assert!(region.owns(memory)); 487 | // region.alloc(layout).expect("Could not allocate 8 bytes"); 488 | // assert!(region.owns(memory)); 489 | // assert_eq!(memory.len(), 8); 490 | // assert_eq!(region.capacity_left(), 16); 491 | 492 | // unsafe { 493 | // region.dealloc(memory.as_non_null_ptr(), layout); 494 | // } 495 | // // It is not possible to deallocate memory that was not allocated last. 496 | // assert!(region.owns(memory)); 497 | // assert_eq!(region.capacity_left(), 16); 498 | // } 499 | 500 | // #[test] 501 | // fn realloc() { 502 | // let mut data = [MaybeUninit::new(1); 32]; 503 | // let mut region = Region::new(&mut data); 504 | // let layout = Layout::from_size_align(8, 1).expect("Invalid layout"); 505 | 506 | // let memory = region.alloc(layout).expect("Could not allocate 8 bytes"); 507 | // assert_eq!(memory.len(), 8); 508 | // assert_eq!(region.capacity_left(), 24); 509 | 510 | // region.alloc(layout).expect("Could not allocate 8 bytes"); 511 | // assert_eq!(region.capacity_left(), 16); 512 | 513 | // let memory = unsafe { 514 | // region 515 | // .grow(memory.as_non_null_ptr(), layout, Layout::new::<[u8; 16]>()) 516 | // .expect("Could not grow to 16 bytes") 517 | // }; 518 | // assert_eq!(memory.len(), 16); 519 | // assert_eq!(region.capacity_left(), 0); 520 | 521 | // region.dealloc_all(); 522 | // let memory = region 523 | // .alloc_zeroed(Layout::new::<[u8; 16]>()) 524 | // .expect("Could not allocate 16 bytes"); 525 | // region 526 | // .alloc(Layout::new::<[u8; 8]>()) 527 | // .expect("Could not allocate 16 bytes"); 528 | 529 | // unsafe { 530 | // region 531 | // .shrink( 532 | // memory.as_non_null_ptr(), 533 | // Layout::new::<[u8; 16]>(), 534 | // Layout::new::<[u8; 8]>(), 535 | // ) 536 | // .expect("Could not shrink to 8 bytes"); 537 | // } 538 | // } 539 | 540 | // #[test] 541 | // fn debug() { 542 | // let test_output = |region: &Region| { 543 | // assert_eq!( 544 | // format!("{:?}", region), 545 | // format!( 546 | // "Region {{ capacity: {}, capacity_left: {} }}", 547 | // region.capacity(), 548 | // region.capacity_left() 549 | // ) 550 | // ) 551 | // }; 552 | 553 | // let mut data = [MaybeUninit::new(1); 32]; 554 | // let mut region = Region::new(&mut data); 555 | // test_output(®ion); 556 | 557 | // region 558 | // .alloc(Layout::new::<[u8; 16]>()) 559 | // .expect("Could not allocate 16 bytes"); 560 | // test_output(®ion); 561 | 562 | // region 563 | // .alloc(Layout::new::<[u8; 16]>()) 564 | // .expect("Could not allocate 16 bytes"); 565 | // test_output(®ion); 566 | 567 | // region.dealloc_all(); 568 | // test_output(®ion); 569 | // } 570 | } 571 | -------------------------------------------------------------------------------- /src/region/raw.rs: -------------------------------------------------------------------------------- 1 | //! Region implementations which are not bound by a lifetime. 2 | //! 3 | //! In comparison to the [`region`] module, this module contains the raw counterparts. They don't 4 | //! require a lifetime bound but are `unsafe` to construct, as the user has to ensure, that the 5 | //! allocator outlives the memory. 6 | //! 7 | //! It is highly encouraged to use the safe counterparts whenever possible. 8 | //! 9 | //! [`region`]: crate::region 10 | 11 | use crate::{intrinsics::unlikely, AllocateAll, Owns}; 12 | use core::{ 13 | alloc::{AllocError, AllocRef, Layout}, 14 | cell::Cell, 15 | fmt, 16 | ptr::NonNull, 17 | }; 18 | 19 | #[cfg(any(doc, feature = "alloc"))] 20 | use alloc::rc::Rc; 21 | 22 | trait Current { 23 | fn current(&self) -> NonNull; 24 | 25 | #[inline] 26 | fn current_usize(&self) -> usize { 27 | self.current().as_ptr() as usize 28 | } 29 | 30 | fn set_current(&self, ptr: NonNull); 31 | } 32 | 33 | /// A stack allocator over an user-defined region of memory. 34 | /// 35 | /// This is the non-lifetime version of [`Region`]. 36 | /// 37 | /// [`Region`]: crate::region::Region 38 | pub struct RawRegion { 39 | memory: NonNull<[u8]>, 40 | current: Cell>, 41 | } 42 | 43 | impl RawRegion { 44 | /// Creates a new region from the given memory block. 45 | /// 46 | /// # Safety 47 | /// 48 | /// Behavior is undefined if any of the following conditions are violated: 49 | /// 50 | /// * `memory` must be [valid] for reads and writes for `memory.len()` many bytes. 51 | /// 52 | /// * `memory` must outlive the region. 53 | /// 54 | /// * `memory.len()` must be no larger than `isize::MAX`. 55 | /// See the safety documentation of [`pointer::offset`]. 56 | /// 57 | /// For a safe variant use [`Region`] instead. 58 | /// 59 | /// [`Region`]: crate::region::Region 60 | /// [valid]: core::ptr#safety 61 | /// [`pointer::offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.offset 62 | #[inline] 63 | pub unsafe fn new(memory: NonNull<[u8]>) -> Self { 64 | Self { 65 | memory, 66 | current: Cell::new(end(memory)), 67 | } 68 | } 69 | } 70 | 71 | impl Current for RawRegion { 72 | #[inline] 73 | fn current(&self) -> NonNull { 74 | self.current.get() 75 | } 76 | 77 | #[inline] 78 | fn set_current(&self, ptr: NonNull) { 79 | self.current.set(ptr) 80 | } 81 | } 82 | 83 | #[derive(Clone)] 84 | #[cfg(any(doc, feature = "alloc"))] 85 | #[cfg_attr(doc, doc(cfg(feature = "alloc")))] 86 | pub struct RawSharedRegion { 87 | memory: NonNull<[u8]>, 88 | current: Rc>>, 89 | } 90 | 91 | /// A clonable region allocator based on `Rc`. 92 | /// 93 | /// This is the non-lifetime version of [`SharedRegion`]. 94 | /// 95 | /// [`SharedRegion`]: crate::region::SharedRegion 96 | #[cfg(any(doc, feature = "alloc"))] 97 | impl RawSharedRegion { 98 | /// Creates a new region from the given memory block. 99 | /// 100 | /// # Safety 101 | /// 102 | /// Behavior is undefined if any of the following conditions are violated: 103 | /// 104 | /// * `memory` must be [valid] for reads and writes for `memory.len()` many bytes. 105 | /// 106 | /// * `memory` must outlive the region. 107 | /// 108 | /// * `memory.len()` must be no larger than `isize::MAX`. 109 | /// See the safety documentation of [`pointer::offset`]. 110 | /// 111 | /// For a safe variant use [`SharedRegion`] instead. 112 | /// 113 | /// [`SharedRegion`]: crate::region::SharedRegion 114 | /// [valid]: core::ptr#safety 115 | /// [`pointer::offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.offset 116 | #[inline] 117 | pub unsafe fn new(memory: NonNull<[u8]>) -> Self { 118 | Self { 119 | memory, 120 | current: Rc::new(Cell::new(end(memory))), 121 | } 122 | } 123 | } 124 | 125 | #[cfg(any(doc, feature = "alloc"))] 126 | impl Current for RawSharedRegion { 127 | #[inline] 128 | fn current(&self) -> NonNull { 129 | self.current.get() 130 | } 131 | 132 | #[inline] 133 | fn set_current(&self, ptr: NonNull) { 134 | self.current.set(ptr) 135 | } 136 | } 137 | 138 | /// An intrusive region allocator, which stores the current posision in the provided memory. 139 | /// 140 | /// This is the non-lifetime version of [`IntrusiveRegion`]. 141 | /// 142 | /// [`IntrusiveRegion`]: crate::region::IntrusiveRegion 143 | #[derive(Clone)] 144 | pub struct RawIntrusiveRegion { 145 | memory: NonNull<[u8]>, 146 | current: NonNull>>, 147 | } 148 | 149 | impl RawIntrusiveRegion { 150 | /// Creates a new region from the given memory block. 151 | /// 152 | /// # Safety 153 | /// 154 | /// Behavior is undefined if any of the following conditions are violated: 155 | /// 156 | /// * `memory` must be [valid] for reads and writes for `memory.len()` many bytes. 157 | /// 158 | /// * `memory` must outlive the region. 159 | /// 160 | /// * `memory.len()` must be no larger than `isize::MAX`. 161 | /// See the safety documentation of [`pointer::offset`]. 162 | /// 163 | /// For a safe variant use [`IntrusiveRegion`] instead. 164 | /// 165 | /// [`IntrusiveRegion`]: crate::region::IntrusiveRegion 166 | /// [valid]: core::ptr#safety 167 | /// [`pointer::offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.offset 168 | /// 169 | /// # Panics 170 | /// 171 | /// This function panics, when `memory` is not large enough to properly store a pointer. 172 | #[inline] 173 | pub unsafe fn new(memory: NonNull<[u8]>) -> Self { 174 | let current: NonNull>> = alloc_impl( 175 | memory, 176 | end(memory), 177 | Layout::new::>>>(), 178 | ) 179 | .expect("Could not store pointer in region") 180 | .as_non_null_ptr() 181 | .cast(); 182 | current.as_ptr().write(Cell::new(current.cast())); 183 | let memory = NonNull::slice_from_raw_parts( 184 | memory.as_non_null_ptr(), 185 | current.as_ptr() as usize - memory.as_mut_ptr() as usize, 186 | ); 187 | Self { memory, current } 188 | } 189 | } 190 | 191 | impl Current for RawIntrusiveRegion { 192 | #[inline] 193 | fn current(&self) -> NonNull { 194 | unsafe { self.current.as_ref().get() } 195 | } 196 | 197 | #[inline] 198 | fn set_current(&self, ptr: NonNull) { 199 | unsafe { self.current.as_ref().set(ptr) } 200 | } 201 | } 202 | 203 | #[inline] 204 | fn alloc_impl( 205 | memory: NonNull<[u8]>, 206 | current: NonNull, 207 | layout: Layout, 208 | ) -> Result, AllocError> { 209 | let current = current.as_ptr() as usize; 210 | let new = current.checked_sub(layout.size()).ok_or(AllocError)?; 211 | let aligned = (new & !(layout.align() - 1)) as *mut u8; 212 | 213 | if unlikely(aligned < memory.as_mut_ptr()) { 214 | Err(AllocError) 215 | } else { 216 | Ok(NonNull::slice_from_raw_parts( 217 | unsafe { NonNull::new_unchecked(aligned) }, 218 | current - aligned as usize, 219 | )) 220 | } 221 | } 222 | 223 | #[inline] 224 | fn alloc_all_impl( 225 | memory: NonNull<[u8]>, 226 | current: NonNull, 227 | ) -> Result, AllocError> { 228 | let current = current.as_ptr() as usize; 229 | let new = memory.as_non_null_ptr(); 230 | 231 | Ok(NonNull::slice_from_raw_parts( 232 | new, 233 | current - new.as_ptr() as usize, 234 | )) 235 | } 236 | 237 | #[inline] 238 | fn end(ptr: NonNull<[u8]>) -> NonNull { 239 | unsafe { NonNull::new_unchecked(ptr.as_mut_ptr().add(ptr.len())) } 240 | } 241 | 242 | // unsafe impl AllocRef for RawRegion { 243 | // #[inline] 244 | // fn alloc(&self, layout: Layout) -> Result, AllocError> { 245 | // let new = alloc_impl(self.memory, self.current.get(), layout)?; 246 | // self.current.set(new.as_non_null_ptr()); 247 | // Ok(new) 248 | // } 249 | 250 | // #[inline] 251 | // unsafe fn dealloc(&self, _ptr: NonNull, _layout: Layout) {} 252 | // } 253 | 254 | // unsafe impl AllocRef for RawSharedRegion { 255 | // #[inline] 256 | // fn alloc(&self, layout: Layout) -> Result, AllocError> { 257 | // let current = self.current.as_ref(); 258 | // let new = alloc_impl(self.memory, current.get(), layout)?; 259 | // current.set(new.as_non_null_ptr()); 260 | // Ok(new) 261 | // } 262 | 263 | // #[inline] 264 | // unsafe fn dealloc(&self, _ptr: NonNull, _layout: Layout) {} 265 | // } 266 | 267 | // unsafe impl AllocRef for RawIntrusiveRegion { 268 | // #[inline] 269 | // fn alloc(&self, layout: Layout) -> Result, AllocError> { 270 | // let current = unsafe { self.current.as_ref() }; 271 | // let new = alloc_impl(self.memory, current.get(), layout)?; 272 | // current.set(new.as_non_null_ptr()); 273 | // Ok(new) 274 | // } 275 | 276 | // #[inline] 277 | // unsafe fn dealloc(&self, _ptr: NonNull, _layout: Layout) {} 278 | // } 279 | 280 | macro_rules! impl_raw_region { 281 | ($ty:ident) => { 282 | impl PartialEq for $ty { 283 | #[inline] 284 | fn eq(&self, rhs: &Self) -> bool { 285 | self.memory == rhs.memory 286 | } 287 | } 288 | 289 | impl fmt::Debug for $ty { 290 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 291 | f.debug_struct("RawRegion") 292 | .field("memory", &self.memory) 293 | .field("len", &self.memory.len()) 294 | .field("current", &self.current()) 295 | .finish() 296 | } 297 | } 298 | 299 | unsafe impl AllocRef for $ty { 300 | #[inline] 301 | fn alloc(&self, layout: Layout) -> Result, AllocError> { 302 | let new = alloc_impl(self.memory, self.current(), layout)?; 303 | self.set_current(new.as_non_null_ptr()); 304 | Ok(new) 305 | } 306 | 307 | #[inline] 308 | unsafe fn dealloc(&self, _ptr: NonNull, _layout: Layout) {} 309 | 310 | unsafe fn grow( 311 | &self, 312 | _ptr: NonNull, 313 | _old_layout: Layout, 314 | _new_layout: Layout, 315 | ) -> Result, AllocError> { 316 | Err(AllocError) 317 | } 318 | 319 | unsafe fn grow_zeroed( 320 | &self, 321 | _ptr: NonNull, 322 | _old_layout: Layout, 323 | _new_layout: Layout, 324 | ) -> Result, AllocError> { 325 | Err(AllocError) 326 | } 327 | 328 | unsafe fn shrink( 329 | &self, 330 | _ptr: NonNull, 331 | _old_layout: Layout, 332 | _new_layout: Layout, 333 | ) -> Result, AllocError> { 334 | Err(AllocError) 335 | } 336 | } 337 | 338 | unsafe impl AllocateAll for $ty { 339 | #[inline] 340 | fn allocate_all(&self) -> Result, AllocError> { 341 | let new = alloc_all_impl(self.memory, self.current())?; 342 | self.set_current(new.as_non_null_ptr()); 343 | Ok(new) 344 | } 345 | 346 | #[inline] 347 | fn deallocate_all(&self) { 348 | self.set_current(end(self.memory)) 349 | } 350 | 351 | #[inline] 352 | fn capacity(&self) -> usize { 353 | self.memory.len() 354 | } 355 | 356 | #[inline] 357 | fn capacity_left(&self) -> usize { 358 | self.current_usize() - self.memory.as_mut_ptr() as usize 359 | } 360 | } 361 | 362 | impl Owns for $ty { 363 | #[inline] 364 | fn owns(&self, memory: NonNull<[u8]>) -> bool { 365 | let ptr = memory.as_mut_ptr() as usize; 366 | let current = self.current_usize(); 367 | ptr >= current && ptr + memory.len() <= end(self.memory).as_ptr() as usize 368 | } 369 | } 370 | 371 | impl_global_alloc!($ty); 372 | }; 373 | } 374 | 375 | impl_raw_region!(RawRegion); 376 | #[cfg(any(doc, feature = "alloc"))] 377 | impl_raw_region!(RawSharedRegion); 378 | impl_raw_region!(RawIntrusiveRegion); 379 | -------------------------------------------------------------------------------- /src/segregate.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | helper::{grow_fallback, shrink_fallback, AllocInit}, 3 | AllocAll, 4 | Owns, 5 | }; 6 | use core::{ 7 | alloc::{AllocErr, AllocRef, Layout}, 8 | cmp, 9 | ptr::NonNull, 10 | }; 11 | 12 | /// Dispatches calls to `AllocRef` between two allocators depending on the size allocated. 13 | /// 14 | /// All allocations smaller than or equal to `threshold` will be dispatched to `Small`. The others 15 | /// will go to `Large`. 16 | #[derive(Debug, Copy, Clone)] 17 | pub struct Segregate { 18 | pub small: Small, 19 | pub large: Large, 20 | } 21 | 22 | impl Segregate { 23 | fn clamped(ptr: NonNull<[u8]>) -> NonNull<[u8]> { 24 | NonNull::slice_from_raw_parts(ptr.as_non_null_ptr(), cmp::min(ptr.len(), THRESHOLD)) 25 | } 26 | } 27 | 28 | unsafe impl AllocRef for Segregate 29 | where 30 | Small: AllocRef, 31 | Large: AllocRef, 32 | { 33 | fn alloc(&mut self, layout: Layout) -> Result, AllocErr> { 34 | if layout.size() <= THRESHOLD { 35 | let memory = self.small.alloc(layout)?; 36 | Ok(Self::clamped(memory)) 37 | } else { 38 | self.large.alloc(layout) 39 | } 40 | } 41 | 42 | fn alloc_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { 43 | if layout.size() <= THRESHOLD { 44 | let memory = self.small.alloc_zeroed(layout)?; 45 | Ok(Self::clamped(memory)) 46 | } else { 47 | self.large.alloc_zeroed(layout) 48 | } 49 | } 50 | 51 | unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { 52 | if layout.size() <= THRESHOLD { 53 | self.small.dealloc(ptr, layout) 54 | } else { 55 | self.large.dealloc(ptr, layout) 56 | } 57 | } 58 | 59 | unsafe fn grow( 60 | &mut self, 61 | ptr: NonNull, 62 | layout: Layout, 63 | new_size: usize, 64 | ) -> Result, AllocErr> { 65 | if layout.size() <= THRESHOLD { 66 | if new_size > THRESHOLD { 67 | grow_fallback( 68 | &mut self.small, 69 | &mut self.large, 70 | ptr, 71 | layout, 72 | new_size, 73 | AllocInit::Uninitialized, 74 | ) 75 | } else { 76 | let memory = self.small.grow(ptr, layout, new_size)?; 77 | Ok(Self::clamped(memory)) 78 | } 79 | } else { 80 | self.large.grow(ptr, layout, new_size) 81 | } 82 | } 83 | 84 | unsafe fn grow_zeroed( 85 | &mut self, 86 | ptr: NonNull, 87 | layout: Layout, 88 | new_size: usize, 89 | ) -> Result, AllocErr> { 90 | if layout.size() <= THRESHOLD { 91 | if new_size > THRESHOLD { 92 | grow_fallback( 93 | &mut self.small, 94 | &mut self.large, 95 | ptr, 96 | layout, 97 | new_size, 98 | AllocInit::Zeroed, 99 | ) 100 | } else { 101 | let memory = self.small.grow_zeroed(ptr, layout, new_size)?; 102 | Ok(Self::clamped(memory)) 103 | } 104 | } else { 105 | self.large.grow_zeroed(ptr, layout, new_size) 106 | } 107 | } 108 | 109 | unsafe fn shrink( 110 | &mut self, 111 | ptr: NonNull, 112 | layout: Layout, 113 | new_size: usize, 114 | ) -> Result, AllocErr> { 115 | if layout.size() <= THRESHOLD { 116 | let memory = self.small.shrink(ptr, layout, new_size)?; 117 | Ok(Self::clamped(memory)) 118 | } else if new_size <= THRESHOLD { 119 | // Move ownership to `self.small` 120 | let memory = shrink_fallback(&mut self.large, &mut self.small, ptr, layout, new_size)?; 121 | Ok(Self::clamped(memory)) 122 | } else { 123 | self.large.shrink(ptr, layout, new_size) 124 | } 125 | } 126 | } 127 | 128 | unsafe impl AllocAll for Segregate 129 | where 130 | Small: AllocAll, 131 | Large: AllocAll, 132 | { 133 | fn alloc_all(&mut self, layout: Layout) -> Result, AllocErr> { 134 | if layout.size() <= THRESHOLD { 135 | let memory = self.small.alloc_all(layout)?; 136 | Ok(Self::clamped(memory)) 137 | } else { 138 | self.large.alloc_all(layout) 139 | } 140 | } 141 | 142 | fn alloc_all_zeroed(&mut self, layout: Layout) -> Result, AllocErr> { 143 | if layout.size() <= THRESHOLD { 144 | let memory = self.small.alloc_all_zeroed(layout)?; 145 | Ok(Self::clamped(memory)) 146 | } else { 147 | self.large.alloc_all(layout) 148 | } 149 | } 150 | 151 | /// Deallocates all the memory the allocator had allocated. 152 | fn dealloc_all(&mut self) { 153 | self.small.dealloc_all(); 154 | self.large.dealloc_all(); 155 | } 156 | 157 | /// Returns the total capacity available in this allocator. 158 | fn capacity(&self) -> usize { 159 | self.small.capacity() + self.large.capacity() 160 | } 161 | 162 | /// Returns the free capacity left for allocating. 163 | fn capacity_left(&self) -> usize { 164 | self.small.capacity_left() + self.large.capacity_left() 165 | } 166 | } 167 | 168 | impl Owns for Segregate 169 | where 170 | Small: Owns, 171 | Large: Owns, 172 | { 173 | fn owns(&self, ptr: NonNull<[u8]>) -> bool { 174 | if ptr.len() <= THRESHOLD { 175 | self.small.owns(ptr) 176 | } else { 177 | self.large.owns(ptr) 178 | } 179 | } 180 | } 181 | 182 | #[cfg(test)] 183 | mod tests { 184 | use super::Segregate; 185 | use crate::{AllocAll, Owns, Region}; 186 | use core::{ 187 | alloc::{AllocRef, Layout}, 188 | mem::MaybeUninit, 189 | }; 190 | 191 | #[test] 192 | fn alloc() { 193 | let mut data_1 = [MaybeUninit::new(0); 128]; 194 | let mut data_2 = [MaybeUninit::new(0); 128]; 195 | 196 | let mut alloc: Segregate<_, _, 32> = Segregate { 197 | small: Region::new(&mut data_1), 198 | large: Region::new(&mut data_2), 199 | }; 200 | 201 | assert_eq!(alloc.capacity(), 256); 202 | assert_eq!(alloc.capacity_left(), alloc.capacity()); 203 | 204 | let mem = alloc 205 | .alloc(Layout::new::<[u8; 4]>()) 206 | .expect("Could not allocate 4 bytes"); 207 | assert_eq!(mem.len(), 4); 208 | assert!(alloc.small.owns(mem)); 209 | 210 | unsafe { alloc.dealloc(mem.as_non_null_ptr(), Layout::new::<[u8; 4]>()) }; 211 | assert!(!alloc.owns(mem)); 212 | 213 | let mem = alloc 214 | .alloc(Layout::new::<[u8; 32]>()) 215 | .expect("Could not allocate 32 bytes"); 216 | assert_eq!(mem.len(), 32); 217 | assert!(alloc.small.owns(mem)); 218 | 219 | assert_eq!(alloc.capacity(), 256); 220 | assert_eq!(alloc.capacity_left(), alloc.capacity() - 32); 221 | 222 | let mem = alloc 223 | .alloc(Layout::new::<[u8; 33]>()) 224 | .expect("Could not allocate 33 bytes"); 225 | assert_eq!(mem.len(), 33); 226 | assert!(alloc.large.owns(mem)); 227 | 228 | assert_eq!(alloc.capacity(), 256); 229 | assert_eq!(alloc.capacity_left(), alloc.capacity() - 32 - 33); 230 | 231 | unsafe { 232 | alloc.dealloc(mem.as_non_null_ptr(), Layout::new::<[u8; 33]>()); 233 | } 234 | assert_eq!(alloc.capacity_left(), alloc.capacity() - 32); 235 | 236 | alloc.dealloc_all(); 237 | assert_eq!(alloc.capacity(), alloc.capacity_left()); 238 | 239 | let mem = alloc 240 | .alloc_all(Layout::new::<[u8; 4]>()) 241 | .expect("Could not allocate 4 bytes"); 242 | assert!(alloc.small.owns(mem)); 243 | assert_eq!(mem.len(), 32); 244 | 245 | assert_eq!(alloc.capacity(), 256); 246 | assert_eq!(alloc.capacity_left(), 128); 247 | 248 | let mem = alloc 249 | .alloc_all(Layout::new::<[u8; 33]>()) 250 | .expect("Could not allocate 33 bytes"); 251 | assert!(alloc.large.owns(mem)); 252 | assert_eq!(mem.len(), 128); 253 | 254 | assert_eq!(alloc.capacity(), 256); 255 | assert_eq!(alloc.capacity_left(), 0); 256 | 257 | alloc.dealloc_all(); 258 | 259 | assert_eq!(alloc.capacity_left(), alloc.capacity()); 260 | } 261 | 262 | #[test] 263 | fn realloc() { 264 | let mut data_1 = [MaybeUninit::new(0); 128]; 265 | let mut data_2 = [MaybeUninit::new(0); 128]; 266 | 267 | let mut alloc: Segregate<_, _, 32> = Segregate { 268 | small: Region::new(&mut data_1), 269 | large: Region::new(&mut data_2), 270 | }; 271 | 272 | let mem = alloc.alloc(Layout::new::<[u8; 8]>()).unwrap(); 273 | assert_eq!(mem.len(), 8); 274 | assert!(alloc.small.owns(mem)); 275 | assert!(alloc.owns(mem)); 276 | 277 | unsafe { 278 | let mem = alloc 279 | .grow(mem.as_non_null_ptr(), Layout::new::<[u8; 8]>(), 16) 280 | .unwrap(); 281 | assert_eq!(mem.len(), 16); 282 | assert!(alloc.small.owns(mem)); 283 | assert!(alloc.owns(mem)); 284 | 285 | let mem = alloc 286 | .grow(mem.as_non_null_ptr(), Layout::new::<[u8; 8]>(), 32) 287 | .unwrap(); 288 | assert_eq!(mem.len(), 32); 289 | assert!(alloc.small.owns(mem)); 290 | assert!(alloc.owns(mem)); 291 | 292 | let mem = alloc 293 | .grow(mem.as_non_null_ptr(), Layout::new::<[u8; 32]>(), 33) 294 | .unwrap(); 295 | assert_eq!(mem.len(), 33); 296 | assert!(!alloc.small.owns(mem)); 297 | assert!(alloc.large.owns(mem)); 298 | assert!(alloc.owns(mem)); 299 | 300 | let mem = alloc 301 | .grow(mem.as_non_null_ptr(), Layout::new::<[u8; 33]>(), 64) 302 | .unwrap(); 303 | assert_eq!(mem.len(), 64); 304 | assert!(!alloc.small.owns(mem)); 305 | assert!(alloc.large.owns(mem)); 306 | assert!(alloc.owns(mem)); 307 | } 308 | } 309 | } 310 | --------------------------------------------------------------------------------