├── .github └── workflows │ ├── ci.yml │ └── fuzz.yml ├── .gitignore ├── Cargo.toml ├── LICENCE.md ├── benches └── palmtree.rs ├── build.rs ├── clippy.toml ├── fuzz ├── .gitignore ├── Cargo.toml └── fuzz_targets │ └── palmtree.rs ├── proptest-regressions └── tests.txt └── src ├── arch.rs ├── array.rs ├── branch.rs ├── branch └── node.rs ├── config.rs ├── entry.rs ├── iter ├── merge.rs ├── mod.rs ├── mut_iter.rs ├── owned.rs └── ref_iter.rs ├── leaf.rs ├── lib.rs ├── pointer.rs ├── search.rs └── tests.rs /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: Continuous integration 4 | 5 | jobs: 6 | check: 7 | name: Check 8 | runs-on: ubuntu-latest 9 | strategy: 10 | matrix: 11 | rust: 12 | - stable 13 | - nightly 14 | - 1.43.0 # lowest supported version 15 | steps: 16 | - uses: actions/checkout@v2 17 | - uses: actions-rs/toolchain@v1 18 | with: 19 | profile: minimal 20 | toolchain: ${{ matrix.rust }} 21 | override: true 22 | - uses: actions-rs/cargo@v1 23 | with: 24 | command: check 25 | args: --all-features 26 | 27 | test: 28 | name: Tests 29 | runs-on: ubuntu-latest 30 | strategy: 31 | matrix: 32 | rust: 33 | - stable 34 | - nightly 35 | - 1.43.0 # lowest supported version 36 | steps: 37 | - uses: actions/checkout@v2 38 | - uses: actions-rs/toolchain@v1 39 | with: 40 | profile: minimal 41 | toolchain: ${{ matrix.rust }} 42 | override: true 43 | - uses: actions-rs/cargo@v1 44 | with: 45 | command: test 46 | args: --all-features 47 | 48 | fmt: 49 | name: Rustfmt 50 | runs-on: ubuntu-latest 51 | steps: 52 | - uses: actions/checkout@v2 53 | - uses: actions-rs/toolchain@v1 54 | with: 55 | profile: minimal 56 | toolchain: stable 57 | override: true 58 | components: rustfmt 59 | - uses: actions-rs/cargo@v1 60 | with: 61 | command: fmt 62 | args: --all -- --check 63 | 64 | clippy: 65 | name: Clippy 66 | runs-on: ubuntu-latest 67 | strategy: 68 | matrix: 69 | rust: 70 | - stable 71 | - nightly 72 | steps: 73 | - uses: actions/checkout@v2 74 | - uses: actions-rs/toolchain@v1 75 | with: 76 | profile: minimal 77 | toolchain: ${{ matrix.rust }} 78 | override: true 79 | components: clippy 80 | - uses: actions-rs/clippy-check@v1 81 | with: 82 | name: Clippy-${{ matrix.rust }} 83 | token: ${{ secrets.GITHUB_TOKEN }} 84 | args: --all-features 85 | -------------------------------------------------------------------------------- /.github/workflows/fuzz.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: libFuzzer 4 | 5 | jobs: 6 | fuzz: 7 | name: libFuzzer 8 | runs-on: ubuntu-latest 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | target: 13 | - palmtree 14 | steps: 15 | - uses: actions/checkout@v2 16 | name: Checkout project 17 | - uses: actions/cache@v1 18 | name: Cache corpus 19 | id: cache-corpus 20 | with: 21 | path: fuzz/corpus/${{ matrix.target }} 22 | key: fuzz-corpus-${{ matrix.target }}-${{ github.run_id }} 23 | restore-keys: | 24 | fuzz-corpus-${{ matrix.target }}- 25 | - uses: actions-rs/toolchain@v1 26 | name: Install Rust 27 | with: 28 | profile: minimal 29 | toolchain: nightly 30 | override: true 31 | - uses: actions-rs/install@v0.1 32 | name: Install cargo-fuzz 33 | with: 34 | crate: cargo-fuzz 35 | version: latest 36 | use-tool-cache: true 37 | - name: Fuzz for 10 minutes 38 | run: cargo fuzz run ${{ matrix.target }} -- -max_total_time=600 # seconds 39 | - uses: actions/upload-artifact@v1 40 | name: Publish artifacts 41 | with: 42 | name: fuzz-artifacts 43 | path: fuzz/artifacts 44 | - uses: actions/upload-artifact@v2 45 | name: Publish corpus 46 | with: 47 | name: fuzz-corpus 48 | path: fuzz/corpus 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "palmtree" 3 | version = "0.1.0" 4 | authors = ["Bodil Stokke "] 5 | edition = "2018" 6 | license = "MPL-2.0+" 7 | build = "build.rs" 8 | 9 | [[bench]] 10 | name = "palmtree" 11 | harness = false 12 | 13 | [features] 14 | test = ["arbitrary"] 15 | tree_debug = [] 16 | 17 | [dependencies] 18 | refpool = "0.4.2" 19 | typenum = "1.12" 20 | arbitrary = { version = "0.4", optional = true, features = ["derive"] } 21 | arrayvec = "0.5.1" 22 | generic-array = "0.14.2" 23 | 24 | [dev-dependencies] 25 | criterion = "0.3" 26 | rand = "0.7" 27 | proptest = "0.10" 28 | proptest-derive = "0.2" 29 | 30 | [build-dependencies] 31 | version_check = "0.9.2" 32 | -------------------------------------------------------------------------------- /LICENCE.md: -------------------------------------------------------------------------------- 1 | Mozilla Public License Version 2.0 2 | ================================== 3 | 4 | ### 1. Definitions 5 | 6 | **1.1. “Contributor”** 7 | means each individual or legal entity that creates, contributes to 8 | the creation of, or owns Covered Software. 9 | 10 | **1.2. “Contributor Version”** 11 | means the combination of the Contributions of others (if any) used 12 | by a Contributor and that particular Contributor's Contribution. 13 | 14 | **1.3. “Contribution”** 15 | means Covered Software of a particular Contributor. 16 | 17 | **1.4. “Covered Software”** 18 | means Source Code Form to which the initial Contributor has attached 19 | the notice in Exhibit A, the Executable Form of such Source Code 20 | Form, and Modifications of such Source Code Form, in each case 21 | including portions thereof. 22 | 23 | **1.5. “Incompatible With Secondary Licenses”** 24 | means 25 | 26 | * **(a)** that the initial Contributor has attached the notice described 27 | in Exhibit B to the Covered Software; or 28 | * **(b)** that the Covered Software was made available under the terms of 29 | version 1.1 or earlier of the License, but not also under the 30 | terms of a Secondary License. 31 | 32 | **1.6. “Executable Form”** 33 | means any form of the work other than Source Code Form. 34 | 35 | **1.7. “Larger Work”** 36 | means a work that combines Covered Software with other material, in 37 | a separate file or files, that is not Covered Software. 38 | 39 | **1.8. “License”** 40 | means this document. 41 | 42 | **1.9. “Licensable”** 43 | means having the right to grant, to the maximum extent possible, 44 | whether at the time of the initial grant or subsequently, any and 45 | all of the rights conveyed by this License. 46 | 47 | **1.10. “Modifications”** 48 | means any of the following: 49 | 50 | * **(a)** any file in Source Code Form that results from an addition to, 51 | deletion from, or modification of the contents of Covered 52 | Software; or 53 | * **(b)** any new file in Source Code Form that contains any Covered 54 | Software. 55 | 56 | **1.11. “Patent Claims” of a Contributor** 57 | means any patent claim(s), including without limitation, method, 58 | process, and apparatus claims, in any patent Licensable by such 59 | Contributor that would be infringed, but for the grant of the 60 | License, by the making, using, selling, offering for sale, having 61 | made, import, or transfer of either its Contributions or its 62 | Contributor Version. 63 | 64 | **1.12. “Secondary License”** 65 | means either the GNU General Public License, Version 2.0, the GNU 66 | Lesser General Public License, Version 2.1, the GNU Affero General 67 | Public License, Version 3.0, or any later versions of those 68 | licenses. 69 | 70 | **1.13. “Source Code Form”** 71 | means the form of the work preferred for making modifications. 72 | 73 | **1.14. “You” (or “Your”)** 74 | means an individual or a legal entity exercising rights under this 75 | License. For legal entities, “You” includes any entity that 76 | controls, is controlled by, or is under common control with You. For 77 | purposes of this definition, “control” means **(a)** the power, direct 78 | or indirect, to cause the direction or management of such entity, 79 | whether by contract or otherwise, or **(b)** ownership of more than 80 | fifty percent (50%) of the outstanding shares or beneficial 81 | ownership of such entity. 82 | 83 | 84 | ### 2. License Grants and Conditions 85 | 86 | #### 2.1. Grants 87 | 88 | Each Contributor hereby grants You a world-wide, royalty-free, 89 | non-exclusive license: 90 | 91 | * **(a)** under intellectual property rights (other than patent or trademark) 92 | Licensable by such Contributor to use, reproduce, make available, 93 | modify, display, perform, distribute, and otherwise exploit its 94 | Contributions, either on an unmodified basis, with Modifications, or 95 | as part of a Larger Work; and 96 | * **(b)** under Patent Claims of such Contributor to make, use, sell, offer 97 | for sale, have made, import, and otherwise transfer either its 98 | Contributions or its Contributor Version. 99 | 100 | #### 2.2. Effective Date 101 | 102 | The licenses granted in Section 2.1 with respect to any Contribution 103 | become effective for each Contribution on the date the Contributor first 104 | distributes such Contribution. 105 | 106 | #### 2.3. Limitations on Grant Scope 107 | 108 | The licenses granted in this Section 2 are the only rights granted under 109 | this License. No additional rights or licenses will be implied from the 110 | distribution or licensing of Covered Software under this License. 111 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 112 | Contributor: 113 | 114 | * **(a)** for any code that a Contributor has removed from Covered Software; 115 | or 116 | * **(b)** for infringements caused by: **(i)** Your and any other third party's 117 | modifications of Covered Software, or **(ii)** the combination of its 118 | Contributions with other software (except as part of its Contributor 119 | Version); or 120 | * **(c)** under Patent Claims infringed by Covered Software in the absence of 121 | its Contributions. 122 | 123 | This License does not grant any rights in the trademarks, service marks, 124 | or logos of any Contributor (except as may be necessary to comply with 125 | the notice requirements in Section 3.4). 126 | 127 | #### 2.4. Subsequent Licenses 128 | 129 | No Contributor makes additional grants as a result of Your choice to 130 | distribute the Covered Software under a subsequent version of this 131 | License (see Section 10.2) or under the terms of a Secondary License (if 132 | permitted under the terms of Section 3.3). 133 | 134 | #### 2.5. Representation 135 | 136 | Each Contributor represents that the Contributor believes its 137 | Contributions are its original creation(s) or it has sufficient rights 138 | to grant the rights to its Contributions conveyed by this License. 139 | 140 | #### 2.6. Fair Use 141 | 142 | This License is not intended to limit any rights You have under 143 | applicable copyright doctrines of fair use, fair dealing, or other 144 | equivalents. 145 | 146 | #### 2.7. Conditions 147 | 148 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 149 | in Section 2.1. 150 | 151 | 152 | ### 3. Responsibilities 153 | 154 | #### 3.1. Distribution of Source Form 155 | 156 | All distribution of Covered Software in Source Code Form, including any 157 | Modifications that You create or to which You contribute, must be under 158 | the terms of this License. You must inform recipients that the Source 159 | Code Form of the Covered Software is governed by the terms of this 160 | License, and how they can obtain a copy of this License. You may not 161 | attempt to alter or restrict the recipients' rights in the Source Code 162 | Form. 163 | 164 | #### 3.2. Distribution of Executable Form 165 | 166 | If You distribute Covered Software in Executable Form then: 167 | 168 | * **(a)** such Covered Software must also be made available in Source Code 169 | Form, as described in Section 3.1, and You must inform recipients of 170 | the Executable Form how they can obtain a copy of such Source Code 171 | Form by reasonable means in a timely manner, at a charge no more 172 | than the cost of distribution to the recipient; and 173 | 174 | * **(b)** You may distribute such Executable Form under the terms of this 175 | License, or sublicense it under different terms, provided that the 176 | license for the Executable Form does not attempt to limit or alter 177 | the recipients' rights in the Source Code Form under this License. 178 | 179 | #### 3.3. Distribution of a Larger Work 180 | 181 | You may create and distribute a Larger Work under terms of Your choice, 182 | provided that You also comply with the requirements of this License for 183 | the Covered Software. If the Larger Work is a combination of Covered 184 | Software with a work governed by one or more Secondary Licenses, and the 185 | Covered Software is not Incompatible With Secondary Licenses, this 186 | License permits You to additionally distribute such Covered Software 187 | under the terms of such Secondary License(s), so that the recipient of 188 | the Larger Work may, at their option, further distribute the Covered 189 | Software under the terms of either this License or such Secondary 190 | License(s). 191 | 192 | #### 3.4. Notices 193 | 194 | You may not remove or alter the substance of any license notices 195 | (including copyright notices, patent notices, disclaimers of warranty, 196 | or limitations of liability) contained within the Source Code Form of 197 | the Covered Software, except that You may alter any license notices to 198 | the extent required to remedy known factual inaccuracies. 199 | 200 | #### 3.5. Application of Additional Terms 201 | 202 | You may choose to offer, and to charge a fee for, warranty, support, 203 | indemnity or liability obligations to one or more recipients of Covered 204 | Software. However, You may do so only on Your own behalf, and not on 205 | behalf of any Contributor. You must make it absolutely clear that any 206 | such warranty, support, indemnity, or liability obligation is offered by 207 | You alone, and You hereby agree to indemnify every Contributor for any 208 | liability incurred by such Contributor as a result of warranty, support, 209 | indemnity or liability terms You offer. You may include additional 210 | disclaimers of warranty and limitations of liability specific to any 211 | jurisdiction. 212 | 213 | 214 | ### 4. Inability to Comply Due to Statute or Regulation 215 | 216 | If it is impossible for You to comply with any of the terms of this 217 | License with respect to some or all of the Covered Software due to 218 | statute, judicial order, or regulation then You must: **(a)** comply with 219 | the terms of this License to the maximum extent possible; and **(b)** 220 | describe the limitations and the code they affect. Such description must 221 | be placed in a text file included with all distributions of the Covered 222 | Software under this License. Except to the extent prohibited by statute 223 | or regulation, such description must be sufficiently detailed for a 224 | recipient of ordinary skill to be able to understand it. 225 | 226 | 227 | ### 5. Termination 228 | 229 | **5.1.** The rights granted under this License will terminate automatically 230 | if You fail to comply with any of its terms. However, if You become 231 | compliant, then the rights granted under this License from a particular 232 | Contributor are reinstated **(a)** provisionally, unless and until such 233 | Contributor explicitly and finally terminates Your grants, and **(b)** on an 234 | ongoing basis, if such Contributor fails to notify You of the 235 | non-compliance by some reasonable means prior to 60 days after You have 236 | come back into compliance. Moreover, Your grants from a particular 237 | Contributor are reinstated on an ongoing basis if such Contributor 238 | notifies You of the non-compliance by some reasonable means, this is the 239 | first time You have received notice of non-compliance with this License 240 | from such Contributor, and You become compliant prior to 30 days after 241 | Your receipt of the notice. 242 | 243 | **5.2.** If You initiate litigation against any entity by asserting a patent 244 | infringement claim (excluding declaratory judgment actions, 245 | counter-claims, and cross-claims) alleging that a Contributor Version 246 | directly or indirectly infringes any patent, then the rights granted to 247 | You by any and all Contributors for the Covered Software under Section 248 | 2.1 of this License shall terminate. 249 | 250 | **5.3.** In the event of termination under Sections 5.1 or 5.2 above, all 251 | end user license agreements (excluding distributors and resellers) which 252 | have been validly granted by You or Your distributors under this License 253 | prior to termination shall survive termination. 254 | 255 | 256 | ### 6. Disclaimer of Warranty 257 | 258 | > Covered Software is provided under this License on an “as is” 259 | > basis, without warranty of any kind, either expressed, implied, or 260 | > statutory, including, without limitation, warranties that the 261 | > Covered Software is free of defects, merchantable, fit for a 262 | > particular purpose or non-infringing. The entire risk as to the 263 | > quality and performance of the Covered Software is with You. 264 | > Should any Covered Software prove defective in any respect, You 265 | > (not any Contributor) assume the cost of any necessary servicing, 266 | > repair, or correction. This disclaimer of warranty constitutes an 267 | > essential part of this License. No use of any Covered Software is 268 | > authorized under this License except under this disclaimer. 269 | 270 | ### 7. Limitation of Liability 271 | 272 | > Under no circumstances and under no legal theory, whether tort 273 | > (including negligence), contract, or otherwise, shall any 274 | > Contributor, or anyone who distributes Covered Software as 275 | > permitted above, be liable to You for any direct, indirect, 276 | > special, incidental, or consequential damages of any character 277 | > including, without limitation, damages for lost profits, loss of 278 | > goodwill, work stoppage, computer failure or malfunction, or any 279 | > and all other commercial damages or losses, even if such party 280 | > shall have been informed of the possibility of such damages. This 281 | > limitation of liability shall not apply to liability for death or 282 | > personal injury resulting from such party's negligence to the 283 | > extent applicable law prohibits such limitation. Some 284 | > jurisdictions do not allow the exclusion or limitation of 285 | > incidental or consequential damages, so this exclusion and 286 | > limitation may not apply to You. 287 | 288 | 289 | ### 8. Litigation 290 | 291 | Any litigation relating to this License may be brought only in the 292 | courts of a jurisdiction where the defendant maintains its principal 293 | place of business and such litigation shall be governed by laws of that 294 | jurisdiction, without reference to its conflict-of-law provisions. 295 | Nothing in this Section shall prevent a party's ability to bring 296 | cross-claims or counter-claims. 297 | 298 | 299 | ### 9. Miscellaneous 300 | 301 | This License represents the complete agreement concerning the subject 302 | matter hereof. If any provision of this License is held to be 303 | unenforceable, such provision shall be reformed only to the extent 304 | necessary to make it enforceable. Any law or regulation which provides 305 | that the language of a contract shall be construed against the drafter 306 | shall not be used to construe this License against a Contributor. 307 | 308 | 309 | ### 10. Versions of the License 310 | 311 | #### 10.1. New Versions 312 | 313 | Mozilla Foundation is the license steward. Except as provided in Section 314 | 10.3, no one other than the license steward has the right to modify or 315 | publish new versions of this License. Each version will be given a 316 | distinguishing version number. 317 | 318 | #### 10.2. Effect of New Versions 319 | 320 | You may distribute the Covered Software under the terms of the version 321 | of the License under which You originally received the Covered Software, 322 | or under the terms of any subsequent version published by the license 323 | steward. 324 | 325 | #### 10.3. Modified Versions 326 | 327 | If you create software not governed by this License, and you want to 328 | create a new license for such software, you may create and use a 329 | modified version of this License if you rename the license and remove 330 | any references to the name of the license steward (except to note that 331 | such modified license differs from this License). 332 | 333 | #### 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses 334 | 335 | If You choose to distribute Source Code Form that is Incompatible With 336 | Secondary Licenses under the terms of this version of the License, the 337 | notice described in Exhibit B of this License must be attached. 338 | 339 | ## Exhibit A - Source Code Form License Notice 340 | 341 | This Source Code Form is subject to the terms of the Mozilla Public 342 | License, v. 2.0. If a copy of the MPL was not distributed with this 343 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 344 | 345 | If it is not possible or desirable to put the notice in a particular 346 | file, then You may include the notice in a location (such as a LICENSE 347 | file in a relevant directory) where a recipient would be likely to look 348 | for such a notice. 349 | 350 | You may add additional accurate notices of copyright ownership. 351 | 352 | ## Exhibit B - “Incompatible With Secondary Licenses” Notice 353 | 354 | This Source Code Form is "Incompatible With Secondary Licenses", as 355 | defined by the Mozilla Public License, v. 2.0. 356 | -------------------------------------------------------------------------------- /benches/palmtree.rs: -------------------------------------------------------------------------------- 1 | use criterion::{ 2 | black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput, 3 | }; 4 | use palmtree::StdPalmTree as PalmTree; 5 | use rand::prelude::SliceRandom; 6 | use rand::{Rng, SeedableRng}; 7 | use std::collections::BTreeMap; 8 | use std::iter::FromIterator; 9 | 10 | const SIZES: &[usize] = &[64, 256, 1024, 4096, 16384, 32768, 65536]; 11 | // const SIZES: &[usize] = &[256, 65536]; 12 | 13 | fn insert_sequence(c: &mut Criterion) { 14 | let mut group = c.benchmark_group("insert_sequence"); 15 | for size in SIZES { 16 | group.throughput(Throughput::Elements(*size as u64)); 17 | group.bench_with_input( 18 | BenchmarkId::new("std::BTreeMap::insert", size), 19 | size, 20 | |b, &size| { 21 | b.iter_batched_ref( 22 | BTreeMap::::new, 23 | |map| { 24 | for i in 0..size { 25 | map.insert(i, i); 26 | } 27 | }, 28 | BatchSize::SmallInput, 29 | ) 30 | }, 31 | ); 32 | group.bench_with_input( 33 | BenchmarkId::new("b+tree::insert", size), 34 | size, 35 | |b, &size| { 36 | b.iter_batched_ref( 37 | PalmTree::::new, 38 | |map| { 39 | for i in 0..size { 40 | map.insert(i, i); 41 | } 42 | }, 43 | BatchSize::SmallInput, 44 | ) 45 | }, 46 | ); 47 | group.bench_with_input(BenchmarkId::new("b+tree::load", size), size, |b, &size| { 48 | b.iter(|| PalmTree::::load((0..size).map(|i| (i, i)))) 49 | }); 50 | } 51 | group.finish(); 52 | } 53 | 54 | fn insert_random(c: &mut Criterion) { 55 | let mut group = c.benchmark_group("insert_random"); 56 | for size in SIZES { 57 | let input_data: Vec<(usize, usize)> = rand::rngs::StdRng::seed_from_u64(31337) 58 | .sample_iter(rand::distributions::Standard) 59 | .take(*size) 60 | .collect(); 61 | group.throughput(Throughput::Elements(*size as u64)); 62 | group.bench_with_input( 63 | BenchmarkId::new("std::btree", size), 64 | &input_data, 65 | |b, input_data| { 66 | b.iter_batched_ref( 67 | BTreeMap::::new, 68 | |map| { 69 | for (k, v) in input_data { 70 | map.insert(*k, *v); 71 | } 72 | }, 73 | BatchSize::SmallInput, 74 | ) 75 | }, 76 | ); 77 | group.bench_with_input( 78 | BenchmarkId::new("b+tree", size), 79 | &input_data, 80 | |b, input_data| { 81 | b.iter_batched_ref( 82 | PalmTree::::new, 83 | |map| { 84 | for (k, v) in input_data { 85 | map.insert(*k, *v); 86 | } 87 | }, 88 | BatchSize::SmallInput, 89 | ) 90 | }, 91 | ); 92 | } 93 | group.finish(); 94 | } 95 | 96 | fn remove_sequence(c: &mut Criterion) { 97 | let mut group = c.benchmark_group("remove_sequence"); 98 | for size in SIZES { 99 | group.throughput(Throughput::Elements(*size as u64)); 100 | group.bench_with_input(BenchmarkId::new("std::btree", size), size, |b, &size| { 101 | b.iter_batched_ref( 102 | || BTreeMap::::from_iter((0..size).map(|i| (i, i))), 103 | |map| { 104 | for k in 0..size { 105 | map.remove(&k); 106 | } 107 | }, 108 | BatchSize::SmallInput, 109 | ) 110 | }); 111 | group.bench_with_input(BenchmarkId::new("b+tree", size), size, |b, &size| { 112 | b.iter_batched_ref( 113 | || PalmTree::::load((0..size).map(|i| (i, i))), 114 | |map| { 115 | for k in 0..size { 116 | map.remove(&k); 117 | } 118 | }, 119 | BatchSize::SmallInput, 120 | ) 121 | }); 122 | } 123 | group.finish(); 124 | } 125 | 126 | fn remove_random(c: &mut Criterion) { 127 | let mut group = c.benchmark_group("remove_random"); 128 | for size in SIZES { 129 | let mut indices = Vec::from_iter(0..*size); 130 | indices.shuffle(&mut rand::rngs::StdRng::seed_from_u64(31337)); 131 | group.throughput(Throughput::Elements(*size as u64)); 132 | group.bench_with_input( 133 | BenchmarkId::new("std::btree", size), 134 | &(&indices, size), 135 | |b, &(indices, &size)| { 136 | b.iter_batched_ref( 137 | || BTreeMap::::from_iter((0..size).map(|i| (i, i))), 138 | |map| { 139 | for k in indices { 140 | map.remove(k); 141 | } 142 | }, 143 | BatchSize::SmallInput, 144 | ) 145 | }, 146 | ); 147 | group.bench_with_input( 148 | BenchmarkId::new("b+tree", size), 149 | &(&indices, size), 150 | |b, &(indices, &size)| { 151 | b.iter_batched_ref( 152 | || PalmTree::::load((0..size).map(|i| (i, i))), 153 | |map| { 154 | for k in indices { 155 | map.remove(k); 156 | } 157 | }, 158 | BatchSize::SmallInput, 159 | ) 160 | }, 161 | ); 162 | } 163 | group.finish(); 164 | } 165 | 166 | fn lookup(c: &mut Criterion) { 167 | let mut group = c.benchmark_group("lookup"); 168 | for size in SIZES { 169 | group.throughput(Throughput::Elements(*size as u64)); 170 | group.bench_with_input(BenchmarkId::new("std::btree", size), size, |b, &size| { 171 | b.iter_batched_ref( 172 | || BTreeMap::::from_iter((0..size).map(|i| (i, i))), 173 | |map| { 174 | for i in 0..size { 175 | black_box(map.get(&i)); 176 | } 177 | }, 178 | BatchSize::SmallInput, 179 | ) 180 | }); 181 | // group.bench_with_input(BenchmarkId::new("b+tree/linear", size), size, |b, &size| { 182 | // b.iter_batched_ref( 183 | // || PalmTree::::load((0..size).map(|i| (i, i))), 184 | // |map| { 185 | // for i in 0..size { 186 | // black_box(map.get_linear(&i)); 187 | // } 188 | // }, 189 | // BatchSize::SmallInput, 190 | // ) 191 | // }); 192 | group.bench_with_input(BenchmarkId::new("b+tree", size), size, |b, &size| { 193 | b.iter_batched_ref( 194 | || PalmTree::::load((0..size).map(|i| (i, i))), 195 | |map| { 196 | for i in 0..size { 197 | black_box(map.get(&i)); 198 | } 199 | }, 200 | BatchSize::SmallInput, 201 | ) 202 | }); 203 | } 204 | group.finish(); 205 | } 206 | 207 | fn iterate(c: &mut Criterion) { 208 | let mut group = c.benchmark_group("iterate"); 209 | for size in SIZES { 210 | group.throughput(Throughput::Elements(*size as u64)); 211 | group.bench_with_input(BenchmarkId::new("std::btree", size), size, |b, &size| { 212 | b.iter_batched_ref( 213 | || BTreeMap::::from_iter((0..size).map(|i| (i, i))), 214 | |map| { 215 | map.iter().for_each(|i| { 216 | black_box(i); 217 | }); 218 | }, 219 | BatchSize::SmallInput, 220 | ) 221 | }); 222 | group.bench_with_input(BenchmarkId::new("b+tree", size), size, |b, &size| { 223 | b.iter_batched_ref( 224 | || PalmTree::::load((0..size).map(|i| (i, i))), 225 | |map| { 226 | map.iter().for_each(|i| { 227 | black_box(i); 228 | }); 229 | }, 230 | BatchSize::PerIteration, 231 | ) 232 | }); 233 | } 234 | group.finish(); 235 | } 236 | 237 | fn iterate_owned(c: &mut Criterion) { 238 | let mut group = c.benchmark_group("iterate_owned"); 239 | for size in SIZES { 240 | group.throughput(Throughput::Elements(*size as u64)); 241 | group.bench_with_input(BenchmarkId::new("std::btree", size), size, |b, &size| { 242 | b.iter_batched( 243 | || BTreeMap::::from_iter((0..size).map(|i| (i, i))), 244 | |map| { 245 | for entry in map { 246 | black_box(entry); 247 | } 248 | }, 249 | BatchSize::SmallInput, 250 | ) 251 | }); 252 | group.bench_with_input(BenchmarkId::new("b+tree", size), size, |b, &size| { 253 | b.iter_batched( 254 | || PalmTree::::from_iter((0..size).map(|i| (i, i))), 255 | |map| { 256 | for entry in map { 257 | black_box(entry); 258 | } 259 | }, 260 | BatchSize::SmallInput, 261 | ) 262 | }); 263 | } 264 | group.finish(); 265 | } 266 | 267 | fn find_key_binary(keys: &[K], key: &K) -> usize 268 | where 269 | K: Ord, 270 | { 271 | let size = keys.len(); 272 | 273 | let mut low = 0; 274 | let mut high = size - 1; 275 | while low != high { 276 | let mid = (low + high) / 2; 277 | if unsafe { keys.get_unchecked(mid) } < key { 278 | low = mid + 1; 279 | } else { 280 | high = mid; 281 | } 282 | } 283 | low 284 | } 285 | 286 | fn branchless_binary_search(keys: &[K], key: &K) -> usize { 287 | unsafe { 288 | let mut base = keys.as_ptr(); 289 | let mut n = keys.len(); 290 | while n > 1 { 291 | let half = n / 2; 292 | if *base.add(half) < *key { 293 | base = base.add(half); 294 | } 295 | n -= half; 296 | } 297 | ((if *base < *key { base.add(1) } else { base }) as usize - keys.as_ptr() as usize) 298 | / std::mem::size_of::() 299 | } 300 | } 301 | 302 | pub fn search_strategies(c: &mut Criterion) { 303 | let mut group = c.benchmark_group("search_strategies"); 304 | for size in &[8, 16, 32, 64, 128, 256usize] { 305 | let keys = Vec::::from_iter(0..(*size as u64)); 306 | group.bench_with_input(BenchmarkId::new("binary", size), size, |b, &size| { 307 | b.iter_batched_ref( 308 | || Vec::from_iter((0..256u64).map(|i| i % (size as u64))), 309 | |lookup| { 310 | for key in lookup { 311 | let index = find_key_binary(&keys, &key); 312 | assert_eq!(keys[index], *key); 313 | } 314 | }, 315 | BatchSize::SmallInput, 316 | ) 317 | }); 318 | group.bench_with_input(BenchmarkId::new("branchless", size), size, |b, &size| { 319 | b.iter_batched_ref( 320 | || Vec::from_iter((0..256u64).map(|i| i % (size as u64))), 321 | |lookup| { 322 | for key in lookup { 323 | let index = branchless_binary_search(&keys, &key); 324 | assert_eq!(keys[index], *key); 325 | } 326 | }, 327 | BatchSize::SmallInput, 328 | ) 329 | }); 330 | } 331 | } 332 | 333 | criterion_group!( 334 | palmtree, 335 | insert_sequence, 336 | insert_random, 337 | remove_sequence, 338 | remove_random, 339 | lookup, 340 | iterate, 341 | iterate_owned, 342 | search_strategies, 343 | ); 344 | criterion_main!(palmtree); 345 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | println!("cargo:rerun-if-changed=build.rs"); 3 | if let Some(channel) = version_check::Channel::read() { 4 | if channel.supports_features() { 5 | println!("cargo:rustc-cfg=core_intrinsics"); 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | blacklisted-names = [] 2 | single-char-binding-names-threshold = 15 3 | # I HAVE THE POWER OF OLEG 4 | type-complexity-threshold = 999999 5 | cognitive-complexity-threshold = 30 6 | -------------------------------------------------------------------------------- /fuzz/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | target 3 | corpus 4 | artifacts 5 | -------------------------------------------------------------------------------- /fuzz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "palmtree-fuzz" 3 | version = "0.0.0" 4 | authors = ["Automatically generated"] 5 | publish = false 6 | edition = "2018" 7 | 8 | [package.metadata] 9 | cargo-fuzz = true 10 | 11 | [dependencies] 12 | libfuzzer-sys = "0.3" 13 | arbitrary = { version = "0.4", features = ["derive"] } 14 | typenum = "1.12" 15 | 16 | [dependencies.palmtree] 17 | path = ".." 18 | features = ["test"] 19 | 20 | # Prevent this from interfering with workspaces 21 | [workspace] 22 | members = ["."] 23 | 24 | [[bin]] 25 | name = "palmtree" 26 | path = "fuzz_targets/palmtree.rs" 27 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/palmtree.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | 3 | use libfuzzer_sys::fuzz_target; 4 | use palmtree::{ 5 | tests::{integration_test, Input}, 6 | Shared, SyncShared, Tree64, Unique, 7 | }; 8 | 9 | fuzz_target!(|input: Input| { 10 | integration_test::>(input); 11 | }); 12 | -------------------------------------------------------------------------------- /proptest-regressions/tests.txt: -------------------------------------------------------------------------------- 1 | # Seeds for failure cases proptest has generated in the past. It is 2 | # automatically read and these particular cases re-run before any 3 | # novel cases are generated. 4 | # 5 | # It is recommended to check this file in to source control so that 6 | # everyone who runs the test benefits from these saved cases. 7 | cc 32cde46ad8c1b16f98f0d32797ece8a7775faaf1da2e88ff3f967718fe273fd3 # shrinks to input = (Empty, [Insert(0, 0), Remove(1)]) 8 | cc fd7f2d9a20872d52540731c8df1e55f0ef290b299f88d41c6e32a0aefbaedf1d # shrinks to input = (Empty, [Insert(208, 0), Remove(208), Insert(0, 0)]) 9 | -------------------------------------------------------------------------------- /src/arch.rs: -------------------------------------------------------------------------------- 1 | /// Prefetch some data. 2 | /// 3 | /// This function may do nothing, if there's no platform support. 4 | /// All x86 CPUs should have some support. 5 | /// 6 | /// Try not to use this excessively. The CPU is usually better at 7 | /// predicting what to prefetch than you are, so don't use it unless 8 | /// you see significant benchmark improvements. 9 | #[cfg_attr( 10 | any(target_arch = "x86", target_arch = "x86_64"), 11 | target_feature(enable = "sse") 12 | )] 13 | pub(crate) unsafe fn prefetch(data: &A) { 14 | // TODO think more carefully about the locality values. 15 | #[cfg(core_intrinsics)] 16 | std::intrinsics::prefetch_read_data(data, 2); 17 | #[cfg(all(not(core_intrinsics), target_arch = "x86"))] 18 | std::arch::x86::_mm_prefetch(data as *const _ as *const i8, std::arch::x86::_MM_HINT_T1); 19 | #[cfg(all(not(core_intrinsics), target_arch = "x86_64"))] 20 | std::arch::x86_64::_mm_prefetch( 21 | data as *const _ as *const i8, 22 | std::arch::x86_64::_MM_HINT_T1, 23 | ); 24 | } 25 | -------------------------------------------------------------------------------- /src/array.rs: -------------------------------------------------------------------------------- 1 | use generic_array::ArrayLength; 2 | use std::{ 3 | fmt::{Debug, Error, Formatter}, 4 | mem::MaybeUninit, 5 | }; 6 | 7 | pub(crate) struct Array 8 | where 9 | N: ArrayLength, 10 | { 11 | data: MaybeUninit, 12 | } 13 | 14 | impl Array 15 | where 16 | N: ArrayLength, 17 | { 18 | #[inline(always)] 19 | fn ptr(&self) -> *const A { 20 | self.data.as_ptr().cast() 21 | } 22 | 23 | #[inline(always)] 24 | fn mut_ptr(&mut self) -> *mut A { 25 | self.data.as_mut_ptr().cast() 26 | } 27 | 28 | #[inline(always)] 29 | pub(crate) unsafe fn deref(&self, length: usize) -> &[A] { 30 | debug_assert!(length <= N::USIZE); 31 | std::slice::from_raw_parts(self.ptr(), length) 32 | } 33 | 34 | #[inline(always)] 35 | pub(crate) unsafe fn deref_mut(&mut self, length: usize) -> &mut [A] { 36 | debug_assert!(length <= N::USIZE); 37 | std::slice::from_raw_parts_mut(self.mut_ptr(), length) 38 | } 39 | 40 | pub(crate) fn new() -> Self { 41 | Self { 42 | data: MaybeUninit::uninit(), 43 | } 44 | } 45 | 46 | pub(crate) unsafe fn drop(&mut self, length: usize) { 47 | std::ptr::drop_in_place(self.deref_mut(length)) 48 | } 49 | 50 | pub(crate) unsafe fn unit(value: A) -> Self { 51 | let mut out = Self::new(); 52 | out.mut_ptr().write(value); 53 | out 54 | } 55 | 56 | pub(crate) unsafe fn steal_from>( 57 | other: &mut Array, 58 | length: usize, 59 | index: usize, 60 | ) -> Self { 61 | let new_length = length - index; 62 | debug_assert!(length <= N2::USIZE); 63 | debug_assert!(index < length); 64 | debug_assert!(new_length <= N::USIZE); 65 | let mut out = Self::new(); 66 | out.mut_ptr() 67 | .copy_from_nonoverlapping(other.mut_ptr().add(index), new_length); 68 | out 69 | } 70 | 71 | pub(crate) unsafe fn clone(&self, length: usize) -> Self 72 | where 73 | A: Clone, 74 | { 75 | debug_assert!(length <= N::USIZE); 76 | let mut out = Self::new(); 77 | for (index, element) in self.deref(length).iter().enumerate() { 78 | out.mut_ptr().add(index).write(element.clone()); 79 | } 80 | out 81 | } 82 | 83 | pub(crate) unsafe fn clone_with(&self, length: usize, f: F) -> Self 84 | where 85 | F: Fn(&A) -> A, 86 | { 87 | debug_assert!(length <= N::USIZE); 88 | let mut out = Self::new(); 89 | for (index, element) in self.deref(length).iter().enumerate() { 90 | out.mut_ptr().add(index).write(f(element)); 91 | } 92 | out 93 | } 94 | 95 | pub(crate) unsafe fn push(&mut self, length: usize, value: A) { 96 | debug_assert!(length < N::USIZE); 97 | self.mut_ptr().add(length).write(value); 98 | } 99 | 100 | pub(crate) unsafe fn pop(&mut self, length: usize) -> A { 101 | debug_assert!(length <= N::USIZE); 102 | debug_assert!(length > 0); 103 | self.mut_ptr().add(length - 1).read() 104 | } 105 | 106 | pub(crate) unsafe fn insert(&mut self, length: usize, index: usize, value: A) { 107 | debug_assert!(length < N::USIZE); 108 | debug_assert!(index <= length); 109 | if index < length { 110 | self.mut_ptr() 111 | .add(index) 112 | .copy_to(self.mut_ptr().add(index + 1), length - index); 113 | } 114 | self.mut_ptr().add(index).write(value); 115 | } 116 | 117 | pub(crate) unsafe fn insert_pair(&mut self, length: usize, index: usize, left: A, right: A) { 118 | debug_assert!(length < (N::USIZE - 1)); 119 | debug_assert!(index <= length); 120 | if index < length { 121 | self.mut_ptr() 122 | .add(index) 123 | .copy_to(self.mut_ptr().add(index + 2), length - index); 124 | } 125 | self.mut_ptr().add(index).write(left); 126 | self.mut_ptr().add(index + 1).write(right); 127 | } 128 | 129 | pub(crate) unsafe fn remove(&mut self, length: usize, index: usize) -> A { 130 | debug_assert!(length <= N::USIZE); 131 | debug_assert!(length > 0); 132 | debug_assert!(index < length); 133 | let result = self.mut_ptr().add(index).read(); 134 | if index + 1 < length { 135 | self.mut_ptr() 136 | .add(index + 1) 137 | .copy_to(self.mut_ptr().add(index), length - (index + 1)); 138 | } 139 | result 140 | } 141 | } 142 | 143 | impl Debug for Array 144 | where 145 | N: ArrayLength, 146 | { 147 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 148 | write!(f, "Array[{}; {}]", std::any::type_name::(), N::USIZE) 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /src/branch.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | array::Array, 3 | config::TreeConfig, 4 | leaf::Leaf, 5 | pointer::Pointer, 6 | search::{find_key, find_key_linear}, 7 | InsertResult, 8 | }; 9 | use node::Node; 10 | use std::fmt::{Debug, Error, Formatter}; 11 | use typenum::Unsigned; 12 | 13 | // Never leak this monster to the rest of the crate. 14 | pub(crate) mod node; 15 | 16 | /// A branch node holds mappings of high keys to child nodes. 17 | pub(crate) struct Branch 18 | where 19 | C: TreeConfig, 20 | { 21 | has_branches: bool, 22 | length: usize, 23 | keys: Array, 24 | children: Array, C::BranchSize>, 25 | } 26 | 27 | impl Drop for Branch 28 | where 29 | C: TreeConfig, 30 | { 31 | fn drop(&mut self) { 32 | unsafe { 33 | self.keys.drop(self.length); 34 | while self.length > 0 { 35 | // The `Node` type can't drop itself because it doesn't know 36 | // whether it's a Branch or a Leaf, so we *must* drop every `Node` 37 | // from the `Branch` it's stored in. 38 | let node = self.children.pop(self.length); 39 | self.length -= 1; 40 | if self.has_branches() { 41 | node.unwrap_branch(); 42 | } else { 43 | node.unwrap_leaf(); 44 | } 45 | } 46 | } 47 | } 48 | } 49 | 50 | impl Clone for Branch 51 | where 52 | K: Clone, 53 | V: Clone, 54 | C: TreeConfig, 55 | { 56 | fn clone(&self) -> Self { 57 | let children = unsafe { 58 | if self.has_branches() { 59 | self.children.clone_with(self.length, |node| { 60 | Pointer::new(node.as_branch().clone()).into() 61 | }) 62 | } else { 63 | self.children.clone_with(self.length, |node| { 64 | Pointer::new(node.as_leaf().clone()).into() 65 | }) 66 | } 67 | }; 68 | Self { 69 | has_branches: self.has_branches, 70 | length: self.length, 71 | keys: unsafe { self.keys.clone(self.length) }, 72 | children, 73 | } 74 | } 75 | } 76 | 77 | impl Branch 78 | where 79 | C: TreeConfig, 80 | { 81 | #[inline(always)] 82 | pub(crate) fn new(has_branches: bool) -> Self { 83 | Branch { 84 | has_branches, 85 | length: 0, 86 | keys: Array::new(), 87 | children: Array::new(), 88 | } 89 | } 90 | 91 | #[inline(always)] 92 | pub(crate) fn len(&self) -> usize { 93 | self.length 94 | } 95 | 96 | #[inline(always)] 97 | pub(crate) fn is_empty(&self) -> bool { 98 | self.len() == 0 99 | } 100 | 101 | #[inline(always)] 102 | pub(crate) fn is_full(&self) -> bool { 103 | self.len() == C::BranchSize::USIZE 104 | } 105 | 106 | #[inline(always)] 107 | pub(crate) fn highest(&self) -> &K { 108 | &self.keys()[self.len() - 1] 109 | } 110 | 111 | #[inline(always)] 112 | pub(crate) fn has_leaves(&self) -> bool { 113 | !self.has_branches() 114 | } 115 | 116 | #[inline(always)] 117 | pub(crate) fn has_branches(&self) -> bool { 118 | self.has_branches 119 | } 120 | 121 | #[inline(always)] 122 | pub(crate) fn keys(&self) -> &[K] { 123 | unsafe { self.keys.deref(self.length) } 124 | } 125 | 126 | #[inline(always)] 127 | pub(crate) fn keys_mut(&mut self) -> &mut [K] { 128 | unsafe { self.keys.deref_mut(self.length) } 129 | } 130 | 131 | #[inline(always)] 132 | fn children(&self) -> &[Node] { 133 | unsafe { self.children.deref(self.length) } 134 | } 135 | 136 | #[inline(always)] 137 | fn children_mut(&mut self) -> &mut [Node] { 138 | unsafe { self.children.deref_mut(self.length) } 139 | } 140 | 141 | #[inline(always)] 142 | pub(crate) fn get_branch(&self, index: usize) -> &Self { 143 | debug_assert!(self.has_branches()); 144 | unsafe { self.children()[index].as_branch() } 145 | } 146 | 147 | #[inline(always)] 148 | pub(crate) unsafe fn get_branch_unchecked(&self, index: usize) -> &Self { 149 | debug_assert!(self.has_branches()); 150 | debug_assert!(self.len() > index); 151 | self.children().get_unchecked(index).as_branch() 152 | } 153 | 154 | #[inline(always)] 155 | pub(crate) fn get_leaf(&self, index: usize) -> &Leaf { 156 | debug_assert!(self.has_leaves()); 157 | unsafe { self.children()[index].as_leaf() } 158 | } 159 | 160 | #[inline(always)] 161 | pub(crate) unsafe fn get_leaf_unchecked(&self, index: usize) -> &Leaf { 162 | debug_assert!(self.has_leaves()); 163 | debug_assert!(self.len() > index); 164 | self.children().get_unchecked(index).as_leaf() 165 | } 166 | 167 | #[inline(always)] 168 | pub(crate) fn get_branch_mut(&mut self, index: usize) -> &mut Self 169 | where 170 | K: Clone, 171 | V: Clone, 172 | { 173 | debug_assert!(self.has_branches()); 174 | unsafe { self.children_mut()[index].as_branch_mut() } 175 | } 176 | 177 | #[inline(always)] 178 | pub(crate) fn get_leaf_mut(&mut self, index: usize) -> &mut Leaf 179 | where 180 | K: Clone, 181 | V: Clone, 182 | { 183 | debug_assert!(self.has_leaves()); 184 | unsafe { self.children_mut()[index].as_leaf_mut() } 185 | } 186 | 187 | #[inline(always)] 188 | pub(crate) fn push_branch(&mut self, key: K, branch: Pointer) { 189 | debug_assert!(self.has_branches()); 190 | debug_assert!(!self.is_full()); 191 | unsafe { 192 | self.keys.push(self.length, key); 193 | self.children.push(self.length, branch.into()); 194 | } 195 | self.length += 1; 196 | } 197 | 198 | #[inline(always)] 199 | pub(crate) fn push_leaf(&mut self, key: K, leaf: Pointer, C::PointerKind>) { 200 | debug_assert!(self.has_leaves()); 201 | debug_assert!(!self.is_full()); 202 | unsafe { 203 | self.keys.push(self.length, key); 204 | self.children.push(self.length, leaf.into()); 205 | } 206 | self.length += 1; 207 | } 208 | 209 | #[inline(always)] 210 | pub(crate) fn remove_branch(&mut self, index: usize) -> (K, Pointer) { 211 | debug_assert!(self.has_branches()); 212 | debug_assert!(index < self.length); 213 | let result = unsafe { 214 | ( 215 | self.keys.remove(self.length, index), 216 | self.children.remove(self.length, index).unwrap_branch(), 217 | ) 218 | }; 219 | self.length -= 1; 220 | result 221 | } 222 | 223 | #[inline(always)] 224 | pub(crate) fn remove_leaf( 225 | &mut self, 226 | index: usize, 227 | ) -> (K, Pointer, C::PointerKind>) { 228 | debug_assert!(self.has_leaves()); 229 | debug_assert!(index < self.length); 230 | let result = unsafe { 231 | ( 232 | self.keys.remove(self.length, index), 233 | self.children.remove(self.length, index).unwrap_leaf(), 234 | ) 235 | }; 236 | self.length -= 1; 237 | result 238 | } 239 | 240 | #[inline(always)] 241 | pub(crate) fn remove_last_branch(&mut self) -> (K, Pointer) { 242 | debug_assert!(self.has_branches()); 243 | debug_assert!(!self.is_empty()); 244 | let result = unsafe { 245 | ( 246 | self.keys.pop(self.length), 247 | self.children.pop(self.length).unwrap_branch(), 248 | ) 249 | }; 250 | self.length -= 1; 251 | result 252 | } 253 | 254 | #[inline(always)] 255 | pub(crate) fn push_branch_pair( 256 | &mut self, 257 | left_key: K, 258 | left: Pointer, 259 | right_key: K, 260 | right: Pointer, 261 | ) { 262 | debug_assert!(self.has_branches()); 263 | debug_assert!(self.len() + 2 <= C::BranchSize::USIZE); 264 | unsafe { 265 | self.keys 266 | .insert_pair(self.length, self.length, left_key, right_key); 267 | self.children 268 | .insert_pair(self.length, self.length, left.into(), right.into()); 269 | } 270 | self.length += 2; 271 | } 272 | 273 | #[inline(always)] 274 | pub(crate) fn insert_branch_pair( 275 | &mut self, 276 | index: usize, 277 | left_key: K, 278 | left: Pointer, 279 | right_key: K, 280 | right: Pointer, 281 | ) { 282 | debug_assert!(self.has_branches()); 283 | debug_assert!(self.len() + 2 <= C::BranchSize::USIZE); 284 | unsafe { 285 | self.keys 286 | .insert_pair(self.length, index, left_key, right_key); 287 | self.children 288 | .insert_pair(self.length, index, left.into(), right.into()); 289 | } 290 | self.length += 2; 291 | } 292 | 293 | #[inline(always)] 294 | pub(crate) fn insert_leaf_pair( 295 | &mut self, 296 | index: usize, 297 | left_key: K, 298 | left: Pointer, C::PointerKind>, 299 | right_key: K, 300 | right: Pointer, C::PointerKind>, 301 | ) { 302 | debug_assert!(self.has_leaves()); 303 | debug_assert!(self.len() + 2 <= C::BranchSize::USIZE); 304 | unsafe { 305 | self.keys 306 | .insert_pair(self.length, index, left_key, right_key); 307 | self.children 308 | .insert_pair(self.length, index, left.into(), right.into()); 309 | } 310 | self.length += 2; 311 | } 312 | 313 | pub(crate) fn split( 314 | mut this: Pointer, 315 | ) -> (Pointer, Pointer) 316 | where 317 | K: Clone, 318 | V: Clone, 319 | { 320 | let right = { 321 | let this = Pointer::make_mut(&mut this); 322 | let half = this.len() / 2; 323 | let right = Pointer::new(Branch { 324 | has_branches: this.has_branches, 325 | length: half, 326 | keys: unsafe { Array::steal_from(&mut this.keys, this.length, half) }, 327 | children: unsafe { Array::steal_from(&mut this.children, this.length, half) }, 328 | }); 329 | this.length -= half; 330 | right 331 | }; 332 | (this, right) 333 | } 334 | } 335 | 336 | impl Branch 337 | where 338 | K: Ord + Clone, 339 | C: TreeConfig, 340 | { 341 | pub(crate) fn unit(leaf: Pointer, C::PointerKind>) -> Self { 342 | Branch { 343 | has_branches: false, 344 | length: 1, 345 | keys: unsafe { Array::unit(leaf.highest().clone()) }, 346 | children: unsafe { Array::unit(leaf.into()) }, 347 | } 348 | } 349 | 350 | // For benchmarking: lookup with a linear search instead of binary. 351 | pub(crate) fn get_linear(&self, key: &K) -> Option<&V> { 352 | let mut branch = self; 353 | loop { 354 | if let Some(index) = find_key_linear(branch.keys(), key) { 355 | if branch.has_branches() { 356 | branch = branch.get_branch(index); 357 | } else { 358 | return branch.get_leaf(index).get_linear(key); 359 | } 360 | } else { 361 | return None; 362 | } 363 | } 364 | } 365 | 366 | pub(crate) fn get(&self, key: &K) -> Option<&V> { 367 | let mut branch = self; 368 | loop { 369 | if let Some(index) = find_key(branch.keys(), key) { 370 | if branch.has_branches() { 371 | branch = branch.get_branch(index); 372 | } else { 373 | return branch.get_leaf(index).get(key); 374 | } 375 | } else { 376 | return None; 377 | } 378 | } 379 | } 380 | 381 | pub(crate) fn get_mut(&mut self, key: &K) -> Option<&mut V> 382 | where 383 | V: Clone, 384 | { 385 | let mut branch = self; 386 | loop { 387 | if branch.is_empty() { 388 | return None; 389 | } 390 | if let Some(index) = find_key(branch.keys(), key) { 391 | if branch.has_branches() { 392 | branch = branch.get_branch_mut(index); 393 | } else { 394 | return branch.get_leaf_mut(index).get_mut(key); 395 | } 396 | } else { 397 | return None; 398 | } 399 | } 400 | } 401 | 402 | pub(crate) fn insert(&mut self, key: K, value: V) -> InsertResult 403 | where 404 | V: Clone, 405 | { 406 | // TODO: this algorithm could benefit from the addition of neighbour 407 | // checking to reduce splitting. 408 | if let Some(index) = find_key(self.keys(), &key) { 409 | // We have found a key match, attempt to insert into the matching child. 410 | let (key, value) = { 411 | let result = if self.has_branches() { 412 | self.get_branch_mut(index).insert(key, value) 413 | } else { 414 | self.get_leaf_mut(index).insert(key, value) 415 | }; 416 | match result { 417 | InsertResult::Full(key, value) => (key, value), 418 | result => return result, 419 | } 420 | }; 421 | // Fall through from match = child is full and needs to be split. 422 | if self.is_full() { 423 | // Current branch is full, needs to split further up. 424 | InsertResult::Full(key, value) 425 | } else if self.has_branches() { 426 | // Split the child branch and retry insertion from here. 427 | // FIXME should determine which of the split branches to insert into instead of rechecking from the parent branch. 428 | // Same for leaf splitting below, and splitting in >max case further below. 429 | let (removed_key, removed_branch) = self.remove_branch(index); 430 | let (left, right) = Self::split(removed_branch); 431 | self.insert_branch_pair(index, left.highest().clone(), left, removed_key, right); 432 | self.insert(key, value) 433 | } else { 434 | let (removed_key, removed_leaf) = self.remove_leaf(index); 435 | let (left, right) = Leaf::split(removed_leaf); 436 | self.insert_leaf_pair(index, left.highest().clone(), left, removed_key, right); 437 | self.insert(key, value) 438 | } 439 | } else { 440 | // No key match, which means the key is higher than the current max, so we insert along the right edge. 441 | let end_index = self.len() - 1; 442 | let (key, value) = { 443 | if self.has_branches() { 444 | self.keys_mut()[end_index] = key.clone(); 445 | match self.get_branch_mut(end_index).insert(key, value) { 446 | InsertResult::Full(key, value) => (key, value), 447 | result => return result, 448 | } 449 | } else { 450 | let leaf = self.get_leaf_mut(end_index); 451 | if !leaf.is_full() { 452 | unsafe { leaf.push_unchecked(key.clone(), value) }; 453 | self.keys_mut()[end_index] = key; 454 | return InsertResult::Added; 455 | } 456 | (key, value) 457 | } 458 | }; 459 | if self.is_full() { 460 | InsertResult::Full(key, value) 461 | } else if self.has_branches() { 462 | let (removed_key, removed_branch) = self.remove_last_branch(); 463 | let (left, right) = Self::split(removed_branch); 464 | self.push_branch_pair(left.highest().clone(), left, removed_key, right); 465 | self.insert(key, value) 466 | } else { 467 | let leaf = Pointer::new(Leaf::unit(key.clone(), value)); 468 | self.push_leaf(key, leaf); 469 | InsertResult::Added 470 | } 471 | } 472 | } 473 | } 474 | 475 | impl Branch 476 | where 477 | K: Clone + Debug, 478 | V: Clone + Debug, 479 | C: TreeConfig, 480 | { 481 | fn tree_fmt(&self, f: &mut Formatter<'_>, level: usize) -> Result<(), Error> { 482 | let mut indent = String::new(); 483 | for _ in 0..level { 484 | indent += " "; 485 | } 486 | writeln!( 487 | f, 488 | "{}Branch(has_branches = {})", 489 | indent, 490 | self.has_branches() 491 | )?; 492 | for (index, key) in self.keys().iter().enumerate() { 493 | if self.has_branches() { 494 | writeln!(f, "{} [{:?}]:", indent, key)?; 495 | self.get_branch(index).tree_fmt(f, level + 1)?; 496 | } else { 497 | writeln!(f, "{} [{:?}]: {:?}", indent, key, self.get_leaf(index))?; 498 | } 499 | } 500 | Ok(()) 501 | } 502 | } 503 | 504 | impl Debug for Branch 505 | where 506 | K: Clone + Debug, 507 | V: Clone + Debug, 508 | C: TreeConfig, 509 | { 510 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 511 | self.tree_fmt(f, 0) 512 | } 513 | } 514 | -------------------------------------------------------------------------------- /src/branch/node.rs: -------------------------------------------------------------------------------- 1 | use crate::{branch::Branch, config::TreeConfig, leaf::Leaf, pointer::Pointer}; 2 | use std::{ 3 | fmt::{Debug, Error, Formatter}, 4 | marker::PhantomData, 5 | mem::ManuallyDrop, 6 | }; 7 | 8 | pub struct Node 9 | where 10 | C: ?Sized + TreeConfig, 11 | { 12 | types: PhantomData<(K, V, C)>, 13 | node: ManuallyDrop>, 14 | } 15 | 16 | impl From, C::PointerKind>> for Node 17 | where 18 | C: TreeConfig, 19 | { 20 | #[inline(always)] 21 | fn from(node: Pointer, C::PointerKind>) -> Self { 22 | Self { 23 | types: PhantomData, 24 | node: ManuallyDrop::new(unsafe { Pointer::cast_into(node) }), 25 | } 26 | } 27 | } 28 | 29 | impl From, C::PointerKind>> for Node 30 | where 31 | C: TreeConfig, 32 | { 33 | #[inline(always)] 34 | fn from(node: Pointer, C::PointerKind>) -> Self { 35 | Self { 36 | types: PhantomData, 37 | node: ManuallyDrop::new(unsafe { Pointer::cast_into(node) }), 38 | } 39 | } 40 | } 41 | 42 | impl Node 43 | where 44 | C: TreeConfig, 45 | { 46 | pub(crate) unsafe fn unwrap_branch(self) -> Pointer, C::PointerKind> { 47 | Pointer::cast_into(ManuallyDrop::into_inner(self.node)) 48 | } 49 | 50 | pub(crate) unsafe fn unwrap_leaf(self) -> Pointer, C::PointerKind> { 51 | Pointer::cast_into(ManuallyDrop::into_inner(self.node)) 52 | } 53 | 54 | #[inline(always)] 55 | pub(crate) unsafe fn as_branch(&self) -> &Branch { 56 | Pointer::deref_cast(&self.node) 57 | } 58 | 59 | #[inline(always)] 60 | pub(crate) unsafe fn as_leaf(&self) -> &Leaf { 61 | Pointer::deref_cast(&self.node) 62 | } 63 | 64 | #[inline(always)] 65 | pub(crate) unsafe fn as_branch_mut(&mut self) -> &mut Branch 66 | where 67 | K: Clone, 68 | V: Clone, 69 | { 70 | Pointer::make_mut_cast(&mut self.node) 71 | } 72 | 73 | #[inline(always)] 74 | pub(crate) unsafe fn as_leaf_mut(&mut self) -> &mut Leaf 75 | where 76 | K: Clone, 77 | V: Clone, 78 | { 79 | Pointer::make_mut_cast(&mut self.node) 80 | } 81 | } 82 | 83 | impl Debug for Node 84 | where 85 | C: TreeConfig, 86 | { 87 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 88 | write!(f, "Node[...]") 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use crate::{branch::node::Node, PointerKind}; 2 | use generic_array::ArrayLength; 3 | use std::marker::PhantomData; 4 | use typenum::{IsGreater, U3, U64}; 5 | 6 | pub trait TreeConfig { 7 | type BranchSize: ArrayLength + ArrayLength> + IsGreater; 8 | type LeafSize: ArrayLength + ArrayLength + IsGreater; 9 | type PointerKind: PointerKind; 10 | } 11 | 12 | #[derive(Debug, Clone, Copy)] 13 | pub struct Tree64(PhantomData); 14 | impl TreeConfig for Tree64 { 15 | type BranchSize = U64; 16 | type LeafSize = U64; 17 | type PointerKind = Kind; 18 | } 19 | -------------------------------------------------------------------------------- /src/entry.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | branch::Branch, config::TreeConfig, leaf::Leaf, pointer::Pointer, search::PathedPointer, 3 | PalmTree, 4 | }; 5 | use std::fmt::{Debug, Error, Formatter}; 6 | 7 | #[derive(Debug)] 8 | pub enum Entry<'a, K, V, C> 9 | where 10 | K: Ord + Clone, 11 | C: TreeConfig, 12 | { 13 | Vacant(VacantEntry<'a, K, V, C>), 14 | Occupied(OccupiedEntry<'a, K, V, C>), 15 | } 16 | 17 | impl<'a, K, V, C> Entry<'a, K, V, C> 18 | where 19 | K: Ord + Clone, 20 | C: TreeConfig, 21 | { 22 | #[inline(always)] 23 | pub(crate) fn new(tree: &'a mut PalmTree, key: K) -> Self { 24 | if let Some(ref mut root) = tree.root { 25 | match PathedPointer::exact_key(root, &key) { 26 | Ok(cursor) => Self::Occupied(OccupiedEntry { tree, cursor }), 27 | Err(cursor) => Self::Vacant(VacantEntry { key, tree, cursor }), 28 | } 29 | } else { 30 | Self::Vacant(VacantEntry { 31 | key, 32 | tree, 33 | cursor: PathedPointer::null(), 34 | }) 35 | } 36 | } 37 | } 38 | 39 | // Vacant entry 40 | 41 | pub struct VacantEntry<'a, K, V, C> 42 | where 43 | K: Ord + Clone, 44 | C: TreeConfig, 45 | { 46 | tree: &'a mut PalmTree, 47 | cursor: PathedPointer<&'a mut (K, V), K, V, C>, 48 | key: K, 49 | } 50 | 51 | impl<'a, K, V, C> VacantEntry<'a, K, V, C> 52 | where 53 | K: 'a + Ord + Clone, 54 | V: 'a, 55 | C: TreeConfig, 56 | { 57 | pub fn key(&self) -> &K { 58 | &self.key 59 | } 60 | 61 | pub fn into_key(self) -> K { 62 | self.key 63 | } 64 | 65 | pub fn insert(mut self, value: V) -> &'a mut V 66 | where 67 | V: Clone, 68 | { 69 | // If the tree is empty, just insert a new node. 70 | // Note that the tree could have an allocated root even when empty, 71 | // and we're just ignoring that here on the assumption that it's better 72 | // to avoid an extra null check on every insert than optimise for an infrequent use case. 73 | if self.tree.is_empty() { 74 | self.tree.root = Some(Branch::unit(Leaf::unit(self.key, value).into()).into()); 75 | self.tree.size = 1; 76 | return &mut Pointer::make_mut(self.tree.root.as_mut().unwrap()) 77 | .get_leaf_mut(0) 78 | .values_mut()[0]; 79 | } 80 | let result = if self.cursor.is_null() { 81 | unsafe { 82 | self.cursor.push_last( 83 | Pointer::make_mut(self.tree.root.as_mut().unwrap()), 84 | self.key, 85 | value, 86 | ) 87 | } 88 | } else { 89 | unsafe { self.cursor.insert(self.key, value) } 90 | }; 91 | let ptr: *mut V = match result { 92 | Ok(mut ptr) => { 93 | self.tree.size += 1; 94 | unsafe { ptr.value_mut().unwrap() } 95 | } 96 | Err((key, value)) => { 97 | let root = self.tree.root.as_mut().unwrap(); 98 | PalmTree::split_root(root); 99 | self.cursor = PathedPointer::exact_key(root, &key).unwrap_err(); 100 | self.key = key; 101 | self.insert(value) 102 | } 103 | }; 104 | unsafe { &mut *ptr } 105 | } 106 | } 107 | 108 | impl<'a, K, V, C> Debug for VacantEntry<'a, K, V, C> 109 | where 110 | K: Ord + Clone + Debug, 111 | V: Debug, 112 | C: TreeConfig, 113 | { 114 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 115 | write!(f, "VacantEntry({:?})", self.key()) 116 | } 117 | } 118 | 119 | // Occupied entry 120 | 121 | pub struct OccupiedEntry<'a, K, V, C> 122 | where 123 | K: Ord + Clone, 124 | C: TreeConfig, 125 | { 126 | tree: &'a mut PalmTree, 127 | cursor: PathedPointer<&'a mut (K, V), K, V, C>, 128 | } 129 | 130 | impl<'a, K, V, C> OccupiedEntry<'a, K, V, C> 131 | where 132 | K: 'a + Ord + Clone, 133 | V: 'a, 134 | C: TreeConfig, 135 | { 136 | pub fn key(&self) -> &K { 137 | unsafe { self.cursor.key() }.unwrap() 138 | } 139 | 140 | pub fn get(&self) -> &V { 141 | unsafe { self.cursor.value() }.unwrap() 142 | } 143 | 144 | pub fn get_mut(&mut self) -> &mut V { 145 | unsafe { self.cursor.value_mut() }.unwrap() 146 | } 147 | 148 | pub fn insert(&mut self, value: V) -> V { 149 | std::mem::replace(self.get_mut(), value) 150 | } 151 | 152 | pub fn remove_entry(self) -> (K, V) { 153 | self.tree.size -= 1; 154 | unsafe { self.cursor.remove() } 155 | } 156 | 157 | pub fn remove(self) -> V { 158 | self.remove_entry().1 159 | } 160 | 161 | pub fn into_mut(self) -> &'a mut V { 162 | unsafe { self.cursor.into_entry_mut() }.1 163 | } 164 | } 165 | 166 | impl<'a, K, V, C> Debug for OccupiedEntry<'a, K, V, C> 167 | where 168 | K: Ord + Clone + Debug, 169 | V: Debug, 170 | C: TreeConfig, 171 | { 172 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 173 | write!(f, "OccupiedEntry({:?} => {:?})", self.key(), self.get()) 174 | } 175 | } 176 | 177 | #[cfg(test)] 178 | mod test { 179 | use super::*; 180 | use crate::StdPalmTree; 181 | use std::iter::FromIterator; 182 | 183 | #[test] 184 | fn insert_with_entry() { 185 | let mut tree: StdPalmTree = PalmTree::new(); 186 | let size = 131_072; 187 | for i in 0..size { 188 | match tree.entry(i) { 189 | Entry::Vacant(entry) => { 190 | entry.insert(i); 191 | } 192 | Entry::Occupied(_) => { 193 | panic!("found an occupied entry where none should be at {}", i); 194 | } 195 | } 196 | } 197 | for i in 0..size { 198 | assert_eq!(Some(&i), tree.get(&i)); 199 | } 200 | } 201 | 202 | #[test] 203 | fn delete_with_entry() { 204 | let size = 131_072; 205 | let mut tree: StdPalmTree = PalmTree::from_iter((0..size).map(|i| (i, i))); 206 | for i in 0..size { 207 | match tree.entry(i) { 208 | Entry::Vacant(_entry) => { 209 | panic!("unexpected vacant entry at {}", i); 210 | } 211 | Entry::Occupied(entry) => { 212 | assert_eq!(entry.remove(), i); 213 | } 214 | } 215 | } 216 | assert_eq!(0, tree.len()); 217 | } 218 | } 219 | -------------------------------------------------------------------------------- /src/iter/merge.rs: -------------------------------------------------------------------------------- 1 | enum Next { 2 | Left, 3 | Right, 4 | } 5 | use self::Next::*; 6 | use std::fmt::{Debug, Error, Formatter}; 7 | 8 | pub struct MergeIter { 9 | left: L, 10 | right: R, 11 | next_left: Option, 12 | next_right: Option, 13 | next: Next, 14 | compare: Cmp, 15 | equal: Eq, 16 | } 17 | 18 | impl MergeIter 19 | where 20 | L: Iterator, 21 | R: Iterator, 22 | Cmp: Fn(&A, &A) -> bool, 23 | Eq: Fn(&A, &A) -> bool, 24 | { 25 | pub fn merge(mut left: L, mut right: R, compare: Cmp, equal: Eq) -> Self { 26 | let next_left = left.next(); 27 | let next_right = right.next(); 28 | let next = Self::choose_next(&next_left, &next_right, &compare); 29 | let mut out = Self { 30 | left, 31 | right, 32 | next_left, 33 | next_right, 34 | next, 35 | compare, 36 | equal, 37 | }; 38 | out.check_eq(); 39 | out 40 | } 41 | 42 | fn choose_next(left: &Option, right: &Option, compare: impl Fn(&A, &A) -> bool) -> Next { 43 | match (left, right) { 44 | (Some(left), Some(right)) if compare(left, right) => Right, 45 | (None, Some(_)) => Right, 46 | _ => Left, 47 | } 48 | } 49 | 50 | fn check_eq(&mut self) { 51 | if let (Some(left), Some(right)) = (&self.next_left, &self.next_right) { 52 | if (self.equal)(left, right) { 53 | match self.next { 54 | Left => self.next_right = self.right.next(), 55 | Right => self.next_left = self.left.next(), 56 | } 57 | } 58 | } 59 | } 60 | } 61 | 62 | impl Iterator for MergeIter 63 | where 64 | L: Iterator, 65 | R: Iterator, 66 | Cmp: Fn(&A, &A) -> bool, 67 | Eq: Fn(&A, &A) -> bool, 68 | { 69 | type Item = A; 70 | 71 | fn next(&mut self) -> Option { 72 | let next_result = match self.next { 73 | Left => std::mem::replace(&mut self.next_left, self.left.next()), 74 | Right => std::mem::replace(&mut self.next_right, self.right.next()), 75 | }; 76 | self.next = Self::choose_next(&self.next_left, &self.next_right, &self.compare); 77 | self.check_eq(); 78 | next_result 79 | } 80 | } 81 | 82 | impl Debug for MergeIter { 83 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 84 | write!(f, "MergeIter") 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/iter/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(unreachable_pub)] // pub exports below erroneously complain without this 2 | 3 | use crate::{config::TreeConfig, search::PathedPointer, PalmTree}; 4 | use std::{ 5 | cmp::Ordering, 6 | ops::{Bound, RangeBounds}, 7 | }; 8 | 9 | mod ref_iter; 10 | pub use ref_iter::Iter; 11 | 12 | mod mut_iter; 13 | pub use mut_iter::IterMut; 14 | 15 | mod owned; 16 | pub use owned::OwnedIter; 17 | 18 | mod merge; 19 | pub use merge::MergeIter; 20 | 21 | fn paths_from_range<'a, Lifetime, K, V, C, R>( 22 | tree: &'a PalmTree, 23 | range: R, 24 | ) -> Option<( 25 | PathedPointer, 26 | PathedPointer, 27 | )> 28 | where 29 | K: Clone + Ord, 30 | R: RangeBounds, 31 | C: TreeConfig, 32 | { 33 | match (range.start_bound(), range.end_bound()) { 34 | (Bound::Excluded(left), Bound::Excluded(right)) if left == right => { 35 | panic!("PalmTreeIter: start and end bounds are equal and excluding each other") 36 | } 37 | (Bound::Included(left), Bound::Included(right)) 38 | | (Bound::Included(left), Bound::Excluded(right)) 39 | | (Bound::Excluded(left), Bound::Included(right)) 40 | | (Bound::Excluded(left), Bound::Excluded(right)) 41 | if left.cmp(right) == Ordering::Greater => 42 | { 43 | panic!("PalmTreeIter: range start is greater than range end"); 44 | } 45 | _ => {} 46 | } 47 | 48 | let left; 49 | let right; 50 | 51 | if let Some(ref tree) = tree.root { 52 | left = match range.start_bound() { 53 | Bound::Included(key) => PathedPointer::key_or_higher(tree, key), 54 | Bound::Excluded(key) => PathedPointer::higher_than_key(tree, key), 55 | Bound::Unbounded => PathedPointer::lowest(tree), 56 | }; 57 | if left.is_null() { 58 | return None; 59 | } 60 | 61 | right = match range.end_bound() { 62 | Bound::Included(key) => PathedPointer::key_or_lower(tree, key), 63 | Bound::Excluded(key) => PathedPointer::lower_than_key(tree, key), 64 | Bound::Unbounded => PathedPointer::highest(tree), 65 | }; 66 | if right.is_null() { 67 | return None; 68 | } 69 | 70 | Some((left, right)) 71 | } else { 72 | // Tree has no root, iterator is empty. 73 | None 74 | } 75 | } 76 | 77 | #[cfg(test)] 78 | mod test { 79 | use crate::StdPalmTree; 80 | 81 | #[test] 82 | fn consuming_iter() { 83 | let size = 65536usize; 84 | let tree = StdPalmTree::load((0..size).map(|i| (i, i))); 85 | for (index, (k, v)) in tree.into_iter().enumerate() { 86 | assert_eq!(index, k); 87 | assert_eq!(index, v); 88 | } 89 | } 90 | 91 | #[test] 92 | fn iterate_single_leaf() { 93 | let size = 64usize; 94 | let tree = StdPalmTree::load((0..size).map(|i| (i, i))); 95 | tree.iter().for_each(|i| { 96 | criterion::black_box(i); 97 | }); 98 | } 99 | 100 | #[test] 101 | fn iterate_forward() { 102 | let size = 65536usize; 103 | let tree = StdPalmTree::load((0..size).map(|i| (i, i))); 104 | let expected: Vec<_> = (0..size).map(|i| (i, i)).collect(); 105 | let result: Vec<_> = tree.iter().map(|(k, v)| (*k, *v)).collect(); 106 | assert_eq!(expected, result); 107 | } 108 | 109 | #[test] 110 | fn iterate_backward() { 111 | let size = 65536usize; 112 | let tree = StdPalmTree::load((0..size).map(|i| (i, i))); 113 | let expected: Vec<_> = (0..size).map(|i| (i, i)).rev().collect(); 114 | let result: Vec<_> = tree.iter().map(|(k, v)| (*k, *v)).rev().collect(); 115 | assert_eq!(expected, result); 116 | } 117 | 118 | #[test] 119 | fn empty_range_iter() { 120 | let tree = StdPalmTree::load((0..1usize).map(|i| (i, i))); 121 | let expected = Vec::<(usize, usize)>::new(); 122 | let result: Vec<_> = tree.range(0..0).map(|(k, v)| (*k, *v)).collect(); 123 | assert_eq!(expected, result); 124 | } 125 | 126 | #[test] 127 | fn wide_end_range_iter() { 128 | let tree = StdPalmTree::load((0..1usize).map(|i| (i, i))); 129 | let expected = vec![(0usize, 0usize)]; 130 | let result: Vec<_> = tree.range(0..255).map(|(k, v)| (*k, *v)).collect(); 131 | assert_eq!(expected, result); 132 | } 133 | 134 | #[test] 135 | fn wide_start_range_iter() { 136 | let tree = StdPalmTree::load((0..1usize).map(|i| (i, i))); 137 | let expected: Vec<(usize, usize)> = vec![]; 138 | let result: Vec<_> = tree.range(100..).map(|(k, v)| (*k, *v)).collect(); 139 | assert_eq!(expected, result); 140 | } 141 | 142 | #[test] 143 | #[should_panic] 144 | fn descending_range_iter() { 145 | let tree = StdPalmTree::load((0..1usize).map(|i| (i, i))); 146 | let expected = Vec::<(usize, usize)>::new(); 147 | let result: Vec<_> = tree.range(255..0).map(|(k, v)| (*k, *v)).collect(); 148 | assert_eq!(expected, result); 149 | } 150 | 151 | #[test] 152 | fn end_before_first_key_iter() { 153 | let tree = StdPalmTree::load((1..2usize).map(|i| (i, i))); 154 | let expected: Vec<(usize, usize)> = vec![]; 155 | let result: Vec<_> = tree.range(..0).map(|(k, v)| (*k, *v)).collect(); 156 | assert_eq!(expected, result); 157 | } 158 | 159 | #[test] 160 | fn start_after_last_key_iter() { 161 | let tree = StdPalmTree::load((1..2usize).map(|i| (i, i))); 162 | let expected: Vec<(usize, usize)> = vec![]; 163 | let result: Vec<_> = tree.range(3..).map(|(k, v)| (*k, *v)).collect(); 164 | assert_eq!(expected, result); 165 | } 166 | 167 | #[test] 168 | fn end_before_last_key_iter() { 169 | let tree = StdPalmTree::load((0..2usize).map(|i| (i, i))); 170 | let expected: Vec<(usize, usize)> = vec![(0, 0)]; 171 | let result: Vec<_> = tree.range(..=0).map(|(k, v)| (*k, *v)).collect(); 172 | assert_eq!(expected, result); 173 | } 174 | 175 | #[test] 176 | fn range_with_deleted_max() { 177 | let mut tree: StdPalmTree = StdPalmTree::new(); 178 | tree.insert(0, 0); 179 | tree.insert(1, 136); 180 | tree.remove(&1); 181 | let result: Vec<(u8, u8)> = tree.range(1..2).map(|(k, v)| (*k, *v)).collect(); 182 | let expected: Vec<(u8, u8)> = vec![]; 183 | assert_eq!(expected, result); 184 | } 185 | 186 | #[test] 187 | fn iterate_over_emptied_tree() { 188 | let mut tree: StdPalmTree = StdPalmTree::new(); 189 | tree.insert(0, 0); 190 | tree.remove(&0); 191 | let result: Vec<(u8, u8)> = tree.iter().map(|(k, v)| (*k, *v)).collect(); 192 | let expected: Vec<(u8, u8)> = vec![]; 193 | assert_eq!(expected, result); 194 | } 195 | 196 | #[test] 197 | fn closing_bound_lies_past_target_leaf() { 198 | // This test has two leaves, and the closing bound for the iterator lies exactly between them. 199 | // Left leaf has max key 251, right leaf has min key 254, bound is 253. 200 | let input = vec![ 201 | (0, 171), 202 | (1, 248), 203 | (5, 189), 204 | (7, 122), 205 | (8, 189), 206 | (9, 11), 207 | (10, 165), 208 | (11, 215), 209 | (13, 243), 210 | (15, 0), 211 | (17, 0), 212 | (21, 245), 213 | (24, 5), 214 | (30, 0), 215 | (31, 255), 216 | (32, 10), 217 | (35, 0), 218 | (41, 255), 219 | (52, 82), 220 | (54, 28), 221 | (58, 0), 222 | (59, 255), 223 | (61, 11), 224 | (64, 238), 225 | (78, 59), 226 | (80, 255), 227 | (82, 82), 228 | (85, 238), 229 | (91, 91), 230 | (93, 243), 231 | (104, 115), 232 | (115, 115), 233 | (121, 121), 234 | (122, 255), 235 | (124, 10), 236 | (126, 251), 237 | (127, 85), 238 | (131, 131), 239 | (133, 115), 240 | (135, 0), 241 | (138, 126), 242 | (142, 238), 243 | (148, 158), 244 | (152, 242), 245 | (158, 138), 246 | (164, 0), 247 | (166, 164), 248 | (170, 170), 249 | (177, 78), 250 | (184, 17), 251 | (189, 255), 252 | (202, 54), 253 | (213, 215), 254 | (215, 50), 255 | (219, 255), 256 | (227, 164), 257 | (238, 246), 258 | (242, 18), 259 | (243, 242), 260 | (245, 243), 261 | (246, 127), 262 | (248, 170), 263 | (249, 255), 264 | (251, 184), 265 | (254, 242), 266 | (255, 54), 267 | ]; 268 | let tree: StdPalmTree = StdPalmTree::load(input.clone().into_iter()); 269 | let result: Vec<(u8, u8)> = tree.range(..253).map(|(k, v)| (*k, *v)).collect(); 270 | let expected: Vec<(u8, u8)> = input.into_iter().filter(|(k, _)| k < &253).collect(); 271 | assert_eq!(expected, result); 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /src/iter/mut_iter.rs: -------------------------------------------------------------------------------- 1 | use super::paths_from_range; 2 | use crate::{config::TreeConfig, search::PathedPointer, PalmTree}; 3 | use std::{ 4 | cmp::Ordering, 5 | fmt::{Debug, Formatter}, 6 | iter::FusedIterator, 7 | ops::RangeBounds, 8 | }; 9 | 10 | pub struct IterMut<'a, K, V, C> 11 | where 12 | C: TreeConfig, 13 | { 14 | left: PathedPointer<&'a mut (K, V), K, V, C>, 15 | right: PathedPointer<&'a mut (K, V), K, V, C>, 16 | } 17 | 18 | impl<'a, K, V, C> IterMut<'a, K, V, C> 19 | where 20 | K: Clone + Ord, 21 | C: 'a + TreeConfig, 22 | { 23 | fn null() -> Self { 24 | Self { 25 | left: PathedPointer::null(), 26 | right: PathedPointer::null(), 27 | } 28 | } 29 | 30 | /// Construct a mutable iterator. 31 | /// 32 | /// Here is a doctest to ensure you can't have two mutable iterators over the same tree 33 | /// at the same time: 34 | /// 35 | /// ```compile_fail 36 | /// use palmtree::PalmTree; 37 | /// let mut tree = PalmTree::load((0..4096).map(|i| (i, i))); 38 | /// let mut it1 = tree.iter_mut(); 39 | /// let mut it2 = tree.iter_mut(); 40 | /// assert_eq!(it1.next(), it2.next()); 41 | /// ``` 42 | pub(crate) fn new(tree: &'a mut PalmTree, range: R) -> Self 43 | where 44 | R: RangeBounds, 45 | { 46 | if let Some((left, right)) = paths_from_range(tree, range) { 47 | Self { left, right } 48 | } else { 49 | Self::null() 50 | } 51 | } 52 | 53 | fn step_forward(&mut self) { 54 | let result = unsafe { self.left.step_forward() }; 55 | debug_assert!(result); 56 | } 57 | 58 | fn step_back(&mut self) { 59 | let result = unsafe { self.right.step_back() }; 60 | debug_assert!(result); 61 | } 62 | 63 | fn left(&mut self) -> &'a mut PathedPointer<&'a mut (), K, V, C> { 64 | unsafe { &mut *(&mut self.left as *mut _ as *mut PathedPointer<&'a mut (), K, V, C>) } 65 | } 66 | 67 | fn right(&mut self) -> &'a mut PathedPointer<&'a mut (), K, V, C> { 68 | unsafe { &mut *(&mut self.right as *mut _ as *mut PathedPointer<&'a mut (), K, V, C>) } 69 | } 70 | 71 | fn left_key(&mut self) -> Option<&'a K> { 72 | unsafe { self.left().key() } 73 | } 74 | 75 | fn left_value(&mut self) -> Option<&'a mut V> { 76 | unsafe { self.left().value_mut() } 77 | } 78 | 79 | fn right_key(&mut self) -> Option<&'a K> { 80 | unsafe { self.right().key() } 81 | } 82 | 83 | fn right_value(&mut self) -> Option<&'a mut V> { 84 | unsafe { self.right().value_mut() } 85 | } 86 | } 87 | 88 | impl<'a, K, V, C> Iterator for IterMut<'a, K, V, C> 89 | where 90 | K: Clone + Ord, 91 | C: 'a + TreeConfig, 92 | { 93 | type Item = (&'a K, &'a mut V); 94 | 95 | fn next(&mut self) -> Option { 96 | let left_key = self.left_key()?; 97 | let right_key = self.right_key()?; 98 | // If left key is greather than right key, we're done. 99 | let cmp = left_key.cmp(right_key); 100 | if cmp == Ordering::Greater { 101 | self.left.clear(); 102 | self.right.clear(); 103 | return None; 104 | } 105 | let value = self.left_value().unwrap(); 106 | if cmp == Ordering::Equal { 107 | self.left.clear(); 108 | self.right.clear(); 109 | } else { 110 | self.step_forward(); 111 | } 112 | Some((left_key, value)) 113 | } 114 | } 115 | 116 | impl<'a, K, V, C> DoubleEndedIterator for IterMut<'a, K, V, C> 117 | where 118 | K: 'a + Clone + Ord, 119 | V: 'a, 120 | C: 'a + TreeConfig, 121 | { 122 | fn next_back(&mut self) -> Option { 123 | let left_key = self.left_key()?; 124 | let right_key = self.right_key()?; 125 | // If left key is greather than right key, we're done. 126 | let cmp = left_key.cmp(right_key); 127 | if cmp == Ordering::Greater { 128 | self.left.clear(); 129 | self.right.clear(); 130 | return None; 131 | } 132 | let value = self.right_value().unwrap(); 133 | if cmp == Ordering::Equal { 134 | self.left.clear(); 135 | self.right.clear(); 136 | } else { 137 | self.step_back(); 138 | } 139 | Some((right_key, value)) 140 | } 141 | } 142 | 143 | impl<'a, K, V, C> FusedIterator for IterMut<'a, K, V, C> 144 | where 145 | K: Clone + Ord, 146 | C: 'a + TreeConfig, 147 | { 148 | } 149 | 150 | impl<'a, K, V, C> Debug for IterMut<'a, K, V, C> 151 | where 152 | C: 'a + TreeConfig, 153 | { 154 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 155 | write!(f, "IterMut") 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /src/iter/owned.rs: -------------------------------------------------------------------------------- 1 | use crate::{branch::Branch, config::TreeConfig, pointer::Pointer, search::PathedPointer}; 2 | use std::{ 3 | fmt::{Debug, Formatter}, 4 | iter::FusedIterator, 5 | }; 6 | 7 | pub struct OwnedIter 8 | where 9 | C: TreeConfig, 10 | { 11 | tree: Option, C::PointerKind>>, 12 | left: PathedPointer<(K, V), K, V, C>, 13 | right: PathedPointer<(K, V), K, V, C>, 14 | remaining: usize, 15 | } 16 | 17 | impl OwnedIter 18 | where 19 | K: Clone + Ord, 20 | C: TreeConfig, 21 | { 22 | pub(crate) fn new( 23 | tree: Option, C::PointerKind>>, 24 | remaining: usize, 25 | ) -> Self { 26 | if let Some(ref root) = tree { 27 | Self { 28 | left: PathedPointer::lowest(&root), 29 | right: PathedPointer::highest(&root), 30 | tree, 31 | remaining, 32 | } 33 | } else { 34 | Self { 35 | tree: None, 36 | left: PathedPointer::null(), 37 | right: PathedPointer::null(), 38 | remaining, 39 | } 40 | } 41 | } 42 | } 43 | 44 | impl Iterator for OwnedIter 45 | where 46 | K: Clone + Ord, 47 | C: TreeConfig, 48 | { 49 | type Item = (K, V); 50 | 51 | fn next(&mut self) -> Option { 52 | if self.tree.is_none() { 53 | return None; 54 | } 55 | loop { 56 | let leaf = match unsafe { self.left.deref_mut_leaf() } { 57 | None => return None, 58 | Some(leaf) => leaf, 59 | }; 60 | if leaf.is_empty() { 61 | unsafe { self.left.step_forward() }; 62 | } else { 63 | let result = leaf.pop_front(); 64 | self.remaining -= 1; 65 | return result; 66 | } 67 | } 68 | } 69 | 70 | fn size_hint(&self) -> (usize, Option) { 71 | (self.remaining, Some(self.remaining)) 72 | } 73 | } 74 | 75 | impl DoubleEndedIterator for OwnedIter 76 | where 77 | K: Clone + Ord, 78 | C: TreeConfig, 79 | { 80 | fn next_back(&mut self) -> Option { 81 | if self.tree.is_none() { 82 | return None; 83 | } 84 | loop { 85 | let leaf = match unsafe { self.right.deref_mut_leaf() } { 86 | None => return None, 87 | Some(leaf) => leaf, 88 | }; 89 | if leaf.is_empty() { 90 | unsafe { self.left.step_back() }; 91 | } else { 92 | self.remaining -= 1; 93 | return leaf.pop_back(); 94 | } 95 | } 96 | } 97 | } 98 | 99 | impl ExactSizeIterator for OwnedIter 100 | where 101 | K: Clone + Ord, 102 | C: TreeConfig, 103 | { 104 | } 105 | impl FusedIterator for OwnedIter 106 | where 107 | K: Clone + Ord, 108 | C: TreeConfig, 109 | { 110 | } 111 | 112 | impl Debug for OwnedIter 113 | where 114 | K: Ord + Clone + Debug, 115 | V: Debug, 116 | C: TreeConfig, 117 | { 118 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 119 | write!(f, "OwnedIter") 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/iter/ref_iter.rs: -------------------------------------------------------------------------------- 1 | use super::paths_from_range; 2 | use crate::{config::TreeConfig, search::PathedPointer, PalmTree}; 3 | use std::{ 4 | cmp::Ordering, 5 | fmt::{Debug, Error, Formatter}, 6 | iter::FusedIterator, 7 | ops::RangeBounds, 8 | }; 9 | 10 | pub struct Iter<'a, K, V, C> 11 | where 12 | C: TreeConfig, 13 | { 14 | left: PathedPointer<&'a (K, V), K, V, C>, 15 | right: PathedPointer<&'a (K, V), K, V, C>, 16 | } 17 | 18 | impl<'a, K, V, C> Clone for Iter<'a, K, V, C> 19 | where 20 | K: Clone + Ord, 21 | C: TreeConfig, 22 | { 23 | fn clone(&self) -> Self { 24 | Self { 25 | left: self.left.clone(), 26 | right: self.right.clone(), 27 | } 28 | } 29 | } 30 | 31 | impl<'a, K, V, C> Iter<'a, K, V, C> 32 | where 33 | K: Clone + Ord, 34 | C: 'a + TreeConfig, 35 | { 36 | fn null() -> Self { 37 | Self { 38 | left: PathedPointer::null(), 39 | right: PathedPointer::null(), 40 | } 41 | } 42 | 43 | pub(crate) fn new(tree: &'a PalmTree, range: R) -> Self 44 | where 45 | R: RangeBounds, 46 | { 47 | if let Some((left, right)) = paths_from_range(tree, range) { 48 | Self { left, right } 49 | } else { 50 | Self::null() 51 | } 52 | } 53 | 54 | fn step_forward(&mut self) { 55 | let result = unsafe { self.left.step_forward() }; 56 | debug_assert!(result); 57 | } 58 | 59 | fn step_back(&mut self) { 60 | let result = unsafe { self.right.step_back() }; 61 | debug_assert!(result); 62 | } 63 | 64 | fn left(&self) -> &'a PathedPointer<&'a (), K, V, C> { 65 | unsafe { &*(&self.left as *const _ as *const PathedPointer<&'a (), K, V, C>) } 66 | } 67 | 68 | fn right(&self) -> &'a PathedPointer<&'a (), K, V, C> { 69 | unsafe { &*(&self.right as *const _ as *const PathedPointer<&'a (), K, V, C>) } 70 | } 71 | 72 | fn left_key(&self) -> Option<&'a K> { 73 | unsafe { self.left().key() } 74 | } 75 | 76 | fn left_value(&self) -> Option<&'a V> { 77 | unsafe { self.left().value() } 78 | } 79 | 80 | fn right_key(&self) -> Option<&'a K> { 81 | unsafe { self.right().key() } 82 | } 83 | 84 | fn right_value(&self) -> Option<&'a V> { 85 | unsafe { self.right().value() } 86 | } 87 | } 88 | 89 | impl<'a, K, V, C> Iterator for Iter<'a, K, V, C> 90 | where 91 | K: Clone + Ord, 92 | C: 'a + TreeConfig, 93 | { 94 | type Item = (&'a K, &'a V); 95 | fn next(&mut self) -> Option { 96 | let left_key = self.left_key()?; 97 | let right_key = self.right_key()?; 98 | // If left key is greather than right key, we're done. 99 | let cmp = left_key.cmp(right_key); 100 | if cmp == Ordering::Greater { 101 | self.left.clear(); 102 | self.right.clear(); 103 | return None; 104 | } 105 | let value = self.left_value().unwrap(); 106 | if cmp == Ordering::Equal { 107 | self.left.clear(); 108 | self.right.clear(); 109 | } else { 110 | self.step_forward(); 111 | } 112 | Some((left_key, value)) 113 | } 114 | } 115 | 116 | impl<'a, K, V, C> DoubleEndedIterator for Iter<'a, K, V, C> 117 | where 118 | K: Clone + Ord, 119 | C: 'a + TreeConfig, 120 | { 121 | fn next_back(&mut self) -> Option { 122 | let left_key = self.left_key()?; 123 | let right_key = self.right_key()?; 124 | // If left key is greather than right key, we're done. 125 | let cmp = left_key.cmp(right_key); 126 | if cmp == Ordering::Greater { 127 | self.left.clear(); 128 | self.right.clear(); 129 | return None; 130 | } 131 | let value = self.right_value().unwrap(); 132 | if cmp == Ordering::Equal { 133 | self.left.clear(); 134 | self.right.clear(); 135 | } else { 136 | self.step_back(); 137 | } 138 | Some((right_key, value)) 139 | } 140 | } 141 | 142 | impl<'a, K, V, C> FusedIterator for Iter<'a, K, V, C> 143 | where 144 | K: Clone + Ord, 145 | C: 'a + TreeConfig, 146 | { 147 | } 148 | 149 | impl<'a, K, V, C> Debug for Iter<'a, K, V, C> 150 | where 151 | K: Clone + Ord + Debug, 152 | V: Debug, 153 | C: TreeConfig, 154 | { 155 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 156 | f.debug_map().entries(self.clone()).finish() 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /src/leaf.rs: -------------------------------------------------------------------------------- 1 | use crate::{array::Array, config::TreeConfig, pointer::Pointer, InsertResult}; 2 | use std::fmt::{Debug, Error, Formatter}; 3 | use typenum::Unsigned; 4 | 5 | /// A leaf node contains an ordered sequence of direct mappings from keys to values. 6 | pub(crate) struct Leaf 7 | where 8 | C: TreeConfig, 9 | { 10 | length: usize, 11 | keys: Array, 12 | values: Array, 13 | } 14 | 15 | impl Drop for Leaf 16 | where 17 | C: TreeConfig, 18 | { 19 | fn drop(&mut self) { 20 | unsafe { 21 | self.keys.drop(self.length); 22 | self.values.drop(self.length); 23 | } 24 | } 25 | } 26 | 27 | impl Clone for Leaf 28 | where 29 | K: Clone, 30 | V: Clone, 31 | C: TreeConfig, 32 | { 33 | fn clone(&self) -> Self { 34 | Self { 35 | length: self.length, 36 | keys: unsafe { self.keys.clone(self.length) }, 37 | values: unsafe { self.values.clone(self.length) }, 38 | } 39 | } 40 | } 41 | 42 | impl Leaf 43 | where 44 | C: TreeConfig, 45 | { 46 | pub(crate) fn new() -> Self { 47 | Leaf { 48 | length: 0, 49 | keys: Array::new(), 50 | values: Array::new(), 51 | } 52 | } 53 | 54 | pub(crate) fn unit(key: K, value: V) -> Self { 55 | Leaf { 56 | length: 1, 57 | keys: unsafe { Array::unit(key) }, 58 | values: unsafe { Array::unit(value) }, 59 | } 60 | } 61 | 62 | pub(crate) fn len(&self) -> usize { 63 | self.length 64 | } 65 | 66 | pub(crate) fn is_empty(&self) -> bool { 67 | self.len() == 0 68 | } 69 | 70 | pub(crate) fn is_full(&self) -> bool { 71 | self.len() == C::LeafSize::USIZE 72 | } 73 | 74 | pub(crate) fn highest(&self) -> &K { 75 | &self.keys()[self.len() - 1] 76 | } 77 | 78 | pub(crate) fn keys(&self) -> &[K] { 79 | unsafe { self.keys.deref(self.length) } 80 | } 81 | 82 | pub(crate) fn values(&self) -> &[V] { 83 | unsafe { self.values.deref(self.length) } 84 | } 85 | 86 | pub(crate) fn keys_mut(&mut self) -> &mut [K] { 87 | unsafe { self.keys.deref_mut(self.length) } 88 | } 89 | 90 | pub(crate) fn values_mut(&mut self) -> &mut [V] { 91 | unsafe { self.values.deref_mut(self.length) } 92 | } 93 | 94 | pub(crate) fn split( 95 | mut this: Pointer, 96 | ) -> (Pointer, Pointer) 97 | where 98 | K: Clone, 99 | V: Clone, 100 | { 101 | let right = { 102 | let this = Pointer::make_mut(&mut this); 103 | let half = this.length / 2; 104 | let right = Pointer::new(Leaf { 105 | length: half, 106 | keys: unsafe { Array::steal_from(&mut this.keys, this.length, half) }, 107 | values: unsafe { Array::steal_from(&mut this.values, this.length, half) }, 108 | }); 109 | this.length -= half; 110 | right 111 | }; 112 | (this, right) 113 | } 114 | 115 | pub(crate) unsafe fn push_unchecked(&mut self, key: K, value: V) { 116 | self.keys.push(self.length, key); 117 | self.values.push(self.length, value); 118 | self.length += 1; 119 | } 120 | 121 | pub(crate) unsafe fn insert_unchecked(&mut self, index: usize, key: K, value: V) { 122 | self.keys.insert(self.length, index, key); 123 | self.values.insert(self.length, index, value); 124 | self.length += 1; 125 | } 126 | 127 | pub(crate) unsafe fn remove_unchecked(&mut self, index: usize) -> (K, V) { 128 | let result = ( 129 | self.keys.remove(self.length, index), 130 | self.values.remove(self.length, index), 131 | ); 132 | self.length -= 1; 133 | result 134 | } 135 | 136 | pub(crate) fn pop_back(&mut self) -> Option<(K, V)> { 137 | if !self.is_empty() { 138 | let result = 139 | Some(unsafe { (self.keys.pop(self.length), self.values.pop(self.length)) }); 140 | self.length -= 1; 141 | result 142 | } else { 143 | None 144 | } 145 | } 146 | 147 | pub(crate) fn pop_front(&mut self) -> Option<(K, V)> { 148 | if !self.is_empty() { 149 | // TODO we could speed this up a lot by keeping a left index as well as a length, a la Chunk, 150 | // but it's only used by OwnedIterator, and it would adversely affect anything else. Think about it. 151 | let result = Some(unsafe { 152 | ( 153 | self.keys.remove(self.length, 0), 154 | self.values.remove(self.length, 0), 155 | ) 156 | }); 157 | self.length -= 1; 158 | result 159 | } else { 160 | None 161 | } 162 | } 163 | } 164 | 165 | impl Leaf 166 | where 167 | K: Clone + Ord, 168 | C: TreeConfig, 169 | { 170 | pub(crate) fn get(&self, key: &K) -> Option<&V> { 171 | self.keys() 172 | .binary_search(key) 173 | .ok() 174 | .map(|index| unsafe { self.values().get_unchecked(index) }) 175 | } 176 | 177 | pub(crate) fn get_mut(&mut self, key: &K) -> Option<&mut V> { 178 | if let Ok(index) = self.keys().binary_search(key) { 179 | Some(unsafe { self.values_mut().get_unchecked_mut(index) }) 180 | } else { 181 | None 182 | } 183 | } 184 | 185 | pub(crate) fn get_linear(&self, key: &K) -> Option<&V> { 186 | for (index, stored_key) in self.keys().iter().enumerate() { 187 | if stored_key == key { 188 | return Some(unsafe { self.values().get_unchecked(index) }); 189 | } 190 | } 191 | None 192 | } 193 | 194 | pub(crate) fn insert(&mut self, key: K, value: V) -> InsertResult { 195 | match self.keys().binary_search(&key) { 196 | Ok(index) => InsertResult::Replaced(std::mem::replace( 197 | unsafe { self.values_mut().get_unchecked_mut(index) }, 198 | value, 199 | )), 200 | Err(index) => { 201 | if !self.is_full() { 202 | unsafe { self.insert_unchecked(index, key, value) }; 203 | InsertResult::Added 204 | } else { 205 | InsertResult::Full(key, value) 206 | } 207 | } 208 | } 209 | } 210 | } 211 | 212 | impl Debug for Leaf 213 | where 214 | K: Debug, 215 | V: Debug, 216 | C: TreeConfig, 217 | { 218 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 219 | let pairs: Vec<_> = self.keys().iter().zip(self.values().iter()).collect(); 220 | writeln!(f, "Leaf(len={}) {:?}", self.len(), pairs) 221 | } 222 | } 223 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // This Source Code Form is subject to the terms of the Mozilla Public 2 | // License, v. 2.0. If a copy of the MPL was not distributed with this 3 | // file, You can obtain one at http://mozilla.org/MPL/2.0/. 4 | 5 | #![forbid(rust_2018_idioms)] 6 | #![deny(nonstandard_style)] 7 | #![warn( 8 | unreachable_pub, 9 | missing_debug_implementations, 10 | // missing_docs, 11 | missing_doc_code_examples 12 | )] 13 | #![allow(clippy::question_mark)] // this lint makes code less readable 14 | #![allow(clippy::large_enum_variant)] // this lint is buggy 15 | #![cfg_attr(core_intrinsics, feature(core_intrinsics))] 16 | 17 | use std::fmt::{Debug, Error, Formatter}; 18 | use std::{ 19 | cmp::Ordering, 20 | collections::BTreeMap, 21 | hash::{Hash, Hasher}, 22 | iter::FromIterator, 23 | ops::{Add, AddAssign, Index, IndexMut, RangeBounds}, 24 | }; 25 | 26 | mod arch; 27 | mod array; 28 | mod branch; 29 | mod config; 30 | mod entry; 31 | mod iter; 32 | mod leaf; 33 | mod pointer; 34 | mod search; 35 | 36 | use branch::Branch; 37 | use leaf::Leaf; 38 | use pointer::Pointer; 39 | use search::PathedPointer; 40 | 41 | pub use config::{Tree64, TreeConfig}; 42 | pub use entry::Entry; 43 | pub use iter::{Iter, IterMut, MergeIter, OwnedIter}; 44 | pub use pointer::{PointerKind, Shared, SyncShared, Unique}; 45 | 46 | #[cfg(any(test, feature = "test"))] 47 | pub mod tests; 48 | 49 | enum InsertResult { 50 | Added, 51 | Replaced(V), 52 | Full(K, V), 53 | } 54 | 55 | pub type StdPalmTree = PalmTree>; 56 | pub type ImPalmTree = PalmTree>; 57 | pub type SyncPalmTree = PalmTree>; 58 | 59 | pub struct PalmTree 60 | where 61 | C: TreeConfig, 62 | { 63 | size: usize, 64 | root: Option, C::PointerKind>>, 65 | } 66 | 67 | impl Default for PalmTree 68 | where 69 | C: TreeConfig, 70 | { 71 | fn default() -> Self { 72 | Self::new() 73 | } 74 | } 75 | 76 | impl PalmTree 77 | where 78 | C: TreeConfig, 79 | { 80 | pub fn new() -> Self { 81 | Self { 82 | size: 0, 83 | root: None, 84 | } 85 | } 86 | } 87 | 88 | impl PalmTree 89 | where 90 | K: Clone + Ord, 91 | C: TreeConfig, 92 | { 93 | /// Construct a B+-tree efficiently from an ordered iterator. 94 | /// 95 | /// This algorithm requires the results coming out of the iterator 96 | /// to be in sorted order, with no duplicate keys, or the resulting 97 | /// tree will be in a very bad state. In debug mode, this invariant 98 | /// will be validated and panic ensues if it isn't held. 99 | pub fn load(iter: I) -> Self 100 | where 101 | V: Clone, 102 | I: IntoIterator, 103 | { 104 | fn push_stack( 105 | child: Pointer, C::PointerKind>, 106 | stack: &mut Vec, C::PointerKind>>, 107 | ) where 108 | K: Clone, 109 | V: Clone, 110 | C: TreeConfig, 111 | { 112 | let mut parent = stack.pop().unwrap_or_else(|| Branch::new(true).into()); 113 | if parent.is_full() { 114 | push_stack(parent, stack); 115 | parent = Pointer::new(Branch::new(true)); 116 | } 117 | Pointer::make_mut(&mut parent).push_branch(child.highest().clone(), child); 118 | stack.push(parent); 119 | } 120 | 121 | #[cfg(debug_assertions)] 122 | let mut last_record = (0, None); 123 | 124 | let iter = iter.into_iter(); 125 | let mut size = 0; 126 | let mut stack: Vec, C::PointerKind>> = Vec::new(); 127 | let mut parent: Branch = Branch::new(false); 128 | let mut leaf: Leaf = Leaf::new(); 129 | 130 | // Loop over input, fill leaf, push into parent when full. 131 | for (key, value) in iter { 132 | #[cfg(debug_assertions)] 133 | { 134 | if let (last_index, Some(last_key)) = last_record { 135 | if last_key >= key { 136 | panic!("PalmTree::load: unordered key at index {}", last_index); 137 | } 138 | last_record = (last_index + 1, Some(key.clone())); 139 | } 140 | } 141 | 142 | if leaf.is_full() { 143 | // If parent is full, push it to the parent above it on the stack. 144 | if parent.is_full() { 145 | push_stack(Pointer::new(parent), &mut stack); 146 | parent = Branch::new(false); 147 | } 148 | 149 | parent.push_leaf(leaf.highest().clone(), Pointer::new(leaf)); 150 | 151 | leaf = Leaf::new(); 152 | } 153 | 154 | // Push the input into the leaf. 155 | unsafe { leaf.push_unchecked(key, value) }; 156 | size += 1; 157 | } 158 | 159 | // If the input was empty, return immediately with an empty tree. 160 | if size == 0 { 161 | return Self { 162 | size: 0, 163 | root: None, 164 | }; 165 | } 166 | 167 | // At end of input, push last leaf into parent, as above. 168 | if parent.is_full() { 169 | push_stack(Pointer::new(parent), &mut stack); 170 | parent = Branch::new(false); 171 | } 172 | parent.push_leaf(leaf.highest().clone(), Pointer::new(leaf)); 173 | 174 | // Push parent into the parent above it. 175 | push_stack(Pointer::new(parent), &mut stack); 176 | 177 | // Fold parent stack into the top level parent. 178 | while stack.len() > 1 { 179 | let parent = stack.pop().unwrap(); 180 | push_stack(parent, &mut stack); 181 | } 182 | 183 | // The root is now the only item left on the stack. 184 | let mut tree = Self { 185 | size, 186 | root: stack.pop(), 187 | }; 188 | tree.trim_root(); 189 | tree 190 | } 191 | 192 | // For benchmarking: lookup with a linear search instead of binary. 193 | pub fn get_linear(&self, key: &K) -> Option<&V> { 194 | if let Some(ref root) = self.root { 195 | root.get_linear(key) 196 | } else { 197 | None 198 | } 199 | } 200 | 201 | pub fn get(&self, key: &K) -> Option<&V> { 202 | if let Some(ref root) = self.root { 203 | root.get(key) 204 | } else { 205 | None 206 | } 207 | } 208 | 209 | pub fn get_mut(&mut self, key: &K) -> Option<&mut V> 210 | where 211 | V: Clone, 212 | { 213 | if let Some(ref mut root) = self.root { 214 | Pointer::make_mut(root).get_mut(key) 215 | } else { 216 | None 217 | } 218 | } 219 | 220 | pub fn len(&self) -> usize { 221 | self.size 222 | } 223 | 224 | pub fn is_empty(&self) -> bool { 225 | self.len() == 0 226 | } 227 | 228 | pub fn iter(&self) -> Iter<'_, K, V, C> { 229 | Iter::new(self, ..) 230 | } 231 | 232 | pub fn iter_mut(&mut self) -> IterMut<'_, K, V, C> { 233 | IterMut::new(self, ..) 234 | } 235 | 236 | pub fn range(&self, range: R) -> Iter<'_, K, V, C> 237 | where 238 | R: RangeBounds, 239 | { 240 | Iter::new(self, range) 241 | } 242 | 243 | pub fn range_mut(&mut self, range: R) -> IterMut<'_, K, V, C> 244 | where 245 | R: RangeBounds, 246 | { 247 | IterMut::new(self, range) 248 | } 249 | 250 | pub fn entry(&mut self, key: K) -> Entry<'_, K, V, C> { 251 | Entry::new(self, key) 252 | } 253 | 254 | pub fn insert(&mut self, key: K, value: V) -> Option 255 | where 256 | V: Clone, 257 | { 258 | match self.entry(key) { 259 | Entry::Occupied(mut entry) => Some(entry.insert(value)), 260 | Entry::Vacant(entry) => { 261 | entry.insert(value); 262 | None 263 | } 264 | } 265 | } 266 | 267 | pub fn remove(&mut self, key: &K) -> Option<(K, V)> { 268 | if let Ok(path) = PathedPointer::<&mut (K, V), _, _, _>::exact_key(self.root.as_mut()?, key) 269 | { 270 | self.size -= 1; 271 | Some(unsafe { path.remove() }) 272 | } else { 273 | None 274 | } 275 | } 276 | 277 | pub fn remove_lowest(&mut self) -> Option<(K, V)> { 278 | if self.is_empty() { 279 | None 280 | } else { 281 | let path = PathedPointer::<&mut (K, V), _, _, _>::lowest(self.root.as_mut()?); 282 | self.size -= 1; 283 | Some(unsafe { path.remove() }) 284 | } 285 | } 286 | 287 | pub fn remove_highest(&mut self) -> Option<(K, V)> { 288 | if self.is_empty() { 289 | None 290 | } else { 291 | let path = PathedPointer::<&mut (K, V), _, _, _>::highest(self.root.as_mut()?); 292 | self.size -= 1; 293 | Some(unsafe { path.remove() }) 294 | } 295 | } 296 | 297 | fn merge_left_from( 298 | left: impl Iterator, 299 | right: impl Iterator, 300 | ) -> impl Iterator { 301 | MergeIter::merge( 302 | left, 303 | right, 304 | |(left, _), (right, _)| left > right, 305 | |(left, _), (right, _)| left == right, 306 | ) 307 | } 308 | 309 | fn merge_right_from( 310 | left: impl Iterator, 311 | right: impl Iterator, 312 | ) -> impl Iterator { 313 | MergeIter::merge( 314 | left, 315 | right, 316 | |(left, _), (right, _)| left >= right, 317 | |(left, _), (right, _)| left == right, 318 | ) 319 | } 320 | 321 | pub fn merge_left_iter(left: Self, right: Self) -> impl Iterator { 322 | Self::merge_left_from(left.into_iter(), right.into_iter()) 323 | } 324 | 325 | pub fn merge_left(left: Self, right: Self) -> Self 326 | where 327 | V: Clone, 328 | { 329 | Self::load(Self::merge_left_iter(left, right)) 330 | } 331 | 332 | pub fn merge_right_iter(left: Self, right: Self) -> impl Iterator { 333 | Self::merge_right_from(left.into_iter(), right.into_iter()) 334 | } 335 | 336 | pub fn merge_right(left: Self, right: Self) -> Self 337 | where 338 | V: Clone, 339 | { 340 | Self::load(Self::merge_right_iter(left, right)) 341 | } 342 | 343 | pub fn append_left(&mut self, other: Self) 344 | where 345 | V: Clone, 346 | { 347 | let root = self.root.take(); 348 | if root.is_some() { 349 | let left = OwnedIter::new(root, self.size); 350 | let right = other.into_iter(); 351 | *self = Self::load(Self::merge_left_from(left, right)); 352 | } else { 353 | *self = other; 354 | } 355 | } 356 | 357 | pub fn append_right(&mut self, other: Self) 358 | where 359 | V: Clone, 360 | { 361 | let root = self.root.take(); 362 | if root.is_some() { 363 | let left = OwnedIter::new(root, self.size); 364 | let right = other.into_iter(); 365 | *self = Self::load(Self::merge_right_from(left, right)); 366 | } else { 367 | *self = other; 368 | } 369 | } 370 | 371 | fn trim_root(&mut self) 372 | where 373 | V: Clone, 374 | { 375 | if let Some(ref mut root) = self.root { 376 | // If a branch bearing root only has one child, we can replace the root with that child. 377 | while root.has_branches() && root.len() == 1 { 378 | *root = Pointer::make_mut(root).remove_last_branch().1; 379 | } 380 | } 381 | } 382 | 383 | fn split_root(root: &mut Pointer, C::PointerKind>) 384 | where 385 | V: Clone, 386 | { 387 | let old_root = std::mem::replace(root, Branch::new(true).into()); 388 | let (left, right) = Branch::split(old_root); 389 | Pointer::make_mut(root).push_branch_pair( 390 | left.highest().clone(), 391 | left, 392 | right.highest().clone(), 393 | right, 394 | ); 395 | } 396 | 397 | pub fn insert_recursive(&mut self, key: K, value: V) -> Option 398 | where 399 | V: Clone, 400 | { 401 | let len = self.size; 402 | if let Some(ref mut root) = self.root { 403 | let root_ref = Pointer::make_mut(root); 404 | // Special case: if a tree has size 0 but there is a root, it's because 405 | // we removed the last entry and the root has been left allocated. 406 | // Tree walking algos assume the tree has no empty nodes, so we have to 407 | // handle this as a special case. 408 | if len == 0 { 409 | // Make sure the delete trimmed the tree properly. 410 | debug_assert_eq!(0, root_ref.len()); 411 | debug_assert!(root_ref.has_leaves()); 412 | 413 | root_ref.push_leaf(key.clone(), Pointer::new(Leaf::unit(key, value))); 414 | self.size = 1; 415 | None 416 | } else { 417 | match root_ref.insert(key, value) { 418 | InsertResult::Added => { 419 | self.size += 1; 420 | None 421 | } 422 | InsertResult::Replaced(value) => Some(value), 423 | InsertResult::Full(key, value) => { 424 | // If the root is full, we need to increase the height of the tree and retry insertion, 425 | // so we can split the old root. 426 | let key2 = root_ref.highest().clone(); 427 | let child = std::mem::replace(root_ref, Branch::new(true)); 428 | root_ref.push_branch(key2, Pointer::new(child)); 429 | self.insert(key, value) 430 | } 431 | } 432 | } 433 | } else { 434 | self.root = Some(Pointer::new(Branch::unit(Pointer::new(Leaf::unit( 435 | key, value, 436 | ))))); 437 | self.size = 1; 438 | None 439 | } 440 | } 441 | } 442 | 443 | #[cfg(feature = "tree_debug")] 444 | impl Debug for PalmTree 445 | where 446 | K: Debug, 447 | V: Debug, 448 | C: TreeConfig, 449 | { 450 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 451 | match &self.root { 452 | None => write!(f, "EmptyTree"), 453 | Some(root) => root.fmt(f), 454 | } 455 | } 456 | } 457 | 458 | #[cfg(not(feature = "tree_debug"))] 459 | impl Debug for PalmTree 460 | where 461 | K: Clone + Ord + Debug, 462 | V: Debug, 463 | C: TreeConfig, 464 | { 465 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 466 | f.debug_map().entries(self.iter()).finish() 467 | } 468 | } 469 | 470 | impl Clone for PalmTree 471 | where 472 | K: Ord + Clone, 473 | V: Clone, 474 | C: TreeConfig, 475 | { 476 | fn clone(&self) -> Self { 477 | Self { 478 | root: self.root.clone(), 479 | size: self.size, 480 | } 481 | } 482 | } 483 | 484 | impl FromIterator<(K, V)> for PalmTree 485 | where 486 | K: Ord + Clone, 487 | V: Clone, 488 | C: TreeConfig, 489 | { 490 | fn from_iter(iter: I) -> Self 491 | where 492 | I: IntoIterator, 493 | { 494 | let mut out = Self::new(); 495 | for (key, value) in iter { 496 | out.insert(key, value); 497 | } 498 | out 499 | } 500 | } 501 | 502 | impl<'a, K, V, C> Index<&'a K> for PalmTree 503 | where 504 | K: Ord + Clone, 505 | C: TreeConfig, 506 | { 507 | type Output = V; 508 | 509 | fn index(&self, index: &K) -> &Self::Output { 510 | self.get(index).expect("no entry found for key") 511 | } 512 | } 513 | 514 | impl<'a, K, V, C> IndexMut<&'a K> for PalmTree 515 | where 516 | K: Ord + Clone, 517 | V: Clone, 518 | C: TreeConfig, 519 | { 520 | fn index_mut(&mut self, index: &K) -> &mut Self::Output { 521 | self.get_mut(index).expect("no entry found for key") 522 | } 523 | } 524 | 525 | impl PartialEq for PalmTree 526 | where 527 | K: Ord + Clone, 528 | V: PartialEq, 529 | C: TreeConfig, 530 | { 531 | fn eq(&self, other: &Self) -> bool { 532 | self.len() == other.len() && self.iter().eq(other.iter()) 533 | } 534 | } 535 | 536 | impl Eq for PalmTree 537 | where 538 | K: Ord + Clone, 539 | V: Eq, 540 | C: TreeConfig, 541 | { 542 | } 543 | 544 | impl PartialOrd for PalmTree 545 | where 546 | K: Ord + Clone, 547 | V: PartialOrd, 548 | C: TreeConfig, 549 | { 550 | fn partial_cmp(&self, other: &Self) -> Option { 551 | self.iter().partial_cmp(other.iter()) 552 | } 553 | } 554 | 555 | impl Ord for PalmTree 556 | where 557 | K: Ord + Clone, 558 | V: Ord, 559 | C: TreeConfig, 560 | { 561 | fn cmp(&self, other: &Self) -> Ordering { 562 | self.iter().cmp(other.iter()) 563 | } 564 | } 565 | 566 | impl Extend<(K, V)> for PalmTree 567 | where 568 | K: Ord + Clone, 569 | V: Clone, 570 | C: TreeConfig, 571 | { 572 | fn extend>(&mut self, iter: I) { 573 | for (k, v) in iter { 574 | self.insert(k, v); 575 | } 576 | } 577 | } 578 | 579 | impl<'a, K, V, C> Extend<(&'a K, &'a V)> for PalmTree 580 | where 581 | K: 'a + Ord + Copy, 582 | V: 'a + Copy, 583 | C: TreeConfig, 584 | { 585 | fn extend>(&mut self, iter: I) { 586 | for (k, v) in iter { 587 | self.insert(*k, *v); 588 | } 589 | } 590 | } 591 | 592 | impl Add for PalmTree 593 | where 594 | K: Ord + Clone, 595 | V: Clone, 596 | C: TreeConfig, 597 | { 598 | type Output = Self; 599 | 600 | fn add(self, other: Self) -> Self::Output { 601 | Self::merge_right(self, other) 602 | } 603 | } 604 | 605 | impl AddAssign for PalmTree 606 | where 607 | K: Ord + Clone, 608 | V: Clone, 609 | C: TreeConfig, 610 | { 611 | fn add_assign(&mut self, other: Self) { 612 | self.append_right(other) 613 | } 614 | } 615 | 616 | impl<'a, K, V, C, C2> Add<&'a PalmTree> for PalmTree 617 | where 618 | K: Ord + Copy, 619 | V: Copy, 620 | C: TreeConfig, 621 | C2: TreeConfig, 622 | { 623 | type Output = Self; 624 | 625 | fn add(self, other: &PalmTree) -> Self::Output { 626 | Self::load(Self::merge_right_from( 627 | self.into_iter(), 628 | other.iter().map(|(k, v)| (*k, *v)), 629 | )) 630 | } 631 | } 632 | 633 | impl<'a, K, V, C, C2> AddAssign<&'a PalmTree> for PalmTree 634 | where 635 | K: Ord + Copy, 636 | V: Copy, 637 | C: TreeConfig, 638 | C2: TreeConfig, 639 | { 640 | fn add_assign(&mut self, other: &'a PalmTree) { 641 | let root = self.root.take(); 642 | if root.is_none() { 643 | *self = Self::load(other.iter().map(|(k, v)| (*k, *v))); 644 | } else { 645 | *self = Self::load(Self::merge_right_from( 646 | OwnedIter::new(root, self.size), 647 | other.iter().map(|(k, v)| (*k, *v)), 648 | )) 649 | } 650 | } 651 | } 652 | 653 | impl Hash for PalmTree 654 | where 655 | K: Ord + Clone + Hash, 656 | V: Hash, 657 | C: TreeConfig, 658 | { 659 | fn hash(&self, state: &mut H) 660 | where 661 | H: Hasher, 662 | { 663 | for entry in self { 664 | entry.hash(state); 665 | } 666 | } 667 | } 668 | 669 | impl<'a, K, V, C> IntoIterator for &'a PalmTree 670 | where 671 | K: Ord + Clone, 672 | C: TreeConfig, 673 | { 674 | type Item = (&'a K, &'a V); 675 | type IntoIter = Iter<'a, K, V, C>; 676 | fn into_iter(self) -> Self::IntoIter { 677 | self.iter() 678 | } 679 | } 680 | 681 | impl<'a, K, V, C> IntoIterator for &'a mut PalmTree 682 | where 683 | K: Ord + Clone, 684 | C: TreeConfig, 685 | { 686 | type Item = (&'a K, &'a mut V); 687 | type IntoIter = IterMut<'a, K, V, C>; 688 | fn into_iter(self) -> Self::IntoIter { 689 | self.iter_mut() 690 | } 691 | } 692 | 693 | impl IntoIterator for PalmTree 694 | where 695 | K: Ord + Clone, 696 | C: TreeConfig, 697 | { 698 | type Item = (K, V); 699 | type IntoIter = OwnedIter; 700 | fn into_iter(self) -> Self::IntoIter { 701 | OwnedIter::new(self.root, self.size) 702 | } 703 | } 704 | 705 | impl From> for PalmTree 706 | where 707 | K: Ord + Clone, 708 | V: Clone, 709 | C: TreeConfig, 710 | { 711 | fn from(map: BTreeMap) -> Self { 712 | Self::load(map.into_iter()) 713 | } 714 | } 715 | 716 | #[cfg(test)] 717 | mod test { 718 | use super::*; 719 | 720 | #[test] 721 | fn lookup_empty() { 722 | let tree: StdPalmTree = PalmTree::new(); 723 | assert_eq!(None, tree.get(&1337)); 724 | } 725 | 726 | #[test] 727 | fn lookup_single() { 728 | let mut tree: StdPalmTree = PalmTree::new(); 729 | tree.insert(1337, 31337); 730 | assert_eq!(None, tree.get(&1336)); 731 | assert_eq!(Some(&31337), tree.get(&1337)); 732 | assert_eq!(None, tree.get(&1338)); 733 | } 734 | 735 | #[test] 736 | fn insert_in_sequence() { 737 | let mut tree: StdPalmTree = PalmTree::new(); 738 | let iters = 131_072; 739 | for i in 0..iters { 740 | tree.insert(i, i); 741 | } 742 | for i in 0..iters { 743 | assert_eq!(Some(&i), tree.get(&i)); 744 | } 745 | } 746 | 747 | #[test] 748 | fn load_from_ordered_stream() { 749 | let size = 131_072; 750 | let tree: StdPalmTree = PalmTree::load((0..size).map(|i| (i, i))); 751 | for i in 0..size { 752 | assert_eq!(Some(&i), tree.get(&i)); 753 | } 754 | } 755 | 756 | #[test] 757 | fn delete_delete_delete() { 758 | let mut tree: StdPalmTree = PalmTree::load((0..131_072).map(|i| (i, i))); 759 | for i in 31337..41337 { 760 | assert_eq!(Some((i, i)), tree.remove(&i)); 761 | assert_eq!(None, tree.remove(&i)); 762 | } 763 | } 764 | 765 | #[test] 766 | fn small_delete() { 767 | let mut tree: StdPalmTree = PalmTree::load((0..64).map(|i| (i, i))); 768 | assert_eq!(Some((0, 0)), tree.remove(&0)); 769 | assert_eq!(None, tree.remove(&0)); 770 | } 771 | 772 | #[test] 773 | fn insert_into_emptied_tree() { 774 | let mut tree: StdPalmTree = PalmTree::new(); 775 | tree.insert(0, 0); 776 | tree.remove(&0); 777 | tree.insert(0, 0); 778 | tree.insert(10, 10); 779 | 780 | let result: Vec<(u8, u8)> = tree.iter().map(|(k, v)| (*k, *v)).collect(); 781 | let expected: Vec<(u8, u8)> = vec![(0, 0), (10, 10)]; 782 | assert_eq!(expected, result); 783 | } 784 | } 785 | -------------------------------------------------------------------------------- /src/pointer.rs: -------------------------------------------------------------------------------- 1 | #![allow(missing_debug_implementations)] 2 | 3 | use std::{ 4 | marker::PhantomData, 5 | mem::{ManuallyDrop, MaybeUninit}, 6 | ops::{Deref, DerefMut}, 7 | ptr::NonNull, 8 | rc::Rc, 9 | sync::Arc, 10 | }; 11 | 12 | pub trait PointerKind { 13 | unsafe fn new(value: A) -> Self; 14 | unsafe fn into_raw(self) -> NonNull; 15 | unsafe fn from_raw(ptr: NonNull) -> Self; 16 | unsafe fn deref(&self) -> &A; 17 | unsafe fn make_mut(&mut self) -> &mut A; 18 | unsafe fn drop_ptr(&mut self); 19 | unsafe fn clone(&self) -> Self; 20 | } 21 | 22 | pub struct Unique { 23 | data: MaybeUninit>, 24 | } 25 | 26 | impl Unique { 27 | unsafe fn from_box(data: Box) -> Self { 28 | let mut out = Self { 29 | data: MaybeUninit::uninit(), 30 | }; 31 | out.data.as_mut_ptr().cast::>().write(data); 32 | out 33 | } 34 | 35 | unsafe fn cast_into(self) -> Box { 36 | std::mem::transmute(self) 37 | } 38 | } 39 | 40 | impl PointerKind for Unique { 41 | unsafe fn new(value: A) -> Self { 42 | Self::from_box(Box::new(value)) 43 | } 44 | 45 | unsafe fn into_raw(self) -> NonNull { 46 | Box::leak(self.cast_into::()).into() 47 | } 48 | 49 | unsafe fn from_raw(mut ptr: NonNull) -> Self { 50 | Self::from_box(Box::from_raw(ptr.as_mut())) 51 | } 52 | 53 | unsafe fn deref(&self) -> &A { 54 | (*self.data.as_ptr().cast::>()).deref() 55 | } 56 | 57 | unsafe fn make_mut(&mut self) -> &mut A { 58 | (*self.data.as_mut_ptr().cast::>()).deref_mut() 59 | } 60 | 61 | unsafe fn drop_ptr(&mut self) { 62 | std::ptr::drop_in_place(self.data.as_mut_ptr().cast::>()) 63 | } 64 | 65 | unsafe fn clone(&self) -> Self { 66 | Self::new(self.deref::().clone()) 67 | } 68 | } 69 | 70 | pub struct Shared { 71 | data: MaybeUninit>, 72 | } 73 | 74 | impl Shared { 75 | unsafe fn from_rc(data: Rc) -> Self { 76 | let mut out = Self { 77 | data: MaybeUninit::uninit(), 78 | }; 79 | out.data.as_mut_ptr().cast::>().write(data); 80 | out 81 | } 82 | 83 | unsafe fn cast_into(self) -> Rc { 84 | std::mem::transmute(self) 85 | } 86 | } 87 | 88 | impl PointerKind for Shared { 89 | unsafe fn new(value: A) -> Self { 90 | Self::from_rc(Rc::new(value)) 91 | } 92 | 93 | unsafe fn into_raw(self) -> NonNull { 94 | NonNull::new_unchecked(Rc::into_raw(self.cast_into::()) as *mut A) 95 | } 96 | 97 | unsafe fn from_raw(ptr: NonNull) -> Self { 98 | Self::from_rc(Rc::from_raw(ptr.as_ptr())) 99 | } 100 | 101 | unsafe fn deref(&self) -> &A { 102 | (*self.data.as_ptr().cast::>()).deref() 103 | } 104 | 105 | unsafe fn make_mut(&mut self) -> &mut A { 106 | Rc::make_mut(&mut *self.data.as_mut_ptr().cast::>()) 107 | } 108 | 109 | unsafe fn drop_ptr(&mut self) { 110 | std::ptr::drop_in_place(self.data.as_mut_ptr().cast::>()) 111 | } 112 | 113 | unsafe fn clone(&self) -> Self { 114 | Self::from_rc::((&*self.data.as_ptr().cast::>()).clone()) 115 | } 116 | } 117 | 118 | pub struct SyncShared { 119 | data: MaybeUninit>, 120 | } 121 | 122 | impl SyncShared { 123 | unsafe fn from_arc(data: Arc) -> Self { 124 | let mut out = Self { 125 | data: MaybeUninit::uninit(), 126 | }; 127 | out.data.as_mut_ptr().cast::>().write(data); 128 | out 129 | } 130 | 131 | unsafe fn cast_into(self) -> Arc { 132 | std::mem::transmute(self) 133 | } 134 | } 135 | 136 | impl PointerKind for SyncShared { 137 | unsafe fn new(value: A) -> Self { 138 | Self::from_arc(Arc::new(value)) 139 | } 140 | 141 | unsafe fn into_raw(self) -> NonNull { 142 | NonNull::new_unchecked(Arc::into_raw(self.cast_into::()) as *mut A) 143 | } 144 | 145 | unsafe fn from_raw(ptr: NonNull) -> Self { 146 | Self::from_arc(Arc::from_raw(ptr.as_ptr())) 147 | } 148 | 149 | unsafe fn deref(&self) -> &A { 150 | (*self.data.as_ptr().cast::>()).deref() 151 | } 152 | 153 | unsafe fn make_mut(&mut self) -> &mut A { 154 | Arc::make_mut(&mut *self.data.as_mut_ptr().cast::>()) 155 | } 156 | 157 | unsafe fn drop_ptr(&mut self) { 158 | std::ptr::drop_in_place(self.data.as_mut_ptr().cast::>()) 159 | } 160 | 161 | unsafe fn clone(&self) -> Self { 162 | Self::from_arc::((&*self.data.as_ptr().cast::>()).clone()) 163 | } 164 | } 165 | 166 | pub(crate) struct Pointer { 167 | data: ManuallyDrop, 168 | kind: PhantomData, 169 | } 170 | 171 | unsafe impl Send for Pointer where Kind: PointerKind + Send {} 172 | unsafe impl Sync for Pointer where Kind: PointerKind + Sync {} 173 | 174 | impl Pointer { 175 | fn from_data(data: Kind) -> Self { 176 | Self { 177 | data: ManuallyDrop::new(data), 178 | kind: PhantomData, 179 | } 180 | } 181 | 182 | pub(crate) fn new(value: A) -> Self { 183 | Self::from_data(unsafe { Kind::new(value) }) 184 | } 185 | 186 | pub(crate) fn into_raw(mut this: Self) -> NonNull { 187 | let ptr = unsafe { ManuallyDrop::take(&mut this.data).into_raw::() }; 188 | std::mem::forget(this); 189 | ptr 190 | } 191 | 192 | pub(crate) unsafe fn from_raw(ptr: NonNull) -> Self { 193 | Self::from_data(Kind::from_raw::(ptr)) 194 | } 195 | 196 | pub(crate) fn make_mut(this: &mut Self) -> &mut A 197 | where 198 | A: Clone, 199 | { 200 | unsafe { this.data.make_mut::() } 201 | } 202 | 203 | pub(crate) unsafe fn cast_into(this: Self) -> Pointer { 204 | Pointer::from_raw(Self::into_raw(this).cast()) 205 | } 206 | 207 | pub(crate) unsafe fn deref_cast(this: &Self) -> &B { 208 | this.data.deref().deref::() 209 | } 210 | 211 | pub(crate) unsafe fn make_mut_cast(this: &mut Self) -> &mut B 212 | where 213 | B: Clone, 214 | { 215 | this.data.make_mut::() 216 | } 217 | } 218 | 219 | impl Drop for Pointer 220 | where 221 | Kind: PointerKind, 222 | { 223 | fn drop(&mut self) { 224 | unsafe { self.data.drop_ptr::() } 225 | } 226 | } 227 | 228 | impl Deref for Pointer 229 | where 230 | Kind: PointerKind, 231 | { 232 | type Target = A; 233 | fn deref(&self) -> &Self::Target { 234 | unsafe { self.data.deref().deref::() } 235 | } 236 | } 237 | 238 | impl From for Pointer 239 | where 240 | Kind: PointerKind, 241 | { 242 | fn from(value: A) -> Self { 243 | Self::new(value) 244 | } 245 | } 246 | 247 | impl Clone for Pointer 248 | where 249 | A: Clone, 250 | Kind: PointerKind, 251 | { 252 | fn clone(&self) -> Self { 253 | Self::from_data(unsafe { self.data.clone::() }) 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /src/search.rs: -------------------------------------------------------------------------------- 1 | use crate::{arch::prefetch, branch::Branch, config::TreeConfig, leaf::Leaf}; 2 | use arrayvec::ArrayVec; 3 | use std::{ 4 | fmt::{Debug, Error, Formatter}, 5 | marker::PhantomData, 6 | }; 7 | 8 | // type PtrPath = Chunk<(*const Branch, isize), U16>; // FIXME hardcoded max height of 16 9 | type PtrPath = ArrayVec<[(*const Branch, isize); 16]>; 10 | 11 | pub(crate) fn find_key_linear(keys: &[K], target: &K) -> Option 12 | where 13 | K: Ord, 14 | { 15 | for (index, key) in keys.iter().enumerate() { 16 | if target <= key { 17 | return Some(index); 18 | } 19 | } 20 | None 21 | } 22 | 23 | /// Find 'key' in 'keys', or the closest higher value. 24 | /// 25 | /// If every value in `keys` is lower than `key`, `None` will be returned. 26 | /// 27 | /// This is a checked version of `find_key_or_next`. No assumption about 28 | /// the content of `keys` is needed, and it will never panic. 29 | pub(crate) fn find_key(keys: &[K], key: &K) -> Option 30 | where 31 | K: Ord, 32 | { 33 | let size = keys.len(); 34 | if size == 0 { 35 | return None; 36 | } 37 | 38 | let mut low = 0; 39 | let mut high = size - 1; 40 | while low != high { 41 | let mid = (low + high) / 2; 42 | if unsafe { keys.get_unchecked(mid) } < key { 43 | low = mid + 1; 44 | } else { 45 | high = mid; 46 | } 47 | } 48 | if low == size || unsafe { keys.get_unchecked(low) } < key { 49 | None 50 | } else { 51 | Some(low) 52 | } 53 | } 54 | 55 | /// Find `key` in `keys`, or the closest higher value. 56 | /// 57 | /// This function assumes the highest value in `keys` is 58 | /// not lower than `key`, and that `keys` is not empty. 59 | /// 60 | /// If `key` is higher than the highest value in `keys`, the 61 | /// index of the highest value will be returned. 62 | /// 63 | /// If `keys` is empty, this function will panic. 64 | pub(crate) fn find_key_or_next(keys: &[K], key: &K) -> usize 65 | where 66 | K: Ord, 67 | { 68 | let size = keys.len(); 69 | let mut low = 0; 70 | let mut high = size - 1; 71 | while low != high { 72 | let mid = (low + high) / 2; 73 | if unsafe { keys.get_unchecked(mid) } < key { 74 | low = mid + 1; 75 | } else { 76 | high = mid; 77 | } 78 | } 79 | low 80 | } 81 | 82 | /// Find `key` in `keys`, or the closest lower value. 83 | /// 84 | /// Invariants as in `find_or_next` above apply, but reversed. 85 | pub(crate) fn find_key_or_prev(keys: &[K], key: &K) -> usize 86 | where 87 | K: Ord, 88 | { 89 | let size = keys.len(); 90 | let mut low = 0; 91 | let mut high = size - 1; 92 | while low != high { 93 | let mid = (low + high + 1) / 2; 94 | if unsafe { keys.get_unchecked(mid) } > key { 95 | high = mid - 1; 96 | } else { 97 | low = mid; 98 | } 99 | } 100 | low 101 | } 102 | 103 | /// A pointer to a leaf entry which can be stepped forwards and backwards. 104 | pub(crate) struct PathedPointer 105 | where 106 | C: TreeConfig, 107 | { 108 | stack: PtrPath, 109 | leaf: *const Leaf, 110 | index: usize, 111 | lifetime: PhantomData, 112 | } 113 | 114 | impl Clone for PathedPointer 115 | where 116 | C: TreeConfig, 117 | { 118 | fn clone(&self) -> Self { 119 | Self { 120 | stack: self.stack.clone(), 121 | leaf: self.leaf, 122 | index: self.index, 123 | lifetime: PhantomData, 124 | } 125 | } 126 | } 127 | 128 | fn walk_path<'a, K, V, C>( 129 | mut branch: &'a Branch, 130 | key: &K, 131 | path: &mut PtrPath, 132 | ) -> Option<&'a Leaf> 133 | where 134 | K: Clone + Ord, 135 | C: TreeConfig, 136 | { 137 | loop { 138 | if let Some(index) = find_key(branch.keys(), key) { 139 | path.push((branch, index as isize)); 140 | if branch.has_branches() { 141 | branch = unsafe { branch.get_branch_unchecked(index) }; 142 | } else { 143 | return Some(unsafe { branch.get_leaf_unchecked(index) }); 144 | } 145 | } else { 146 | return None; 147 | } 148 | } 149 | } 150 | 151 | /// Find the path to the leaf which contains `key` or the closest higher key. 152 | fn path_for<'a, K, V, C>( 153 | tree: &'a Branch, 154 | key: &K, 155 | ) -> Option<(PtrPath, &'a Leaf)> 156 | where 157 | K: Clone + Ord, 158 | C: TreeConfig, 159 | { 160 | let mut path: PtrPath = ArrayVec::new(); 161 | walk_path(tree, key, &mut path).map(|leaf| (path, leaf)) 162 | } 163 | 164 | impl PathedPointer 165 | where 166 | K: Clone + Ord, 167 | C: TreeConfig, 168 | { 169 | pub(crate) fn null() -> Self { 170 | Self { 171 | stack: ArrayVec::new(), 172 | leaf: std::ptr::null(), 173 | index: 0, 174 | lifetime: PhantomData, 175 | } 176 | } 177 | 178 | /// Find `key` and return `Ok(path)` for a key match or `Err(path)` for an absent key with 179 | /// the path to the leaf it should be in. This path will be null if the key is larger than 180 | /// the tree's current highest key. 181 | pub(crate) fn exact_key(tree: &Branch, key: &K) -> Result { 182 | if let Some((stack, leaf)) = path_for(tree, key) { 183 | match leaf.keys().binary_search(key) { 184 | Ok(index) => Ok(Self { 185 | stack, 186 | leaf, 187 | index, 188 | lifetime: PhantomData, 189 | }), 190 | Err(index) => Err(Self { 191 | stack, 192 | leaf, 193 | index, 194 | lifetime: PhantomData, 195 | }), 196 | } 197 | } else { 198 | Err(Self::null()) 199 | } 200 | } 201 | 202 | /// Find `key` or the first higher key. 203 | pub(crate) fn key_or_higher(tree: &Branch, key: &K) -> Self { 204 | let mut ptr = Self::null(); 205 | if let Some((path, leaf)) = path_for(tree, key) { 206 | ptr.stack = path; 207 | ptr.index = find_key_or_next(leaf.keys(), key); 208 | ptr.leaf = leaf; 209 | // find_key_or_next assumes the highest key in the leaf isn't lower than `key`, but a search 210 | // through a tree with branch keys higher than the highest key present in the leaf can take 211 | // you to a node where this doesn't hold, so we have to check if we need to step forward. 212 | // If we do, we can depend on the next neighbour node containing the right key as its first 213 | // entry. 214 | unsafe { 215 | if ptr.key_unchecked() < key && !ptr.step_forward() { 216 | // If we can't step forward, we were at the highest key already, so the iterator is empty. 217 | ptr = Self::null(); 218 | } 219 | } 220 | } else { 221 | // No target node for start bound means the key is higher than our highest value, so we leave ptr empty. 222 | } 223 | ptr 224 | } 225 | 226 | /// Find the first key higher than `key`. 227 | pub(crate) fn higher_than_key(tree: &Branch, key: &K) -> Self { 228 | let mut ptr = Self::null(); 229 | if let Some((path, leaf)) = path_for(tree, key) { 230 | ptr.stack = path; 231 | ptr.index = find_key_or_next(leaf.keys(), key); 232 | ptr.leaf = leaf; 233 | unsafe { 234 | if leaf.keys().get_unchecked(ptr.index) == key && !ptr.step_forward() { 235 | // If we can't step forward, we were at the highest key already, so the iterator is empty. 236 | return Self::null(); 237 | } 238 | } 239 | } else { 240 | // No target node for start bound means the key is higher than our highest value, so we leave ptr empty. 241 | } 242 | ptr 243 | } 244 | 245 | /// Find `key` or the first lower key. 246 | pub(crate) fn key_or_lower(tree: &Branch, key: &K) -> Self { 247 | if let Some((path, leaf)) = path_for(tree, key) { 248 | let mut ptr = Self::null(); 249 | ptr.stack = path; 250 | ptr.index = find_key_or_next(leaf.keys(), key); 251 | ptr.leaf = leaf; 252 | ptr 253 | } else { 254 | // No target node for end bound means it's past the largest key, so get a path to the end of the tree. 255 | Self::highest(tree) 256 | } 257 | } 258 | 259 | /// Find the first key lower than `key`. 260 | pub(crate) fn lower_than_key(tree: &Branch, key: &K) -> Self { 261 | if let Some((path, leaf)) = path_for(tree, key) { 262 | let mut ptr = Self::null(); 263 | ptr.stack = path; 264 | ptr.index = find_key_or_prev(leaf.keys(), key); 265 | ptr.leaf = leaf; 266 | // If we've found a value equal to key, we step back one key. 267 | // If we've found a value higher than key, we're one branch ahead of the target key and step back. 268 | unsafe { 269 | if leaf.keys().get_unchecked(ptr.index) >= key && !ptr.step_back() { 270 | // If we can't step back, we were at the lowest key already, so the iterator is empty. 271 | return Self::null(); 272 | } 273 | } 274 | ptr 275 | } else { 276 | // No target node for end bound, so it must be larger than the largest key; get the path to that. 277 | Self::highest(tree) 278 | } 279 | } 280 | 281 | /// Find the lowest key in the tree. 282 | pub(crate) fn lowest(tree: &Branch) -> Self { 283 | let mut branch = tree; 284 | let mut stack = PtrPath::new(); 285 | loop { 286 | if branch.is_empty() { 287 | return Self::null(); 288 | } 289 | stack.push((branch, 0)); 290 | if branch.has_branches() { 291 | branch = unsafe { branch.get_branch_unchecked(0) }; 292 | } else { 293 | return Self { 294 | stack, 295 | leaf: unsafe { branch.get_leaf_unchecked(0) }, 296 | index: 0, 297 | lifetime: PhantomData, 298 | }; 299 | } 300 | } 301 | } 302 | 303 | /// Find the highest key in the tree. 304 | pub(crate) fn highest(tree: &Branch) -> Self { 305 | let mut branch = tree; 306 | let mut stack = PtrPath::new(); 307 | loop { 308 | if branch.is_empty() { 309 | return Self::null(); 310 | } 311 | let index = branch.len() - 1; 312 | stack.push((branch, index as isize)); 313 | if branch.has_branches() { 314 | branch = unsafe { branch.get_branch_unchecked(index) }; 315 | } else { 316 | let leaf = unsafe { branch.get_leaf_unchecked(index) }; 317 | return Self { 318 | stack, 319 | leaf, 320 | index: leaf.len() - 1, 321 | lifetime: PhantomData, 322 | }; 323 | } 324 | } 325 | } 326 | 327 | /// Step a pointer forward by one entry. 328 | /// 329 | /// If it returns `false`, you tried to step past the last entry. 330 | /// If this happens, the pointer is now a null pointer. 331 | pub(crate) unsafe fn step_forward(&mut self) -> bool { 332 | if !self.is_null() { 333 | self.index += 1; 334 | if self.index >= (*self.leaf).keys().len() { 335 | loop { 336 | // Pop a branch off the top of the stack and examine it. 337 | if let Some((branch, mut index)) = self.stack.pop() { 338 | index += 1; 339 | if index < (*branch).len() as isize { 340 | // If we're not at the end yet, push the branch back on the stack and look at the next child. 341 | self.stack.push((branch, index)); 342 | if (*branch).has_branches() { 343 | // If it's a branch, push it on the stack and go through the loop again with this branch. 344 | self.stack 345 | .push(((*branch).get_branch_unchecked(index as usize), -1)); 346 | continue; 347 | } else { 348 | // If it's a leaf, this is our new leaf, we're done. 349 | self.leaf = (*branch).get_leaf_unchecked(index as usize); 350 | self.index = 0; 351 | // Prefetch the next leaf. 352 | let next_index = (index + 1) as usize; 353 | if next_index < (*branch).len() { 354 | prefetch((*branch).get_leaf_unchecked(next_index)); 355 | } 356 | break; 357 | } 358 | } else { 359 | // If this branch is exhausted, go round the loop again to look at its parent. 360 | continue; 361 | } 362 | } else { 363 | self.clear(); 364 | return false; 365 | } 366 | } 367 | } 368 | } 369 | true 370 | } 371 | 372 | /// Step a pointer back by one entry. 373 | /// 374 | /// See notes for `step_forward`. 375 | pub(crate) unsafe fn step_back(&mut self) -> bool { 376 | if !self.is_null() { 377 | if self.index > 0 { 378 | self.index -= 1; 379 | } else { 380 | loop { 381 | // Pop a branch off the top of the stack and examine it. 382 | if let Some((branch, mut index)) = self.stack.pop() { 383 | if index > 0 { 384 | index -= 1; 385 | // If we're not at the end yet, push the branch back on the stack and look at the next child. 386 | self.stack.push((branch, index)); 387 | if (*branch).has_branches() { 388 | let child = (*branch).get_branch_unchecked(index as usize); 389 | // If it's a branch, push it on the stack and go through the loop again with this branch. 390 | self.stack.push((child, child.len() as isize)); 391 | continue; 392 | } else { 393 | // If it's a leaf, this is our new leaf, we're done. 394 | self.leaf = (*branch).get_leaf_unchecked(index as usize); 395 | self.index = (*self.leaf).keys().len() - 1; 396 | // Prefetch the next leaf. 397 | if index > 0 { 398 | prefetch((*branch).get_leaf_unchecked(index as usize - 1)); 399 | } 400 | break; 401 | } 402 | } else { 403 | // If this branch is exhausted, go round the loop again to look at its parent. 404 | continue; 405 | } 406 | } else { 407 | self.clear(); 408 | return false; 409 | } 410 | } 411 | } 412 | } 413 | true 414 | } 415 | 416 | /// Remove the entry being pointed at. 417 | /// 418 | /// You're responsible for ensuring there is indeed an entry being pointed at. 419 | pub(crate) unsafe fn remove(mut self) -> (K, V) { 420 | // TODO need a strategy for rebalancing after remove 421 | let index = self.index; 422 | let leaf = self.deref_mut_leaf().unwrap(); 423 | let (key, value) = leaf.remove_unchecked(index); 424 | if leaf.is_empty() { 425 | while let Some((branch, index)) = self.stack.pop() { 426 | let branch = &mut *(branch as *mut Branch); 427 | let index = index as usize; 428 | if branch.has_leaves() { 429 | branch.remove_leaf(index); 430 | } else { 431 | branch.remove_branch(index); 432 | } 433 | if !branch.is_empty() { 434 | break; 435 | } 436 | } 437 | } 438 | 439 | (key, value) 440 | } 441 | 442 | /// Insert a key at the index being pointed at. 443 | /// 444 | /// You're responsible for ensuring that something is being pointed at, 445 | /// that what's being pointed at is the location in the leaf where this 446 | /// key should be inserted, and that the key isn't already there. 447 | /// This is the assumption validated by the `exact_key` constructor when it 448 | /// returns a non-null `Err` value. 449 | pub(crate) unsafe fn insert(mut self, key: K, value: V) -> Result 450 | where 451 | V: Clone, 452 | { 453 | let index = self.index; 454 | let leaf = self.deref_mut_leaf().unwrap(); 455 | if !leaf.is_full() { 456 | leaf.insert_unchecked(index, key, value); 457 | Ok(self) 458 | } else { 459 | // Walk up the tree to find somewhere to split. 460 | loop { 461 | if let Some((branch, index)) = self.stack.pop() { 462 | let branch = &mut *(branch as *mut Branch); 463 | let index = index as usize; 464 | if !branch.is_full() { 465 | let choose_index = if branch.has_branches() { 466 | let (removed_key, removed_branch) = branch.remove_branch(index); 467 | let (left, right) = Branch::split(removed_branch); 468 | let left_highest = left.highest(); 469 | let choose_index = if &key <= left_highest { 470 | index 471 | } else { 472 | index + 1 473 | }; 474 | branch.insert_branch_pair( 475 | index, 476 | left_highest.clone(), 477 | left, 478 | removed_key, 479 | right, 480 | ); 481 | choose_index 482 | } else { 483 | let (removed_key, removed_leaf) = branch.remove_leaf(index); 484 | let (left, right) = Leaf::split(removed_leaf); 485 | let left_highest = left.highest(); 486 | let choose_index = if &key <= left_highest { 487 | index 488 | } else { 489 | index + 1 490 | }; 491 | branch.insert_leaf_pair( 492 | index, 493 | left_highest.clone(), 494 | left, 495 | removed_key, 496 | right, 497 | ); 498 | choose_index 499 | }; 500 | // We're going to walk down either the left or the right hand branch of our split. 501 | // We're guaranteed to find a leaf, but it might be full if we split a higher branch, 502 | // so we might have to go back up and split further. 503 | let leaf = if branch.has_branches() { 504 | walk_path( 505 | branch.get_branch_unchecked(choose_index), 506 | &key, 507 | &mut self.stack, 508 | ) 509 | } else { 510 | Some(branch.get_leaf_unchecked(choose_index)) 511 | }; 512 | if let Some(leaf) = leaf { 513 | if !leaf.is_full() { 514 | let index = leaf 515 | .keys() 516 | .binary_search(&key) 517 | .expect_err("tried to insert() a key that already exists"); 518 | self.leaf = leaf; 519 | self.index = index; 520 | assert!( 521 | index <= leaf.len(), 522 | "index {} > len {}", 523 | index, 524 | leaf.len() 525 | ); 526 | let leaf = self.deref_mut_leaf_unchecked(); 527 | leaf.insert_unchecked(index, key, value); 528 | return Ok(self); 529 | } 530 | } else { 531 | unreachable!("walk_path() failed to produce a leaf, even though the leaf should be there!") 532 | } 533 | } 534 | } else { 535 | return Err((key, value)); 536 | } 537 | } 538 | } 539 | } 540 | 541 | /// Insert a value at the right edge of the tree. 542 | /// If it returns false, you need to split the root and try again. 543 | /// 544 | /// This must only be called on a null pointer, and the key provided must 545 | /// be higher than the tree's current maximum. 546 | pub(crate) unsafe fn push_last( 547 | mut self, 548 | root: &mut Branch, 549 | key: K, 550 | value: V, 551 | ) -> Result 552 | where 553 | V: Clone, 554 | { 555 | let mut branch = root; 556 | let mut index; 557 | loop { 558 | index = branch.len() - 1; 559 | debug_assert!(branch.highest() < &key); 560 | branch.keys_mut()[index] = key.clone(); 561 | self.stack.push((branch, index as isize)); 562 | if branch.has_branches() { 563 | branch = branch.get_branch_mut(index); 564 | } else { 565 | break; 566 | } 567 | } 568 | self.leaf = branch.get_leaf(index); 569 | self.index = (*self.leaf).len(); 570 | self.insert(key, value) 571 | } 572 | 573 | pub(crate) fn clear(&mut self) { 574 | self.leaf = std::ptr::null(); 575 | } 576 | 577 | pub(crate) fn is_null(&self) -> bool { 578 | self.leaf.is_null() 579 | } 580 | 581 | pub(crate) unsafe fn deref_leaf_unchecked<'a>(&'a self) -> &'a Leaf { 582 | &*self.leaf 583 | } 584 | 585 | pub(crate) unsafe fn deref_mut_leaf_unchecked<'a>(&'a mut self) -> &'a mut Leaf { 586 | let ptr = self.leaf as *mut Leaf; 587 | &mut *ptr 588 | } 589 | 590 | pub(crate) unsafe fn deref_leaf<'a>(&'a self) -> Option<&'a Leaf> { 591 | self.leaf.as_ref() 592 | } 593 | 594 | pub(crate) unsafe fn deref_mut_leaf<'a>(&'a mut self) -> Option<&'a mut Leaf> { 595 | (self.leaf as *mut Leaf).as_mut() 596 | } 597 | 598 | pub(crate) unsafe fn into_entry_mut<'a>(self) -> (&'a mut K, &'a mut V) { 599 | let index = self.index; 600 | let leaf = &mut *(self.leaf as *mut Leaf); 601 | let key: *mut K = &mut leaf.keys_mut()[index]; 602 | let value: *mut V = &mut leaf.values_mut()[index]; 603 | (&mut *key, &mut *value) 604 | } 605 | 606 | pub(crate) unsafe fn key(&self) -> Option<&K> { 607 | self.deref_leaf() 608 | .map(|leaf| leaf.keys().get_unchecked(self.index)) 609 | } 610 | 611 | pub(crate) unsafe fn key_unchecked(&self) -> &K { 612 | self.deref_leaf_unchecked().keys().get_unchecked(self.index) 613 | } 614 | 615 | pub(crate) unsafe fn value(&self) -> Option<&V> { 616 | self.deref_leaf() 617 | .map(|leaf| leaf.values().get_unchecked(self.index)) 618 | } 619 | 620 | pub(crate) unsafe fn value_mut(&mut self) -> Option<&mut V> { 621 | let index = self.index; 622 | self.deref_mut_leaf() 623 | .map(|leaf| leaf.values_mut().get_unchecked_mut(index)) 624 | } 625 | } 626 | 627 | impl Debug for PathedPointer 628 | where 629 | C: TreeConfig, 630 | { 631 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { 632 | write!(f, "PathedPointer") 633 | } 634 | } 635 | 636 | #[cfg(test)] 637 | mod test { 638 | use super::*; 639 | use std::iter::FromIterator; 640 | 641 | #[test] 642 | fn test_find_key() { 643 | let keys: Vec = Vec::from_iter(vec![2, 4, 6, 8]); 644 | assert_eq!(Some(0), find_key(&keys, &0)); 645 | assert_eq!(Some(0), find_key(&keys, &1)); 646 | assert_eq!(Some(0), find_key(&keys, &2)); 647 | assert_eq!(Some(1), find_key(&keys, &3)); 648 | assert_eq!(Some(1), find_key(&keys, &4)); 649 | assert_eq!(Some(2), find_key(&keys, &5)); 650 | assert_eq!(Some(2), find_key(&keys, &6)); 651 | assert_eq!(Some(3), find_key(&keys, &7)); 652 | assert_eq!(Some(3), find_key(&keys, &8)); 653 | assert_eq!(None, find_key(&keys, &9)); 654 | assert_eq!(None, find_key(&keys, &10)); 655 | assert_eq!(None, find_key(&keys, &31337)); 656 | } 657 | 658 | #[test] 659 | fn test_find_key_or_next() { 660 | let keys: Vec = Vec::from_iter(vec![2, 4, 6, 8]); 661 | assert_eq!(0, find_key_or_next(&keys, &0)); 662 | assert_eq!(0, find_key_or_next(&keys, &1)); 663 | assert_eq!(0, find_key_or_next(&keys, &2)); 664 | assert_eq!(1, find_key_or_next(&keys, &3)); 665 | assert_eq!(1, find_key_or_next(&keys, &4)); 666 | assert_eq!(2, find_key_or_next(&keys, &5)); 667 | assert_eq!(2, find_key_or_next(&keys, &6)); 668 | assert_eq!(3, find_key_or_next(&keys, &7)); 669 | assert_eq!(3, find_key_or_next(&keys, &8)); 670 | } 671 | 672 | #[test] 673 | fn test_find_key_or_prev() { 674 | let keys: Vec = Vec::from_iter(vec![2, 4, 6, 8]); 675 | assert_eq!(0, find_key_or_prev(&keys, &2)); 676 | assert_eq!(0, find_key_or_prev(&keys, &3)); 677 | assert_eq!(1, find_key_or_prev(&keys, &4)); 678 | assert_eq!(1, find_key_or_prev(&keys, &5)); 679 | assert_eq!(2, find_key_or_prev(&keys, &6)); 680 | assert_eq!(2, find_key_or_prev(&keys, &7)); 681 | assert_eq!(3, find_key_or_prev(&keys, &8)); 682 | assert_eq!(3, find_key_or_prev(&keys, &9)); 683 | assert_eq!(3, find_key_or_prev(&keys, &10)); 684 | } 685 | } 686 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | use std::fmt::Debug; 3 | use std::iter::FromIterator; 4 | 5 | use crate::{config::TreeConfig, PalmTree}; 6 | 7 | #[cfg(not(test))] 8 | use arbitrary::Arbitrary; 9 | #[cfg(test)] 10 | use proptest::proptest; 11 | #[cfg(test)] 12 | use proptest_derive::Arbitrary; 13 | 14 | #[derive(Arbitrary, Debug)] 15 | pub enum Construct 16 | where 17 | K: Ord, 18 | { 19 | Empty, 20 | FromIter(BTreeMap), 21 | Insert(BTreeMap), 22 | Load(BTreeMap), 23 | } 24 | 25 | #[derive(Arbitrary, Debug)] 26 | pub enum Action { 27 | Insert(K, V), 28 | Lookup(K), 29 | Remove(K), 30 | Range(Option, Option), 31 | RangeMut(Option, Option), 32 | } 33 | 34 | pub type Input = (Construct, Vec>); 35 | 36 | pub fn integration_test(input: Input) 37 | where 38 | C: TreeConfig, 39 | { 40 | let (constructor, actions) = input; 41 | 42 | let mut set: PalmTree; 43 | let mut nat; 44 | 45 | match constructor { 46 | Construct::Empty => { 47 | set = PalmTree::new(); 48 | nat = BTreeMap::new(); 49 | } 50 | Construct::FromIter(map) => { 51 | nat = map.clone(); 52 | set = PalmTree::from_iter(map.into_iter()); 53 | } 54 | Construct::Insert(map) => { 55 | nat = map.clone(); 56 | set = PalmTree::new(); 57 | for (k, v) in map.into_iter() { 58 | set.insert(k, v); 59 | } 60 | } 61 | Construct::Load(map) => { 62 | nat = map.clone(); 63 | set = PalmTree::load(map.into_iter()); 64 | } 65 | } 66 | 67 | for action in actions { 68 | match action { 69 | Action::Insert(key, value) => { 70 | let len = nat.len() + if nat.get(&key).is_some() { 0 } else { 1 }; 71 | nat.insert(key, value); 72 | set.insert(key, value); 73 | assert_eq!(len, set.len()); 74 | assert_eq!(nat.len(), set.len()); 75 | } 76 | Action::Lookup(key) => { 77 | assert_eq!(nat.get(&key), set.get(&key)); 78 | } 79 | Action::Remove(key) => { 80 | let len = nat.len() - if nat.get(&key).is_some() { 1 } else { 0 }; 81 | let removed_from_nat = nat.remove(&key); 82 | if let Some((removed_key, removed_value)) = set.remove(&key) { 83 | assert_eq!(removed_key, key); 84 | assert_eq!(Some(removed_value), removed_from_nat); 85 | } 86 | assert_eq!(len, set.len()); 87 | assert_eq!(nat.len(), set.len()); 88 | } 89 | Action::Range(left, right) => { 90 | let set_iter; 91 | let nat_iter; 92 | match (left, right) { 93 | (Some(mut left), Some(mut right)) => { 94 | if left > right { 95 | std::mem::swap(&mut left, &mut right); 96 | } 97 | set_iter = set.range(left..right); 98 | nat_iter = nat.range(left..right); 99 | } 100 | (Some(left), None) => { 101 | set_iter = set.range(left..); 102 | nat_iter = nat.range(left..); 103 | } 104 | (None, Some(right)) => { 105 | set_iter = set.range(..right); 106 | nat_iter = nat.range(..right); 107 | } 108 | (None, None) => { 109 | set_iter = set.range(..); 110 | nat_iter = nat.range(..); 111 | } 112 | } 113 | let expected: Vec<_> = nat_iter.map(|(k, v)| (*k, *v)).collect(); 114 | let actual: Vec<_> = set_iter.map(|(k, v)| (*k, *v)).collect(); 115 | assert_eq!(expected, actual); 116 | } 117 | Action::RangeMut(left, right) => { 118 | let set_iter; 119 | let nat_iter; 120 | match (left, right) { 121 | (Some(mut left), Some(mut right)) => { 122 | if left > right { 123 | std::mem::swap(&mut left, &mut right); 124 | } 125 | set_iter = set.range_mut(left..right); 126 | nat_iter = nat.range_mut(left..right); 127 | } 128 | (Some(left), None) => { 129 | set_iter = set.range_mut(left..); 130 | nat_iter = nat.range_mut(left..); 131 | } 132 | (None, Some(right)) => { 133 | set_iter = set.range_mut(..right); 134 | nat_iter = nat.range_mut(..right); 135 | } 136 | (None, None) => { 137 | set_iter = set.range_mut(..); 138 | nat_iter = nat.range_mut(..); 139 | } 140 | } 141 | let expected: Vec<_> = nat_iter.map(|(k, v)| (*k, *v)).collect(); 142 | let actual: Vec<_> = set_iter.map(|(k, v)| (*k, *v)).collect(); 143 | assert_eq!(expected, actual); 144 | } 145 | } 146 | 147 | // Check len() 148 | assert_eq!(nat.len(), set.len()); 149 | 150 | // Immutable ref iterator 151 | let expected: Vec<_> = nat.iter().map(|(k, v)| (*k, *v)).collect(); 152 | let actual: Vec<_> = set.iter().map(|(k, v)| (*k, *v)).collect(); 153 | assert_eq!(expected, actual); 154 | 155 | // Mutable ref iterator 156 | let expected: Vec<_> = nat.iter_mut().map(|(k, v)| (*k, *v)).collect(); 157 | let actual: Vec<_> = set.iter_mut().map(|(k, v)| (*k, *v)).collect(); 158 | assert_eq!(expected, actual); 159 | 160 | // Consuming iterator 161 | let expected: Vec<_> = nat.clone().into_iter().collect(); 162 | let actual: Vec<_> = set.clone().into_iter().collect(); 163 | assert_eq!(expected, actual); 164 | } 165 | } 166 | 167 | #[cfg(test)] 168 | proptest! { 169 | #[test] 170 | fn integration_proptest(input: Input) { 171 | use crate::{config::Tree64, pointer::Unique}; 172 | integration_test::>(input); 173 | } 174 | } 175 | --------------------------------------------------------------------------------