BTreeMap: stop mistaking node::MIN_LEN as a node level constraint

This commit is contained in:
Stein Somers 2020-10-24 13:04:52 +02:00
parent 2e8a54af60
commit 3b6c4fe465
6 changed files with 55 additions and 55 deletions

View File

@ -17,6 +17,10 @@
pub use entry::{Entry, OccupiedEntry, VacantEntry}; pub use entry::{Entry, OccupiedEntry, VacantEntry};
use Entry::*; use Entry::*;
/// Minimum number of elements in nodes that are not a root.
/// We might temporarily have fewer elements during methods.
pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
/// A map based on a B-Tree. /// A map based on a B-Tree.
/// ///
/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing /// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
@ -1094,13 +1098,13 @@ fn fix_right_edge(root: &mut node::Root<K, V>) {
// Check if right-most child is underfull. // Check if right-most child is underfull.
let mut last_edge = internal.last_edge(); let mut last_edge = internal.last_edge();
let right_child_len = last_edge.reborrow().descend().len(); let right_child_len = last_edge.reborrow().descend().len();
if right_child_len < node::MIN_LEN { if right_child_len < MIN_LEN {
// We need to steal. // We need to steal.
let mut last_kv = match last_edge.left_kv() { let mut last_kv = match last_edge.left_kv() {
Ok(left) => left, Ok(left) => left,
Err(_) => unreachable!(), Err(_) => unreachable!(),
}; };
last_kv.bulk_steal_left(node::MIN_LEN - right_child_len); last_kv.bulk_steal_left(MIN_LEN - right_child_len);
last_edge = last_kv.right_edge(); last_edge = last_kv.right_edge();
} }

View File

@ -50,10 +50,15 @@ fn check(&self)
{ {
if let Some(root) = &self.root { if let Some(root) = &self.root {
let root_node = root.node_as_ref(); let root_node = root.node_as_ref();
assert!(root_node.ascend().is_err()); assert!(root_node.ascend().is_err());
root_node.assert_back_pointers(); root_node.assert_back_pointers();
root_node.assert_ascending();
assert_eq!(self.length, root_node.assert_and_add_lengths()); let counted = root_node.assert_ascending();
assert_eq!(self.length, counted);
assert_eq!(self.length, root_node.calc_length());
root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 });
} else { } else {
assert_eq!(self.length, 0); assert_eq!(self.length, 0);
} }
@ -76,6 +81,18 @@ fn dump_keys(&self) -> String
} }
} }
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
pub fn assert_min_len(self, min_len: usize) {
assert!(self.len() >= min_len, "{} < {}", self.len(), min_len);
if let node::ForceResult::Internal(node) = self.force() {
for idx in 0..=node.len() {
let edge = unsafe { Handle::new_edge(node, idx) };
edge.descend().assert_min_len(MIN_LEN);
}
}
}
}
// Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the // Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the
// implementation of insertion, but it's best to be aware of when it does. // implementation of insertion, but it's best to be aware of when it does.
#[test] #[test]

View File

@ -38,8 +38,8 @@
use crate::boxed::Box; use crate::boxed::Box;
const B: usize = 6; const B: usize = 6;
pub const MIN_LEN: usize = B - 1;
pub const CAPACITY: usize = 2 * B - 1; pub const CAPACITY: usize = 2 * B - 1;
pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
const KV_IDX_CENTER: usize = B - 1; const KV_IDX_CENTER: usize = B - 1;
const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
const EDGE_IDX_RIGHT_OF_CENTER: usize = B; const EDGE_IDX_RIGHT_OF_CENTER: usize = B;

View File

@ -5,25 +5,26 @@
use core::cmp::Ordering::*; use core::cmp::Ordering::*;
impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> { impl<'a, K: 'a, V: 'a> NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal> {
/// Asserts that the back pointer in each reachable node points to its parent.
pub fn assert_back_pointers(self) { pub fn assert_back_pointers(self) {
match self.force() { if let ForceResult::Internal(node) = self.force() {
ForceResult::Leaf(_) => {} for idx in 0..=node.len() {
ForceResult::Internal(node) => { let edge = unsafe { Handle::new_edge(node, idx) };
for idx in 0..=node.len() { let child = edge.descend();
let edge = unsafe { Handle::new_edge(node, idx) }; assert!(child.ascend().ok() == Some(edge));
let child = edge.descend(); child.assert_back_pointers();
assert!(child.ascend().ok() == Some(edge));
child.assert_back_pointers();
}
} }
} }
} }
pub fn assert_ascending(self) /// Asserts that the keys are in strictly ascending order.
/// Returns how many keys it encountered.
pub fn assert_ascending(self) -> usize
where where
K: Copy + Debug + Ord, K: Copy + Debug + Ord,
{ {
struct SeriesChecker<T> { struct SeriesChecker<T> {
num_seen: usize,
previous: Option<T>, previous: Option<T>,
} }
impl<T: Copy + Debug + Ord> SeriesChecker<T> { impl<T: Copy + Debug + Ord> SeriesChecker<T> {
@ -32,10 +33,11 @@ fn is_ascending(&mut self, next: T) {
assert!(previous < next, "{:?} >= {:?}", previous, next); assert!(previous < next, "{:?} >= {:?}", previous, next);
} }
self.previous = Some(next); self.previous = Some(next);
self.num_seen += 1;
} }
} }
let mut checker = SeriesChecker { previous: None }; let mut checker = SeriesChecker { num_seen: 0, previous: None };
self.visit_nodes_in_order(|pos| match pos { self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(node) => { navigate::Position::Leaf(node) => {
for idx in 0..node.len() { for idx in 0..node.len() {
@ -49,33 +51,7 @@ fn is_ascending(&mut self, next: T) {
} }
navigate::Position::Internal(_) => {} navigate::Position::Internal(_) => {}
}); });
} checker.num_seen
pub fn assert_and_add_lengths(self) -> usize {
let mut internal_length = 0;
let mut internal_kv_count = 0;
let mut leaf_length = 0;
self.visit_nodes_in_order(|pos| match pos {
navigate::Position::Leaf(node) => {
let is_root = self.height() == 0;
let min_len = if is_root { 0 } else { MIN_LEN };
assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
leaf_length += node.len();
}
navigate::Position::Internal(node) => {
let is_root = self.height() == node.height();
let min_len = if is_root { 1 } else { MIN_LEN };
assert!(node.len() >= min_len, "{} < {}", node.len(), min_len);
internal_length += node.len();
}
navigate::Position::InternalKV(_) => {
internal_kv_count += 1;
}
});
assert_eq!(internal_length, internal_kv_count);
let total = internal_length + leaf_length;
assert_eq!(self.calc_length(), total);
total
} }
pub fn dump_keys(self) -> String pub fn dump_keys(self) -> String
@ -124,8 +100,8 @@ fn test_splitpoint() {
right_len += 1; right_len += 1;
} }
} }
assert!(left_len >= MIN_LEN); assert!(left_len >= MIN_LEN_AFTER_SPLIT);
assert!(right_len >= MIN_LEN); assert!(right_len >= MIN_LEN_AFTER_SPLIT);
assert!(left_len + right_len == CAPACITY); assert!(left_len + right_len == CAPACITY);
} }
} }

View File

@ -1,4 +1,5 @@
use super::node::{self, marker, ForceResult, Handle, NodeRef}; use super::map::MIN_LEN;
use super::node::{marker, ForceResult, Handle, NodeRef};
use super::unwrap_unchecked; use super::unwrap_unchecked;
use core::mem; use core::mem;
use core::ptr; use core::ptr;
@ -40,7 +41,7 @@ pub fn remove_kv_tracking<F: FnOnce()>(
// Handle underflow // Handle underflow
let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() }; let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
let mut at_leaf = true; let mut at_leaf = true;
while cur_node.len() < node::MIN_LEN { while cur_node.len() < MIN_LEN {
match handle_underfull_node(cur_node) { match handle_underfull_node(cur_node) {
UnderflowResult::AtRoot => break, UnderflowResult::AtRoot => break,
UnderflowResult::Merged(edge, merged_with_left, offset) => { UnderflowResult::Merged(edge, merged_with_left, offset) => {

View File

@ -1,5 +1,6 @@
use super::node::{self, ForceResult::*, Root}; use super::map::MIN_LEN;
use super::search::{self, SearchResult::*}; use super::node::{ForceResult::*, Root};
use super::search::{search_node, SearchResult::*};
use core::borrow::Borrow; use core::borrow::Borrow;
impl<K, V> Root<K, V> { impl<K, V> Root<K, V> {
@ -20,7 +21,7 @@ pub fn split_off<Q: ?Sized + Ord>(&mut self, right_root: &mut Self, key: &Q)
let mut right_node = right_root.node_as_mut(); let mut right_node = right_root.node_as_mut();
loop { loop {
let mut split_edge = match search::search_node(left_node, key) { let mut split_edge = match search_node(left_node, key) {
// key is going to the right tree // key is going to the right tree
Found(handle) => handle.left_edge(), Found(handle) => handle.left_edge(),
GoDown(handle) => handle, GoDown(handle) => handle,
@ -65,9 +66,9 @@ fn fix_right_border(&mut self) {
cur_node = last_kv.merge().descend(); cur_node = last_kv.merge().descend();
} else { } else {
let right_len = last_kv.reborrow().right_edge().descend().len(); let right_len = last_kv.reborrow().right_edge().descend().len();
// `MINLEN + 1` to avoid readjust if merge happens on the next level. // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
if right_len < node::MIN_LEN + 1 { if right_len < MIN_LEN + 1 {
last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len); last_kv.bulk_steal_left(MIN_LEN + 1 - right_len);
} }
cur_node = last_kv.right_edge().descend(); cur_node = last_kv.right_edge().descend();
} }
@ -91,8 +92,9 @@ fn fix_left_border(&mut self) {
cur_node = first_kv.merge().descend(); cur_node = first_kv.merge().descend();
} else { } else {
let left_len = first_kv.reborrow().left_edge().descend().len(); let left_len = first_kv.reborrow().left_edge().descend().len();
if left_len < node::MIN_LEN + 1 { // `MIN_LEN + 1` to avoid readjust if merge happens on the next level.
first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len); if left_len < MIN_LEN + 1 {
first_kv.bulk_steal_right(MIN_LEN + 1 - left_len);
} }
cur_node = first_kv.left_edge().descend(); cur_node = first_kv.left_edge().descend();
} }