diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index 20c6ebd22928b..4f9aa44b6b510 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -17,6 +17,10 @@ mod entry; pub use entry::{Entry, OccupiedEntry, VacantEntry}; use Entry::*; +/// Minimum number of elements in nodes that are not a root. +/// We might temporarily have fewer elements during methods. +pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT; + /// A map based on a B-Tree. /// /// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing @@ -1094,13 +1098,13 @@ impl BTreeMap { // Check if right-most child is underfull. let mut last_edge = internal.last_edge(); let right_child_len = last_edge.reborrow().descend().len(); - if right_child_len < node::MIN_LEN { + if right_child_len < MIN_LEN { // We need to steal. let mut last_kv = match last_edge.left_kv() { Ok(left) => left, Err(_) => unreachable!(), }; - last_kv.bulk_steal_left(node::MIN_LEN - right_child_len); + last_kv.bulk_steal_left(MIN_LEN - right_child_len); last_edge = last_kv.right_edge(); } diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index b51b95a635c87..adb94972f5bb6 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -50,10 +50,15 @@ impl<'a, K: 'a, V: 'a> BTreeMap { { if let Some(root) = &self.root { let root_node = root.node_as_ref(); + assert!(root_node.ascend().is_err()); root_node.assert_back_pointers(); - root_node.assert_ascending(); - assert_eq!(self.length, root_node.assert_and_add_lengths()); + + let counted = root_node.assert_ascending(); + assert_eq!(self.length, counted); + assert_eq!(self.length, root_node.calc_length()); + + root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 }); } else { assert_eq!(self.length, 0); } @@ -76,6 +81,18 @@ impl<'a, K: 'a, V: 'a> BTreeMap { } } +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + pub fn assert_min_len(self, min_len: usize) { + assert!(self.len() >= min_len, "{} < {}", self.len(), min_len); + if let node::ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + edge.descend().assert_min_len(MIN_LEN); + } + } + } +} + // Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the // implementation of insertion, but it's best to be aware of when it does. #[test] diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 886d8abd030bf..f5aff9bf494e9 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -38,8 +38,8 @@ use crate::alloc::{AllocRef, Global, Layout}; use crate::boxed::Box; const B: usize = 6; -pub const MIN_LEN: usize = B - 1; pub const CAPACITY: usize = 2 * B - 1; +pub const MIN_LEN_AFTER_SPLIT: usize = B - 1; const KV_IDX_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_RIGHT_OF_CENTER: usize = B; diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs index e56fc2aa51e7c..d6527057c5d77 100644 --- a/library/alloc/src/collections/btree/node/tests.rs +++ b/library/alloc/src/collections/btree/node/tests.rs @@ -5,25 +5,26 @@ use crate::string::String; use core::cmp::Ordering::*; impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Asserts that the back pointer in each reachable node points to its parent. pub fn assert_back_pointers(self) { - match self.force() { - ForceResult::Leaf(_) => {} - ForceResult::Internal(node) => { - for idx in 0..=node.len() { - let edge = unsafe { Handle::new_edge(node, idx) }; - let child = edge.descend(); - assert!(child.ascend().ok() == Some(edge)); - child.assert_back_pointers(); - } + if let ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + let child = edge.descend(); + assert!(child.ascend().ok() == Some(edge)); + child.assert_back_pointers(); } } } - pub fn assert_ascending(self) + /// Asserts that the keys are in strictly ascending order. + /// Returns how many keys it encountered. + pub fn assert_ascending(self) -> usize where K: Copy + Debug + Ord, { struct SeriesChecker { + num_seen: usize, previous: Option, } impl SeriesChecker { @@ -32,10 +33,11 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> assert!(previous < next, "{:?} >= {:?}", previous, next); } self.previous = Some(next); + self.num_seen += 1; } } - let mut checker = SeriesChecker { previous: None }; + let mut checker = SeriesChecker { num_seen: 0, previous: None }; self.visit_nodes_in_order(|pos| match pos { navigate::Position::Leaf(node) => { for idx in 0..node.len() { @@ -49,33 +51,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> } navigate::Position::Internal(_) => {} }); - } - - pub fn assert_and_add_lengths(self) -> usize { - let mut internal_length = 0; - let mut internal_kv_count = 0; - let mut leaf_length = 0; - self.visit_nodes_in_order(|pos| match pos { - navigate::Position::Leaf(node) => { - let is_root = self.height() == 0; - let min_len = if is_root { 0 } else { MIN_LEN }; - assert!(node.len() >= min_len, "{} < {}", node.len(), min_len); - leaf_length += node.len(); - } - navigate::Position::Internal(node) => { - let is_root = self.height() == node.height(); - let min_len = if is_root { 1 } else { MIN_LEN }; - assert!(node.len() >= min_len, "{} < {}", node.len(), min_len); - internal_length += node.len(); - } - navigate::Position::InternalKV(_) => { - internal_kv_count += 1; - } - }); - assert_eq!(internal_length, internal_kv_count); - let total = internal_length + leaf_length; - assert_eq!(self.calc_length(), total); - total + checker.num_seen } pub fn dump_keys(self) -> String @@ -124,8 +100,8 @@ fn test_splitpoint() { right_len += 1; } } - assert!(left_len >= MIN_LEN); - assert!(right_len >= MIN_LEN); + assert!(left_len >= MIN_LEN_AFTER_SPLIT); + assert!(right_len >= MIN_LEN_AFTER_SPLIT); assert!(left_len + right_len == CAPACITY); } } diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs index 9733b7d608425..99655d3e2bf64 100644 --- a/library/alloc/src/collections/btree/remove.rs +++ b/library/alloc/src/collections/btree/remove.rs @@ -1,4 +1,5 @@ -use super::node::{self, marker, ForceResult, Handle, NodeRef}; +use super::map::MIN_LEN; +use super::node::{marker, ForceResult, Handle, NodeRef}; use super::unwrap_unchecked; use core::mem; use core::ptr; @@ -40,7 +41,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInter // Handle underflow let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() }; let mut at_leaf = true; - while cur_node.len() < node::MIN_LEN { + while cur_node.len() < MIN_LEN { match handle_underfull_node(cur_node) { UnderflowResult::AtRoot => break, UnderflowResult::Merged(edge, merged_with_left, offset) => { diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs index 0e6e213f6e87d..5f00a5a25abad 100644 --- a/library/alloc/src/collections/btree/split.rs +++ b/library/alloc/src/collections/btree/split.rs @@ -1,5 +1,6 @@ -use super::node::{self, ForceResult::*, Root}; -use super::search::{self, SearchResult::*}; +use super::map::MIN_LEN; +use super::node::{ForceResult::*, Root}; +use super::search::{search_node, SearchResult::*}; use core::borrow::Borrow; impl Root { @@ -20,7 +21,7 @@ impl Root { let mut right_node = right_root.node_as_mut(); loop { - let mut split_edge = match search::search_node(left_node, key) { + let mut split_edge = match search_node(left_node, key) { // key is going to the right tree Found(handle) => handle.left_edge(), GoDown(handle) => handle, @@ -65,9 +66,9 @@ impl Root { cur_node = last_kv.merge().descend(); } else { let right_len = last_kv.reborrow().right_edge().descend().len(); - // `MINLEN + 1` to avoid readjust if merge happens on the next level. - if right_len < node::MIN_LEN + 1 { - last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len); + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + if right_len < MIN_LEN + 1 { + last_kv.bulk_steal_left(MIN_LEN + 1 - right_len); } cur_node = last_kv.right_edge().descend(); } @@ -91,8 +92,9 @@ impl Root { cur_node = first_kv.merge().descend(); } else { let left_len = first_kv.reborrow().left_edge().descend().len(); - if left_len < node::MIN_LEN + 1 { - first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len); + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + if left_len < MIN_LEN + 1 { + first_kv.bulk_steal_right(MIN_LEN + 1 - left_len); } cur_node = first_kv.left_edge().descend(); }