diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index 79dc694e6be82..a9bf8f299072f 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -138,8 +138,8 @@ pub struct BTreeMap { #[stable(feature = "btree_drop", since = "1.7.0")] unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { fn drop(&mut self) { - unsafe { - drop(ptr::read(self).into_iter()); + if let Some(root) = self.root.take() { + Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length }; } } } @@ -147,42 +147,59 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { #[stable(feature = "rust1", since = "1.0.0")] impl Clone for BTreeMap { fn clone(&self) -> BTreeMap { + struct GuardedTree(Option>); + impl GuardedTree { + fn new(root: NodeRef) -> Self { + Self(Some(root)) + } + fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { + self.0.as_mut().unwrap().borrow_mut() + } + fn leak(mut self) -> Root { + self.0.take().unwrap().forget_type() + } + } + impl Drop for GuardedTree { + fn drop(&mut self) { + if let Some(root) = self.0.take() { + let mut cur_edge = root.forget_type().into_dying().first_leaf_edge(); + while let Some((next_edge, _kv)) = unsafe { cur_edge.deallocating_next() } { + cur_edge = next_edge; + } + } + } + } + fn clone_subtree<'a, K: Clone, V: Clone>( node: NodeRef, K, V, marker::LeafOrInternal>, - ) -> BTreeMap + ) -> Root where K: 'a, V: 'a, { match node.force() { Leaf(leaf) => { - let mut out_tree = BTreeMap { root: Some(Root::new()), length: 0 }; + let mut out_tree = GuardedTree::new(NodeRef::new_leaf()); { - let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped - let mut out_node = match root.borrow_mut().force() { - Leaf(leaf) => leaf, - Internal(_) => unreachable!(), - }; - + let mut out_node = out_tree.borrow_mut(); let mut in_edge = leaf.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); in_edge = kv.right_edge(); out_node.push(k.clone(), v.clone()); - out_tree.length += 1; } } - out_tree + out_tree.leak() } Internal(internal) => { - let mut out_tree = clone_subtree(internal.first_edge().descend()); + let first_child = clone_subtree(internal.first_edge().descend()); + let mut out_tree = GuardedTree::new(NodeRef::new_internal(first_child)); { - let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root); - let mut out_node = out_root.push_internal_level(); + let mut out_node = out_tree.borrow_mut(); let mut in_edge = internal.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); @@ -192,32 +209,17 @@ impl Clone for BTreeMap { let v = (*v).clone(); let subtree = clone_subtree(in_edge.descend()); - // We can't destructure subtree directly - // because BTreeMap implements Drop - let (subroot, sublength) = unsafe { - let subtree = ManuallyDrop::new(subtree); - let root = ptr::read(&subtree.root); - let length = subtree.length; - (root, length) - }; - - out_node.push(k, v, subroot.unwrap_or_else(Root::new)); - out_tree.length += 1 + sublength; + out_node.push(k, v, subtree); } } - out_tree + out_tree.leak() } } } - if self.is_empty() { - // Ideally we'd call `BTreeMap::new` here, but that has the `K: - // Ord` constraint, which this method lacks. - BTreeMap { root: None, length: 0 } - } else { - clone_subtree(self.root.as_ref().unwrap().reborrow()) // unwrap succeeds because not empty - } + let cloned_root = self.root.as_ref().map(|root| clone_subtree(root.reborrow())); + BTreeMap { root: cloned_root, length: self.length } } } @@ -325,6 +327,14 @@ impl fmt::Debug for IntoIter { } } +/// A simplified version of `IntoIter` that is not double-ended and has only one +/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to +/// drop an entire tree without the need to look up a `back` leaf edge. +struct Dropper { + front: Handle, marker::Edge>, + remaining_length: usize, +} + /// An iterator over the keys of a `BTreeMap`. /// /// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its @@ -1373,42 +1383,42 @@ impl IntoIterator for BTreeMap { } } -#[stable(feature = "btree_drop", since = "1.7.0")] -impl Drop for IntoIter { +impl Drop for Dropper { fn drop(&mut self) { - struct DropGuard<'a, K, V>(&'a mut IntoIter); + // Similar to advancing a non-fusing iterator. + fn next_or_end(this: &mut Dropper) -> Option<(K, V)> { + if this.remaining_length == 0 { + unsafe { ptr::read(&this.front).deallocating_end() } + None + } else { + this.remaining_length -= 1; + Some(unsafe { this.front.next_unchecked() }) + } + } + + struct DropGuard<'a, K, V>(&'a mut Dropper); impl<'a, K, V> Drop for DropGuard<'a, K, V> { fn drop(&mut self) { // Continue the same loop we perform below. This only runs when unwinding, so we // don't have to care about panics this time (they'll abort). - while let Some(_) = self.0.next() {} - - unsafe { - let mut node = - unwrap_unchecked(ptr::read(&self.0.front)).into_node().forget_type(); - while let Some(parent) = node.deallocate_and_ascend() { - node = parent.into_node().forget_type(); - } - } + while let Some(_pair) = next_or_end(&mut self.0) {} } } - while let Some(pair) = self.next() { + while let Some(pair) = next_or_end(self) { let guard = DropGuard(self); drop(pair); mem::forget(guard); } + } +} - unsafe { - if let Some(front) = ptr::read(&self.front) { - let mut node = front.into_node().forget_type(); - // Most of the nodes have been deallocated while traversing - // but one pile from a leaf up to the root is left standing. - while let Some(parent) = node.deallocate_and_ascend() { - node = parent.into_node().forget_type(); - } - } +#[stable(feature = "btree_drop", since = "1.7.0")] +impl Drop for IntoIter { + fn drop(&mut self) { + if let Some(front) = self.front.take() { + Dropper { front, remaining_length: self.length }; } } } diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index ba5a4442f56e6..b4ceaac2cd5b2 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -1,4 +1,6 @@ -use super::super::{node, DeterministicRng}; +use super::super::testing::crash_test::{CrashTest, Panic}; +use super::super::testing::ord_chaos::{Cyclic3, Governed, Governor}; +use super::super::testing::rng::DeterministicRng; use super::Entry::{Occupied, Vacant}; use super::*; use crate::boxed::Box; @@ -15,9 +17,6 @@ use std::ops::RangeBounds; use std::panic::{catch_unwind, AssertUnwindSafe}; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; -mod ord_chaos; -use ord_chaos::{Cyclic3, Governed, Governor}; - // Capacity of a tree with a single level, // i.e., a tree who's root is a leaf node at height 0. const NODE_CAPACITY: usize = node::CAPACITY; @@ -1136,91 +1135,48 @@ mod test_drain_filter { #[test] fn drop_panic_leak() { - static PREDS: AtomicUsize = AtomicUsize::new(0); - static DROPS: AtomicUsize = AtomicUsize::new(0); - - struct D; - impl Drop for D { - fn drop(&mut self) { - if DROPS.fetch_add(1, SeqCst) == 1 { - panic!("panic in `drop`"); - } - } - } + let test = CrashTest::new(); + let mut map = BTreeMap::new(); + map.insert(test.dummy(0, Panic::Never), ()); + map.insert(test.dummy(1, Panic::InDrop), ()); + map.insert(test.dummy(2, Panic::Never), ()); - // Keys are multiples of 4, so that each key is counted by a hexadecimal digit. - let mut map = (0..3).map(|i| (i * 4, D)).collect::>(); + catch_unwind(move || drop(map.drain_filter(|dummy, _| dummy.query(true)))).unwrap_err(); - catch_unwind(move || { - drop(map.drain_filter(|i, _| { - PREDS.fetch_add(1usize << i, SeqCst); - true - })) - }) - .unwrap_err(); - - assert_eq!(PREDS.load(SeqCst), 0x011); - assert_eq!(DROPS.load(SeqCst), 3); + assert_eq!(test.queried(), 0x011); + assert_eq!(test.dropped(), 0x111); } #[test] fn pred_panic_leak() { - static PREDS: AtomicUsize = AtomicUsize::new(0); - static DROPS: AtomicUsize = AtomicUsize::new(0); - - struct D; - impl Drop for D { - fn drop(&mut self) { - DROPS.fetch_add(1, SeqCst); - } - } - - // Keys are multiples of 4, so that each key is counted by a hexadecimal digit. - let mut map = (0..3).map(|i| (i * 4, D)).collect::>(); + let test = CrashTest::new(); + let mut map = BTreeMap::new(); + map.insert(test.dummy(0, Panic::Never), ()); + map.insert(test.dummy(1, Panic::InQuery), ()); + map.insert(test.dummy(2, Panic::InQuery), ()); - catch_unwind(AssertUnwindSafe(|| { - drop(map.drain_filter(|i, _| { - PREDS.fetch_add(1usize << i, SeqCst); - match i { - 0 => true, - _ => panic!(), - } - })) - })) - .unwrap_err(); + catch_unwind(AssertUnwindSafe(|| drop(map.drain_filter(|dummy, _| dummy.query(true))))) + .unwrap_err(); - assert_eq!(PREDS.load(SeqCst), 0x011); - assert_eq!(DROPS.load(SeqCst), 1); + assert_eq!(test.queried(), 0x011); + assert_eq!(test.dropped(), 0x001); assert_eq!(map.len(), 2); - assert_eq!(map.first_entry().unwrap().key(), &4); - assert_eq!(map.last_entry().unwrap().key(), &8); + assert_eq!(map.first_entry().unwrap().key().id, 1); + assert_eq!(map.last_entry().unwrap().key().id, 2); map.check(); } // Same as above, but attempt to use the iterator again after the panic in the predicate #[test] fn pred_panic_reuse() { - static PREDS: AtomicUsize = AtomicUsize::new(0); - static DROPS: AtomicUsize = AtomicUsize::new(0); - - struct D; - impl Drop for D { - fn drop(&mut self) { - DROPS.fetch_add(1, SeqCst); - } - } - - // Keys are multiples of 4, so that each key is counted by a hexadecimal digit. - let mut map = (0..3).map(|i| (i * 4, D)).collect::>(); + let test = CrashTest::new(); + let mut map = BTreeMap::new(); + map.insert(test.dummy(0, Panic::Never), ()); + map.insert(test.dummy(1, Panic::InQuery), ()); + map.insert(test.dummy(2, Panic::InQuery), ()); { - let mut it = map.drain_filter(|i, _| { - PREDS.fetch_add(1usize << i, SeqCst); - match i { - 0 => true, - _ => panic!(), - } - }); + let mut it = map.drain_filter(|dummy, _| dummy.query(true)); catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err(); // Iterator behaviour after a panic is explicitly unspecified, // so this is just the current implementation: @@ -1228,11 +1184,11 @@ mod test_drain_filter { assert!(matches!(result, Ok(None))); } - assert_eq!(PREDS.load(SeqCst), 0x011); - assert_eq!(DROPS.load(SeqCst), 1); + assert_eq!(test.queried(), 0x011); + assert_eq!(test.dropped(), 0x001); assert_eq!(map.len(), 2); - assert_eq!(map.first_entry().unwrap().key(), &4); - assert_eq!(map.last_entry().unwrap().key(), &8); + assert_eq!(map.first_entry().unwrap().key().id, 1); + assert_eq!(map.last_entry().unwrap().key().id, 2); map.check(); } } @@ -1484,6 +1440,25 @@ fn test_clone() { map.check(); } +#[test] +fn test_clone_panic_leak() { + let test = CrashTest::new(); + + let mut map = BTreeMap::new(); + map.insert(test.dummy(0, Panic::Never), ()); + map.insert(test.dummy(1, Panic::InClone), ()); + map.insert(test.dummy(2, Panic::Never), ()); + + catch_unwind(|| map.clone()).unwrap_err(); + assert_eq!(test.cloned(), 0x011); + assert_eq!(test.dropped(), 0x001); + assert_eq!(map.len(), 3); + + drop(map); + assert_eq!(test.cloned(), 0x011); + assert_eq!(test.dropped(), 0x112); +} + #[test] fn test_clone_from() { let mut map1 = BTreeMap::new(); @@ -1860,29 +1835,18 @@ create_append_test!(test_append_1700, 1700); #[test] fn test_append_drop_leak() { - static DROPS: AtomicUsize = AtomicUsize::new(0); - - struct D; - - impl Drop for D { - fn drop(&mut self) { - if DROPS.fetch_add(1, SeqCst) == 0 { - panic!("panic in `drop`"); - } - } - } - + let test = CrashTest::new(); let mut left = BTreeMap::new(); let mut right = BTreeMap::new(); - left.insert(0, D); - left.insert(1, D); // first to be dropped during append - left.insert(2, D); - right.insert(1, D); - right.insert(2, D); + left.insert(test.dummy(0, Panic::Never), ()); + left.insert(test.dummy(1, Panic::InDrop), ()); // first duplicate key, dropped during append + left.insert(test.dummy(2, Panic::Never), ()); + right.insert(test.dummy(1, Panic::Never), ()); + right.insert(test.dummy(2, Panic::Never), ()); catch_unwind(move || left.append(&mut right)).unwrap_err(); - assert_eq!(DROPS.load(SeqCst), 5); + assert_eq!(test.dropped(), 0x221); } #[test] @@ -2009,51 +1973,32 @@ fn test_split_off_large_random_sorted() { #[test] fn test_into_iter_drop_leak_height_0() { - static DROPS: AtomicUsize = AtomicUsize::new(0); - - struct D; - - impl Drop for D { - fn drop(&mut self) { - if DROPS.fetch_add(1, SeqCst) == 3 { - panic!("panic in `drop`"); - } - } - } - + let test = CrashTest::new(); let mut map = BTreeMap::new(); - map.insert("a", D); - map.insert("b", D); - map.insert("c", D); - map.insert("d", D); - map.insert("e", D); + map.insert("a", test.dummy(0, Panic::Never)); + map.insert("b", test.dummy(1, Panic::Never)); + map.insert("c", test.dummy(2, Panic::Never)); + map.insert("d", test.dummy(3, Panic::InDrop)); + map.insert("e", test.dummy(4, Panic::Never)); catch_unwind(move || drop(map.into_iter())).unwrap_err(); - assert_eq!(DROPS.load(SeqCst), 5); + assert_eq!(test.dropped(), 0x11111); } #[test] fn test_into_iter_drop_leak_height_1() { let size = MIN_INSERTS_HEIGHT_1; - static DROPS: AtomicUsize = AtomicUsize::new(0); - static PANIC_POINT: AtomicUsize = AtomicUsize::new(0); - - struct D; - impl Drop for D { - fn drop(&mut self) { - if DROPS.fetch_add(1, SeqCst) == PANIC_POINT.load(SeqCst) { - panic!("panic in `drop`"); - } - } - } - for panic_point in vec![0, 1, size - 2, size - 1] { - DROPS.store(0, SeqCst); - PANIC_POINT.store(panic_point, SeqCst); - let map: BTreeMap<_, _> = (0..size).map(|i| (i, D)).collect(); + let test = CrashTest::new_totaling(); + let map: BTreeMap<_, _> = (0..size) + .map(|i| { + let panic = if i == panic_point { Panic::InDrop } else { Panic::Never }; + (test.dummy(i, Panic::Never), test.dummy(i, panic)) + }) + .collect(); catch_unwind(move || drop(map.into_iter())).unwrap_err(); - assert_eq!(DROPS.load(SeqCst), size); + assert_eq!(test.dropped(), size as u64 * 2); } } diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs index cdb39104047f0..d10c3b1e3d7a0 100644 --- a/library/alloc/src/collections/btree/mod.rs +++ b/library/alloc/src/collections/btree/mod.rs @@ -36,32 +36,4 @@ pub unsafe fn unwrap_unchecked(val: Option) -> T { } #[cfg(test)] -/// XorShiftRng -struct DeterministicRng { - count: usize, - x: u32, - y: u32, - z: u32, - w: u32, -} - -#[cfg(test)] -impl DeterministicRng { - fn new() -> Self { - DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb } - } - - /// Guarantees that each returned number is unique. - fn next(&mut self) -> u32 { - self.count += 1; - assert!(self.count <= 70029); - let x = self.x; - let t = x ^ (x << 11); - self.x = self.y; - self.y = self.z; - self.z = self.w; - let w_ = self.w; - self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8)); - self.w - } -} +mod testing; diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs index 2773b427fb133..5138ba97fc171 100644 --- a/library/alloc/src/collections/btree/navigate.rs +++ b/library/alloc/src/collections/btree/navigate.rs @@ -290,37 +290,71 @@ impl } } -macro_rules! def_next_kv_uncheched_dealloc { - { unsafe fn $name:ident : $adjacent_kv:ident } => { - /// Given a leaf edge handle into an owned tree, returns a handle to the next KV, - /// while deallocating any node left behind yet leaving the corresponding edge - /// in its parent node dangling. - /// - /// # Safety - /// - The leaf edge must not be the last one in the direction travelled. - /// - The node carrying the next KV returned must not have been deallocated by a - /// previous call on any handle obtained for this tree. - unsafe fn $name ( - leaf_edge: Handle, marker::Edge>, - ) -> Handle, marker::KV> { - let mut edge = leaf_edge.forget_node_type(); - loop { - edge = match edge.$adjacent_kv() { - Ok(internal_kv) => return internal_kv, - Err(last_edge) => { - unsafe { - let parent_edge = last_edge.into_node().deallocate_and_ascend(); - unwrap_unchecked(parent_edge).forget_node_type() - } - } +impl Handle, marker::Edge> { + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the right side, and the key-value pair in between, which is either + /// in the same leaf node, in an ancestor node, or non-existent. + /// + /// This method also deallocates any node(s) it reaches the end of. This + /// implies that if no more key-value pair exists, the entire remainder of + /// the tree will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// The next KV must not have been previously returned by counterpart `deallocating_next_back`. + pub unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.right_kv() { + Ok(kv) => { + let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; + let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; + return Some((kv.next_leaf_edge(), (k, v))); } + Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + }, } } - }; -} + } -def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv} -def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv} + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the left side, and the key-value pair in between, which is either + /// in the same leaf node, in an ancestor node, or non-existent. + /// + /// This method also deallocates any node(s) it reaches the end of. This + /// implies that if no more key-value pair exists, the entire remainder of + /// the tree will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// The next KV must not have been previously returned by counterpart `deallocating_next`. + unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.left_kv() { + Ok(kv) => { + let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; + let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; + return Some((kv.next_back_leaf_edge(), (k, v))); + } + Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + }, + } + } + } + + /// Deallocates a pile of nodes from the leaf up to the root. + /// This is useful when `deallocating_next` and `deallocating_next_back` + /// have been nibbling at both sides of the same tree. + pub fn deallocating_end(self) { + let mut edge = self.forget_node_type(); + while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } { + edge = parent_edge.forget_node_type(); + } + } +} impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { /// Moves the leaf edge handle to the next leaf edge and returns references to the @@ -396,11 +430,8 @@ impl Handle, marker::Edge> { /// call this method again subject to its safety conditions, or call counterpart /// `next_back_unchecked` subject to its safety conditions. pub unsafe fn next_unchecked(&mut self) -> (K, V) { - super::mem::replace(self, |leaf_edge| { - let kv = unsafe { next_kv_unchecked_dealloc(leaf_edge) }; - let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; - let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; - (kv.next_leaf_edge(), (k, v)) + super::mem::replace(self, |leaf_edge| unsafe { + unwrap_unchecked(leaf_edge.deallocating_next()) }) } @@ -417,11 +448,8 @@ impl Handle, marker::Edge> { /// call this method again subject to its safety conditions, or call counterpart /// `next_unchecked` subject to its safety conditions. pub unsafe fn next_back_unchecked(&mut self) -> (K, V) { - super::mem::replace(self, |leaf_edge| { - let kv = unsafe { next_back_kv_unchecked_dealloc(leaf_edge) }; - let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; - let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; - (kv.next_back_leaf_edge(), (k, v)) + super::mem::replace(self, |leaf_edge| unsafe { + unwrap_unchecked(leaf_edge.deallocating_next_back()) }) } } diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 1d632512c78b4..b56bcd2a496ca 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -132,7 +132,7 @@ impl Root { } impl NodeRef { - fn new_leaf() -> Self { + pub fn new_leaf() -> Self { Self::from_new_leaf(Box::new(unsafe { LeafNode::new() })) } @@ -142,7 +142,7 @@ impl NodeRef { } impl NodeRef { - fn new_internal(child: Root) -> Self { + pub fn new_internal(child: Root) -> Self { let mut new_node = Box::new(unsafe { InternalNode::new() }); new_node.edges[0].write(child.node); NodeRef::from_new_internal(new_node, child.height + 1) @@ -1461,29 +1461,14 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { } } -impl NodeRef { - /// Removes any static information asserting that this node is a `Leaf` node. - pub fn forget_type(self) -> NodeRef { - NodeRef { height: self.height, node: self.node, _marker: PhantomData } - } -} - -impl NodeRef { - /// Removes any static information asserting that this node is an `Internal` node. +impl NodeRef { + /// Removes any static information asserting that this node is a `Leaf` or `Internal` node. pub fn forget_type(self) -> NodeRef { NodeRef { height: self.height, node: self.node, _marker: PhantomData } } } -impl Handle, marker::Edge> { - pub fn forget_node_type( - self, - ) -> Handle, marker::Edge> { - unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } - } -} - -impl Handle, marker::Edge> { +impl Handle, marker::Edge> { pub fn forget_node_type( self, ) -> Handle, marker::Edge> { @@ -1491,15 +1476,7 @@ impl Handle, marke } } -impl Handle, marker::KV> { - pub fn forget_node_type( - self, - ) -> Handle, marker::KV> { - unsafe { Handle::new_kv(self.node.forget_type(), self.idx) } - } -} - -impl Handle, marker::KV> { +impl Handle, marker::KV> { pub fn forget_node_type( self, ) -> Handle, marker::KV> { diff --git a/library/alloc/src/collections/btree/set/tests.rs b/library/alloc/src/collections/btree/set/tests.rs index fd19c0078a748..38a66fcf46ba1 100644 --- a/library/alloc/src/collections/btree/set/tests.rs +++ b/library/alloc/src/collections/btree/set/tests.rs @@ -1,10 +1,10 @@ -use super::super::DeterministicRng; +use super::super::testing::crash_test::{CrashTest, Panic}; +use super::super::testing::rng::DeterministicRng; use super::*; use crate::vec::Vec; use std::cmp::Ordering; use std::iter::FromIterator; use std::panic::{catch_unwind, AssertUnwindSafe}; -use std::sync::atomic::{AtomicU32, Ordering::SeqCst}; #[test] fn test_clone_eq() { @@ -349,70 +349,33 @@ fn test_drain_filter() { #[test] fn test_drain_filter_drop_panic_leak() { - static PREDS: AtomicU32 = AtomicU32::new(0); - static DROPS: AtomicU32 = AtomicU32::new(0); - - #[derive(PartialEq, Eq, PartialOrd, Ord)] - struct D(i32); - impl Drop for D { - fn drop(&mut self) { - if DROPS.fetch_add(1, SeqCst) == 1 { - panic!("panic in `drop`"); - } - } - } - + let test = CrashTest::new(); let mut set = BTreeSet::new(); - set.insert(D(0)); - set.insert(D(4)); - set.insert(D(8)); + set.insert(test.dummy(0, Panic::Never)); + set.insert(test.dummy(1, Panic::InDrop)); + set.insert(test.dummy(2, Panic::Never)); - catch_unwind(move || { - drop(set.drain_filter(|d| { - PREDS.fetch_add(1u32 << d.0, SeqCst); - true - })) - }) - .ok(); + catch_unwind(move || drop(set.drain_filter(|dummy| dummy.query(true)))).ok(); - assert_eq!(PREDS.load(SeqCst), 0x011); - assert_eq!(DROPS.load(SeqCst), 3); + assert_eq!(test.queried(), 0x011); + assert_eq!(test.dropped(), 0x111); } #[test] fn test_drain_filter_pred_panic_leak() { - static PREDS: AtomicU32 = AtomicU32::new(0); - static DROPS: AtomicU32 = AtomicU32::new(0); - - #[derive(PartialEq, Eq, PartialOrd, Ord)] - struct D(i32); - impl Drop for D { - fn drop(&mut self) { - DROPS.fetch_add(1, SeqCst); - } - } - + let test = CrashTest::new(); let mut set = BTreeSet::new(); - set.insert(D(0)); - set.insert(D(4)); - set.insert(D(8)); - - catch_unwind(AssertUnwindSafe(|| { - drop(set.drain_filter(|d| { - PREDS.fetch_add(1u32 << d.0, SeqCst); - match d.0 { - 0 => true, - _ => panic!(), - } - })) - })) - .ok(); - - assert_eq!(PREDS.load(SeqCst), 0x011); - assert_eq!(DROPS.load(SeqCst), 1); + set.insert(test.dummy(0, Panic::Never)); + set.insert(test.dummy(1, Panic::InQuery)); + set.insert(test.dummy(2, Panic::InQuery)); + + catch_unwind(AssertUnwindSafe(|| drop(set.drain_filter(|dummy| dummy.query(true))))).ok(); + + assert_eq!(test.queried(), 0x011); + assert_eq!(test.dropped(), 0x001); assert_eq!(set.len(), 2); - assert_eq!(set.first().unwrap().0, 4); - assert_eq!(set.last().unwrap().0, 8); + assert_eq!(set.first().unwrap().id, 1); + assert_eq!(set.last().unwrap().id, 2); } #[test] diff --git a/library/alloc/src/collections/btree/testing/crash_test.rs b/library/alloc/src/collections/btree/testing/crash_test.rs new file mode 100644 index 0000000000000..8ae11c8e145e8 --- /dev/null +++ b/library/alloc/src/collections/btree/testing/crash_test.rs @@ -0,0 +1,147 @@ +use crate::fmt::Debug; +use std::cmp::Ordering; +use std::sync::atomic::{AtomicU64, Ordering::SeqCst}; + +/// A recording of particular events happening in a test scenario that involves +/// crash test dummies, some of which may be configured to panic at some point. +/// Events are `clone`, `drop` or some anonymous `query`. +/// +/// Crash test dummies are identified and ordered by an id, so they can be used +/// as keys in a BTreeMap. The implementation intentionally uses only primitives. +#[derive(Debug)] +pub struct CrashTest { + bits_per_id: usize, + cloned: AtomicU64, + dropped: AtomicU64, + queried: AtomicU64, +} + +impl CrashTest { + /// Sets up the recording of a test scenario that counts how many events + /// dummies experience, per event type and per dummy id. The range of dummy + /// ids is limited to 16 and the number of times all dummies with the same + /// dummy id experience the same event is also limited to 16. + pub fn new() -> Self { + Self { + bits_per_id: 4, + cloned: AtomicU64::new(0), + dropped: AtomicU64::new(0), + queried: AtomicU64::new(0), + } + } + + /// Sets up the recording of a test scenario that counts how many events + /// dummies experience, per event type. There is virtually no limit on the + /// range of dummy ids or on the number of events happening. + pub fn new_totaling() -> Self { + Self { + bits_per_id: 0, + cloned: AtomicU64::new(0), + dropped: AtomicU64::new(0), + queried: AtomicU64::new(0), + } + } + + /// Creates a crash test dummy that records what events it experiences + /// and optionally panics. + pub fn dummy(&self, id: usize, panic: Panic) -> Dummy<'_> { + assert!(self.is_supported_id(id)); + Dummy { id, panic, context: self } + } + + /// Returns how many times a dummy has been cloned. If set up by `new`, + /// this is a hexadecimal composition of the count for each dummy id. + /// If set up by `new_totaling`, this is the sum over all dummies. + pub fn cloned(&self) -> u64 { + self.cloned.load(SeqCst) + } + + /// Returns how many times a dummy has been dropped. If set up by `new`, + /// this is a hexadecimal composition of the count for each dummy id. + /// If set up by `new_totaling`, this is the sum over all dummies. + pub fn dropped(&self) -> u64 { + self.dropped.load(SeqCst) + } + + /// Returns how many times a dummy has been queried. If set up by `new`, + /// this is a hexadecimal composition of the count for each dummy id. + /// If set up by `new_totaling`, this is the sum over all dummies. + pub fn queried(&self) -> u64 { + self.queried.load(SeqCst) + } + + /// Whether there is room for the id as a counter in our registers. + fn is_supported_id(&self, id: usize) -> bool { + (id + 1) * self.bits_per_id <= 64 + } + + /// Bit position of the counter in our registers. + fn bit(&self, id: usize) -> u64 { + 1 << (id * self.bits_per_id) + } +} + +#[derive(Debug)] +pub struct Dummy<'a> { + pub id: usize, + context: &'a CrashTest, + panic: Panic, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum Panic { + Never, + InClone, + InDrop, + InQuery, +} + +impl Dummy<'_> { + /// Some anonymous query, the result of which is already given. + pub fn query(&self, result: R) -> R { + self.context.queried.fetch_add(self.context.bit(self.id), SeqCst); + if self.panic == Panic::InQuery { + panic!("panic in `query`"); + } + result + } +} + +impl Clone for Dummy<'_> { + fn clone(&self) -> Self { + self.context.cloned.fetch_add(self.context.bit(self.id), SeqCst); + if self.panic == Panic::InClone { + panic!("panic in `clone`"); + } + Self { id: self.id, context: self.context, panic: Panic::Never } + } +} + +impl Drop for Dummy<'_> { + fn drop(&mut self) { + self.context.dropped.fetch_add(self.context.bit(self.id), SeqCst); + if self.panic == Panic::InDrop { + panic!("panic in `drop`"); + } + } +} + +impl PartialOrd for Dummy<'_> { + fn partial_cmp(&self, other: &Self) -> Option { + self.id.partial_cmp(&other.id) + } +} + +impl Ord for Dummy<'_> { + fn cmp(&self, other: &Self) -> Ordering { + self.id.cmp(&other.id) + } +} + +impl PartialEq for Dummy<'_> { + fn eq(&self, other: &Self) -> bool { + self.id.eq(&other.id) + } +} + +impl Eq for Dummy<'_> {} diff --git a/library/alloc/src/collections/btree/testing/mod.rs b/library/alloc/src/collections/btree/testing/mod.rs new file mode 100644 index 0000000000000..7a094f8a59522 --- /dev/null +++ b/library/alloc/src/collections/btree/testing/mod.rs @@ -0,0 +1,3 @@ +pub mod crash_test; +pub mod ord_chaos; +pub mod rng; diff --git a/library/alloc/src/collections/btree/map/tests/ord_chaos.rs b/library/alloc/src/collections/btree/testing/ord_chaos.rs similarity index 100% rename from library/alloc/src/collections/btree/map/tests/ord_chaos.rs rename to library/alloc/src/collections/btree/testing/ord_chaos.rs diff --git a/library/alloc/src/collections/btree/testing/rng.rs b/library/alloc/src/collections/btree/testing/rng.rs new file mode 100644 index 0000000000000..ecf543bee035a --- /dev/null +++ b/library/alloc/src/collections/btree/testing/rng.rs @@ -0,0 +1,28 @@ +/// XorShiftRng +pub struct DeterministicRng { + count: usize, + x: u32, + y: u32, + z: u32, + w: u32, +} + +impl DeterministicRng { + pub fn new() -> Self { + DeterministicRng { count: 0, x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb } + } + + /// Guarantees that each returned number is unique. + pub fn next(&mut self) -> u32 { + self.count += 1; + assert!(self.count <= 70029); + let x = self.x; + let t = x ^ (x << 11); + self.x = self.y; + self.y = self.z; + self.z = self.w; + let w_ = self.w; + self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8)); + self.w + } +}