Auto merge of #87696 - ssomers:btree_lazy_iterator_cleanup, r=Mark-Simulacrum

BTree: merge the complication introduced by #81486 and #86031

Also:
- Deallocate the last few tree nodes as soon as an `into_iter` iterator steps beyond the end, instead of waiting around for the drop of the iterator (just to share more code).
- Symmetric code for backward iteration.
- Mark unsafe the methods on dying handles, modelling dying handles after raw pointers: it's the caller's responsibility to use them safely.

r? `@Mark-Simulacrum`
This commit is contained in:
bors 2021-08-16 03:45:26 +00:00
commit 23461b210f
3 changed files with 56 additions and 56 deletions

View file

@ -161,9 +161,7 @@ pub struct BTreeMap<K, V> {
#[stable(feature = "btree_drop", since = "1.7.0")]
unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
fn drop(&mut self) {
if let Some(root) = self.root.take() {
Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length };
}
drop(unsafe { ptr::read(self) }.into_iter())
}
}
@ -352,14 +350,6 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
}
}
/// A simplified version of `IntoIter` that is not double-ended and has only one
/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to
/// drop an entire tree without the need to first look up a `back` leaf edge.
struct Dropper<K, V> {
front: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
remaining_length: usize,
}
/// An iterator over the keys of a `BTreeMap`.
///
/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
@ -1458,47 +1448,57 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
}
}
impl<K, V> Drop for Dropper<K, V> {
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
// Similar to advancing a non-fusing iterator.
fn next_or_end<K, V>(
this: &mut Dropper<K, V>,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>>
{
if this.remaining_length == 0 {
unsafe { ptr::read(&this.front).deallocating_end() }
None
} else {
this.remaining_length -= 1;
Some(unsafe { this.front.deallocating_next_unchecked() })
}
}
struct DropGuard<'a, K, V>(&'a mut Dropper<K, V>);
struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
impl<'a, K, V> Drop for DropGuard<'a, K, V> {
fn drop(&mut self) {
// Continue the same loop we perform below. This only runs when unwinding, so we
// don't have to care about panics this time (they'll abort).
while let Some(kv) = next_or_end(&mut self.0) {
kv.drop_key_val();
while let Some(kv) = self.0.dying_next() {
// SAFETY: we consume the dying handle immediately.
unsafe { kv.drop_key_val() };
}
}
}
while let Some(kv) = next_or_end(self) {
while let Some(kv) = self.dying_next() {
let guard = DropGuard(self);
kv.drop_key_val();
// SAFETY: we don't touch the tree before consuming the dying handle.
unsafe { kv.drop_key_val() };
mem::forget(guard);
}
}
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
if let Some(front) = self.range.take_front() {
Dropper { front, remaining_length: self.length };
impl<K, V> IntoIter<K, V> {
/// Core of a `next` method returning a dying KV handle,
/// invalidated by further calls to this function and some others.
fn dying_next(
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end();
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_unchecked() })
}
}
/// Core of a `next_back` method returning a dying KV handle,
/// invalidated by further calls to this function and some others.
fn dying_next_back(
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end();
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_back_unchecked() })
}
}
}
@ -1508,13 +1508,8 @@ impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
if self.length == 0 {
None
} else {
self.length -= 1;
let kv = unsafe { self.range.deallocating_next_unchecked() };
Some(kv.into_key_val())
}
// SAFETY: we consume the dying handle immediately.
self.dying_next().map(unsafe { |kv| kv.into_key_val() })
}
fn size_hint(&self) -> (usize, Option<usize>) {
@ -1525,13 +1520,8 @@ impl<K, V> Iterator for IntoIter<K, V> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
fn next_back(&mut self) -> Option<(K, V)> {
if self.length == 0 {
None
} else {
self.length -= 1;
let kv = unsafe { self.range.deallocating_next_back_unchecked() };
Some(kv.into_key_val())
}
// SAFETY: we consume the dying handle immediately.
self.dying_next_back().map(unsafe { |kv| kv.into_key_val() })
}
}

View file

@ -167,8 +167,7 @@ impl<'a, K, V> LazyLeafRange<marker::ValMut<'a>, K, V> {
}
impl<K, V> LazyLeafRange<marker::Dying, K, V> {
#[inline]
pub fn take_front(
fn take_front(
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>> {
match self.front.take()? {
@ -194,6 +193,13 @@ impl<K, V> LazyLeafRange<marker::Dying, K, V> {
let back = self.init_back().unwrap();
unsafe { back.deallocating_next_back_unchecked() }
}
#[inline]
pub fn deallocating_end(&mut self) {
if let Some(front) = self.take_front() {
front.deallocating_end()
}
}
}
impl<BorrowType: marker::BorrowType, K, V> LazyLeafRange<BorrowType, K, V> {
@ -488,7 +494,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// both sides of the tree, and have hit the same edge. As it is intended
/// only to be called when all keys and values have been returned,
/// no cleanup is done on any of the keys or values.
pub fn deallocating_end(self) {
fn deallocating_end(self) {
let mut edge = self.forget_node_type();
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
edge = parent_edge.forget_node_type();
@ -565,7 +571,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
///
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// or call this method or counterpart `deallocating_next_back_unchecked` again.
pub unsafe fn deallocating_next_unchecked(
unsafe fn deallocating_next_unchecked(
&mut self,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next().unwrap() })

View file

@ -1058,7 +1058,9 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV> {
/// Extracts the key and value that the KV handle refers to.
pub fn into_key_val(mut self) -> (K, V) {
/// # Safety
/// The node that the handle refers to must not yet have been deallocated.
pub unsafe fn into_key_val(mut self) -> (K, V) {
debug_assert!(self.idx < self.node.len());
let leaf = self.node.as_leaf_dying();
unsafe {
@ -1069,8 +1071,10 @@ impl<K, V, NodeType> Handle<NodeRef<marker::Dying, K, V, NodeType>, marker::KV>
}
/// Drops the key and value that the KV handle refers to.
/// # Safety
/// The node that the handle refers to must not yet have been deallocated.
#[inline]
pub fn drop_key_val(mut self) {
pub unsafe fn drop_key_val(mut self) {
debug_assert!(self.idx < self.node.len());
let leaf = self.node.as_leaf_dying();
unsafe {