From fe3d3fc85c86283ba590bb3f0e3d2828ac05ec4e Mon Sep 17 00:00:00 2001 From: David Koloski Date: Fri, 18 Feb 2022 00:17:04 -0500 Subject: [PATCH 1/2] Relax `Allocator` bounds into pin-safe trait Because `Allocator` requires that returned memory blocks remain valid until the instance and all clones are dropped, some types of allocators cannot fulfill the safety requirements for the trait. This change relaxes the safety requirements of `Allocator` to blocks remaining valid until the allocator becomes unreachable and moves the bound into a separate `PinSafeAllocator` trait. It also relaxes some overly cautious bounds on the `Unpin` impls for `Box` since unpinning a box doesn't require any special consideration. --- library/alloc/src/alloc.rs | 7 +++++ library/alloc/src/boxed.rs | 41 ++++++++++++------------- library/alloc/src/rc.rs | 1 + library/alloc/src/sync.rs | 2 ++ library/alloc/tests/boxed.rs | 5 ++- library/core/src/alloc/mod.rs | 28 ++++++++++++++--- src/test/ui/box/leak-alloc.rs | 1 + src/test/ui/box/leak-alloc.stderr | 29 ++++++++++++----- src/test/ui/box/pin-safe-alloc.rs | 36 ++++++++++++++++++++++ src/test/ui/box/pin-unsafe-alloc.rs | 34 ++++++++++++++++++++ src/test/ui/box/pin-unsafe-alloc.stderr | 21 +++++++++++++ 11 files changed, 171 insertions(+), 34 deletions(-) create mode 100644 src/test/ui/box/pin-safe-alloc.rs create mode 100644 src/test/ui/box/pin-unsafe-alloc.rs create mode 100644 src/test/ui/box/pin-unsafe-alloc.stderr diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index efdc86bf57a8a..40880ce93abc1 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -44,6 +44,8 @@ extern "Rust" { /// to the allocator registered with the `#[global_allocator]` attribute /// if there is one, or the `std` crate’s default. /// +/// This type is always guaranteed to implement [`PinSafeAllocator`]. +/// /// Note: while this type is unstable, the functionality it provides can be /// accessed through the [free functions in `alloc`](self#functions). #[unstable(feature = "allocator_api", issue = "32838")] @@ -314,6 +316,11 @@ unsafe impl Allocator for Global { } } +// SAFETY: memory blocks allocated by `Global` are not invalidated when `Global` is dropped. +#[unstable(feature = "allocator_api", issue = "32838")] +#[cfg(not(test))] +unsafe impl PinSafeAllocator for Global {} + /// The allocator for unique pointers. #[cfg(all(not(no_global_oom_handling), not(test)))] #[lang = "exchange_malloc"] diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index c1ceeb0deb837..6383fea862425 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -168,7 +168,7 @@ use core::task::{Context, Poll}; #[cfg(not(no_global_oom_handling))] use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw}; -use crate::alloc::{AllocError, Allocator, Global, Layout}; +use crate::alloc::{AllocError, Allocator, Global, Layout, PinSafeAllocator}; #[cfg(not(no_global_oom_handling))] use crate::borrow::Cow; use crate::raw_vec::RawVec; @@ -1123,8 +1123,12 @@ impl Box { // recognized as "releasing" the unique pointer to permit aliased raw accesses, // so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer // behaves correctly. - let alloc = unsafe { ptr::read(&b.1) }; - (Unique::from(Box::leak(b)), alloc) + let manually_drop = mem::ManuallyDrop::new(b); + // SAFETY: unique ownership of the memory block moves into `ptr` + let ptr = unsafe { &mut *manually_drop.0.as_ptr() }; + // SAFETY: moving the allocator will not invalidate `ptr` + let alloc = unsafe { ptr::read(&manually_drop.1) }; + (Unique::from(ptr), alloc) } /// Returns a reference to the underlying allocator. @@ -1179,9 +1183,13 @@ impl Box { #[inline] pub const fn leak<'a>(b: Self) -> &'a mut T where - A: 'a, + A: ~const PinSafeAllocator, { - unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() } + let (ptr, alloc) = Box::into_unique(b); + mem::forget(alloc); + // SAFETY: ptr will remain valid for any lifetime since `alloc` is never + // dropped + unsafe { &mut *ptr.as_ptr() } } /// Converts a `Box` into a `Pin>`. If `T` does not implement [`Unpin`], then @@ -1218,7 +1226,7 @@ impl Box { #[rustc_const_unstable(feature = "const_box", issue = "92521")] pub const fn into_pin(boxed: Self) -> Pin where - A: 'static, + A: ~const PinSafeAllocator, { // It's not possible to move or replace the insides of a `Pin>` // when `T: !Unpin`, so it's safe to pin it directly without any @@ -1454,9 +1462,9 @@ impl From for Box { #[stable(feature = "pin", since = "1.33.0")] #[rustc_const_unstable(feature = "const_box", issue = "92521")] -impl const From> for Pin> +impl const From> for Pin> where - A: 'static, + A: ~const PinSafeAllocator, { /// Converts a `Box` into a `Pin>`. If `T` does not implement [`Unpin`], then /// `*boxed` will be pinned in memory and unable to be moved. @@ -2033,13 +2041,10 @@ impl AsMut for Box { */ #[stable(feature = "pin", since = "1.33.0")] #[rustc_const_unstable(feature = "const_box", issue = "92521")] -impl const Unpin for Box where A: 'static {} +impl const Unpin for Box {} #[unstable(feature = "generator_trait", issue = "43122")] -impl + Unpin, R, A: Allocator> Generator for Box -where - A: 'static, -{ +impl + Unpin, R, A: Allocator> Generator for Box { type Yield = G::Yield; type Return = G::Return; @@ -2049,10 +2054,7 @@ where } #[unstable(feature = "generator_trait", issue = "43122")] -impl, R, A: Allocator> Generator for Pin> -where - A: 'static, -{ +impl, R, A: Allocator> Generator for Pin> { type Yield = G::Yield; type Return = G::Return; @@ -2062,10 +2064,7 @@ where } #[stable(feature = "futures_api", since = "1.36.0")] -impl Future for Box -where - A: 'static, -{ +impl Future for Box { type Output = F::Output; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b89b03683baef..ddd22b364e77a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -630,6 +630,7 @@ impl Rc { #[stable(feature = "pin", since = "1.33.0")] #[must_use] pub fn pin(value: T) -> Pin> { + // SAFETY: Global is a pin-safe allocator. unsafe { Pin::new_unchecked(Rc::new(value)) } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 4c03cc3ed158a..86095e6f6f3b2 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -530,6 +530,7 @@ impl Arc { #[stable(feature = "pin", since = "1.33.0")] #[must_use] pub fn pin(data: T) -> Pin> { + // SAFETY: Global is a pin-safe allocator. unsafe { Pin::new_unchecked(Arc::new(data)) } } @@ -537,6 +538,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_pin(data: T) -> Result>, AllocError> { + // SAFETY: Global is a pin-safe allocator. unsafe { Ok(Pin::new_unchecked(Arc::try_new(data)?)) } } diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index 9e5123be98990..bea3b6c256711 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -1,4 +1,4 @@ -use core::alloc::{AllocError, Allocator, Layout}; +use core::alloc::{AllocError, Allocator, Layout, PinSafeAllocator}; use core::cell::Cell; use core::mem::MaybeUninit; use core::ptr::NonNull; @@ -151,6 +151,9 @@ unsafe impl const Allocator for ConstAllocator { } } +// SAFETY: Memory allocated by `ConstAllocator` is never invalidated. +unsafe impl const PinSafeAllocator for ConstAllocator {} + #[test] fn const_box() { const VALUE: u32 = { diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 6cc6e359e65b6..2bc9b35f26e06 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -87,12 +87,14 @@ impl fmt::Display for AllocError { /// # Safety /// /// * Memory blocks returned from an allocator must point to valid memory and retain their validity -/// until the instance and all of its clones are dropped, +/// until the instance and all of its clones are dropped, forgotten, or otherwise rendered +/// inaccessible. The validity of memory block is tied directly to the validity of the allocator +/// group that it is allocated from. /// -/// * cloning or moving the allocator must not invalidate memory blocks returned from this -/// allocator. A cloned allocator must behave like the same allocator, and +/// * Cloning or moving the allocator must not invalidate memory blocks returned from this +/// allocator. A cloned allocator must behave like the same allocator. /// -/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// * Any pointer to a memory block which is [*currently allocated*] may be passed to any other /// method of the allocator. /// /// [*currently allocated*]: #currently-allocated-memory @@ -408,3 +410,21 @@ where unsafe { (**self).shrink(ptr, old_layout, new_layout) } } } + +/// An [`Allocator`] which returns memory blocks that can safely be pinned. +/// +/// Unlike `Allocator`, `PinSafeAllocator` guarantees that forgetting an instance will cause any +/// allocated memory to remain valid indefinitely. +/// +/// # Safety +/// +/// In addition to the safety guarantees of `Allocator`, memory blocks returned from a +/// `PinSafeAllocator` must retain their validity until the instance and all of its clones are +/// dropped. +#[unstable(feature = "allocator_api", issue = "32838")] +pub unsafe trait PinSafeAllocator: Allocator {} + +#[unstable(feature = "allocator_api", issue = "32838")] +// SAFETY: Allocators that live forever never become unreachable, and so never invalidate their +// allocated memory blocks. +unsafe impl PinSafeAllocator for &'static A {} diff --git a/src/test/ui/box/leak-alloc.rs b/src/test/ui/box/leak-alloc.rs index 3f0f39f448b91..5d0db792f64ce 100644 --- a/src/test/ui/box/leak-alloc.rs +++ b/src/test/ui/box/leak-alloc.rs @@ -22,6 +22,7 @@ fn use_value(_: u32) {} fn main() { let alloc = Alloc {}; let boxed = Box::new_in(10, alloc.by_ref()); + //~^ ERROR `alloc` does not live long enough let theref = Box::leak(boxed); drop(alloc); //~^ ERROR cannot move out of `alloc` because it is borrowed diff --git a/src/test/ui/box/leak-alloc.stderr b/src/test/ui/box/leak-alloc.stderr index e8a6ad0995a0f..dc2a0ea23426d 100644 --- a/src/test/ui/box/leak-alloc.stderr +++ b/src/test/ui/box/leak-alloc.stderr @@ -1,15 +1,28 @@ +error[E0597]: `alloc` does not live long enough + --> $DIR/leak-alloc.rs:24:33 + | +LL | let boxed = Box::new_in(10, alloc.by_ref()); + | ^^^^^^^^^^^^^^ + | | + | borrowed value does not live long enough + | argument requires that `alloc` is borrowed for `'static` +... +LL | } + | - `alloc` dropped here while still borrowed + error[E0505]: cannot move out of `alloc` because it is borrowed - --> $DIR/leak-alloc.rs:26:10 + --> $DIR/leak-alloc.rs:27:10 | LL | let boxed = Box::new_in(10, alloc.by_ref()); - | -------------- borrow of `alloc` occurs here -LL | let theref = Box::leak(boxed); + | -------------- + | | + | borrow of `alloc` occurs here + | argument requires that `alloc` is borrowed for `'static` +... LL | drop(alloc); | ^^^^^ move out of `alloc` occurs here -LL | -LL | use_value(*theref) - | ------- borrow later used here -error: aborting due to previous error +error: aborting due to 2 previous errors -For more information about this error, try `rustc --explain E0505`. +Some errors have detailed explanations: E0505, E0597. +For more information about an error, try `rustc --explain E0505`. diff --git a/src/test/ui/box/pin-safe-alloc.rs b/src/test/ui/box/pin-safe-alloc.rs new file mode 100644 index 0000000000000..699be28df2abc --- /dev/null +++ b/src/test/ui/box/pin-safe-alloc.rs @@ -0,0 +1,36 @@ +// run-pass +#![feature(allocator_api)] +#![feature(box_into_pin)] + +use std::alloc::{AllocError, Allocator, Layout, PinSafeAllocator, System}; +use std::ptr::NonNull; +use std::marker::PhantomPinned; +use std::boxed::Box; + +struct Alloc {} + +unsafe impl Allocator for Alloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + System.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + System.deallocate(ptr, layout) + } +} + +unsafe impl PinSafeAllocator for Alloc {} + +fn main() { + struct MyPinned { + _value: u32, + _pinned: PhantomPinned, + } + + let value = MyPinned { + _value: 0, + _pinned: PhantomPinned, + }; + let alloc = Alloc {}; + let _ = Box::pin_in(value, alloc); +} diff --git a/src/test/ui/box/pin-unsafe-alloc.rs b/src/test/ui/box/pin-unsafe-alloc.rs new file mode 100644 index 0000000000000..49485f4763443 --- /dev/null +++ b/src/test/ui/box/pin-unsafe-alloc.rs @@ -0,0 +1,34 @@ +#![feature(allocator_api)] +#![feature(box_into_pin)] + +use std::alloc::{AllocError, Allocator, Layout, System}; +use std::ptr::NonNull; +use std::marker::PhantomPinned; +use std::boxed::Box; + +struct Alloc {} + +unsafe impl Allocator for Alloc { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + System.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + System.deallocate(ptr, layout) + } +} + +fn main() { + struct MyPinned { + _value: u32, + _pinned: PhantomPinned, + } + + let value = MyPinned { + _value: 0, + _pinned: PhantomPinned, + }; + let alloc = Alloc {}; + let _ = Box::pin_in(value, alloc); + //~^ ERROR the trait bound `Alloc: PinSafeAllocator` is not satisfied +} diff --git a/src/test/ui/box/pin-unsafe-alloc.stderr b/src/test/ui/box/pin-unsafe-alloc.stderr new file mode 100644 index 0000000000000..09996df418975 --- /dev/null +++ b/src/test/ui/box/pin-unsafe-alloc.stderr @@ -0,0 +1,21 @@ +error[E0277]: the trait bound `Alloc: PinSafeAllocator` is not satisfied + --> $DIR/pin-unsafe-alloc.rs:32:32 + | +LL | let _ = Box::pin_in(value, alloc); + | ----------- ^^^^^ expected an implementor of trait `PinSafeAllocator` + | | + | required by a bound introduced by this call + | +note: required by a bound in `Box::::pin_in` + --> $SRC_DIR/alloc/src/boxed.rs:LL:COL + | +LL | A: ~const Allocator + ~const PinSafeAllocator + ~const Drop + ~const Destruct, + | ^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `Box::::pin_in` +help: consider borrowing here + | +LL | let _ = Box::pin_in(value, &alloc); + | + + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0277`. From 483c8a30a62ab0473a9db756c62536361c1a875c Mon Sep 17 00:00:00 2001 From: David Koloski Date: Sun, 24 Jul 2022 14:27:36 -0400 Subject: [PATCH 2/2] Push new allocator trait through `BTreeMap` The `BTreeMap` implementation was updated with support for custom allocators, but many of the methods that take allocators do not require that the provided allocator is the same allocator that is used for the owning `BTreeMap`. This could lead to situations where nodes are created with a different allocator, which would violate the safety conditions of `deallocate` on drop. This was discovered because the changes to `Box::leak` make it invalid to call without upgrading the allocator to `A: PinSafeAllocator`. This also updates `Box::pin_in` with the new `PinSafeAllocator` trait. --- library/alloc/src/boxed.rs | 2 +- library/alloc/src/collections/btree/append.rs | 36 +++++-- library/alloc/src/collections/btree/map.rs | 40 ++++--- .../alloc/src/collections/btree/map/entry.rs | 45 ++++---- .../alloc/src/collections/btree/map/tests.rs | 9 +- library/alloc/src/collections/btree/node.rs | 101 ++++++++++++++---- .../alloc/src/collections/btree/node/tests.rs | 9 +- library/alloc/src/collections/btree/split.rs | 14 ++- src/test/ui/box/leak-alloc.stderr | 16 +-- src/test/ui/box/pin-safe-alloc.rs | 1 - src/test/ui/box/pin-unsafe-alloc.rs | 1 - src/test/ui/box/pin-unsafe-alloc.stderr | 4 +- 12 files changed, 203 insertions(+), 75 deletions(-) diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 6383fea862425..f1064e702c5e5 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -575,7 +575,7 @@ impl Box { #[inline(always)] pub const fn pin_in(x: T, alloc: A) -> Pin where - A: 'static + ~const Allocator + ~const Destruct, + A: ~const Allocator + ~const PinSafeAllocator + ~const Destruct, { Self::into_pin(Self::new_in(x, alloc)) } diff --git a/library/alloc/src/collections/btree/append.rs b/library/alloc/src/collections/btree/append.rs index b6989afb6255d..c3230add01137 100644 --- a/library/alloc/src/collections/btree/append.rs +++ b/library/alloc/src/collections/btree/append.rs @@ -15,7 +15,11 @@ impl Root { /// a `BTreeMap`, both iterators should produce keys in strictly ascending /// order, each greater than all keys in the tree, including any keys /// already in the tree upon entry. - pub fn append_from_sorted_iters( + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn append_from_sorted_iters( &mut self, left: I, right: I, @@ -29,14 +33,25 @@ impl Root { let iter = MergeIter(MergeIterInner::new(left, right)); // Meanwhile, we build a tree from the sorted sequence in linear time. - self.bulk_push(iter, length, alloc) + + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + unsafe { self.bulk_push(iter, length, alloc) } } /// Pushes all key-value pairs to the end of the tree, incrementing a /// `length` variable along the way. The latter makes it easier for the /// caller to avoid a leak when the iterator panicks. - pub fn bulk_push(&mut self, iter: I, length: &mut usize, alloc: A) - where + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn bulk_push( + &mut self, + iter: I, + length: &mut usize, + alloc: A, + ) where I: Iterator, { let mut cur_node = self.borrow_mut().last_leaf_edge().into_node(); @@ -64,7 +79,10 @@ impl Root { } Err(_) => { // We are at the top, create a new root node and push there. - open_node = self.push_internal_level(alloc.clone()); + + // SAFETY: The caller has guaranteed that `alloc` is the allocator for + // the owning `BTreeMap`. + open_node = unsafe { self.push_internal_level(alloc.clone()) }; break; } } @@ -72,9 +90,13 @@ impl Root { // Push key-value pair and new right subtree. let tree_height = open_node.height() - 1; - let mut right_tree = Root::new(alloc.clone()); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + let mut right_tree = unsafe { Root::new(alloc.clone()) }; for _ in 0..tree_height { - right_tree.push_internal_level(alloc.clone()); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the + // owning `BTreeMap`. + unsafe { right_tree.push_internal_level(alloc.clone()) }; } open_node.push(key, value, right_tree); diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index cacbd54b6c246..ca8108f3d7562 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -216,7 +216,9 @@ impl Clone for BTreeMap { match node.force() { Leaf(leaf) => { let mut out_tree = BTreeMap { - root: Some(Root::new(alloc.clone())), + // SAFETY: `alloc` is the allocator for both the original and the cloned + // `BTreeMap`. + root: unsafe { Some(Root::new(alloc.clone())) }, length: 0, alloc: ManuallyDrop::new(alloc), _marker: PhantomData, @@ -247,7 +249,9 @@ impl Clone for BTreeMap { { let out_root = out_tree.root.as_mut().unwrap(); - let mut out_node = out_root.push_internal_level(alloc.clone()); + // SAFETY: `alloc` is the allocator for both the original and the cloned + // `BTreeMap`. + let mut out_node = unsafe { out_root.push_internal_level(alloc.clone()) }; let mut in_edge = internal.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); @@ -269,7 +273,9 @@ impl Clone for BTreeMap { out_node.push( k, v, - subroot.unwrap_or_else(|| Root::new(alloc.clone())), + // SAFETY: `alloc` is the allocator for both the original and cloned + // `BTreeMap`. + subroot.unwrap_or_else(|| unsafe { Root::new(alloc.clone()) }), ); out_tree.length += 1 + sublength; } @@ -323,8 +329,9 @@ where fn replace(&mut self, key: K) -> Option { let (map, dormant_map) = DormantMutRef::new(self); + // SAFETY: `alloc` is the allocator for the `BTreeMap`. let root_node = - map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut(); + map.root.get_or_insert_with(|| unsafe { Root::new((*map.alloc).clone()) }).borrow_mut(); match root_node.search_tree::(&key) { Found(mut kv) => Some(mem::replace(kv.key_mut(), key)), GoDown(handle) => { @@ -1144,13 +1151,16 @@ impl BTreeMap { let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter(); let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter(); - let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone())); - root.append_from_sorted_iters( - self_iter, - other_iter, - &mut self.length, - (*self.alloc).clone(), - ) + let root = self.root.get_or_insert_with(|| unsafe { Root::new((*self.alloc).clone()) }); + // SAFETY: `self.alloc` is the allocator for the `BTreeMap`. + unsafe { + root.append_from_sorted_iters( + self_iter, + other_iter, + &mut self.length, + (*self.alloc).clone(), + ) + } } /// Constructs a double-ended iterator over a sub-range of elements in the map. @@ -1464,9 +1474,13 @@ impl BTreeMap { K: Ord, I: IntoIterator, { - let mut root = Root::new(alloc.clone()); + // SAFETY: `alloc` is the allocator for the returned `BTreeMap`. + let mut root = unsafe { Root::new(alloc.clone()) }; let mut length = 0; - root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone()); + // SAFETY: `alloc` is the allocator for the returned `BTreeMap`. + unsafe { + root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone()); + } BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc), _marker: PhantomData } } } diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs index b6eecf9b0e952..f661095d378b9 100644 --- a/library/alloc/src/collections/btree/map/entry.rs +++ b/library/alloc/src/collections/btree/map/entry.rs @@ -342,30 +342,39 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> { None => { // SAFETY: There is no tree yet so no reference to it exists. let map = unsafe { self.dormant_map.awaken() }; - let mut root = NodeRef::new_leaf(self.alloc.clone()); + // SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`. + let mut root = unsafe { NodeRef::new_leaf(self.alloc.clone()) }; let val_ptr = root.borrow_mut().push(self.key, value) as *mut V; map.root = Some(root.forget_type()); map.length = 1; val_ptr } - Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) { - (None, val_ptr) => { - // SAFETY: We have consumed self.handle. - let map = unsafe { self.dormant_map.awaken() }; - map.length += 1; - val_ptr + Some(handle) => { + // SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`. + let insert_result = + unsafe { handle.insert_recursing(self.key, value, self.alloc.clone()) }; + match insert_result { + (None, val_ptr) => { + // SAFETY: We have consumed self.handle. + let map = unsafe { self.dormant_map.awaken() }; + map.length += 1; + val_ptr + } + (Some(ins), val_ptr) => { + drop(ins.left); + // SAFETY: We have consumed self.handle and dropped the + // remaining reference to the tree, ins.left. + let map = unsafe { self.dormant_map.awaken() }; + let root = map.root.as_mut().unwrap(); // same as ins.left + // SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`. + unsafe { + root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right) + }; + map.length += 1; + val_ptr + } } - (Some(ins), val_ptr) => { - drop(ins.left); - // SAFETY: We have consumed self.handle and dropped the - // remaining reference to the tree, ins.left. - let map = unsafe { self.dormant_map.awaken() }; - let root = map.root.as_mut().unwrap(); // same as ins.left - root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right); - map.length += 1; - val_ptr - } - }, + } }; // Now that we have finished growing the tree using borrowed references, // dereference the pointer to a part of it, that we picked up along the way. diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index 4c372b1d60ac4..7636268c0380d 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -116,7 +116,14 @@ impl BTreeMap { { let iter = mem::take(self).into_iter(); if !iter.is_empty() { - self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc); + // SAFETY: `self.alloc` is the allocator for this `BTreeMap`. + unsafe { + self.root.insert(Root::new(*self.alloc)).bulk_push( + iter, + &mut self.length, + *self.alloc, + ); + } } } } diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index d831161bcb686..217718ab525d4 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -213,31 +213,61 @@ unsafe impl Send for NodeRef unsafe impl Send for NodeRef {} impl NodeRef { - pub fn new_leaf(alloc: A) -> Self { - Self::from_new_leaf(LeafNode::new(alloc)) + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn new_leaf(alloc: A) -> Self { + // SAFETY: The caller has guaranteed that the allocator of the provided `Box` is the + // allocator for the owning `BTreeMap`. + unsafe { Self::from_new_leaf(LeafNode::new(alloc)) } } - fn from_new_leaf(leaf: Box, A>) -> Self { - NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData } + /// # Safety + /// + /// The allocator of the `Box` must be the allocator for the owning `BTreeMap`. + unsafe fn from_new_leaf(leaf: Box, A>) -> Self { + // We're dropping the `alloc` part of the box here, but our safety condition guarantees that + // a clone of that allocator will outlive the returned `NodeRef` in the owning `BTreeMap`. + // This prevents the memory of the box from being invalidated. + let ptr = Box::into_raw(leaf); + // SAFETY: The pointer returned from `Box::into_raw` is guaranteed to be non-null. + let node = unsafe { NonNull::new_unchecked(ptr) }; + NodeRef { height: 0, node, _marker: PhantomData } } } impl NodeRef { - fn new_internal(child: Root, alloc: A) -> Self { + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + unsafe fn new_internal(child: Root, alloc: A) -> Self { let mut new_node = unsafe { InternalNode::new(alloc) }; new_node.edges[0].write(child.node); + + // SAFETY: + // - `child.height + 1` is always nonzero. + // - The caller has guaranteed that the allocator of the provided `Box` is the allocator for + // the owning `BTreeMap`. unsafe { NodeRef::from_new_internal(new_node, child.height + 1) } } /// # Safety - /// `height` must not be zero. + /// + /// - `height` must not be zero. + /// - The allocator of the `Box` must be the allocator for the owning `BTreeMap`. unsafe fn from_new_internal( internal: Box, A>, height: usize, ) -> Self { debug_assert!(height > 0); - let node = NonNull::from(Box::leak(internal)).cast(); - let mut this = NodeRef { height, node, _marker: PhantomData }; + // We're dropping the `alloc` part of the box here, but our safety condition guarantees that + // a clone of that allocator will outlive the returned `NodeRef` in the owning `BTreeMap`. + // This prevents the memory of the box from being invalidated. + let ptr = Box::into_raw(internal); + // SAFETY: The pointer returned from `Box::into_raw` is guaranteed to be non-null. + let node = unsafe { NonNull::new_unchecked(ptr) }; + + let mut this = NodeRef { height, node: node.cast(), _marker: PhantomData }; this.borrow_mut().correct_all_childrens_parent_links(); this } @@ -559,18 +589,32 @@ impl NodeRef { impl NodeRef { /// Returns a new owned tree, with its own root node that is initially empty. - pub fn new(alloc: A) -> Self { - NodeRef::new_leaf(alloc).forget_type() + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn new(alloc: A) -> Self { + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + unsafe { NodeRef::new_leaf(alloc).forget_type() } } /// Adds a new internal node with a single edge pointing to the previous root node, /// make that new node the root node, and return it. This increases the height by 1 /// and is the opposite of `pop_internal_level`. - pub fn push_internal_level( + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn push_internal_level( &mut self, alloc: A, ) -> NodeRef, K, V, marker::Internal> { - super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type()); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + super::mem::take_mut(self, |old_root| unsafe { + NodeRef::new_internal(old_root, alloc).forget_type() + }); // `self.borrow_mut()`, except that we just forgot we're internal now: NodeRef { height: self.height, node: self.node, _marker: PhantomData } @@ -869,7 +913,11 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// this edge. This method splits the node if there isn't enough room. /// /// The returned pointer points to the inserted value. - fn insert( + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + unsafe fn insert( mut self, key: K, val: V, @@ -881,7 +929,9 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark } else { let (middle_kv_idx, insertion) = splitpoint(self.idx); let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; - let mut result = middle.split(alloc); + // SAFETY: The caller has guaranteed that `alloc` is the allocator of the owning + // `BTreeMap`. + let mut result = unsafe { middle.split(alloc) }; let mut insertion_edge = match insertion { LeftOrRight::Left(insert_idx) => unsafe { Handle::new_edge(result.left.reborrow_mut(), insert_idx) @@ -968,13 +1018,19 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// If the returned result is some `SplitResult`, the `left` field will be the root node. /// The returned pointer points to the inserted value, which in the case of `SplitResult` /// is in the `left` or `right` tree. - pub fn insert_recursing( + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn insert_recursing( self, key: K, value: V, alloc: A, ) -> (Option>, *mut V) { - let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) { + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + let (mut split, val_ptr) = match unsafe { self.insert(key, value, alloc.clone()) } { (None, val_ptr) => return (None, val_ptr), (Some(split), val_ptr) => (split.forget_node_type(), val_ptr), }; @@ -1128,12 +1184,21 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// - The key and value pointed to by this handle are extracted. /// - All the key-value pairs to the right of this handle are put into a newly /// allocated node. - pub fn split(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> { + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. + pub unsafe fn split( + mut self, + alloc: A, + ) -> SplitResult<'a, K, V, marker::Leaf> { let mut new_node = LeafNode::new(alloc); let kv = self.split_leaf_data(&mut new_node); - let right = NodeRef::from_new_leaf(new_node); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + let right = unsafe { NodeRef::from_new_leaf(new_node) }; SplitResult { left: self.node, kv, right } } diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs index aadb0dc9c40d9..e79bc77359c1e 100644 --- a/library/alloc/src/collections/btree/node/tests.rs +++ b/library/alloc/src/collections/btree/node/tests.rs @@ -68,10 +68,13 @@ fn test_splitpoint() { #[test] fn test_partial_eq() { - let mut root1 = NodeRef::new_leaf(Global); + // SAFETY: `Global` is the allocator for the `BTreeMap` we're testing. + let mut root1 = unsafe { NodeRef::new_leaf(Global) }; root1.borrow_mut().push(1, ()); - let mut root1 = NodeRef::new_internal(root1.forget_type(), Global).forget_type(); - let root2 = Root::new(Global); + // SAFETY: `Global` is the allocator for the `BTreeMap` we're testing. + let mut root1 = unsafe { NodeRef::new_internal(root1.forget_type(), Global).forget_type() }; + // SAFETY: `Global` is the allocator for the `BTreeMap` we're testing. + let root2 = unsafe { Root::new(Global) }; root1.reborrow().assert_back_pointers(); root2.reborrow().assert_back_pointers(); diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs index 638dc98fc3e41..dd9f29521b745 100644 --- a/library/alloc/src/collections/btree/split.rs +++ b/library/alloc/src/collections/btree/split.rs @@ -63,10 +63,20 @@ impl Root { } /// Creates a tree consisting of empty nodes. + /// + /// # Safety + /// + /// `alloc` must be the allocator for the owning `BTreeMap`. fn new_pillar(height: usize, alloc: A) -> Self { - let mut root = Root::new(alloc.clone()); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + let mut root = unsafe { Root::new(alloc.clone()) }; for _ in 0..height { - root.push_internal_level(alloc.clone()); + // SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning + // `BTreeMap`. + unsafe { + root.push_internal_level(alloc.clone()); + } } root } diff --git a/src/test/ui/box/leak-alloc.stderr b/src/test/ui/box/leak-alloc.stderr index dc2a0ea23426d..83bf0d60cc185 100644 --- a/src/test/ui/box/leak-alloc.stderr +++ b/src/test/ui/box/leak-alloc.stderr @@ -2,10 +2,10 @@ error[E0597]: `alloc` does not live long enough --> $DIR/leak-alloc.rs:24:33 | LL | let boxed = Box::new_in(10, alloc.by_ref()); - | ^^^^^^^^^^^^^^ - | | - | borrowed value does not live long enough - | argument requires that `alloc` is borrowed for `'static` + | ----------------^^^^^^^^^^^^^^- + | | | + | | borrowed value does not live long enough + | argument requires that `alloc` is borrowed for `'static` ... LL | } | - `alloc` dropped here while still borrowed @@ -14,10 +14,10 @@ error[E0505]: cannot move out of `alloc` because it is borrowed --> $DIR/leak-alloc.rs:27:10 | LL | let boxed = Box::new_in(10, alloc.by_ref()); - | -------------- - | | - | borrow of `alloc` occurs here - | argument requires that `alloc` is borrowed for `'static` + | ------------------------------- + | | | + | | borrow of `alloc` occurs here + | argument requires that `alloc` is borrowed for `'static` ... LL | drop(alloc); | ^^^^^ move out of `alloc` occurs here diff --git a/src/test/ui/box/pin-safe-alloc.rs b/src/test/ui/box/pin-safe-alloc.rs index 699be28df2abc..91fdc799ddae0 100644 --- a/src/test/ui/box/pin-safe-alloc.rs +++ b/src/test/ui/box/pin-safe-alloc.rs @@ -1,6 +1,5 @@ // run-pass #![feature(allocator_api)] -#![feature(box_into_pin)] use std::alloc::{AllocError, Allocator, Layout, PinSafeAllocator, System}; use std::ptr::NonNull; diff --git a/src/test/ui/box/pin-unsafe-alloc.rs b/src/test/ui/box/pin-unsafe-alloc.rs index 49485f4763443..ffccf474c7c2c 100644 --- a/src/test/ui/box/pin-unsafe-alloc.rs +++ b/src/test/ui/box/pin-unsafe-alloc.rs @@ -1,5 +1,4 @@ #![feature(allocator_api)] -#![feature(box_into_pin)] use std::alloc::{AllocError, Allocator, Layout, System}; use std::ptr::NonNull; diff --git a/src/test/ui/box/pin-unsafe-alloc.stderr b/src/test/ui/box/pin-unsafe-alloc.stderr index 09996df418975..40ffb18819a8c 100644 --- a/src/test/ui/box/pin-unsafe-alloc.stderr +++ b/src/test/ui/box/pin-unsafe-alloc.stderr @@ -1,5 +1,5 @@ error[E0277]: the trait bound `Alloc: PinSafeAllocator` is not satisfied - --> $DIR/pin-unsafe-alloc.rs:32:32 + --> $DIR/pin-unsafe-alloc.rs:31:32 | LL | let _ = Box::pin_in(value, alloc); | ----------- ^^^^^ expected an implementor of trait `PinSafeAllocator` @@ -9,7 +9,7 @@ LL | let _ = Box::pin_in(value, alloc); note: required by a bound in `Box::::pin_in` --> $SRC_DIR/alloc/src/boxed.rs:LL:COL | -LL | A: ~const Allocator + ~const PinSafeAllocator + ~const Drop + ~const Destruct, +LL | A: ~const Allocator + ~const PinSafeAllocator + ~const Destruct, | ^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `Box::::pin_in` help: consider borrowing here |