From 3070db1cb59e47d2263973a36e9149d02b1fbf9f Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 19 Apr 2021 18:06:32 +0200 Subject: [PATCH 01/20] Add Allocator-helper to allocate prefix and safety-tester --- library/core/src/alloc/helper.rs | 170 ++++++++++++++++ library/core/src/alloc/mod.rs | 3 + .../const_unchecked_layout.rs} | 0 library/core/tests/alloc/mod.rs | 185 ++++++++++++++++++ library/core/tests/alloc/prefix.rs | 69 +++++++ library/core/tests/lib.rs | 4 + 6 files changed, 431 insertions(+) create mode 100644 library/core/src/alloc/helper.rs rename library/core/tests/{alloc.rs => alloc/const_unchecked_layout.rs} (100%) create mode 100644 library/core/tests/alloc/mod.rs create mode 100644 library/core/tests/alloc/prefix.rs diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs new file mode 100644 index 0000000000000..efa51082a53c8 --- /dev/null +++ b/library/core/src/alloc/helper.rs @@ -0,0 +1,170 @@ +use crate::{ + alloc::{AllocError, Allocator, Layout}, + fmt, + marker::PhantomData, + ptr::NonNull, +}; + +/// An allocator that requests some extra memory from the parent allocator for storing a prefix and/or a suffix. +/// +/// The alignment of the memory block is the maximum of the alignment of `Prefix` and the requested +/// alignment. This may introduce an unused padding between `Prefix` and the returned memory. +/// +/// To get a pointer to the prefix, [`prefix()`] may be called. +/// +/// [`prefix()`]: Self::prefix +/// +/// Consider +/// +/// ```rust,ignore (not real code) +/// #[repr(C)] +/// struct Struct { +/// t: T, +/// data: Data, +/// } +/// ``` +/// +/// where `Data` is a type with layout `layout`. +/// +/// When this allocator creates an allocation for layout `layout`, the pointer can be +/// offset by `-offsetof(Struct, data)` and the resulting pointer points is an allocation +/// of `A` for `Layout::new::()`. +#[unstable(feature = "allocator_api_internals", issue = "none")] +pub struct PrefixAllocator { + /// The parent allocator to be used as backend + pub parent: Alloc, + _prefix: PhantomData<*const Prefix>, +} + +impl fmt::Debug for PrefixAllocator { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Affix").field("parent", &self.parent).finish() + } +} + +impl Default for PrefixAllocator { + fn default() -> Self { + Self::new(Alloc::default()) + } +} + +impl Clone for PrefixAllocator { + fn clone(&self) -> Self { + Self::new(self.parent.clone()) + } +} + +impl Copy for PrefixAllocator {} + +impl PartialEq for PrefixAllocator { + fn eq(&self, other: &Self) -> bool { + self.parent.eq(&other.parent) + } +} + +impl Eq for PrefixAllocator {} + +unsafe impl Send for PrefixAllocator {} +unsafe impl Sync for PrefixAllocator {} +impl Unpin for PrefixAllocator {} + +impl PrefixAllocator { + pub const fn new(parent: Alloc) -> Self { + Self { parent, _prefix: PhantomData } + } + + /// Returns the offset between the `Prefix` and the stored data. + #[inline] + pub fn prefix_offset(layout: Layout) -> usize { + let prefix_layout = Layout::new::(); + prefix_layout.size() + prefix_layout.padding_needed_for(layout.align()) + } + + /// Returns a pointer to the prefix. + /// + /// # Safety + /// + /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and + /// * `layout` must *[fit]* that block of memory. + /// + /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory + /// [fit]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#memory-fitting + #[inline] + pub unsafe fn prefix(ptr: NonNull, layout: Layout) -> NonNull { + let prefix_offset = Self::prefix_offset(layout); + // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. + unsafe { NonNull::new_unchecked(ptr.as_ptr().sub(prefix_offset)).cast() } + } + + fn create_ptr(ptr: NonNull<[u8]>, offset_prefix: usize) -> NonNull<[u8]> { + let len = ptr.len() - offset_prefix; + + // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. + let ptr = unsafe { NonNull::new_unchecked(ptr.as_mut_ptr().add(offset_prefix)) }; + + NonNull::slice_from_raw_parts(ptr, len) + } + + #[inline] + fn alloc_impl( + layout: Layout, + alloc: impl FnOnce(Layout) -> Result, AllocError>, + ) -> Result, AllocError> { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + Ok(Self::create_ptr(alloc(layout)?, offset_prefix)) + } +} + +unsafe impl Allocator for PrefixAllocator +where + Alloc: Allocator, +{ + fn allocate(&self, layout: Layout) -> Result, AllocError> { + Self::alloc_impl(layout, |l| self.parent.allocate(l)) + } + + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + Self::alloc_impl(layout, |l| self.parent.allocate_zeroed(l)) + } + + unsafe fn grow( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement growing."); + } + + unsafe fn grow_zeroed( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement growing."); + } + + unsafe fn shrink( + &self, + _ptr: NonNull, + _old_layout: Layout, + _new_layout: Layout, + ) -> Result, AllocError> { + // For (A)Rc it's not needed. When implementing please take care, if the alignment changes. + unimplemented!("PrefixAllocator currently does not implement shrinking."); + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + let (layout, prefix_offset) = Layout::new::().extend(layout).unwrap(); + // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. + unsafe { + let base_ptr = NonNull::new_unchecked(ptr.as_ptr().sub(prefix_offset)); + self.parent.deallocate(base_ptr, layout) + }; + } +} diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 06a761531b676..e1853f743de7d 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -5,6 +5,9 @@ mod global; mod layout; +#[unstable(feature = "allocator_api_internals", issue = "none")] +pub mod helper; + #[stable(feature = "global_alloc", since = "1.28.0")] pub use self::global::GlobalAlloc; #[stable(feature = "alloc_layout", since = "1.28.0")] diff --git a/library/core/tests/alloc.rs b/library/core/tests/alloc/const_unchecked_layout.rs similarity index 100% rename from library/core/tests/alloc.rs rename to library/core/tests/alloc/const_unchecked_layout.rs diff --git a/library/core/tests/alloc/mod.rs b/library/core/tests/alloc/mod.rs new file mode 100644 index 0000000000000..de57d828edac5 --- /dev/null +++ b/library/core/tests/alloc/mod.rs @@ -0,0 +1,185 @@ +use core::alloc::{AllocError, Allocator, Layout}; +use core::ptr::NonNull; +use std::{ + collections::HashMap, + sync::{Mutex, PoisonError}, +}; + +mod const_unchecked_layout; +mod prefix; + +#[derive(Default)] +/// Implements `Allocator` and checks it's unsafety conditions. +struct Tracker { + alloc: A, + map: Mutex, (usize, Layout)>>, +} + +impl Tracker { + fn new(alloc: A) -> Self { + Self { alloc, map: Default::default() } + } + + fn after_alloc(&self, layout: Layout, result: Result, AllocError>) { + if let Ok(ptr) = result { + self.map + .lock() + .unwrap_or_else(PoisonError::into_inner) + .insert(ptr.as_non_null_ptr(), (ptr.len(), layout)); + } + } + + fn before_dealloc(&self, ptr: NonNull, layout: Layout) { + let lock = self.map.lock().unwrap_or_else(PoisonError::into_inner); + let (size, old_layout) = lock + .get(&ptr) + .expect("`ptr` must denote a block of memory currently allocated via this allocator"); + assert_eq!( + layout.align(), + old_layout.align(), + "`layout` must fit that block of memory. Expected alignment of {}, got {}", + old_layout.align(), + layout.align() + ); + if layout.size() < old_layout.size() || layout.size() > *size { + if *size == old_layout.size() { + panic!( + "`layout` must fit that block of memory. Expected size of {}, got {}", + old_layout.size(), + layout.size() + ) + } else { + panic!( + "`layout` must fit that block of memory. Expected size between {}..={}, \ + got {}", + old_layout.size(), + size, + layout.size() + ) + } + } + } + + fn after_dealloc(&self, ptr: NonNull, _layout: Layout) { + self.map.lock().unwrap_or_else(PoisonError::into_inner).remove(&ptr); + } + + fn before_grow(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { + assert!( + new_layout.size() >= old_layout.size(), + "`new_layout.size()` must be greater than or equal to `old_layout.size()`, expected {} >= {}", + new_layout.size(), + old_layout.size() + ); + self.before_dealloc(ptr, old_layout) + } + + #[track_caller] + fn after_grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + result: Result, AllocError>, + ) { + if result.is_ok() { + self.after_dealloc(ptr, old_layout); + self.after_alloc(new_layout, result); + } + } + + fn before_shrink(&self, ptr: NonNull, old_layout: Layout, new_layout: Layout) { + assert!( + new_layout.size() <= old_layout.size(), + "`new_layout.size()` must be smaller than or equal to `old_layout.size()`, expected {} >= {}", + new_layout.size(), + old_layout.size() + ); + self.before_dealloc(ptr, old_layout) + } + + #[track_caller] + fn after_shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + result: Result, AllocError>, + ) { + if result.is_ok() { + self.after_dealloc(ptr, old_layout); + self.after_alloc(new_layout, result); + } + } +} + +unsafe impl Allocator for Tracker { + #[track_caller] + fn allocate(&self, layout: Layout) -> Result, std::alloc::AllocError> { + let result = self.alloc.allocate(layout); + self.after_alloc(layout, result); + result + } + + #[track_caller] + fn allocate_zeroed(&self, layout: Layout) -> Result, std::alloc::AllocError> { + let result = self.alloc.allocate_zeroed(layout); + self.after_alloc(layout, result); + result + } + + #[track_caller] + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.before_dealloc(ptr, layout); + unsafe { self.alloc.deallocate(ptr, layout) } + self.after_dealloc(ptr, layout); + } + + #[track_caller] + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_grow(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.grow(ptr, old_layout, new_layout) }; + self.after_grow(ptr, old_layout, new_layout, result); + result + } + + #[track_caller] + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_grow(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.grow_zeroed(ptr, old_layout, new_layout) }; + self.after_grow(ptr, old_layout, new_layout, result); + result + } + + #[track_caller] + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, std::alloc::AllocError> { + self.before_shrink(ptr, old_layout, new_layout); + let result = unsafe { self.alloc.shrink(ptr, old_layout, new_layout) }; + self.after_shrink(ptr, old_layout, new_layout, result); + result + } +} + +impl Drop for Tracker { + fn drop(&mut self) { + let lock = self.map.lock().unwrap_or_else(PoisonError::into_inner); + if !lock.is_empty() { + panic!("Missing deallocations {:#?}", lock); + } + } +} diff --git a/library/core/tests/alloc/prefix.rs b/library/core/tests/alloc/prefix.rs new file mode 100644 index 0000000000000..12adb961844b2 --- /dev/null +++ b/library/core/tests/alloc/prefix.rs @@ -0,0 +1,69 @@ +use core::alloc::helper::PrefixAllocator; +use core::alloc::{Allocator, Layout}; +use core::any::type_name; +use std::alloc::System; + +use super::Tracker; + +fn test_prefix() { + unsafe { + let layout = Layout::new::(); + let prefix_offset = PrefixAllocator::::prefix_offset(layout); + assert_eq!(prefix_offset, Layout::new::().extend(layout).unwrap().1); + + let alloc = + Tracker::new(PrefixAllocator::, Prefix>::new(Tracker::new(System))); + let memory = alloc.allocate(layout).unwrap_or_else(|_| { + panic!( + "Could not allocate {} bytes for PrefixAllocator<_, {}> with Layout<{}>.", + layout.size(), + type_name::(), + type_name::() + ) + }); + + assert_eq!( + PrefixAllocator::::prefix(memory.as_non_null_ptr(), layout) + .cast() + .as_ptr(), + memory.as_mut_ptr().sub(prefix_offset), + "Invalid prefix location for PrefixAllocator<_, {}> with Layout<{}>.", + type_name::(), + type_name::(), + ); + + alloc.deallocate(memory.as_non_null_ptr(), layout); + } +} + +#[repr(align(1024))] +#[derive(Debug, Copy, Clone, PartialEq)] +struct AlignTo1024 { + a: T, +} + +#[repr(align(64))] +#[derive(Debug, Copy, Clone, PartialEq)] +struct AlignTo64; + +#[test] +fn test() { + macro_rules! test_ty { + ($($ty:ty),*) => { test_ty!(@2 $($ty),*; ($($ty),*)) }; + (@2 $($tyl:ty),*; $tyr:tt) => { $(test_ty!(@3 $tyl; $tyr);)* }; + (@3 $tyl:ty; ($($tyr:ty),*)) => { $(test_prefix::<$tyl, $tyr>();)* }; + } + // call test_pair::() for every combination of these types + test_ty!( + (), + u8, + u16, + u32, + u64, + u128, + AlignTo64, + AlignTo1024, + AlignTo1024, + AlignTo1024 + ); +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index f6bfe67e1b12c..297b2b2eda759 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -1,4 +1,6 @@ #![feature(alloc_layout_extra)] +#![feature(allocator_api)] +#![feature(allocator_api_internals)] #![feature(array_chunks)] #![feature(array_from_ref)] #![feature(array_methods)] @@ -48,6 +50,8 @@ #![feature(try_trait)] #![feature(slice_internals)] #![feature(slice_partition_dedup)] +#![feature(slice_ptr_len)] +#![feature(slice_ptr_get)] #![feature(int_error_matching)] #![feature(iter_advance_by)] #![feature(iter_partition_in_place)] From 8293713617a81c039927ab1afddfc27aeac8e687 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 19 Apr 2021 19:52:37 +0200 Subject: [PATCH 02/20] Restructure `Rc` and `Arc` metadata for uniform access via a PrefixAlloc --- library/alloc/src/lib.rs | 1 + library/alloc/src/rc.rs | 105 +++++++++++++++++------------ library/alloc/src/sync.rs | 137 ++++++++++++++++++++------------------ src/etc/gdb_providers.py | 4 +- src/etc/lldb_providers.py | 8 +-- 5 files changed, 145 insertions(+), 110 deletions(-) diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 14cb1d3b405c2..0ada3c89a0015 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -76,6 +76,7 @@ #![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(new_uninit))] #![feature(allocator_api)] +#![feature(allocator_api_internals)] #![feature(vec_extend_from_within)] #![feature(array_chunks)] #![feature(array_methods)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index cb4af7c5cd151..a5fd3dc81f9a3 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -247,6 +247,7 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cell::Cell; @@ -257,7 +258,7 @@ use core::hash::{Hash, Hasher}; use core::intrinsics::abort; use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; -use core::mem::{self, align_of_val_raw, forget, size_of_val}; +use core::mem::{self, forget, size_of_val, MaybeUninit}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -273,13 +274,33 @@ use crate::vec::Vec; #[cfg(test)] mod tests; +struct RcBoxMetadata { + strong: Cell, + weak: Cell, +} + +impl RcBoxMetadata { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + #[inline] + fn new_strong() -> Self { + Self { strong: Cell::new(1), weak: Cell::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: Cell::new(0), weak: Cell::new(1) } + } +} + // This is repr(C) to future-proof against possible field-reordering, which // would interfere with otherwise safe [into|from]_raw() of transmutable // inner types. #[repr(C)] struct RcBox { - strong: Cell, - weak: Cell, + meta: RcBoxMetadata, value: T, } @@ -319,10 +340,12 @@ impl Rc { unsafe { self.ptr.as_ref() } } + #[inline] fn from_inner(ptr: NonNull>) -> Self { Self { ptr, phantom: PhantomData } } + #[inline] unsafe fn from_ptr(ptr: *mut RcBox) -> Self { Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) } @@ -340,13 +363,7 @@ impl Rc { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Self::from_inner( - Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), - ) + Self::from_inner(Box::leak(box RcBox { meta: RcBoxMetadata::new_strong(), value }).into()) } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -378,8 +395,7 @@ impl Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box RcBox { - strong: Cell::new(0), - weak: Cell::new(1), + meta: RcBoxMetadata::new_weak(), value: mem::MaybeUninit::::uninit(), }) .into(); @@ -400,9 +416,9 @@ impl Rc { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).value), data); - let prev_value = (*inner).strong.get(); + let prev_value = (*inner).meta.strong.get(); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); + (*inner).meta.strong.set(1); } let strong = Rc::from_inner(init_ptr); @@ -494,8 +510,7 @@ impl Rc { // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) - .into(), + Box::leak(Box::try_new(RcBox { meta: RcBoxMetadata::new_strong(), value })?).into(), )) } @@ -846,13 +861,7 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcBox. - let rc_ptr = - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; - - unsafe { Self::from_ptr(rc_ptr) } + unsafe { Self::from_data_ptr(ptr).assume_init() } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1237,8 +1246,8 @@ impl Rc { unsafe { debug_assert_eq!(Layout::for_value(&*inner), layout); - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); + ptr::write(&mut (*inner).meta.strong, Cell::new(1)); + ptr::write(&mut (*inner).meta.weak, Cell::new(1)); } Ok(inner) @@ -1277,6 +1286,23 @@ impl Rc { Self::from_ptr(ptr) } } + + /// # Safety + /// + /// The caller must ensure that the pointer points to the `value` field of a `Global` + /// allocation of type `RcBox`. Depending on how the pointer was created, the + /// `meta` field might or might not be uninitialized. It's up to the caller to ensure + /// that this field is set to the correct value before the return value is unwrapped. + #[inline] + unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { + let offset = unsafe { data_offset(ptr) }; + + // Reverse the offset to find the original RcBox. + let rc_ptr = + unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; + + unsafe { MaybeUninit::new(Self::from_ptr(rc_ptr)) } + } } impl Rc<[T]> { @@ -2206,7 +2232,7 @@ impl Weak { // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } }) } } @@ -2415,12 +2441,12 @@ trait RcInnerPtr { impl RcInnerPtr for RcBox { #[inline(always)] fn weak_ref(&self) -> &Cell { - &self.weak + &self.meta.weak } #[inline(always)] fn strong_ref(&self) -> &Cell { - &self.strong + &self.meta.strong } } @@ -2453,6 +2479,8 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} +type RcAllocator = PrefixAllocator; + /// Get the offset within an `RcBox` for the payload behind a pointer. /// /// # Safety @@ -2460,17 +2488,12 @@ impl Unpin for Rc {} /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the RcBox. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize + unsafe { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let layout = Layout::for_value_raw(ptr); + RcAllocator::prefix_offset(layout) as isize + } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 1b7e656cefd9a..fe9dc657c8595 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -4,6 +4,7 @@ //! //! See the [`Arc`][Arc] documentation for more details. +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cmp::Ordering; @@ -14,7 +15,7 @@ use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -use core::mem::{self, align_of_val_raw, size_of_val}; +use core::mem::{self, size_of_val, MaybeUninit}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -296,18 +297,33 @@ impl fmt::Debug for Weak { } } -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct ArcInner { +struct ArcInnerMetadata { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the // ability to upgrade weak pointers or downgrade strong ones; this is used // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, +} +impl ArcInnerMetadata { + #[inline] + fn new_strong() -> Self { + Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) } + } +} + +// This is repr(C) to future-proof against possible field-reordering, which +// would interfere with otherwise safe [into|from]_raw() of transmutable +// inner types. +#[repr(C)] +struct ArcInner { + meta: ArcInnerMetadata, data: T, } @@ -327,13 +343,7 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = box ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }; + let x: Box<_> = box ArcInner { meta: ArcInnerMetadata::new_strong(), data }; Self::from_inner(Box::leak(x).into()) } @@ -363,8 +373,7 @@ impl Arc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), + meta: ArcInnerMetadata::new_weak(), data: mem::MaybeUninit::::uninit(), }) .into(); @@ -398,7 +407,7 @@ impl Arc { // // These side effects do not impact us in any way, and no other side effects are // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); + let prev_value = (*inner).meta.strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); } @@ -494,13 +503,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(data: T) -> Result, AllocError> { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = Box::try_new(ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - })?; + let x: Box<_> = Box::try_new(ArcInner { meta: ArcInnerMetadata::new_strong(), data })?; Ok(Self::from_inner(Box::leak(x).into())) } @@ -593,11 +596,11 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { + if this.inner().meta.strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { return Err(this); } - acquire!(this.inner().strong); + acquire!(this.inner().meta.strong); unsafe { let elem = ptr::read(&this.ptr.as_ref().data); @@ -842,14 +845,7 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); - - Self::from_ptr(arc_ptr) - } + unsafe { Self::from_data_ptr(ptr).assume_init() } } /// Creates a new [`Weak`] pointer to this allocation. @@ -867,13 +863,13 @@ impl Arc { pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. - let mut cur = this.inner().weak.load(Relaxed); + let mut cur = this.inner().meta.weak.load(Relaxed); loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { hint::spin_loop(); - cur = this.inner().weak.load(Relaxed); + cur = this.inner().meta.weak.load(Relaxed); continue; } @@ -884,7 +880,7 @@ impl Arc { // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. - match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + match this.inner().meta.weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); @@ -918,7 +914,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - let cnt = this.inner().weak.load(SeqCst); + let cnt = this.inner().meta.weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } @@ -947,7 +943,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong.load(SeqCst) + this.inner().meta.strong.load(SeqCst) } /// Increments the strong reference count on the `Arc` associated with the @@ -1112,8 +1108,8 @@ impl Arc { debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); unsafe { - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + ptr::write(&mut (*inner).meta.strong, atomic::AtomicUsize::new(1)); + ptr::write(&mut (*inner).meta.weak, atomic::AtomicUsize::new(1)); } Ok(inner) @@ -1152,6 +1148,24 @@ impl Arc { Self::from_ptr(ptr) } } + + /// # Safety + /// + /// The caller must ensure that the pointer points to the `data` field of a `Global` + /// allocation of type `ArcInner`. Depending on how the pointer was created, the + /// `meta` field might or might not be uninitialized. It's up to the caller to ensure + /// that this field is set to the correct value before the return value is unwrapped. + #[inline] + unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { + unsafe { + let offset = data_offset(ptr); + + // Reverse the offset to find the original ArcInner. + let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); + + MaybeUninit::new(Self::from_ptr(arc_ptr)) + } + } } impl Arc<[T]> { @@ -1276,7 +1290,7 @@ impl Clone for Arc { // another must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - let old_size = self.inner().strong.fetch_add(1, Relaxed); + let old_size = self.inner().meta.strong.fetch_add(1, Relaxed); // However we need to guard against massive refcounts in case someone // is `mem::forget`ing Arcs. If we don't do this the count can overflow @@ -1352,7 +1366,7 @@ impl Arc { // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. - if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { + if this.inner().meta.strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. let mut arc = Self::new_uninit(); @@ -1361,7 +1375,7 @@ impl Arc { (**this).write_clone_into_raw(data.as_mut_ptr()); *this = arc.assume_init(); } - } else if this.inner().weak.load(Relaxed) != 1 { + } else if this.inner().meta.weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being // dropped. Worst case, we end up allocated a new Arc unnecessarily. @@ -1388,7 +1402,7 @@ impl Arc { } else { // We were the sole reference of either kind; bump back up the // strong ref count. - this.inner().strong.store(1, Release); + this.inner().meta.strong.store(1, Release); } // As with `get_mut()`, the unsafety is ok because our reference was @@ -1484,16 +1498,16 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if self.inner().meta.weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. - let unique = self.inner().strong.load(Acquire) == 1; + let unique = self.inner().meta.strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. - self.inner().weak.store(1, Release); // release the lock + self.inner().meta.weak.store(1, Release); // release the lock unique } else { false @@ -1533,7 +1547,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { + if self.inner().meta.strong.fetch_sub(1, Release) != 1 { return; } @@ -1565,7 +1579,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) // [2]: (https://github.com/rust-lang/rust/pull/41714) - acquire!(self.inner().strong); + acquire!(self.inner().meta.strong); unsafe { self.drop_slow(); @@ -1883,7 +1897,7 @@ impl Weak { // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } }) } } @@ -2507,6 +2521,8 @@ impl AsRef for Arc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} +type ArcAllocator = PrefixAllocator; + /// Get the offset within an `ArcInner` for the payload behind a pointer. /// /// # Safety @@ -2514,17 +2530,12 @@ impl Unpin for Arc {} /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the ArcInner. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } -} - -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize + unsafe { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let layout = Layout::for_value_raw(ptr); + ArcAllocator::prefix_offset(layout) as isize + } } diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index f0ce13b269c59..3ce10ceaf6994 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -146,8 +146,8 @@ def __init__(self, valobj, is_atomic=False): self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"] - self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + self.strong = self.ptr["meta"]["strong"]["v" if is_atomic else "value"]["value"] + self.weak = self.ptr["meta"]["weak"]["v" if is_atomic else "value"]["value"] - 1 def to_string(self): if self.is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 86dcc335e3cbf..6df5407371b7c 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -603,10 +603,10 @@ def __init__(self, valobj, dict, is_atomic=False): self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") - self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex( - 0).GetChildMemberWithName("value") - self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex( - 0).GetChildMemberWithName("value") + self.strong = self.ptr.GetChildMemberWithName("meta")\ + .GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value") + self.weak = self.ptr.GetChildMemberWithName("meta")\ + .GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value")) self.value_builder = ValueBuilder(valobj) From fba9038565ed174d98c3ed095c400bea7de52a67 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Tue, 20 Apr 2021 13:13:47 +0200 Subject: [PATCH 03/20] Fix parameter order for allocator --- library/alloc/src/rc.rs | 14 +++++++------- library/alloc/src/sync.rs | 14 +++++++------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index a5fd3dc81f9a3..0dee5f80f804d 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -274,12 +274,12 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -struct RcBoxMetadata { +struct RcMetadata { strong: Cell, weak: Cell, } -impl RcBoxMetadata { +impl RcMetadata { // There is an implicit weak pointer owned by all the strong // pointers, which ensures that the weak destructor never frees // the allocation while the strong destructor is running, even @@ -300,7 +300,7 @@ impl RcBoxMetadata { // inner types. #[repr(C)] struct RcBox { - meta: RcBoxMetadata, + meta: RcMetadata, value: T, } @@ -363,7 +363,7 @@ impl Rc { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - Self::from_inner(Box::leak(box RcBox { meta: RcBoxMetadata::new_strong(), value }).into()) + Self::from_inner(Box::leak(box RcBox { meta: RcMetadata::new_strong(), value }).into()) } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -395,7 +395,7 @@ impl Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box RcBox { - meta: RcBoxMetadata::new_weak(), + meta: RcMetadata::new_weak(), value: mem::MaybeUninit::::uninit(), }) .into(); @@ -510,7 +510,7 @@ impl Rc { // the allocation while the strong destructor is running, even // if the weak pointer is stored inside the strong one. Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { meta: RcBoxMetadata::new_strong(), value })?).into(), + Box::leak(Box::try_new(RcBox { meta: RcMetadata::new_strong(), value })?).into(), )) } @@ -2479,7 +2479,7 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} -type RcAllocator = PrefixAllocator; +type RcAllocator = PrefixAllocator; /// Get the offset within an `RcBox` for the payload behind a pointer. /// diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index fe9dc657c8595..866c33003aae7 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -297,7 +297,7 @@ impl fmt::Debug for Weak { } } -struct ArcInnerMetadata { +struct ArcMetadata { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the @@ -306,7 +306,7 @@ struct ArcInnerMetadata { weak: atomic::AtomicUsize, } -impl ArcInnerMetadata { +impl ArcMetadata { #[inline] fn new_strong() -> Self { Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } @@ -323,7 +323,7 @@ impl ArcInnerMetadata { // inner types. #[repr(C)] struct ArcInner { - meta: ArcInnerMetadata, + meta: ArcMetadata, data: T, } @@ -343,7 +343,7 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { - let x: Box<_> = box ArcInner { meta: ArcInnerMetadata::new_strong(), data }; + let x: Box<_> = box ArcInner { meta: ArcMetadata::new_strong(), data }; Self::from_inner(Box::leak(x).into()) } @@ -373,7 +373,7 @@ impl Arc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { - meta: ArcInnerMetadata::new_weak(), + meta: ArcMetadata::new_weak(), data: mem::MaybeUninit::::uninit(), }) .into(); @@ -503,7 +503,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(data: T) -> Result, AllocError> { - let x: Box<_> = Box::try_new(ArcInner { meta: ArcInnerMetadata::new_strong(), data })?; + let x: Box<_> = Box::try_new(ArcInner { meta: ArcMetadata::new_strong(), data })?; Ok(Self::from_inner(Box::leak(x).into())) } @@ -2521,7 +2521,7 @@ impl AsRef for Arc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} -type ArcAllocator = PrefixAllocator; +type ArcAllocator = PrefixAllocator; /// Get the offset within an `ArcInner` for the payload behind a pointer. /// From 4b0dafd69e918c80e1c293153b598e3d45cee853 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 21 Apr 2021 18:01:20 +0200 Subject: [PATCH 04/20] Refactor `Rc` to use `PrefixAllocator` and store pointer to `T` directly --- library/alloc/src/rc.rs | 787 ++++++++++++++----------------- library/core/src/alloc/helper.rs | 33 +- 2 files changed, 380 insertions(+), 440 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 0dee5f80f804d..bc7c623f72b7e 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -247,7 +247,7 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; -use core::alloc::helper::PrefixAllocator; +use core::alloc::helper::{AllocInit, PrefixAllocator}; use core::any::Any; use core::borrow; use core::cell::Cell; @@ -258,7 +258,7 @@ use core::hash::{Hash, Hasher}; use core::intrinsics::abort; use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; -use core::mem::{self, forget, size_of_val, MaybeUninit}; +use core::mem::{self, forget}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -274,7 +274,11 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -struct RcMetadata { +/// Metadata `Rc` and `Weak` to be allocated as prefix. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[derive(Debug, Clone)] +#[doc(hidden)] +pub struct RcMetadata { strong: Cell, weak: Cell, } @@ -293,17 +297,61 @@ impl RcMetadata { fn new_weak() -> Self { Self { strong: Cell::new(0), weak: Cell::new(1) } } -} -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct RcBox { - meta: RcMetadata, - value: T, + #[inline] + fn strong(&self) -> usize { + self.strong.get() + } + + #[inline] + fn inc_strong(&self) { + let strong = self.strong(); + + // We want to abort on overflow instead of dropping the value. + // The reference count will never be zero when this is called; + // nevertheless, we insert an abort here to hint LLVM at + // an otherwise missed optimization. + if strong == 0 || strong == usize::MAX { + abort(); + } + self.strong.set(strong + 1); + } + + #[inline] + fn dec_strong(&self) { + self.strong.set(self.strong() - 1); + } + + #[inline] + fn weak(&self) -> usize { + self.weak.get() + } + + #[inline] + fn inc_weak(&self) { + let weak = self.weak(); + + // We want to abort on overflow instead of dropping the value. + // The reference count will never be zero when this is called; + // nevertheless, we insert an abort here to hint LLVM at + // an otherwise missed optimization. + if weak == 0 || weak == usize::MAX { + abort(); + } + self.weak.set(weak + 1); + } + + #[inline] + fn dec_weak(&self) { + self.weak.set(self.weak() - 1); + } } +/// Allocator used for `Rc` and `Weak`. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[doc(hidden)] +pub type RcAllocator = PrefixAllocator; + /// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference /// Counted'. /// @@ -317,8 +365,9 @@ struct RcBox { #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Rc { - ptr: NonNull>, - phantom: PhantomData>, + ptr: NonNull, + alloc: RcAllocator, + _marker: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] @@ -334,20 +383,8 @@ impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { #[inline(always)] - fn inner(&self) -> &RcBox { - // This unsafety is ok because while this Rc is alive we're guaranteed - // that the inner pointer is valid. - unsafe { self.ptr.as_ref() } - } - - #[inline] - fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } - } - - #[inline] - unsafe fn from_ptr(ptr: *mut RcBox) -> Self { - Self::from_inner(unsafe { NonNull::new_unchecked(ptr) }) + fn metadata(&self) -> &RcMetadata { + unsafe { RcAllocator::::prefix(self.ptr).as_ref() } } } @@ -361,9 +398,10 @@ impl Rc { /// /// let five = Rc::new(5); /// ``` + #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - Self::from_inner(Box::leak(box RcBox { meta: RcMetadata::new_strong(), value }).into()) + Self::try_new(value).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -390,19 +428,52 @@ impl Rc { /// } /// } /// ``` + #[inline] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(box RcBox { - meta: RcMetadata::new_weak(), - value: mem::MaybeUninit::::uninit(), - }) - .into(); + Self::try_new_cyclic(data_fn).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + } - let init_ptr: NonNull> = uninit_ptr.cast(); + /// Tries to construct a new `Rc` using a weak reference to itself. Attempting + /// to upgrade the weak reference before this function returns will result + /// in a `None` value. However, the weak reference may be cloned freely and + /// stored for use at a later time. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, arc_new_cyclic)] + /// #![allow(dead_code)] + /// use std::alloc::AllocError; + /// use std::rc::{Rc, Weak}; + /// + /// struct Gadget { + /// self_weak: Weak, + /// // ... more fields + /// } + /// impl Gadget { + /// pub fn new() -> Result, AllocError> { + /// Rc::try_new_cyclic(|self_weak| { + /// Gadget { self_weak: self_weak.clone(), /* ... */ } + /// }) + /// } + /// } + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "arc_new_cyclic", issue = "75861")] + pub fn try_new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Result, AllocError> { + let alloc = RcAllocator::new(Global); + let ptr = Self::try_allocate( + &alloc, + Layout::new::(), + RcMetadata::new_weak(), + AllocInit::Uninitialized, + NonNull::cast, + )?; - let weak = Weak { ptr: init_ptr }; + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -410,23 +481,15 @@ impl Rc { // weak pointer for ourselves, but this would result in additional // updates to the weak reference count which might not be necessary // otherwise. - let data = data_fn(&weak); - unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).value), data); - - let prev_value = (*inner).meta.strong.get(); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).meta.strong.set(1); + ptr.as_ptr().write(data_fn(&weak)); } - let strong = Rc::from_inner(init_ptr); + let meta = unsafe { RcAllocator::::prefix(ptr).as_ref() }; + debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); + meta.strong.set(1); - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + unsafe { Ok(Rc::from_raw_in(ptr.as_ptr(), weak.alloc)) } } /// Constructs a new `Rc` with uninitialized contents. @@ -450,15 +513,10 @@ impl Rc { /// /// assert_eq!(*five, 5) /// ``` + #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, - )) - } + Self::try_new_uninit().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -481,15 +539,10 @@ impl Rc { /// ``` /// /// [zeroed]: mem::MaybeUninit::zeroed + #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Rc> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, - )) - } + Self::try_new_zeroed().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -503,15 +556,14 @@ impl Rc { /// let five = Rc::try_new(5); /// # Ok::<(), std::alloc::AllocError>(()) /// ``` + #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { meta: RcMetadata::new_strong(), value })?).into(), - )) + let mut rc = Self::try_new_uninit()?; + unsafe { + Rc::get_mut_unchecked(&mut rc).as_mut_ptr().write(value); + Ok(rc.assume_init()) + } } /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails @@ -539,13 +591,16 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, - )?)) - } + let alloc = RcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::try_allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + )?; + unsafe { Ok(Rc::from_raw_in(ptr.as_ptr().cast(), alloc)) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -572,16 +627,21 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Rc::from_ptr(Rc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, - )?)) - } + let alloc = RcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::try_allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Zeroed, + NonNull::cast, + )?; + unsafe { Ok(Rc::from_raw_in(ptr.as_ptr().cast(), alloc)) } } + /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. + #[inline] #[stable(feature = "pin", since = "1.33.0")] pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } @@ -610,16 +670,16 @@ impl Rc { #[stable(feature = "rc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { if Rc::strong_count(&this) == 1 { + let this = mem::ManuallyDrop::new(this); unsafe { - let val = ptr::read(&*this); // copy the contained object + let val = ptr::read(&**this); // copy the contained object // Indicate to Weaks that they can't be promoted by decrementing // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. - this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr }; - forget(this); + this.metadata().dec_strong(); + let _weak = Weak { ptr: this.ptr, alloc: this.alloc }; Ok(val) } } else { @@ -654,7 +714,15 @@ impl Rc<[T]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) } + let alloc = RcAllocator::new(Global); + let ptr = Rc::allocate( + &alloc, + Layout::array::(len).unwrap(), + RcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); + unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -679,16 +747,15 @@ impl Rc<[T]> { /// [zeroed]: mem::MaybeUninit::zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { - unsafe { - Rc::from_ptr(Rc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) - as *mut RcBox<[mem::MaybeUninit]> - }, - )) - } + let alloc = RcAllocator::new(Global); + let ptr = Rc::allocate( + &alloc, + Layout::array::(len).unwrap(), + RcMetadata::new_strong(), + AllocInit::Zeroed, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); + unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } } } @@ -724,10 +791,13 @@ impl Rc> { /// /// assert_eq!(*five, 5) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc { - Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + let this = mem::ManuallyDrop::new(self); + let ptr = this.ptr; + let alloc = this.alloc; + unsafe { Rc::from_raw_in(ptr.cast().as_ptr(), alloc) } } } @@ -765,10 +835,16 @@ impl Rc<[mem::MaybeUninit]> { /// /// assert_eq!(*values, [1, 2, 3]) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc<[T]> { - unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + let this = mem::ManuallyDrop::new(self); + let len = this.ptr.len(); + let ptr = unsafe { + NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) + }; + let alloc = this.alloc; + unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } } } @@ -789,11 +865,11 @@ impl Rc { /// let x_ptr = Rc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = Self::as_ptr(&this); - mem::forget(this); - ptr + let this = mem::ManuallyDrop::new(this); + Self::as_ptr(&this) } /// Provides a raw pointer to the data. @@ -812,14 +888,10 @@ impl Rc { /// assert_eq!(x_ptr, Rc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline(always)] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut RcBox = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or Rc::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { ptr::addr_of_mut!((*ptr).value) } + this.ptr.as_ptr() } /// Constructs an `Rc` from a raw pointer. @@ -859,9 +931,17 @@ impl Rc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_data_ptr(ptr).assume_init() } + unsafe { Self::from_raw_in(ptr, RcAllocator::new(Global)) } + } + + /// Constructs an `Rc` from a raw pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: RcAllocator) -> Self { + Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc, _marker: PhantomData } } /// Creates a new [`Weak`] pointer to this allocation. @@ -875,12 +955,13 @@ impl Rc { /// /// let weak_five = Rc::downgrade(&five); /// ``` + #[inline] #[stable(feature = "rc_weak", since = "1.4.0")] pub fn downgrade(this: &Self) -> Weak { - this.inner().inc_weak(); + this.metadata().inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: this.alloc.clone() } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -898,7 +979,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - this.inner().weak() - 1 + this.metadata().weak() - 1 } /// Gets the number of strong (`Rc`) pointers to this allocation. @@ -916,7 +997,7 @@ impl Rc { #[inline] #[stable(feature = "rc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong() + this.metadata().strong() } /// Increments the strong reference count on the `Rc` associated with the @@ -947,7 +1028,8 @@ impl Rc { #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn increment_strong_count(ptr: *const T) { // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw(ptr)) }; + let rc = unsafe { mem::ManuallyDrop::new(Self::from_raw(ptr)) }; + debug_assert_ne!(Self::strong_count(&rc), 0, "the strong count must be at least 1"); // Now increase refcount, but don't drop new refcount either let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); } @@ -983,14 +1065,14 @@ impl Rc { #[inline] #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { mem::drop(Rc::from_raw(ptr)) }; + unsafe { mem::drop(Self::from_raw(ptr)) }; } /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to /// this allocation. #[inline] fn is_unique(this: &Self) -> bool { - Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 + Self::weak_count(this) == 0 && Self::strong_count(this) == 1 } /// Returns a mutable reference into the given `Rc`, if there are @@ -1020,7 +1102,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { - if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None } + if Self::is_unique(this) { unsafe { Some(Self::get_mut_unchecked(this)) } } else { None } } /// Returns a mutable reference into the given `Rc`, @@ -1053,13 +1135,12 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { + debug_assert!(Self::is_unique(this), "The pointer has to be unique"); // We are careful to *not* create a reference covering the "count" fields, as // this would conflict with accesses to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).value } + unsafe { this.ptr.as_mut() } } - #[inline] - #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns `true` if the two `Rc`s point to the same allocation /// (in a vein similar to [`ptr::eq`]). /// @@ -1077,8 +1158,10 @@ impl Rc { /// ``` /// /// [`ptr::eq`]: core::ptr::eq + #[inline] + #[stable(feature = "ptr_eq", since = "1.17.0")] pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() == other.ptr.as_ptr() + this.ptr == other.ptr } } @@ -1134,7 +1217,7 @@ impl Rc { #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] pub fn make_mut(this: &mut Self) -> &mut T { - if Rc::strong_count(this) != 1 { + if Self::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. // Pre-allocate memory to allow writing the cloned value directly. let mut rc = Self::new_uninit(); @@ -1150,10 +1233,10 @@ impl Rc { let data = Rc::get_mut_unchecked(&mut rc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); - this.inner().dec_strong(); + this.metadata().dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake // Weak here -- we know other Weaks can clean up for us) - this.inner().dec_weak(); + this.metadata().dec_weak(); ptr::write(this, rc.assume_init()); } } @@ -1162,13 +1245,11 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the allocation. - unsafe { &mut this.ptr.as_mut().value } + unsafe { Self::get_mut_unchecked(this) } } } impl Rc { - #[inline] - #[stable(feature = "rc_downcast", since = "1.29.0")] /// Attempt to downcast the `Rc` to a concrete type. /// /// # Examples @@ -1187,11 +1268,14 @@ impl Rc { /// print_if_string(Rc::new(my_string)); /// print_if_string(Rc::new(0i8)); /// ``` + #[inline] + #[stable(feature = "rc_downcast", since = "1.29.0")] pub fn downcast(self) -> Result, Rc> { if (*self).is::() { - let ptr = self.ptr.cast::>(); - forget(self); - Ok(Rc::from_inner(ptr)) + let this = mem::ManuallyDrop::new(self); + let ptr = this.ptr; + let alloc = this.alloc; + unsafe { Ok(Rc::from_raw_in(ptr.cast().as_ptr(), alloc)) } } else { Err(self) } @@ -1200,24 +1284,21 @@ impl Rc { impl Rc { /// Allocates an `RcBox` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. + /// a possibly-unsized inner value where the value has the layout provided, + /// returning an error if allocation fails. /// /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, - ) -> *mut RcBox { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcBox)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - unsafe { - Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } + #[inline] + fn allocate( + alloc: &RcAllocator, + layout: Layout, + meta: RcMetadata, + init: AllocInit, + mem_to_ptr: impl FnOnce(NonNull) -> NonNull, + ) -> NonNull { + Self::try_allocate(alloc, layout, meta, init, mem_to_ptr) + .unwrap_or_else(|_| handle_alloc_error(layout)) } /// Allocates an `RcBox` with sufficient space for @@ -1227,104 +1308,71 @@ impl Rc { /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. #[inline] - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox, - ) -> Result<*mut RcBox, AllocError> { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const RcBox)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - - // Allocate for the layout. - let ptr = allocate(layout)?; - - // Initialize the RcBox - let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr()); - unsafe { - debug_assert_eq!(Layout::for_value(&*inner), layout); - - ptr::write(&mut (*inner).meta.strong, Cell::new(1)); - ptr::write(&mut (*inner).meta.weak, Cell::new(1)); - } - - Ok(inner) - } + fn try_allocate( + alloc: &RcAllocator, + layout: Layout, + meta: RcMetadata, + init: AllocInit, + mem_to_ptr: impl FnOnce(NonNull) -> NonNull, + ) -> Result, AllocError> { + let memory = match init { + AllocInit::Uninitialized => alloc.allocate(layout)?, + AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, + }; - /// Allocates an `RcBox` with sufficient space for an unsized inner value - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { - // Allocate for the `RcBox` using the given value. + let ptr = mem_to_ptr(memory.as_non_null_ptr()); unsafe { - Self::allocate_for_layout( - Layout::for_value(&*ptr), - |layout| Global.allocate(layout), - |mem| (ptr as *mut RcBox).set_ptr_value(mem), - ) + RcAllocator::::prefix(ptr).as_ptr().write(meta); } + Ok(ptr) } fn from_box(v: Box) -> Rc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); - - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + let rc_alloc = RcAllocator::new(alloc); + + let layout = Layout::for_value(&*bptr); + let ptr = Self::allocate( + &rc_alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Uninitialized, + |mem| NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())), + ); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, - &mut (*ptr).value as *mut _ as *mut u8, - value_size, + ptr.as_ptr() as *mut u8, + layout.size(), ); // Free the allocation without dropping its contents - box_free(box_unique, alloc); + box_free(box_unique, &rc_alloc.parent); - Self::from_ptr(ptr) + Self::from_raw_in(ptr.as_ptr(), rc_alloc) } } - - /// # Safety - /// - /// The caller must ensure that the pointer points to the `value` field of a `Global` - /// allocation of type `RcBox`. Depending on how the pointer was created, the - /// `meta` field might or might not be uninitialized. It's up to the caller to ensure - /// that this field is set to the correct value before the return value is unwrapped. - #[inline] - unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcBox. - let rc_ptr = - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; - - unsafe { MaybeUninit::new(Self::from_ptr(rc_ptr)) } - } } impl Rc<[T]> { - /// Allocates an `RcBox<[T]>` with the given length. - unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, - ) - } - } - /// Copy elements from slice into newly allocated Rc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy` unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { + let alloc = RcAllocator::new(Global); + let ptr = Self::allocate( + &alloc, + Layout::array::(v.len()).unwrap(), + RcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), v.len()), + ); unsafe { - let ptr = Self::allocate_for_slice(v.len()); - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len()); - Self::from_ptr(ptr) + ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); + Self::from_raw_in(ptr.as_ptr(), alloc) } } @@ -1335,44 +1383,47 @@ impl Rc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new RcBox will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, + struct Guard { + ptr: NonNull, layout: Layout, n_elems: usize, + alloc: A, } - impl Drop for Guard { + impl Drop for Guard { fn drop(&mut self) { unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); + let slice = from_raw_parts_mut(self.ptr.as_ptr(), self.n_elems); ptr::drop_in_place(slice); - Global.deallocate(self.mem, self.layout); + self.alloc.deallocate(self.ptr.cast(), self.layout); } } } unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); - - // Pointer to first element - let elems = &mut (*ptr).value as *mut [T] as *mut T; + let alloc = RcAllocator::new(Global); + let layout = Layout::array::(len).unwrap(); + let ptr = Self::allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = + Guard { ptr: ptr.as_non_null_ptr(), layout, n_elems: 0, alloc: alloc.by_ref() }; for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); + ptr::write(ptr.get_unchecked_mut(i).as_ptr(), item); guard.n_elems += 1; } // All clear. Forget the guard so it doesn't free the new RcBox. forget(guard); - Self::from_ptr(ptr) + Self::from_raw_in(ptr.as_ptr(), alloc) } } } @@ -1402,7 +1453,7 @@ impl Deref for Rc { #[inline(always)] fn deref(&self) -> &T { - &self.inner().value + unsafe { self.ptr.as_ref() } } } @@ -1437,18 +1488,20 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// drop(foo2); // Prints "dropped!" /// ``` fn drop(&mut self) { - unsafe { - self.inner().dec_strong(); - if self.inner().strong() == 0 { - // destroy the contained object + self.metadata().dec_strong(); + if self.metadata().strong() == 0 { + // destroy the contained object + unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)); + } - // remove the implicit "strong weak" pointer now that we've - // destroyed the contents. - self.inner().dec_weak(); - - if self.inner().weak() == 0 { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + // remove the implicit "strong weak" pointer now that we've + // destroyed the contents. + self.metadata().dec_weak(); + if self.metadata().weak() == 0 { + unsafe { + let layout = Layout::for_value_raw(self.ptr.as_ptr()); + self.alloc.deallocate(self.ptr.cast(), layout); } } } @@ -1473,8 +1526,8 @@ impl Clone for Rc { /// ``` #[inline] fn clone(&self) -> Rc { - self.inner().inc_strong(); - Self::from_inner(self.ptr) + self.metadata().inc_strong(); + unsafe { Self::from_raw(self.ptr.as_ptr()) } } } @@ -1976,9 +2029,10 @@ pub struct Weak { // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcBox has alignment at least 2. + // will ever have because `RcMetadata` has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, + alloc: RcAllocator, } #[stable(feature = "rc_weak", since = "1.4.0")] @@ -1992,6 +2046,17 @@ impl, U: ?Sized> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} +impl Weak { + #[inline(always)] + fn metadata(&self) -> Option<&RcMetadata> { + if is_dangling(self.ptr.as_ptr()) { + None + } else { + Some(unsafe { RcAllocator::::prefix(self.ptr).as_ref() }) + } + } +} + impl Weak { /// Constructs a new `Weak`, without allocating any memory. /// Calling [`upgrade`] on the return value always gives [`None`]. @@ -2008,20 +2073,16 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { - Weak { ptr: NonNull::new(usize::MAX as *mut RcBox).expect("MAX is not 0") } + Weak { + ptr: NonNull::new(usize::MAX as *mut T).expect("MAX is not 0"), + alloc: RcAllocator::new(Global), + } } } +#[inline] pub(crate) fn is_dangling(ptr: *mut T) -> bool { - let address = ptr as *mut () as usize; - address == usize::MAX -} - -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a Cell, - strong: &'a Cell, + ptr as *mut () as usize == usize::MAX } impl Weak { @@ -2050,20 +2111,10 @@ impl Weak { /// ``` /// /// [`null`]: core::ptr::null + #[inline(always)] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut RcBox = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as RcBox (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferencable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { ptr::addr_of_mut!((*ptr).value) } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -2093,6 +2144,7 @@ impl Weak { /// /// [`from_raw`]: Weak::from_raw /// [`as_ptr`]: Weak::as_ptr + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn into_raw(self) -> *const T { let result = self.as_ptr(); @@ -2142,24 +2194,17 @@ impl Weak { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`new`]: Weak::new + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr as *mut T) { - // This is a dangling Weak. - ptr as *mut RcBox - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcBox. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) } - }; + unsafe { Self::from_raw_in(ptr, RcAllocator::new(Global)) } + } - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + /// Constructs a `Weak` from a raw pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: RcAllocator) -> Self { + Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -2187,13 +2232,11 @@ impl Weak { /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { - let inner = self.inner()?; - if inner.strong() == 0 { - None - } else { - inner.inc_strong(); - Some(Rc::from_inner(self.ptr)) - } + let meta = self.metadata()?; + (meta.strong() != 0).then(|| { + meta.inc_strong(); + unsafe { Rc::from_raw(self.ptr.as_ptr()) } + }) } /// Gets the number of strong (`Rc`) pointers pointing to this allocation. @@ -2201,7 +2244,7 @@ impl Weak { /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong() } else { 0 } + self.metadata().map(|meta| meta.strong()).unwrap_or(0) } /// Gets the number of `Weak` pointers pointing to this allocation. @@ -2209,10 +2252,10 @@ impl Weak { /// If no strong pointers remain, this will return zero. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr + self.metadata() + .map(|meta| { + if meta.strong() > 0 { + meta.weak() - 1 // subtract the implicit weak ptr } else { 0 } @@ -2220,23 +2263,6 @@ impl Weak { .unwrap_or(0) } - /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Rc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } - }) - } - } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). @@ -2278,10 +2304,10 @@ impl Weak { /// ``` /// /// [`ptr::eq`]: core::ptr::eq - #[inline] + #[inline(always)] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.ptr.as_ptr() == other.ptr.as_ptr() + self.ptr == other.ptr } } @@ -2312,14 +2338,15 @@ impl Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let inner = if let Some(inner) = self.inner() { inner } else { return }; + let meta = if let Some(meta) = self.metadata() { meta } else { return }; - inner.dec_weak(); + meta.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. - if inner.weak() == 0 { + if meta.weak() == 0 { unsafe { - Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + let layout = Layout::for_value_raw(self.ptr.as_ptr()); + self.alloc.deallocate(self.ptr.cast(), layout); } } } @@ -2340,10 +2367,10 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - if let Some(inner) = self.inner() { - inner.inc_weak() + if let Some(meta) = self.metadata() { + meta.inc_weak() } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: RcAllocator::new(Global) } } } @@ -2375,93 +2402,6 @@ impl Default for Weak { } } -// NOTE: We checked_add here to deal with mem::forget safely. In particular -// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then -// you can free the allocation while outstanding Rcs (or Weaks) exist. -// We abort because this is such a degenerate scenario that we don't care about -// what happens -- no real program should ever experience this. -// -// This should have negligible overhead since you don't actually need to -// clone these much in Rust thanks to ownership and move-semantics. - -#[doc(hidden)] -trait RcInnerPtr { - fn weak_ref(&self) -> &Cell; - fn strong_ref(&self) -> &Cell; - - #[inline] - fn strong(&self) -> usize { - self.strong_ref().get() - } - - #[inline] - fn inc_strong(&self) { - let strong = self.strong(); - - // We want to abort on overflow instead of dropping the value. - // The reference count will never be zero when this is called; - // nevertheless, we insert an abort here to hint LLVM at - // an otherwise missed optimization. - if strong == 0 || strong == usize::MAX { - abort(); - } - self.strong_ref().set(strong + 1); - } - - #[inline] - fn dec_strong(&self) { - self.strong_ref().set(self.strong() - 1); - } - - #[inline] - fn weak(&self) -> usize { - self.weak_ref().get() - } - - #[inline] - fn inc_weak(&self) { - let weak = self.weak(); - - // We want to abort on overflow instead of dropping the value. - // The reference count will never be zero when this is called; - // nevertheless, we insert an abort here to hint LLVM at - // an otherwise missed optimization. - if weak == 0 || weak == usize::MAX { - abort(); - } - self.weak_ref().set(weak + 1); - } - - #[inline] - fn dec_weak(&self) { - self.weak_ref().set(self.weak() - 1); - } -} - -impl RcInnerPtr for RcBox { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - &self.meta.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - &self.meta.strong - } -} - -impl<'a> RcInnerPtr for WeakInner<'a> { - #[inline(always)] - fn weak_ref(&self) -> &Cell { - self.weak - } - - #[inline(always)] - fn strong_ref(&self) -> &Cell { - self.strong - } -} - #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { @@ -2478,22 +2418,3 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} - -type RcAllocator = PrefixAllocator; - -/// Get the offset within an `RcBox` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - unsafe { - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of for_value_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - let layout = Layout::for_value_raw(ptr); - RcAllocator::prefix_offset(layout) as isize - } -} diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs index efa51082a53c8..4a3705a56ed0b 100644 --- a/library/core/src/alloc/helper.rs +++ b/library/core/src/alloc/helper.rs @@ -2,9 +2,17 @@ use crate::{ alloc::{AllocError, Allocator, Layout}, fmt, marker::PhantomData, + mem, ptr::NonNull, }; +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AllocInit { + Uninitialized, + Zeroed, +} + /// An allocator that requests some extra memory from the parent allocator for storing a prefix and/or a suffix. /// /// The alignment of the memory block is the maximum of the alignment of `Prefix` and the requested @@ -38,7 +46,7 @@ pub struct PrefixAllocator { impl fmt::Debug for PrefixAllocator { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Affix").field("parent", &self.parent).finish() + f.debug_struct("PrefixAllocator").field("parent", &self.parent).finish() } } @@ -85,15 +93,26 @@ impl PrefixAllocator { /// # Safety /// /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and - /// * `layout` must *[fit]* that block of memory. + /// * `ptr` must point to (and have valid metadata for) a previously valid instance of `T`, + /// but the `T` is allowed to be dropped. /// /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory - /// [fit]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#memory-fitting #[inline] - pub unsafe fn prefix(ptr: NonNull, layout: Layout) -> NonNull { - let prefix_offset = Self::prefix_offset(layout); - // SAFETY: `prefix_offset` is smaller (and not equal to) `ptr` as the same function for calculating `prefix_offset` is used when allocating. - unsafe { NonNull::new_unchecked(ptr.as_ptr().sub(prefix_offset)).cast() } + pub unsafe fn prefix(ptr: NonNull) -> NonNull { + let prefix_layout = Layout::new::(); + + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; + + let offset = prefix_layout.size() + prefix_layout.padding_needed_for(align); + let ptr = ptr.as_ptr() as *mut u8; + + // SAFETY: `ptr` was allocated with this allocator thus, `ptr - offset` points to the + // prefix and is non-null. + unsafe { NonNull::new_unchecked(ptr.sub(offset)).cast() } } fn create_ptr(ptr: NonNull<[u8]>, offset_prefix: usize) -> NonNull<[u8]> { From 596585ac8735ec877193fef2b27ab80fa92e59b3 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 21 Apr 2021 19:46:02 +0200 Subject: [PATCH 05/20] Fix `PrefixAllocator` test --- library/core/tests/alloc/prefix.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/core/tests/alloc/prefix.rs b/library/core/tests/alloc/prefix.rs index 12adb961844b2..cbe87be158c8d 100644 --- a/library/core/tests/alloc/prefix.rs +++ b/library/core/tests/alloc/prefix.rs @@ -23,7 +23,7 @@ fn test_prefix() { }); assert_eq!( - PrefixAllocator::::prefix(memory.as_non_null_ptr(), layout) + PrefixAllocator::::prefix::(memory.as_non_null_ptr().cast()) .cast() .as_ptr(), memory.as_mut_ptr().sub(prefix_offset), From c4d6ec718085bd941d59b6ae205e71238b3ad55f Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 21 Apr 2021 20:00:16 +0200 Subject: [PATCH 06/20] Remove debug assertions in `get_mut_unchecked` Rc::drop uses this when strong count is zero --- library/alloc/src/rc.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index bc7c623f72b7e..c2129331dd08c 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1135,7 +1135,6 @@ impl Rc { #[inline] #[unstable(feature = "get_mut_unchecked", issue = "63292")] pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { - debug_assert!(Self::is_unique(this), "The pointer has to be unique"); // We are careful to *not* create a reference covering the "count" fields, as // this would conflict with accesses to the reference counts (e.g. by `Weak`). unsafe { this.ptr.as_mut() } From e2b4562d88be1899c08b2ecaa99ac888226f4698 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 19:24:08 +0200 Subject: [PATCH 07/20] Fix debug output --- src/etc/gdb_providers.py | 19 +++++++++++++++--- src/etc/lldb_providers.py | 38 ++++++++++++++++++++++-------------- src/test/debuginfo/rc_arc.rs | 2 +- 3 files changed, 40 insertions(+), 19 deletions(-) diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 3ce10ceaf6994..cbb90a65205ac 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -142,12 +142,25 @@ def display_hint(): class StdRcProvider: def __init__(self, valobj, is_atomic=False): + def size_rounded_up(size, align): + return (size + align - 1) & ~(align - 1) + self.valobj = valobj self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) - self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["meta"]["strong"]["v" if is_atomic else "value"]["value"] - self.weak = self.ptr["meta"]["weak"]["v" if is_atomic else "value"]["value"] - 1 + if is_atomic: + # Use old lookup for Arc as it is not refactored yet + self.value = self.ptr["data" if is_atomic else "value"] + self.strong = self.ptr["meta"]["strong"]["v"]["value"] + self.weak = self.ptr["meta"]["weak"]["v"]["value"] - 1 + return + self.value = self.ptr.dereference() + + metadata_type = gdb.lookup_type("alloc::sync::ArcMetadata" if is_atomic else "alloc::rc::RcMetadata") + offset = size_rounded_up(metadata_type.sizeof, self.value.type.alignof) + self.metadata = (self.ptr.cast(gdb.lookup_type("u8").pointer()) - offset).cast(metadata_type.pointer()) + self.strong = self.metadata["strong"]["v" if is_atomic else "value"]["value"] + self.weak = self.metadata["weak"]["v" if is_atomic else "value"]["value"] - 1 def to_string(self): if self.is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 6df5407371b7c..50ea0fa7e2b01 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -582,33 +582,36 @@ def StdRcSummaryProvider(valobj, dict): class StdRcSyntheticProvider: """Pretty-printer for alloc::rc::Rc and alloc::sync::Arc - struct Rc { ptr: NonNull>, ... } + struct Rc { ptr: NonNull, alloc: PrefixAllocator<_, RcMetadata>, ... } rust 1.31.1: struct NonNull { pointer: NonZero<*const T> } rust 1.33.0: struct NonNull { pointer: *const T } struct NonZero(T) - struct RcBox { strong: Cell, weak: Cell, value: T } + struct RcMetadata { strong: Cell, weak: Cell } struct Cell { value: UnsafeCell } struct UnsafeCell { value: T } - struct Arc { ptr: NonNull>, ... } - struct ArcInner { strong: atomic::AtomicUsize, weak: atomic::AtomicUsize, data: T } + struct Arc { ptr: NonNull, alloc: PrefixAllocator<_, ArcMetadata>, ... } + struct ArcMetadata{ strong: atomic::AtomicUsize, weak: atomic::AtomicUsize } struct AtomicUsize { v: UnsafeCell } """ def __init__(self, valobj, dict, is_atomic=False): + def size_rounded_up(size, align): + return (size + align - 1) & ~(align - 1) + # type: (SBValue, dict, bool) -> StdRcSyntheticProvider self.valobj = valobj + self.value_builder = ValueBuilder(valobj) + self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) - self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") - - self.strong = self.ptr.GetChildMemberWithName("meta")\ - .GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value") - self.weak = self.ptr.GetChildMemberWithName("meta")\ - .GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value")) - - self.value_builder = ValueBuilder(valobj) + if is_atomic: + self.meta = self.ptr.GetChildMemberWithName("meta") + else: + metadata_type = self.valobj.GetChildMemberWithName("alloc").type.template_args[1] + offset = size_rounded_up(metadata_type.size, self.ptr.type.GetPointeeType().GetByteSize()) + self.meta = self.valobj.CreateValueFromAddress("meta", self.ptr.GetValueAsUnsigned() - offset, metadata_type) self.update() @@ -630,7 +633,8 @@ def get_child_index(self, name): def get_child_at_index(self, index): # type: (int) -> SBValue if index == 0: - return self.value + value = self.ptr.GetChildMemberWithName("data") if self.is_atomic else self.ptr.Dereference() + return self.valobj.CreateValueFromData("value", value.data, value.type) if index == 1: return self.value_builder.from_uint("strong", self.strong_count) if index == 2: @@ -640,8 +644,12 @@ def get_child_at_index(self, index): def update(self): # type: () -> None - self.strong_count = self.strong.GetValueAsUnsigned() - self.weak_count = self.weak.GetValueAsUnsigned() - 1 + if self.is_atomic: + self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value").GetValueAsUnsigned() - 1 + else: + self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildMemberWithName("value").GetValueAsUnsigned() - 1 def has_children(self): # type: () -> bool diff --git a/src/test/debuginfo/rc_arc.rs b/src/test/debuginfo/rc_arc.rs index 87bc79ea79437..062668209bbc8 100644 --- a/src/test/debuginfo/rc_arc.rs +++ b/src/test/debuginfo/rc_arc.rs @@ -20,7 +20,7 @@ // lldb-command:print r // lldb-check:[...]$0 = strong=2, weak=1 { value = 42 } // lldb-command:print a -// lldb-check:[...]$1 = strong=2, weak=1 { data = 42 } +// lldb-check:[...]$1 = strong=2, weak=1 { value = 42 } use std::rc::Rc; use std::sync::Arc; From 19a813e5101d855371ed17cae2bcc74ca0075c2a Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 19:56:11 +0200 Subject: [PATCH 08/20] Satisfy tidy --- src/etc/gdb_providers.py | 5 +++-- src/etc/lldb_providers.py | 21 ++++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index cbb90a65205ac..c2cd95007ac0c 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -156,9 +156,10 @@ def size_rounded_up(size, align): return self.value = self.ptr.dereference() - metadata_type = gdb.lookup_type("alloc::sync::ArcMetadata" if is_atomic else "alloc::rc::RcMetadata") + metadata_type = self.valobj["alloc"].type.template_argument(1) offset = size_rounded_up(metadata_type.sizeof, self.value.type.alignof) - self.metadata = (self.ptr.cast(gdb.lookup_type("u8").pointer()) - offset).cast(metadata_type.pointer()) + u8_pointer = gdb.lookup_type("u8").pointer() + self.metadata = (self.ptr.cast(u8_pointer) - offset).cast(metadata_type.pointer()) self.strong = self.metadata["strong"]["v" if is_atomic else "value"]["value"] self.weak = self.metadata["weak"]["v" if is_atomic else "value"]["value"] - 1 diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 50ea0fa7e2b01..47117c62d0dc1 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -610,8 +610,10 @@ def size_rounded_up(size, align): self.meta = self.ptr.GetChildMemberWithName("meta") else: metadata_type = self.valobj.GetChildMemberWithName("alloc").type.template_args[1] - offset = size_rounded_up(metadata_type.size, self.ptr.type.GetPointeeType().GetByteSize()) - self.meta = self.valobj.CreateValueFromAddress("meta", self.ptr.GetValueAsUnsigned() - offset, metadata_type) + align = self.ptr.type.GetPointeeType().GetByteSize() + offset = size_rounded_up(metadata_type.size, align) + meta_address = self.ptr.GetValueAsUnsigned() - offset + self.meta = self.valobj.CreateValueFromAddress("meta", meta_address, metadata_type) self.update() @@ -633,7 +635,8 @@ def get_child_index(self, name): def get_child_at_index(self, index): # type: (int) -> SBValue if index == 0: - value = self.ptr.GetChildMemberWithName("data") if self.is_atomic else self.ptr.Dereference() + value = self.ptr.GetChildMemberWithName("data") if self.is_atomic \ + else self.ptr.Dereference() return self.valobj.CreateValueFromData("value", value.data, value.type) if index == 1: return self.value_builder.from_uint("strong", self.strong_count) @@ -645,11 +648,15 @@ def get_child_at_index(self, index): def update(self): # type: () -> None if self.is_atomic: - self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value").GetValueAsUnsigned() - self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value").GetValueAsUnsigned() - 1 + self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildAtIndex(0)\ + .GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildAtIndex(0)\ + .GetChildMemberWithName("value").GetValueAsUnsigned() - 1 else: - self.strong_count = self.meta.GetChildMemberWithName("strong").GetChildMemberWithName("value").GetValueAsUnsigned() - self.weak_count = self.meta.GetChildMemberWithName("weak").GetChildMemberWithName("value").GetValueAsUnsigned() - 1 + self.strong_count = self.meta.GetChildMemberWithName("strong")\ + .GetChildMemberWithName("value").GetValueAsUnsigned() + self.weak_count = self.meta.GetChildMemberWithName("weak")\ + .GetChildMemberWithName("value").GetValueAsUnsigned() - 1 def has_children(self): # type: () -> bool From 87ee732797a05ed1416e46fea55f534e239ddb7e Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 20:10:07 +0200 Subject: [PATCH 09/20] Use `AllocInit` from allocation helpers in `raw_vec.rs` --- library/alloc/src/raw_vec.rs | 9 +-------- library/core/src/alloc/helper.rs | 7 +++++-- library/core/src/alloc/mod.rs | 5 ++--- 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index dc02c9c883ea0..2d9355f0ceeef 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,7 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] -use core::alloc::LayoutError; +use core::alloc::{LayoutError, helper::AllocInit}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit}; @@ -16,13 +16,6 @@ use crate::collections::TryReserveError::{self, *}; #[cfg(test)] mod tests; -enum AllocInit { - /// The contents of the new memory are uninitialized. - Uninitialized, - /// The new memory is guaranteed to be zeroed. - Zeroed, -} - /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases /// involved. This type is excellent for building your own data structures like Vec and VecDeque. diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs index 4a3705a56ed0b..4f3255d57067b 100644 --- a/library/core/src/alloc/helper.rs +++ b/library/core/src/alloc/helper.rs @@ -1,3 +1,6 @@ +#![unstable(feature = "allocator_api_internals", issue = "none")] +#![doc(hidden)] + use crate::{ alloc::{AllocError, Allocator, Layout}, fmt, @@ -6,10 +9,11 @@ use crate::{ ptr::NonNull, }; -#[unstable(feature = "allocator_api_internals", issue = "none")] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum AllocInit { + /// The contents of the new memory are uninitialized. Uninitialized, + /// The new memory is guaranteed to be zeroed. Zeroed, } @@ -37,7 +41,6 @@ pub enum AllocInit { /// When this allocator creates an allocation for layout `layout`, the pointer can be /// offset by `-offsetof(Struct, data)` and the resulting pointer points is an allocation /// of `A` for `Layout::new::()`. -#[unstable(feature = "allocator_api_internals", issue = "none")] pub struct PrefixAllocator { /// The parent allocator to be used as backend pub parent: Alloc, diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index e1853f743de7d..136f09214e404 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -2,12 +2,11 @@ #![stable(feature = "alloc_module", since = "1.28.0")] +pub mod helper; + mod global; mod layout; -#[unstable(feature = "allocator_api_internals", issue = "none")] -pub mod helper; - #[stable(feature = "global_alloc", since = "1.28.0")] pub use self::global::GlobalAlloc; #[stable(feature = "alloc_layout", since = "1.28.0")] From 1c43985b124431861fbcf029b7825bca2c4be284 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 20:23:34 +0200 Subject: [PATCH 10/20] Use `sizeof` instead of `alignof` in Debugger` Old versions of debugger don't keep track of alignment. This uses `sizeof` instead as in the hashmap debugger. --- src/etc/gdb_providers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index c2cd95007ac0c..7c242e0f82262 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -157,7 +157,7 @@ def size_rounded_up(size, align): self.value = self.ptr.dereference() metadata_type = self.valobj["alloc"].type.template_argument(1) - offset = size_rounded_up(metadata_type.sizeof, self.value.type.alignof) + offset = size_rounded_up(metadata_type.sizeof, self.value.type.sizeof) u8_pointer = gdb.lookup_type("u8").pointer() self.metadata = (self.ptr.cast(u8_pointer) - offset).cast(metadata_type.pointer()) self.strong = self.metadata["strong"]["v" if is_atomic else "value"]["value"] From ab4008334b16aa83a0d16748ae6903c20f8b8ffd Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 20:48:45 +0200 Subject: [PATCH 11/20] Satisfy rustfmt --- library/alloc/src/raw_vec.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 2d9355f0ceeef..2c09c5f206f76 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,7 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] -use core::alloc::{LayoutError, helper::AllocInit}; +use core::alloc::{helper::AllocInit, LayoutError}; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit}; From 40261fd505d0f82038b47e19756c8403189553c4 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 22 Apr 2021 22:34:22 +0200 Subject: [PATCH 12/20] Refactor `Arc` to use `PrefixAllocator` internally --- library/alloc/src/rc.rs | 23 +- library/alloc/src/sync.rs | 720 +++++++++++++++++--------------------- src/etc/gdb_providers.py | 6 - src/etc/lldb_providers.py | 16 +- 4 files changed, 341 insertions(+), 424 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index c2129331dd08c..10215b78b32ba 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -274,7 +274,7 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -/// Metadata `Rc` and `Weak` to be allocated as prefix. +/// Metadata for `Rc` and `Weak` to be allocated as prefix. #[unstable(feature = "allocator_api_internals", issue = "none")] #[derive(Debug, Clone)] #[doc(hidden)] @@ -462,6 +462,8 @@ impl Rc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn try_new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Result, AllocError> { + // Construct the inner in the "uninitialized" state with a single + // weak reference. let alloc = RcAllocator::new(Global); let ptr = Self::try_allocate( &alloc, @@ -489,7 +491,7 @@ impl Rc { debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); meta.strong.set(1); - unsafe { Ok(Rc::from_raw_in(ptr.as_ptr(), weak.alloc)) } + unsafe { Ok(Self::from_raw_in(ptr.as_ptr(), weak.alloc)) } } /// Constructs a new `Rc` with uninitialized contents. @@ -795,9 +797,7 @@ impl Rc> { #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc { let this = mem::ManuallyDrop::new(self); - let ptr = this.ptr; - let alloc = this.alloc; - unsafe { Rc::from_raw_in(ptr.cast().as_ptr(), alloc) } + unsafe { Rc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc) } } } @@ -843,8 +843,7 @@ impl Rc<[mem::MaybeUninit]> { let ptr = unsafe { NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) }; - let alloc = this.alloc; - unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } + unsafe { Rc::from_raw_in(ptr.as_ptr(), this.alloc) } } } @@ -1272,9 +1271,7 @@ impl Rc { pub fn downcast(self) -> Result, Rc> { if (*self).is::() { let this = mem::ManuallyDrop::new(self); - let ptr = this.ptr; - let alloc = this.alloc; - unsafe { Ok(Rc::from_raw_in(ptr.cast().as_ptr(), alloc)) } + unsafe { Ok(Rc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc)) } } else { Err(self) } @@ -1282,7 +1279,7 @@ impl Rc { } impl Rc { - /// Allocates an `RcBox` with sufficient space for + /// Allocates an `Rc` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// @@ -1300,11 +1297,11 @@ impl Rc { .unwrap_or_else(|_| handle_alloc_error(layout)) } - /// Allocates an `RcBox` with sufficient space for + /// Allocates an `Rc` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// - /// The function `mem_to_rcbox` is called with the data pointer + /// The function `mem_to_ptr` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. #[inline] fn try_allocate( diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 866c33003aae7..79618a90658e6 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -4,7 +4,7 @@ //! //! See the [`Arc`][Arc] documentation for more details. -use core::alloc::helper::PrefixAllocator; +use core::alloc::helper::{AllocInit, PrefixAllocator}; use core::any::Any; use core::borrow; use core::cmp::Ordering; @@ -15,7 +15,7 @@ use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -use core::mem::{self, size_of_val, MaybeUninit}; +use core::mem::{self, forget}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -58,6 +58,36 @@ macro_rules! acquire { }; } +/// Metadata for `Arc` and `Weak` to be allocated as prefix. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[derive(Debug)] +#[doc(hidden)] +pub struct ArcMetadata { + strong: atomic::AtomicUsize, + + // the value usize::MAX acts as a sentinel for temporarily "locking" the + // ability to upgrade weak pointers or downgrade strong ones; this is used + // to avoid races in `make_mut` and `get_mut`. + weak: atomic::AtomicUsize, +} + +impl ArcMetadata { + #[inline] + fn new_strong() -> Self { + Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) } + } +} + +/// Allocator used for `Arc` and `Weak`. +#[unstable(feature = "allocator_api_internals", issue = "none")] +#[doc(hidden)] +pub type ArcAllocator = PrefixAllocator; + /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// @@ -224,8 +254,9 @@ macro_rules! acquire { #[cfg_attr(not(test), rustc_diagnostic_item = "Arc")] #[stable(feature = "rust1", since = "1.0.0")] pub struct Arc { - ptr: NonNull>, - phantom: PhantomData>, + ptr: NonNull, + alloc: ArcAllocator, + _marker: PhantomData, } #[stable(feature = "rust1", since = "1.0.0")] @@ -240,12 +271,9 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { - fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } - } - - unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + #[inline(always)] + fn metadata(&self) -> &ArcMetadata { + unsafe { ArcAllocator::::prefix(self.ptr).as_ref() } } } @@ -277,7 +305,8 @@ pub struct Weak { // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. - ptr: NonNull>, + ptr: NonNull, + alloc: ArcAllocator, } #[stable(feature = "arc_weak", since = "1.4.0")] @@ -297,39 +326,17 @@ impl fmt::Debug for Weak { } } -struct ArcMetadata { - strong: atomic::AtomicUsize, - - // the value usize::MAX acts as a sentinel for temporarily "locking" the - // ability to upgrade weak pointers or downgrade strong ones; this is used - // to avoid races in `make_mut` and `get_mut`. - weak: atomic::AtomicUsize, -} - -impl ArcMetadata { - #[inline] - fn new_strong() -> Self { - Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } - } - - #[inline] - fn new_weak() -> Self { - Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) } +impl Weak { + #[inline(always)] + fn metadata(&self) -> Option<&ArcMetadata> { + if is_dangling(self.ptr.as_ptr()) { + None + } else { + Some(unsafe { ArcAllocator::::prefix(self.ptr).as_ref() }) + } } } -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct ArcInner { - meta: ArcMetadata, - data: T, -} - -unsafe impl Send for ArcInner {} -unsafe impl Sync for ArcInner {} - impl Arc { /// Constructs a new `Arc`. /// @@ -342,9 +349,8 @@ impl Arc { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn new(data: T) -> Arc { - let x: Box<_> = box ArcInner { meta: ArcMetadata::new_strong(), data }; - Self::from_inner(Box::leak(x).into()) + pub fn new(data: T) -> Self { + Self::try_new(data).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Arc` using a weak reference to itself. Attempting @@ -369,54 +375,8 @@ impl Arc { /// ``` #[inline] #[unstable(feature = "arc_new_cyclic", issue = "75861")] - pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Arc { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { - meta: ArcMetadata::new_weak(), - data: mem::MaybeUninit::::uninit(), - }) - .into(); - let init_ptr: NonNull> = uninit_ptr.cast(); - - let weak = Weak { ptr: init_ptr }; - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - let data = data_fn(&weak); - - // Now we can properly initialize the inner value and turn our weak - // reference into a strong reference. - unsafe { - let inner = init_ptr.as_ptr(); - ptr::write(ptr::addr_of_mut!((*inner).data), data); - - // The above write to the data field must be visible to any threads which - // observe a non-zero strong count. Therefore we need at least "Release" ordering - // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`. - // - // "Acquire" ordering is not required. When considering the possible behaviours - // of `data_fn` we only need to look at what it could do with a reference to a - // non-upgradeable `Weak`: - // - It can *clone* the `Weak`, increasing the weak reference count. - // - It can drop those clones, decreasing the weak reference count (but never to zero). - // - // These side effects do not impact us in any way, and no other side effects are - // possible with safe code alone. - let prev_value = (*inner).meta.strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - } - - let strong = Arc::from_inner(init_ptr); - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - mem::forget(weak); - strong + pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Self { + Self::try_new_cyclic(data_fn).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Arc` with uninitialized contents. @@ -440,15 +400,10 @@ impl Arc { /// /// assert_eq!(*five, 5) /// ``` + #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, - )) - } + Self::try_new_uninit().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -471,21 +426,17 @@ impl Arc { /// ``` /// /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed + #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, - )) - } + Self::try_new_zeroed().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `data` will be pinned in memory and unable to be moved. + #[inline] #[stable(feature = "pin", since = "1.33.0")] - pub fn pin(data: T) -> Pin> { + pub fn pin(data: T) -> Pin { unsafe { Pin::new_unchecked(Arc::new(data)) } } @@ -500,11 +451,70 @@ impl Arc { /// let five = Arc::try_new(5)?; /// # Ok::<(), std::alloc::AllocError>(()) /// ``` + #[inline] #[unstable(feature = "allocator_api", issue = "32838")] + pub fn try_new(data: T) -> Result { + let mut arc = Self::try_new_uninit()?; + unsafe { + Arc::get_mut_unchecked(&mut arc).as_mut_ptr().write(data); + Ok(arc.assume_init()) + } + } + + /// Constructs a new `Arc` using a weak reference to itself. Attempting + /// to upgrade the weak reference before this function returns will result + /// in a `None` value. However, the weak reference may be cloned freely and + /// stored for use at a later time. + /// + /// # Examples + /// ``` + /// #![feature(allocator_api, arc_new_cyclic)] + /// #![allow(dead_code)] + /// + /// use std::sync::{Arc, Weak}; + /// + /// struct Foo { + /// me: Weak, + /// } + /// + /// let foo = Arc::try_new_cyclic(|me| Foo { + /// me: me.clone(), + /// })?; + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` #[inline] - pub fn try_new(data: T) -> Result, AllocError> { - let x: Box<_> = Box::try_new(ArcInner { meta: ArcMetadata::new_strong(), data })?; - Ok(Self::from_inner(Box::leak(x).into())) + #[unstable(feature = "arc_new_cyclic", issue = "75861")] + pub fn try_new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Result { + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let alloc = ArcAllocator::new(Global); + let ptr = Self::try_allocate( + &alloc, + Layout::new::(), + ArcMetadata::new_weak(), + AllocInit::Uninitialized, + NonNull::cast, + )?; + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + unsafe { + ptr.as_ptr().write(data_fn(&weak)); + } + + let meta = unsafe { ArcAllocator::::prefix(ptr).as_ref() }; + let prev_value = meta.strong.fetch_add(1, Release); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + + unsafe { Ok(Self::from_raw_in(ptr.as_ptr(), weak.alloc)) } } /// Constructs a new `Arc` with uninitialized contents, returning an error @@ -533,13 +543,16 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, - )?)) - } + let alloc = ArcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::try_allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + )?; + unsafe { Ok(Arc::from_raw_in(ptr.as_ptr().cast(), alloc)) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -566,13 +579,16 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { - unsafe { - Ok(Arc::from_ptr(Arc::try_allocate_for_layout( - Layout::new::(), - |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, - )?)) - } + let alloc = ArcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::try_allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Zeroed, + NonNull::cast, + )?; + unsafe { Ok(Arc::from_raw_in(ptr.as_ptr().cast(), alloc)) } } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// @@ -596,20 +612,19 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().meta.strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { + if this.metadata().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { return Err(this); } - acquire!(this.inner().meta.strong); + acquire!(this.metadata().strong); + let this = mem::ManuallyDrop::new(this); unsafe { - let elem = ptr::read(&this.ptr.as_ref().data); + let val = this.ptr.as_ptr().read(); // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr }; - mem::forget(this); - - Ok(elem) + let _weak = Weak { ptr: this.ptr, alloc: this.alloc }; + Ok(val) } } } @@ -640,7 +655,15 @@ impl Arc<[T]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) } + let alloc = ArcAllocator::new(Global); + let ptr = Arc::allocate( + &alloc, + Layout::array::(len).unwrap(), + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); + unsafe { Arc::from_raw_in(ptr.as_ptr(), alloc) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -665,16 +688,15 @@ impl Arc<[T]> { /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { - unsafe { - Arc::from_ptr(Arc::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate_zeroed(layout), - |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) - as *mut ArcInner<[mem::MaybeUninit]> - }, - )) - } + let alloc = ArcAllocator::new(Global); + let ptr = Arc::allocate( + &alloc, + Layout::array::(len).unwrap(), + ArcMetadata::new_strong(), + AllocInit::Zeroed, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); + unsafe { Arc::from_raw_in(ptr.as_ptr(), alloc) } } } @@ -710,10 +732,11 @@ impl Arc> { /// /// assert_eq!(*five, 5) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Arc { - Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) + let this = mem::ManuallyDrop::new(self); + unsafe { Arc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc) } } } @@ -751,10 +774,15 @@ impl Arc<[mem::MaybeUninit]> { /// /// assert_eq!(*values, [1, 2, 3]) /// ``` - #[unstable(feature = "new_uninit", issue = "63291")] #[inline] + #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Arc<[T]> { - unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + let this = mem::ManuallyDrop::new(self); + let len = this.ptr.len(); + let ptr = unsafe { + NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) + }; + unsafe { Arc::from_raw_in(ptr.as_ptr(), this.alloc) } } } @@ -773,11 +801,11 @@ impl Arc { /// let x_ptr = Arc::into_raw(x); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = Self::as_ptr(&this); - mem::forget(this); - ptr + let this = mem::ManuallyDrop::new(this); + Self::as_ptr(&this) } /// Provides a raw pointer to the data. @@ -796,14 +824,10 @@ impl Arc { /// assert_eq!(x_ptr, Arc::as_ptr(&y)); /// assert_eq!(unsafe { &*x_ptr }, "hello"); /// ``` + #[inline(always)] #[stable(feature = "rc_as_ptr", since = "1.45.0")] pub fn as_ptr(this: &Self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(this.ptr); - - // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because - // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. - unsafe { ptr::addr_of_mut!((*ptr).data) } + this.ptr.as_ptr() } /// Constructs an `Arc` from a raw pointer. @@ -843,9 +867,17 @@ impl Arc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` + #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_data_ptr(ptr).assume_init() } + unsafe { Self::from_raw_in(ptr, ArcAllocator::new(Global)) } + } + + /// Constructs an `Arc` from a raw pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: ArcAllocator) -> Self { + Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc, _marker: PhantomData } } /// Creates a new [`Weak`] pointer to this allocation. @@ -863,13 +895,13 @@ impl Arc { pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. - let mut cur = this.inner().meta.weak.load(Relaxed); + let mut cur = this.metadata().weak.load(Relaxed); loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { hint::spin_loop(); - cur = this.inner().meta.weak.load(Relaxed); + cur = this.metadata().weak.load(Relaxed); continue; } @@ -880,11 +912,11 @@ impl Arc { // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. - match this.inner().meta.weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + match this.metadata().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - return Weak { ptr: this.ptr }; + return Weak { ptr: this.ptr, alloc: this.alloc.clone() }; } Err(old) => cur = old, } @@ -914,7 +946,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - let cnt = this.inner().meta.weak.load(SeqCst); + let cnt = this.metadata().weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } @@ -943,7 +975,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().meta.strong.load(SeqCst) + this.metadata().strong.load(SeqCst) } /// Increments the strong reference count on the `Arc` associated with the @@ -1017,16 +1049,6 @@ impl Arc { unsafe { mem::drop(Arc::from_raw(ptr)) }; } - #[inline] - fn inner(&self) -> &ArcInner { - // This unsafety is ok because while this arc is alive we're guaranteed - // that the inner pointer is valid. Furthermore, we know that the - // `ArcInner` structure itself is `Sync` because the inner data is - // `Sync` as well, so we're ok loaning out an immutable pointer to these - // contents. - unsafe { self.ptr.as_ref() } - } - // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { @@ -1035,7 +1057,7 @@ impl Arc { unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references - drop(Weak { ptr: self.ptr }); + drop(Weak { ptr: self.ptr, alloc: ArcAllocator::new(Global) }); } #[inline] @@ -1058,138 +1080,100 @@ impl Arc { /// /// [`ptr::eq`]: core::ptr::eq pub fn ptr_eq(this: &Self, other: &Self) -> bool { - this.ptr.as_ptr() == other.ptr.as_ptr() + this.ptr == other.ptr } } impl Arc { - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - unsafe { - Arc::try_allocate_for_layout(value_layout, allocate, mem_to_arcinner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `ArcInner` with sufficient space for + /// Allocates an `Rc` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - unsafe fn try_allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> Result<*mut ArcInner, AllocError> { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - - let ptr = allocate(layout)?; - - // Initialize the ArcInner - let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr()); - debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); - - unsafe { - ptr::write(&mut (*inner).meta.strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).meta.weak, atomic::AtomicUsize::new(1)); - } - - Ok(inner) - } + /// The function `mem_to_rcbox` is called with the data pointer + /// and must return back a (potentially fat)-pointer for the `RcBox`. + #[inline] + fn allocate( + alloc: &ArcAllocator, + layout: Layout, + meta: ArcMetadata, + init: AllocInit, + mem_to_ptr: impl FnOnce(NonNull) -> NonNull, + ) -> NonNull { + Self::try_allocate(alloc, layout, meta, init, mem_to_ptr) + .unwrap_or_else(|_| handle_alloc_error(layout)) + } + + /// Allocates an `Arc` with sufficient space for + /// a possibly-unsized inner value where the value has the layout provided. + /// + /// The function `mem_to_mem` is called with the data pointer + /// and must return back a (potentially fat)-pointer for the `Arc`. + #[inline] + fn try_allocate( + alloc: &ArcAllocator, + layout: Layout, + meta: ArcMetadata, + init: AllocInit, + mem_to_ptr: impl FnOnce(NonNull) -> NonNull, + ) -> Result, AllocError> { + let memory = match init { + AllocInit::Uninitialized => alloc.allocate(layout)?, + AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, + }; - /// Allocates an `ArcInner` with sufficient space for an unsized inner value. - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { - // Allocate for the `ArcInner` using the given value. + let ptr = mem_to_ptr(memory.as_non_null_ptr()); unsafe { - Self::allocate_for_layout( - Layout::for_value(&*ptr), - |layout| Global.allocate(layout), - |mem| (ptr as *mut ArcInner).set_ptr_value(mem) as *mut ArcInner, - ) + ArcAllocator::::prefix(ptr).as_ptr().write(meta); } + Ok(ptr) } fn from_box(v: Box) -> Arc { unsafe { let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); - - let value_size = size_of_val(&*bptr); - let ptr = Self::allocate_for_ptr(bptr); + let arc_alloc = ArcAllocator::new(alloc); + + let layout = Layout::for_value(&*bptr); + let ptr = Self::allocate( + &arc_alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + |mem| NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())), + ); // Copy value as bytes ptr::copy_nonoverlapping( bptr as *const T as *const u8, - &mut (*ptr).data as *mut _ as *mut u8, - value_size, + ptr.as_ptr() as *mut u8, + layout.size(), ); // Free the allocation without dropping its contents - box_free(box_unique, alloc); + box_free(box_unique, &arc_alloc.parent); - Self::from_ptr(ptr) - } - } - - /// # Safety - /// - /// The caller must ensure that the pointer points to the `data` field of a `Global` - /// allocation of type `ArcInner`. Depending on how the pointer was created, the - /// `meta` field might or might not be uninitialized. It's up to the caller to ensure - /// that this field is set to the correct value before the return value is unwrapped. - #[inline] - unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { - unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); - - MaybeUninit::new(Self::from_ptr(arc_ptr)) + Self::from_raw_in(ptr.as_ptr(), arc_alloc) } } } impl Arc<[T]> { - /// Allocates an `ArcInner<[T]>` with the given length. - unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, - ) - } - } - /// Copy elements from slice into newly allocated Arc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy`. unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { + let alloc = ArcAllocator::new(Global); + let ptr = Self::allocate( + &alloc, + Layout::array::(v.len()).unwrap(), + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), v.len()), + ); unsafe { - let ptr = Self::allocate_for_slice(v.len()); - - ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len()); - - Self::from_ptr(ptr) + ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); + Self::from_raw_in(ptr.as_ptr(), alloc) } } @@ -1200,44 +1184,47 @@ impl Arc<[T]> { // Panic guard while cloning T elements. // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. - struct Guard { - mem: NonNull, - elems: *mut T, + struct Guard { + ptr: NonNull, layout: Layout, n_elems: usize, + alloc: A, } - impl Drop for Guard { + impl Drop for Guard { fn drop(&mut self) { unsafe { - let slice = from_raw_parts_mut(self.elems, self.n_elems); + let slice = from_raw_parts_mut(self.ptr.as_ptr(), self.n_elems); ptr::drop_in_place(slice); - Global.deallocate(self.mem, self.layout); + self.alloc.deallocate(self.ptr.cast(), self.layout); } } } unsafe { - let ptr = Self::allocate_for_slice(len); - - let mem = ptr as *mut _ as *mut u8; - let layout = Layout::for_value(&*ptr); - - // Pointer to first element - let elems = &mut (*ptr).data as *mut [T] as *mut T; + let alloc = ArcAllocator::new(Global); + let layout = Layout::array::(len).unwrap(); + let ptr = Self::allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + ); - let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 }; + let mut guard = + Guard { ptr: ptr.as_non_null_ptr(), layout, n_elems: 0, alloc: alloc.by_ref() }; for (i, item) in iter.enumerate() { - ptr::write(elems.add(i), item); + ptr::write(ptr.get_unchecked_mut(i).as_ptr(), item); guard.n_elems += 1; } - // All clear. Forget the guard so it doesn't free the new ArcInner. - mem::forget(guard); + // All clear. Forget the guard so it doesn't free the new RcBox. + forget(guard); - Self::from_ptr(ptr) + Self::from_raw_in(ptr.as_ptr(), alloc) } } } @@ -1290,7 +1277,7 @@ impl Clone for Arc { // another must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - let old_size = self.inner().meta.strong.fetch_add(1, Relaxed); + let old_size = self.metadata().strong.fetch_add(1, Relaxed); // However we need to guard against massive refcounts in case someone // is `mem::forget`ing Arcs. If we don't do this the count can overflow @@ -1305,7 +1292,7 @@ impl Clone for Arc { abort(); } - Self::from_inner(self.ptr) + unsafe { Self::from_raw(self.ptr.as_ptr()) } } } @@ -1315,7 +1302,7 @@ impl Deref for Arc { #[inline] fn deref(&self) -> &T { - &self.inner().data + unsafe { self.ptr.as_ref() } } } @@ -1366,7 +1353,7 @@ impl Arc { // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. - if this.inner().meta.strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { + if this.metadata().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. let mut arc = Self::new_uninit(); @@ -1375,7 +1362,7 @@ impl Arc { (**this).write_clone_into_raw(data.as_mut_ptr()); *this = arc.assume_init(); } - } else if this.inner().meta.weak.load(Relaxed) != 1 { + } else if this.metadata().weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being // dropped. Worst case, we end up allocated a new Arc unnecessarily. @@ -1390,7 +1377,7 @@ impl Arc { // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; // Can just steal the data, all that's left is Weaks let mut arc = Self::new_uninit(); @@ -1402,7 +1389,7 @@ impl Arc { } else { // We were the sole reference of either kind; bump back up the // strong ref count. - this.inner().meta.strong.store(1, Release); + this.metadata().strong.store(1, Release); } // As with `get_mut()`, the unsafety is ok because our reference was @@ -1483,7 +1470,7 @@ impl Arc { pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T { // We are careful to *not* create a reference covering the "count" fields, as // this would alias with concurrent access to the reference counts (e.g. by `Weak`). - unsafe { &mut (*this.ptr.as_ptr()).data } + unsafe { this.ptr.as_mut() } } /// Determine whether this is the unique reference (including weak refs) to @@ -1498,16 +1485,16 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().meta.weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if self.metadata().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. - let unique = self.inner().meta.strong.load(Acquire) == 1; + let unique = self.metadata().strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. - self.inner().meta.weak.store(1, Release); // release the lock + self.metadata().weak.store(1, Release); // release the lock unique } else { false @@ -1547,7 +1534,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().meta.strong.fetch_sub(1, Release) != 1 { + if self.metadata().strong.fetch_sub(1, Release) != 1 { return; } @@ -1579,7 +1566,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) // [2]: (https://github.com/rust-lang/rust/pull/41714) - acquire!(self.inner().meta.strong); + acquire!(self.metadata().strong); unsafe { self.drop_slow(); @@ -1613,9 +1600,8 @@ impl Arc { T: Any + Send + Sync + 'static, { if (*self).is::() { - let ptr = self.ptr.cast::>(); - mem::forget(self); - Ok(Arc::from_inner(ptr)) + let this = mem::ManuallyDrop::new(self); + unsafe { Ok(Arc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc)) } } else { Err(self) } @@ -1638,17 +1624,13 @@ impl Weak { /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { - Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner).expect("MAX is not 0") } + Weak { + ptr: NonNull::new(usize::MAX as *mut T).expect("MAX is not 0"), + alloc: ArcAllocator::new(Global), + } } } -/// Helper type to allow accessing the reference counts without -/// making any assertions about the data field. -struct WeakInner<'a> { - weak: &'a atomic::AtomicUsize, - strong: &'a atomic::AtomicUsize, -} - impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// @@ -1677,18 +1659,7 @@ impl Weak { /// [`null`]: core::ptr::null #[stable(feature = "weak_into_raw", since = "1.45.0")] pub fn as_ptr(&self) -> *const T { - let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr); - - if is_dangling(ptr) { - // If the pointer is dangling, we return the sentinel directly. This cannot be - // a valid payload address, as the payload is at least as aligned as ArcInner (usize). - ptr as *const T - } else { - // SAFETY: if is_dangling returns false, then the pointer is dereferencable. - // The payload may be dropped at this point, and we have to maintain provenance, - // so use raw pointer manipulation. - unsafe { ptr::addr_of_mut!((*ptr).data) } - } + self.ptr.as_ptr() } /// Consumes the `Weak` and turns it into a raw pointer. @@ -1767,24 +1738,17 @@ impl Weak { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`forget`]: std::mem::forget + #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - // See Weak::as_ptr for context on how the input pointer is derived. - - let ptr = if is_dangling(ptr as *mut T) { - // This is a dangling Weak. - ptr as *mut ArcInner - } else { - // Otherwise, we're guaranteed the pointer came from a nondangling Weak. - // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. - let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcBox. - // SAFETY: the pointer originated from a Weak, so this offset is safe. - unsafe { (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)) } - }; + unsafe { Self::from_raw_in(ptr, ArcAllocator::new(Global)) } + } - // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + /// Constructs a `Weak` from a raw pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: ArcAllocator) -> Self { + Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc } } } @@ -1817,13 +1781,13 @@ impl Weak { // We use a CAS loop to increment the strong count instead of a // fetch_add as this function should never take the reference count // from zero to one. - let inner = self.inner()?; + let meta = self.metadata()?; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a // "stale" read of 0 is fine), and any other value is // confirmed via the CAS below. - let mut n = inner.strong.load(Relaxed); + let mut n = meta.strong.load(Relaxed); loop { if n == 0 { @@ -1839,8 +1803,8 @@ impl Weak { // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner // value can be initialized after `Weak` references have already been created. In that case, we // expect to observe the fully initialized value. - match inner.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { - Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above + match meta.strong.compare_exchange_weak(n, n + 1, Acquire, Relaxed) { + Ok(_) => unsafe { return Some(Arc::from_raw(self.ptr.as_ptr())) }, // null checked above Err(old) => n = old, } } @@ -1851,7 +1815,7 @@ impl Weak { /// If `self` was created using [`Weak::new`], this will return 0. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { - if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 } + self.metadata().map(|meta| meta.strong.load(SeqCst)).unwrap_or(0) } /// Gets an approximation of the number of `Weak` pointers pointing to this @@ -1867,10 +1831,10 @@ impl Weak { /// `Weak`s pointing to the same allocation. #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - let weak = inner.weak.load(SeqCst); - let strong = inner.strong.load(SeqCst); + self.metadata() + .map(|meta| { + let weak = meta.weak.load(SeqCst); + let strong = meta.strong.load(SeqCst); if strong == 0 { 0 } else { @@ -1885,23 +1849,6 @@ impl Weak { .unwrap_or(0) } - /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, - /// (i.e., when this `Weak` was created by `Weak::new`). - #[inline] - fn inner(&self) -> Option> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - // We are careful to *not* create a reference covering the "data" field, as - // the field may be mutated concurrently (for example, if the last `Arc` - // is dropped, the data field will be dropped in-place). - Some(unsafe { - let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } - }) - } - } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to /// [`ptr::eq`]), or if both don't point to any allocation /// (because they were created with `Weak::new()`). @@ -1946,7 +1893,7 @@ impl Weak { #[inline] #[stable(feature = "weak_ptr_eq", since = "1.39.0")] pub fn ptr_eq(&self, other: &Self) -> bool { - self.ptr.as_ptr() == other.ptr.as_ptr() + self.ptr == other.ptr } } @@ -1965,23 +1912,23 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { - let inner = if let Some(inner) = self.inner() { - inner + let meta = if let Some(meta) = self.metadata() { + meta } else { - return Weak { ptr: self.ptr }; + return Weak { ptr: self.ptr, alloc: self.alloc.clone() }; }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). - let old_size = inner.weak.fetch_add(1, Relaxed); + let old_size = meta.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { abort(); } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } @@ -2041,11 +1988,13 @@ impl Drop for Weak { // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. - let inner = if let Some(inner) = self.inner() { inner } else { return }; + let meta = if let Some(meta) = self.metadata() { meta } else { return }; - if inner.weak.fetch_sub(1, Release) == 1 { - acquire!(inner.weak); - unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } + if meta.weak.fetch_sub(1, Release) == 1 { + acquire!(meta.weak); + unsafe { + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) + } } } } @@ -2520,22 +2469,3 @@ impl AsRef for Arc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} - -type ArcAllocator = PrefixAllocator; - -/// Get the offset within an `ArcInner` for the payload behind a pointer. -/// -/// # Safety -/// -/// The pointer must point to (and have valid metadata for) a previously -/// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - unsafe { - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of for_value_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - let layout = Layout::for_value_raw(ptr); - ArcAllocator::prefix_offset(layout) as isize - } -} diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 7c242e0f82262..490b02cf4e031 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -148,12 +148,6 @@ def size_rounded_up(size, align): self.valobj = valobj self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) - if is_atomic: - # Use old lookup for Arc as it is not refactored yet - self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["meta"]["strong"]["v"]["value"] - self.weak = self.ptr["meta"]["weak"]["v"]["value"] - 1 - return self.value = self.ptr.dereference() metadata_type = self.valobj["alloc"].type.template_argument(1) diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 47117c62d0dc1..fa8e4effc9919 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -606,14 +606,11 @@ def size_rounded_up(size, align): self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr")) - if is_atomic: - self.meta = self.ptr.GetChildMemberWithName("meta") - else: - metadata_type = self.valobj.GetChildMemberWithName("alloc").type.template_args[1] - align = self.ptr.type.GetPointeeType().GetByteSize() - offset = size_rounded_up(metadata_type.size, align) - meta_address = self.ptr.GetValueAsUnsigned() - offset - self.meta = self.valobj.CreateValueFromAddress("meta", meta_address, metadata_type) + metadata_type = self.valobj.GetChildMemberWithName("alloc").type.template_args[1] + align = self.ptr.type.GetPointeeType().GetByteSize() + offset = size_rounded_up(metadata_type.size, align) + meta_address = self.ptr.GetValueAsUnsigned() - offset + self.meta = self.valobj.CreateValueFromAddress("meta", meta_address, metadata_type) self.update() @@ -635,8 +632,7 @@ def get_child_index(self, name): def get_child_at_index(self, index): # type: (int) -> SBValue if index == 0: - value = self.ptr.GetChildMemberWithName("data") if self.is_atomic \ - else self.ptr.Dereference() + value = self.ptr.Dereference() return self.valobj.CreateValueFromData("value", value.data, value.type) if index == 1: return self.value_builder.from_uint("strong", self.strong_count) From bcbba9cdffde09270bb399532b8245da6d449715 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 26 Apr 2021 13:07:49 +0200 Subject: [PATCH 13/20] Fix inlining for `fn metadata` and move out allocator call --- library/alloc/src/rc.rs | 103 +++++++++++++------------ library/alloc/src/sync.rs | 153 +++++++++++++++++++++----------------- 2 files changed, 139 insertions(+), 117 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 10215b78b32ba..bc87b9f999673 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -382,7 +382,7 @@ impl, U: ?Sized> CoerceUnsized> for Rc {} impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { - #[inline(always)] + #[inline] fn metadata(&self) -> &RcMetadata { unsafe { RcAllocator::::prefix(self.ptr).as_ref() } } @@ -401,7 +401,11 @@ impl Rc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - Self::try_new(value).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let mut rc = Self::new_uninit(); + unsafe { + Rc::get_mut_unchecked(&mut rc).as_mut_ptr().write(value); + rc.assume_init() + } } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -431,47 +435,16 @@ impl Rc { #[inline] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc { - Self::try_new_cyclic(data_fn).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) - } - - /// Tries to construct a new `Rc` using a weak reference to itself. Attempting - /// to upgrade the weak reference before this function returns will result - /// in a `None` value. However, the weak reference may be cloned freely and - /// stored for use at a later time. - /// - /// # Examples - /// - /// ``` - /// #![feature(allocator_api, arc_new_cyclic)] - /// #![allow(dead_code)] - /// use std::alloc::AllocError; - /// use std::rc::{Rc, Weak}; - /// - /// struct Gadget { - /// self_weak: Weak, - /// // ... more fields - /// } - /// impl Gadget { - /// pub fn new() -> Result, AllocError> { - /// Rc::try_new_cyclic(|self_weak| { - /// Gadget { self_weak: self_weak.clone(), /* ... */ } - /// }) - /// } - /// } - /// ``` - #[unstable(feature = "allocator_api", issue = "32838")] - // #[unstable(feature = "arc_new_cyclic", issue = "75861")] - pub fn try_new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Result, AllocError> { // Construct the inner in the "uninitialized" state with a single // weak reference. let alloc = RcAllocator::new(Global); - let ptr = Self::try_allocate( + let ptr = Self::allocate( &alloc, Layout::new::(), RcMetadata::new_weak(), AllocInit::Uninitialized, NonNull::cast, - )?; + ); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. @@ -491,7 +464,7 @@ impl Rc { debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); meta.strong.set(1); - unsafe { Ok(Self::from_raw_in(ptr.as_ptr(), weak.alloc)) } + unsafe { Self::from_raw_in(ptr.as_ptr(), weak.alloc) } } /// Constructs a new `Rc` with uninitialized contents. @@ -518,7 +491,16 @@ impl Rc { #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Rc> { - Self::try_new_uninit().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let alloc = RcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + ); + unsafe { Rc::from_raw_in(ptr.as_ptr().cast(), alloc) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -544,7 +526,16 @@ impl Rc { #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Rc> { - Self::try_new_zeroed().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let alloc = RcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Zeroed, + NonNull::cast, + ); + unsafe { Rc::from_raw_in(ptr.as_ptr().cast(), alloc) } } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -1293,8 +1284,11 @@ impl Rc { init: AllocInit, mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> NonNull { - Self::try_allocate(alloc, layout, meta, init, mem_to_ptr) - .unwrap_or_else(|_| handle_alloc_error(layout)) + let ptr = mem_to_ptr(allocate(alloc, layout, init)); + unsafe { + RcAllocator::::prefix(ptr).as_ptr().write(meta); + } + ptr } /// Allocates an `Rc` with sufficient space for @@ -1311,12 +1305,7 @@ impl Rc { init: AllocInit, mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> Result, AllocError> { - let memory = match init { - AllocInit::Uninitialized => alloc.allocate(layout)?, - AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, - }; - - let ptr = mem_to_ptr(memory.as_non_null_ptr()); + let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); unsafe { RcAllocator::::prefix(ptr).as_ptr().write(meta); } @@ -2043,7 +2032,7 @@ impl, U: ?Sized> CoerceUnsized> for Weak {} impl, U: ?Sized> DispatchFromDyn> for Weak {} impl Weak { - #[inline(always)] + #[inline] fn metadata(&self) -> Option<&RcMetadata> { if is_dangling(self.ptr.as_ptr()) { None @@ -2398,6 +2387,26 @@ impl Default for Weak { } } +/// Dediated function for allocating to prevent generating a function for every `T` +#[inline] +fn allocate(alloc: &RcAllocator, layout: Layout, init: AllocInit) -> NonNull { + try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +#[inline] +fn try_allocate( + alloc: &RcAllocator, + layout: Layout, + init: AllocInit, +) -> Result, AllocError> { + let ptr = match init { + AllocInit::Uninitialized => alloc.allocate(layout)?, + AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, + }; + Ok(ptr.as_non_null_ptr()) +} + #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Rc { fn borrow(&self) -> &T { diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 79618a90658e6..4fdca1b34c329 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -271,7 +271,7 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { - #[inline(always)] + #[inline] fn metadata(&self) -> &ArcMetadata { unsafe { ArcAllocator::::prefix(self.ptr).as_ref() } } @@ -327,7 +327,7 @@ impl fmt::Debug for Weak { } impl Weak { - #[inline(always)] + #[inline] fn metadata(&self) -> Option<&ArcMetadata> { if is_dangling(self.ptr.as_ptr()) { None @@ -350,7 +350,11 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Self { - Self::try_new(data).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let mut arc = Self::new_uninit(); + unsafe { + Arc::get_mut_unchecked(&mut arc).as_mut_ptr().write(data); + arc.assume_init() + } } /// Constructs a new `Arc` using a weak reference to itself. Attempting @@ -376,7 +380,36 @@ impl Arc { #[inline] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Self { - Self::try_new_cyclic(data_fn).unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + // Construct the inner in the "uninitialized" state with a single + // weak reference. + let alloc = ArcAllocator::new(Global); + let ptr = Self::allocate( + &alloc, + Layout::new::(), + ArcMetadata::new_weak(), + AllocInit::Uninitialized, + NonNull::cast, + ); + + // Strong references should collectively own a shared weak reference, + // so don't run the destructor for our old weak reference. + let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); + + // It's important we don't give up ownership of the weak pointer, or + // else the memory might be freed by the time `data_fn` returns. If + // we really wanted to pass ownership, we could create an additional + // weak pointer for ourselves, but this would result in additional + // updates to the weak reference count which might not be necessary + // otherwise. + unsafe { + ptr.as_ptr().write(data_fn(&weak)); + } + + let meta = unsafe { ArcAllocator::::prefix(ptr).as_ref() }; + let prev_value = meta.strong.fetch_add(1, Release); + debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); + + unsafe { Self::from_raw_in(ptr.as_ptr(), weak.alloc) } } /// Constructs a new `Arc` with uninitialized contents. @@ -403,7 +436,16 @@ impl Arc { #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { - Self::try_new_uninit().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let alloc = ArcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + ); + unsafe { Arc::from_raw_in(ptr.as_ptr().cast(), alloc) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -429,7 +471,16 @@ impl Arc { #[inline] #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { - Self::try_new_zeroed().unwrap_or_else(|_| handle_alloc_error(Layout::new::())) + let alloc = ArcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Zeroed, + NonNull::cast, + ); + unsafe { Arc::from_raw_in(ptr.as_ptr().cast(), alloc) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -461,62 +512,6 @@ impl Arc { } } - /// Constructs a new `Arc` using a weak reference to itself. Attempting - /// to upgrade the weak reference before this function returns will result - /// in a `None` value. However, the weak reference may be cloned freely and - /// stored for use at a later time. - /// - /// # Examples - /// ``` - /// #![feature(allocator_api, arc_new_cyclic)] - /// #![allow(dead_code)] - /// - /// use std::sync::{Arc, Weak}; - /// - /// struct Foo { - /// me: Weak, - /// } - /// - /// let foo = Arc::try_new_cyclic(|me| Foo { - /// me: me.clone(), - /// })?; - /// # Ok::<(), std::alloc::AllocError>(()) - /// ``` - #[inline] - #[unstable(feature = "arc_new_cyclic", issue = "75861")] - pub fn try_new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Result { - // Construct the inner in the "uninitialized" state with a single - // weak reference. - let alloc = ArcAllocator::new(Global); - let ptr = Self::try_allocate( - &alloc, - Layout::new::(), - ArcMetadata::new_weak(), - AllocInit::Uninitialized, - NonNull::cast, - )?; - - // Strong references should collectively own a shared weak reference, - // so don't run the destructor for our old weak reference. - let weak = mem::ManuallyDrop::new(Weak { ptr, alloc }); - - // It's important we don't give up ownership of the weak pointer, or - // else the memory might be freed by the time `data_fn` returns. If - // we really wanted to pass ownership, we could create an additional - // weak pointer for ourselves, but this would result in additional - // updates to the weak reference count which might not be necessary - // otherwise. - unsafe { - ptr.as_ptr().write(data_fn(&weak)); - } - - let meta = unsafe { ArcAllocator::::prefix(ptr).as_ref() }; - let prev_value = meta.strong.fetch_add(1, Release); - debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - - unsafe { Ok(Self::from_raw_in(ptr.as_ptr(), weak.alloc)) } - } - /// Constructs a new `Arc` with uninitialized contents, returning an error /// if allocation fails. /// @@ -1099,8 +1094,11 @@ impl Arc { init: AllocInit, mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> NonNull { - Self::try_allocate(alloc, layout, meta, init, mem_to_ptr) - .unwrap_or_else(|_| handle_alloc_error(layout)) + let ptr = mem_to_ptr(allocate(alloc, layout, init)); + unsafe { + ArcAllocator::::prefix(ptr).as_ptr().write(meta); + } + ptr } /// Allocates an `Arc` with sufficient space for @@ -1116,12 +1114,7 @@ impl Arc { init: AllocInit, mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> Result, AllocError> { - let memory = match init { - AllocInit::Uninitialized => alloc.allocate(layout)?, - AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, - }; - - let ptr = mem_to_ptr(memory.as_non_null_ptr()); + let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); unsafe { ArcAllocator::::prefix(ptr).as_ptr().write(meta); } @@ -2453,6 +2446,26 @@ impl> ToArcSlice for I { } } +/// Dediated function for allocating to prevent generating a function for every `T` +#[inline] +fn allocate(alloc: &ArcAllocator, layout: Layout, init: AllocInit) -> NonNull { + try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +#[inline] +fn try_allocate( + alloc: &ArcAllocator, + layout: Layout, + init: AllocInit, +) -> Result, AllocError> { + let ptr = match init { + AllocInit::Uninitialized => alloc.allocate(layout)?, + AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, + }; + Ok(ptr.as_non_null_ptr()) +} + #[stable(feature = "rust1", since = "1.0.0")] impl borrow::Borrow for Arc { fn borrow(&self) -> &T { From a50bef282f0753567989d7297a1c59df2988f111 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 26 Apr 2021 13:32:14 +0200 Subject: [PATCH 14/20] Add panic message for PrefixAllocator offset test --- library/core/tests/alloc/prefix.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/library/core/tests/alloc/prefix.rs b/library/core/tests/alloc/prefix.rs index cbe87be158c8d..e13e506ddeb88 100644 --- a/library/core/tests/alloc/prefix.rs +++ b/library/core/tests/alloc/prefix.rs @@ -9,7 +9,13 @@ fn test_prefix() { unsafe { let layout = Layout::new::(); let prefix_offset = PrefixAllocator::::prefix_offset(layout); - assert_eq!(prefix_offset, Layout::new::().extend(layout).unwrap().1); + assert_eq!( + prefix_offset, + Layout::new::().extend(layout).unwrap().1, + "Invalid prefix offset for PrefixAllocator<_, {}> with Layout<{}>.", + type_name::(), + type_name::(), + ); let alloc = Tracker::new(PrefixAllocator::, Prefix>::new(Tracker::new(System))); From 251d44f41f6ad75f6965051341da8ed5ee934ed0 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sun, 23 May 2021 16:09:45 +0200 Subject: [PATCH 15/20] Fix messed up merge from GitHub --- library/alloc/src/raw_vec.rs | 11 ------- library/alloc/src/sync.rs | 60 ------------------------------------ 2 files changed, 71 deletions(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 8c56723972543..b73467d6e2cdc 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -18,17 +18,6 @@ use crate::collections::TryReserveError::{self, *}; #[cfg(test)] mod tests; -<<<<<<< unify_box_rc -======= -#[cfg(not(no_global_oom_handling))] -enum AllocInit { - /// The contents of the new memory are uninitialized. - Uninitialized, - /// The new memory is guaranteed to be zeroed. - Zeroed, -} - ->>>>>>> master /// A low-level utility for more ergonomically allocating, reallocating, and deallocating /// a buffer of memory on the heap without having to worry about all the corner cases /// involved. This type is excellent for building your own data structures like Vec and VecDeque. diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 076d9810dee75..e1f449c6b4b45 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -16,13 +16,7 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -<<<<<<< unify_box_rc use core::mem::{self, forget}; -======= -#[cfg(not(no_global_oom_handling))] -use core::mem::size_of_val; -use core::mem::{self, align_of_val_raw}; ->>>>>>> master use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -445,11 +439,8 @@ impl Arc { /// /// assert_eq!(*five, 5) /// ``` -<<<<<<< unify_box_rc #[inline] -======= #[cfg(not(no_global_oom_handling))] ->>>>>>> master #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { let alloc = ArcAllocator::new(Global); @@ -484,11 +475,8 @@ impl Arc { /// ``` /// /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed -<<<<<<< unify_box_rc #[inline] -======= #[cfg(not(no_global_oom_handling))] ->>>>>>> master #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { let alloc = ArcAllocator::new(Global); @@ -1102,33 +1090,7 @@ impl Arc { } impl Arc { -<<<<<<< unify_box_rc /// Allocates an `Rc` with sufficient space for -======= - /// Allocates an `ArcInner` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_arcinner` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `ArcInner`. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_layout( - value_layout: Layout, - allocate: impl FnOnce(Layout) -> Result, AllocError>, - mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner, - ) -> *mut ArcInner { - // Calculate layout using the given value layout. - // Previously, layout was calculated on the expression - // `&*(ptr as *const ArcInner)`, but this created a misaligned - // reference (see #54908). - let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - unsafe { - Arc::try_allocate_for_layout(value_layout, allocate, mem_to_arcinner) - .unwrap_or_else(|_| handle_alloc_error(layout)) - } - } - - /// Allocates an `ArcInner` with sufficient space for ->>>>>>> master /// a possibly-unsized inner value where the value has the layout provided, /// returning an error if allocation fails. /// @@ -1149,7 +1111,6 @@ impl Arc { ptr } -<<<<<<< unify_box_rc /// Allocates an `Arc` with sufficient space for /// a possibly-unsized inner value where the value has the layout provided. /// @@ -1164,12 +1125,6 @@ impl Arc { mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> Result, AllocError> { let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); -======= - /// Allocates an `ArcInner` with sufficient space for an unsized inner value. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { - // Allocate for the `ArcInner` using the given value. ->>>>>>> master unsafe { ArcAllocator::::prefix(ptr).as_ptr().write(meta); } @@ -1208,21 +1163,6 @@ impl Arc { } impl Arc<[T]> { -<<<<<<< unify_box_rc -======= - /// Allocates an `ArcInner<[T]>` with the given length. - #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> { - unsafe { - Self::allocate_for_layout( - Layout::array::(len).unwrap(), - |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, - ) - } - } - ->>>>>>> master /// Copy elements from slice into newly allocated Arc<\[T\]> /// /// Unsafe because the caller must either take ownership or bind `T: Copy`. From 0dc2769bd5a125b73a3b3b0103ed70f87d292b20 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 24 May 2021 03:48:52 +0200 Subject: [PATCH 16/20] Fix regression for `Rc` and `Arc` For this, the generic parameter in`PrefixAllocator::prefix` was removed and `(A)Rc::new_uninit` isn't called in `new` anymore as this doubles the code generation for `(A)Rc` --- library/alloc/src/raw_vec.rs | 4 +- library/alloc/src/rc.rs | 64 ++++++++++++++++++++------------ library/alloc/src/sync.rs | 55 +++++++++++++++++---------- library/core/src/alloc/helper.rs | 17 ++------- 4 files changed, 84 insertions(+), 56 deletions(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index b73467d6e2cdc..623c0b06af873 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,7 +1,9 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] -use core::alloc::{helper::AllocInit, LayoutError}; +#[cfg(not(no_global_oom_handling))] +use core::alloc::helper::AllocInit; +use core::alloc::LayoutError; use core::cmp; use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit}; diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b7462d66eb726..9420f5b72789a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -259,7 +259,7 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; -use core::mem::{self, forget}; +use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -389,8 +389,19 @@ impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { #[inline] + fn metadata_ptr(ptr: NonNull) -> NonNull { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; + + unsafe { RcAllocator::::prefix(ptr.cast(), align) } + } + + #[inline(always)] fn metadata(&self) -> &RcMetadata { - unsafe { RcAllocator::::prefix(self.ptr).as_ref() } + unsafe { Self::metadata_ptr(self.ptr).as_ref() } } } @@ -404,13 +415,21 @@ impl Rc { /// /// let five = Rc::new(5); /// ``` - #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - let mut rc = Self::new_uninit(); + let alloc = RcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + RcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + ); unsafe { - Rc::get_mut_unchecked(&mut rc).as_mut_ptr().write(value); - rc.assume_init() + ptr.as_ptr().write(value); + Self::from_raw_in(ptr.as_ptr().cast(), alloc) } } @@ -439,6 +458,7 @@ impl Rc { /// } /// ``` #[inline] + #[cfg(not(no_global_oom_handling))] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc { // Construct the inner in the "uninitialized" state with a single @@ -466,7 +486,7 @@ impl Rc { ptr.as_ptr().write(data_fn(&weak)); } - let meta = unsafe { RcAllocator::::prefix(ptr).as_ref() }; + let meta = unsafe { Self::metadata_ptr(ptr).as_ref() }; debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); meta.strong.set(1); @@ -1295,9 +1315,7 @@ impl Rc { mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> NonNull { let ptr = mem_to_ptr(allocate(alloc, layout, init)); - unsafe { - RcAllocator::::prefix(ptr).as_ptr().write(meta); - } + unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } ptr } @@ -1316,9 +1334,7 @@ impl Rc { mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> Result, AllocError> { let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); - unsafe { - RcAllocator::::prefix(ptr).as_ptr().write(meta); - } + unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } Ok(ptr) } @@ -1419,7 +1435,7 @@ impl Rc<[T]> { } // All clear. Forget the guard so it doesn't free the new RcBox. - forget(guard); + mem::forget(guard); Self::from_raw_in(ptr.as_ptr(), alloc) } @@ -1488,17 +1504,22 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// drop(foo2); // Prints "dropped!" /// ``` fn drop(&mut self) { - self.metadata().dec_strong(); - if self.metadata().strong() == 0 { + let metadata = self.metadata(); + + metadata.dec_strong(); + if metadata.strong() == 0 { // destroy the contained object unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)); } + // Due to the borrow checker, we have to read the metadata again + let metadata = self.metadata(); + // remove the implicit "strong weak" pointer now that we've // destroyed the contents. - self.metadata().dec_weak(); - if self.metadata().weak() == 0 { + metadata.dec_weak(); + if metadata.weak() == 0 { unsafe { let layout = Layout::for_value_raw(self.ptr.as_ptr()); self.alloc.deallocate(self.ptr.cast(), layout); @@ -2071,11 +2092,7 @@ impl, U: ?Sized> DispatchFromDyn> for Weak {} impl Weak { #[inline] fn metadata(&self) -> Option<&RcMetadata> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - Some(unsafe { RcAllocator::::prefix(self.ptr).as_ref() }) - } + (!is_dangling(self.ptr.as_ptr())).then(|| unsafe { Rc::metadata_ptr(self.ptr).as_ref() }) } } @@ -2426,6 +2443,7 @@ impl Default for Weak { /// Dediated function for allocating to prevent generating a function for every `T` #[inline] +#[cfg(not(no_global_oom_handling))] fn allocate(alloc: &RcAllocator, layout: Layout, init: AllocInit) -> NonNull { try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index e1f449c6b4b45..d407b933fcbff 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -16,7 +16,7 @@ use core::intrinsics::abort; #[cfg(not(no_global_oom_handling))] use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -use core::mem::{self, forget}; +use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -31,6 +31,7 @@ use crate::alloc::handle_alloc_error; use crate::alloc::{box_free, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; +#[cfg(not(no_global_oom_handling))] use crate::boxed::Box; use crate::rc::is_dangling; #[cfg(not(no_global_oom_handling))] @@ -278,8 +279,19 @@ impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { #[inline] + fn metadata_ptr(ptr: NonNull) -> NonNull { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; + + unsafe { ArcAllocator::::prefix(ptr.cast(), align) } + } + + #[inline(always)] fn metadata(&self) -> &ArcMetadata { - unsafe { ArcAllocator::::prefix(self.ptr).as_ref() } + unsafe { Self::metadata_ptr(self.ptr).as_ref() } } } @@ -335,11 +347,7 @@ impl fmt::Debug for Weak { impl Weak { #[inline] fn metadata(&self) -> Option<&ArcMetadata> { - if is_dangling(self.ptr.as_ptr()) { - None - } else { - Some(unsafe { ArcAllocator::::prefix(self.ptr).as_ref() }) - } + (!is_dangling(self.ptr.as_ptr())).then(|| unsafe { Arc::metadata_ptr(self.ptr).as_ref() }) } } @@ -354,12 +362,21 @@ impl Arc { /// let five = Arc::new(5); /// ``` #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - pub fn new(data: T) -> Self { - let mut arc = Self::new_uninit(); + pub fn new(value: T) -> Self { + let alloc = ArcAllocator::new(Global); + let layout = Layout::new::(); + let ptr = Self::allocate( + &alloc, + layout, + ArcMetadata::new_strong(), + AllocInit::Uninitialized, + NonNull::cast, + ); unsafe { - Arc::get_mut_unchecked(&mut arc).as_mut_ptr().write(data); - arc.assume_init() + ptr.as_ptr().write(value); + Self::from_raw_in(ptr.as_ptr().cast(), alloc) } } @@ -384,6 +401,7 @@ impl Arc { /// }); /// ``` #[inline] + #[cfg(not(no_global_oom_handling))] #[unstable(feature = "arc_new_cyclic", issue = "75861")] pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Self { // Construct the inner in the "uninitialized" state with a single @@ -411,7 +429,8 @@ impl Arc { ptr.as_ptr().write(data_fn(&weak)); } - let meta = unsafe { ArcAllocator::::prefix(ptr).as_ref() }; + // SAFETY: `ptr` was just allocated with the allocator with the alignment of `T` + let meta = unsafe { Self::metadata_ptr(ptr).as_ref() }; let prev_value = meta.strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); @@ -1097,6 +1116,7 @@ impl Arc { /// The function `mem_to_rcbox` is called with the data pointer /// and must return back a (potentially fat)-pointer for the `RcBox`. #[inline] + #[cfg(not(no_global_oom_handling))] fn allocate( alloc: &ArcAllocator, layout: Layout, @@ -1105,9 +1125,7 @@ impl Arc { mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> NonNull { let ptr = mem_to_ptr(allocate(alloc, layout, init)); - unsafe { - ArcAllocator::::prefix(ptr).as_ptr().write(meta); - } + unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } ptr } @@ -1125,9 +1143,7 @@ impl Arc { mem_to_ptr: impl FnOnce(NonNull) -> NonNull, ) -> Result, AllocError> { let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); - unsafe { - ArcAllocator::::prefix(ptr).as_ptr().write(meta); - } + unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } Ok(ptr) } @@ -1228,7 +1244,7 @@ impl Arc<[T]> { } // All clear. Forget the guard so it doesn't free the new RcBox. - forget(guard); + mem::forget(guard); Self::from_raw_in(ptr.as_ptr(), alloc) } @@ -2473,6 +2489,7 @@ impl> ToArcSlice for I { /// Dediated function for allocating to prevent generating a function for every `T` #[inline] +#[cfg(not(no_global_oom_handling))] fn allocate(alloc: &ArcAllocator, layout: Layout, init: AllocInit) -> NonNull { try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) } diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs index 4f3255d57067b..d5051ca7f4287 100644 --- a/library/core/src/alloc/helper.rs +++ b/library/core/src/alloc/helper.rs @@ -5,7 +5,6 @@ use crate::{ alloc::{AllocError, Allocator, Layout}, fmt, marker::PhantomData, - mem, ptr::NonNull, }; @@ -91,31 +90,23 @@ impl PrefixAllocator { prefix_layout.size() + prefix_layout.padding_needed_for(layout.align()) } - /// Returns a pointer to the prefix. + /// Returns a pointer to the prefix for an allocated pointer and it's used alignment. /// /// # Safety /// /// * `ptr` must denote a block of memory *[currently allocated]* via this allocator, and - /// * `ptr` must point to (and have valid metadata for) a previously valid instance of `T`, - /// but the `T` is allowed to be dropped. + /// * `align` has to be the alignment used for allocating `ptr`. /// /// [currently allocated]: https://doc.rust-lang.org/nightly/core/alloc/trait.AllocRef.html#currently-allocated-memory #[inline] - pub unsafe fn prefix(ptr: NonNull) -> NonNull { + pub unsafe fn prefix(ptr: NonNull, align: usize) -> NonNull { let prefix_layout = Layout::new::(); - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of for_value_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - let align = unsafe { mem::align_of_val_raw(ptr.as_ptr()) }; - let offset = prefix_layout.size() + prefix_layout.padding_needed_for(align); - let ptr = ptr.as_ptr() as *mut u8; // SAFETY: `ptr` was allocated with this allocator thus, `ptr - offset` points to the // prefix and is non-null. - unsafe { NonNull::new_unchecked(ptr.sub(offset)).cast() } + unsafe { NonNull::new_unchecked(ptr.as_ptr().sub(offset).cast()) } } fn create_ptr(ptr: NonNull<[u8]>, offset_prefix: usize) -> NonNull<[u8]> { From 42fe0b9753991fa85b6611c569a1cc0711837056 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 24 May 2021 04:02:11 +0200 Subject: [PATCH 17/20] Fix test for `PrefixAllocator::prefix` to use the non-generic version --- library/core/tests/alloc/prefix.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/core/tests/alloc/prefix.rs b/library/core/tests/alloc/prefix.rs index e13e506ddeb88..76a6999863123 100644 --- a/library/core/tests/alloc/prefix.rs +++ b/library/core/tests/alloc/prefix.rs @@ -29,7 +29,7 @@ fn test_prefix() { }); assert_eq!( - PrefixAllocator::::prefix::(memory.as_non_null_ptr().cast()) + PrefixAllocator::::prefix(memory.as_non_null_ptr(), layout.align()) .cast() .as_ptr(), memory.as_mut_ptr().sub(prefix_offset), From 2cbdee608639a1a8b8e8eba4ca8b63b45a305477 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 24 May 2021 10:47:39 +0200 Subject: [PATCH 18/20] Add missing cfg-attributes for no-global-oom-handling in `Rc` and `Arc` --- library/alloc/src/rc.rs | 12 +++++++----- library/alloc/src/sync.rs | 4 ++++ 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 9420f5b72789a..aee8e15099125 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -242,9 +242,9 @@ #![stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(test))] +#[cfg(all(not(test), not(no_global_oom_handling)))] use crate::boxed::Box; -#[cfg(test)] +#[cfg(all(test, not(no_global_oom_handling)))] use std::boxed::Box; use core::alloc::helper::{AllocInit, PrefixAllocator}; @@ -261,15 +261,14 @@ use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; +#[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] use core::slice::from_raw_parts_mut; #[cfg(not(no_global_oom_handling))] -use crate::alloc::handle_alloc_error; -#[cfg(not(no_global_oom_handling))] -use crate::alloc::{box_free, WriteCloneIntoRaw}; +use crate::alloc::{handle_alloc_error, box_free, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] @@ -661,6 +660,7 @@ impl Rc { /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } @@ -1552,6 +1552,7 @@ impl Clone for Rc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Rc { /// Creates a new `Rc`, with the `Default` value for `T`. @@ -1810,6 +1811,7 @@ impl fmt::Pointer for Rc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Rc { /// Converts a generic type `T` into a `Rc` diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index d407b933fcbff..359d8962b9d6d 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -18,6 +18,7 @@ use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; use core::mem; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; +#[cfg(not(no_global_oom_handling))] use core::pin::Pin; use core::ptr::{self, NonNull}; #[cfg(not(no_global_oom_handling))] @@ -513,6 +514,7 @@ impl Arc { /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then /// `data` will be pinned in memory and unable to be moved. #[inline] + #[cfg(not(no_global_oom_handling))] #[stable(feature = "pin", since = "1.33.0")] pub fn pin(data: T) -> Pin { unsafe { Pin::new_unchecked(Arc::new(data)) } @@ -2241,6 +2243,7 @@ impl fmt::Pointer for Arc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { /// Creates a new `Arc`, with the `Default` value for `T`. @@ -2265,6 +2268,7 @@ impl Hash for Arc { } } +#[cfg(not(no_global_oom_handling))] #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl From for Arc { fn from(t: T) -> Self { From 7944de5570b791965e599da6409d503984483046 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Mon, 24 May 2021 11:06:57 +0200 Subject: [PATCH 19/20] Satisfy tidy... --- library/alloc/src/rc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index aee8e15099125..d3463bde6c2a7 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -268,7 +268,7 @@ use core::ptr::{self, NonNull}; use core::slice::from_raw_parts_mut; #[cfg(not(no_global_oom_handling))] -use crate::alloc::{handle_alloc_error, box_free, WriteCloneIntoRaw}; +use crate::alloc::{box_free, handle_alloc_error, WriteCloneIntoRaw}; use crate::alloc::{AllocError, Allocator, Global, Layout}; use crate::borrow::{Cow, ToOwned}; #[cfg(not(no_global_oom_handling))] From a4b4fe4d117c19dde5ec9522ae87e25ed47c0ea9 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Tue, 25 May 2021 15:26:50 +0200 Subject: [PATCH 20/20] Simplify allocation for `Rc` and `Arc` to speed up optimization path --- library/alloc/src/rc.rs | 231 ++++++++++-------------------- library/alloc/src/sync.rs | 235 ++++++++++--------------------- library/core/src/alloc/helper.rs | 36 +++++ 3 files changed, 189 insertions(+), 313 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index d3463bde6c2a7..399c0fbbeef6a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -247,7 +247,7 @@ use crate::boxed::Box; #[cfg(all(test, not(no_global_oom_handling)))] use std::boxed::Box; -use core::alloc::helper::{AllocInit, PrefixAllocator}; +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cell::Cell; @@ -418,17 +418,10 @@ impl Rc { #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { let alloc = RcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - ); + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_strong()).cast::(); unsafe { ptr.as_ptr().write(value); - Self::from_raw_in(ptr.as_ptr().cast(), alloc) + Self::from_non_null(ptr, alloc) } } @@ -463,13 +456,7 @@ impl Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. let alloc = RcAllocator::new(Global); - let ptr = Self::allocate( - &alloc, - Layout::new::(), - RcMetadata::new_weak(), - AllocInit::Uninitialized, - NonNull::cast, - ); + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_weak()).cast::(); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. @@ -489,7 +476,7 @@ impl Rc { debug_assert_eq!(meta.strong.get(), 0, "No prior strong references should exist"); meta.strong.set(1); - unsafe { Self::from_raw_in(ptr.as_ptr(), weak.alloc) } + unsafe { Self::from_non_null(ptr, weak.alloc) } } /// Constructs a new `Rc` with uninitialized contents. @@ -517,15 +504,8 @@ impl Rc { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Rc> { let alloc = RcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - ); - unsafe { Rc::from_raw_in(ptr.as_ptr().cast(), alloc) } + let ptr = allocate(&alloc, Layout::new::(), RcMetadata::new_strong()).cast(); + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -552,15 +532,8 @@ impl Rc { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Rc> { let alloc = RcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Zeroed, - NonNull::cast, - ); - unsafe { Rc::from_raw_in(ptr.as_ptr().cast(), alloc) } + let ptr = allocate_zeroed(&alloc, Layout::new::(), RcMetadata::new_strong()).cast(); + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new `Rc`, returning an error if the allocation fails @@ -577,10 +550,11 @@ impl Rc { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - let mut rc = Self::try_new_uninit()?; + let alloc = RcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), RcMetadata::new_strong())?.cast::(); unsafe { - Rc::get_mut_unchecked(&mut rc).as_mut_ptr().write(value); - Ok(rc.assume_init()) + ptr.as_ptr().write(value); + Ok(Self::from_non_null(ptr, alloc)) } } @@ -610,15 +584,8 @@ impl Rc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { let alloc = RcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::try_allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - )?; - unsafe { Ok(Rc::from_raw_in(ptr.as_ptr().cast(), alloc)) } + let ptr = try_allocate(&alloc, Layout::new::(), RcMetadata::new_strong())?; + unsafe { Ok(Rc::from_non_null(ptr.cast(), alloc)) } } /// Constructs a new `Rc` with uninitialized contents, with the memory @@ -646,15 +613,8 @@ impl Rc { //#[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { let alloc = RcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::try_allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Zeroed, - NonNull::cast, - )?; - unsafe { Ok(Rc::from_raw_in(ptr.as_ptr().cast(), alloc)) } + let ptr = try_allocate_zeroed(&alloc, Layout::new::(), RcMetadata::new_strong())?; + unsafe { Ok(Rc::from_non_null(ptr.cast(), alloc)) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -735,14 +695,11 @@ impl Rc<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit]> { let alloc = RcAllocator::new(Global); - let ptr = Rc::allocate( - &alloc, - Layout::array::(len).unwrap(), - RcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(len).unwrap(), RcMetadata::new_strong()).cast(), + len, ); - unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } + unsafe { Rc::from_non_null(ptr, alloc) } } /// Constructs a new reference-counted slice with uninitialized contents, with the memory being @@ -769,14 +726,12 @@ impl Rc<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit]> { let alloc = RcAllocator::new(Global); - let ptr = Rc::allocate( - &alloc, - Layout::array::(len).unwrap(), - RcMetadata::new_strong(), - AllocInit::Zeroed, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate_zeroed(&alloc, Layout::array::(len).unwrap(), RcMetadata::new_strong()) + .cast(), + len, ); - unsafe { Rc::from_raw_in(ptr.as_ptr(), alloc) } + unsafe { Rc::from_non_null(ptr, alloc) } } } @@ -816,7 +771,7 @@ impl Rc> { #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Rc { let this = mem::ManuallyDrop::new(self); - unsafe { Rc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc) } + unsafe { Rc::from_non_null(this.ptr.cast(), this.alloc) } } } @@ -862,7 +817,7 @@ impl Rc<[mem::MaybeUninit]> { let ptr = unsafe { NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) }; - unsafe { Rc::from_raw_in(ptr.as_ptr(), this.alloc) } + unsafe { Rc::from_non_null(ptr, this.alloc) } } } @@ -952,14 +907,14 @@ impl Rc { #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, RcAllocator::new(Global)) } + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), RcAllocator::new(Global)) + } } - /// Constructs an `Rc` from a raw pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn from_raw_in(ptr: *const T, alloc: RcAllocator) -> Self { - Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc, _marker: PhantomData } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: RcAllocator) -> Self { + Self { ptr, alloc, _marker: PhantomData } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1291,7 +1246,7 @@ impl Rc { pub fn downcast(self) -> Result, Rc> { if (*self).is::() { let this = mem::ManuallyDrop::new(self); - unsafe { Ok(Rc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc)) } + unsafe { Ok(Rc::from_non_null(this.ptr.cast(), this.alloc)) } } else { Err(self) } @@ -1299,45 +1254,6 @@ impl Rc { } impl Rc { - /// Allocates an `Rc` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rcbox` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcBox`. - #[inline] - #[cfg(not(no_global_oom_handling))] - fn allocate( - alloc: &RcAllocator, - layout: Layout, - meta: RcMetadata, - init: AllocInit, - mem_to_ptr: impl FnOnce(NonNull) -> NonNull, - ) -> NonNull { - let ptr = mem_to_ptr(allocate(alloc, layout, init)); - unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } - ptr - } - - /// Allocates an `Rc` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_ptr` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcBox`. - #[inline] - fn try_allocate( - alloc: &RcAllocator, - layout: Layout, - meta: RcMetadata, - init: AllocInit, - mem_to_ptr: impl FnOnce(NonNull) -> NonNull, - ) -> Result, AllocError> { - let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); - unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } - Ok(ptr) - } - #[cfg(not(no_global_oom_handling))] fn from_box(v: Box) -> Rc { unsafe { @@ -1346,13 +1262,8 @@ impl Rc { let rc_alloc = RcAllocator::new(alloc); let layout = Layout::for_value(&*bptr); - let ptr = Self::allocate( - &rc_alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Uninitialized, - |mem| NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())), - ); + let mem = allocate(&rc_alloc, layout, RcMetadata::new_strong()).cast(); + let ptr = NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1364,7 +1275,7 @@ impl Rc { // Free the allocation without dropping its contents box_free(box_unique, &rc_alloc.parent); - Self::from_raw_in(ptr.as_ptr(), rc_alloc) + Self::from_non_null(ptr, rc_alloc) } } } @@ -1376,16 +1287,13 @@ impl Rc<[T]> { #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> { let alloc = RcAllocator::new(Global); - let ptr = Self::allocate( - &alloc, - Layout::array::(v.len()).unwrap(), - RcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), v.len()), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(v.len()).unwrap(), RcMetadata::new_strong()).cast(), + v.len(), ); unsafe { ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); - Self::from_raw_in(ptr.as_ptr(), alloc) + Self::from_non_null(ptr, alloc) } } @@ -1418,12 +1326,9 @@ impl Rc<[T]> { unsafe { let alloc = RcAllocator::new(Global); let layout = Layout::array::(len).unwrap(); - let ptr = Self::allocate( - &alloc, - layout, - RcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, layout, RcMetadata::new_strong()).cast(), + len, ); let mut guard = @@ -1437,7 +1342,7 @@ impl Rc<[T]> { // All clear. Forget the guard so it doesn't free the new RcBox. mem::forget(guard); - Self::from_raw_in(ptr.as_ptr(), alloc) + Self::from_non_null(ptr, alloc) } } } @@ -2238,14 +2143,15 @@ impl Weak { #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, RcAllocator::new(Global)) } + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), RcAllocator::new(Global)) + } } /// Constructs a `Weak` from a raw pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn from_raw_in(ptr: *const T, alloc: RcAllocator) -> Self { - Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: RcAllocator) -> Self { + Self { ptr, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -2444,24 +2350,37 @@ impl Default for Weak { } /// Dediated function for allocating to prevent generating a function for every `T` -#[inline] #[cfg(not(no_global_oom_handling))] -fn allocate(alloc: &RcAllocator, layout: Layout, init: AllocInit) -> NonNull { - try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) +fn allocate(alloc: &RcAllocator, layout: Layout, metadata: RcMetadata) -> NonNull { + try_allocate(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate_zeroed( + alloc: &RcAllocator, + layout: Layout, + metadata: RcMetadata, +) -> NonNull { + try_allocate_zeroed(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) } /// Dediated function for allocating to prevent generating a function for every `T` -#[inline] fn try_allocate( alloc: &RcAllocator, layout: Layout, - init: AllocInit, + metadata: RcMetadata, +) -> Result, AllocError> { + alloc.allocate_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate_zeroed( + alloc: &RcAllocator, + layout: Layout, + metadata: RcMetadata, ) -> Result, AllocError> { - let ptr = match init { - AllocInit::Uninitialized => alloc.allocate(layout)?, - AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, - }; - Ok(ptr.as_non_null_ptr()) + alloc.allocate_zeroed_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 359d8962b9d6d..6eb9c7148f4ae 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -4,7 +4,7 @@ //! //! See the [`Arc`][Arc] documentation for more details. -use core::alloc::helper::{AllocInit, PrefixAllocator}; +use core::alloc::helper::PrefixAllocator; use core::any::Any; use core::borrow; use core::cmp::Ordering; @@ -367,17 +367,10 @@ impl Arc { #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Self { let alloc = ArcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - ); + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast::(); unsafe { ptr.as_ptr().write(value); - Self::from_raw_in(ptr.as_ptr().cast(), alloc) + Self::from_non_null(ptr, alloc) } } @@ -408,13 +401,7 @@ impl Arc { // Construct the inner in the "uninitialized" state with a single // weak reference. let alloc = ArcAllocator::new(Global); - let ptr = Self::allocate( - &alloc, - Layout::new::(), - ArcMetadata::new_weak(), - AllocInit::Uninitialized, - NonNull::cast, - ); + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_weak()).cast::(); // Strong references should collectively own a shared weak reference, // so don't run the destructor for our old weak reference. @@ -435,7 +422,7 @@ impl Arc { let prev_value = meta.strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - unsafe { Self::from_raw_in(ptr.as_ptr(), weak.alloc) } + unsafe { Self::from_non_null(ptr, weak.alloc) } } /// Constructs a new `Arc` with uninitialized contents. @@ -464,15 +451,8 @@ impl Arc { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit() -> Arc> { let alloc = ArcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - ); - unsafe { Arc::from_raw_in(ptr.as_ptr().cast(), alloc) } + let ptr = allocate(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast(); + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -500,15 +480,8 @@ impl Arc { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed() -> Arc> { let alloc = ArcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Zeroed, - NonNull::cast, - ); - unsafe { Arc::from_raw_in(ptr.as_ptr().cast(), alloc) } + let ptr = allocate_zeroed(&alloc, Layout::new::(), ArcMetadata::new_strong()).cast(); + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new `Pin>`. If `T` does not implement `Unpin`, then @@ -533,11 +506,12 @@ impl Arc { /// ``` #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn try_new(data: T) -> Result { - let mut arc = Self::try_new_uninit()?; + pub fn try_new(value: T) -> Result { + let alloc = ArcAllocator::new(Global); + let ptr = try_allocate(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast::(); unsafe { - Arc::get_mut_unchecked(&mut arc).as_mut_ptr().write(data); - Ok(arc.assume_init()) + ptr.as_ptr().write(value); + Ok(Self::from_non_null(ptr, alloc)) } } @@ -568,15 +542,8 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_uninit() -> Result>, AllocError> { let alloc = ArcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::try_allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - NonNull::cast, - )?; - unsafe { Ok(Arc::from_raw_in(ptr.as_ptr().cast(), alloc)) } + let ptr = try_allocate(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast(); + unsafe { Ok(Arc::from_non_null(ptr, alloc)) } } /// Constructs a new `Arc` with uninitialized contents, with the memory @@ -604,15 +571,9 @@ impl Arc { // #[unstable(feature = "new_uninit", issue = "63291")] pub fn try_new_zeroed() -> Result>, AllocError> { let alloc = ArcAllocator::new(Global); - let layout = Layout::new::(); - let ptr = Self::try_allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Zeroed, - NonNull::cast, - )?; - unsafe { Ok(Arc::from_raw_in(ptr.as_ptr().cast(), alloc)) } + let ptr = + try_allocate_zeroed(&alloc, Layout::new::(), ArcMetadata::new_strong())?.cast(); + unsafe { Ok(Arc::from_non_null(ptr, alloc)) } } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// @@ -681,14 +642,11 @@ impl Arc<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit]> { let alloc = ArcAllocator::new(Global); - let ptr = Arc::allocate( - &alloc, - Layout::array::(len).unwrap(), - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(len).unwrap(), ArcMetadata::new_strong()).cast(), + len, ); - unsafe { Arc::from_raw_in(ptr.as_ptr(), alloc) } + unsafe { Arc::from_non_null(ptr, alloc) } } /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being @@ -715,14 +673,12 @@ impl Arc<[T]> { #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_zeroed_slice(len: usize) -> Arc<[mem::MaybeUninit]> { let alloc = ArcAllocator::new(Global); - let ptr = Arc::allocate( - &alloc, - Layout::array::(len).unwrap(), - ArcMetadata::new_strong(), - AllocInit::Zeroed, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate_zeroed(&alloc, Layout::array::(len).unwrap(), ArcMetadata::new_strong()) + .cast(), + len, ); - unsafe { Arc::from_raw_in(ptr.as_ptr(), alloc) } + unsafe { Arc::from_non_null(ptr, alloc) } } } @@ -762,7 +718,7 @@ impl Arc> { #[unstable(feature = "new_uninit", issue = "63291")] pub unsafe fn assume_init(self) -> Arc { let this = mem::ManuallyDrop::new(self); - unsafe { Arc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc) } + unsafe { Arc::from_non_null(this.ptr.cast(), this.alloc) } } } @@ -808,7 +764,7 @@ impl Arc<[mem::MaybeUninit]> { let ptr = unsafe { NonNull::slice_from_raw_parts(NonNull::new_unchecked(this.ptr.as_mut_ptr().cast()), len) }; - unsafe { Arc::from_raw_in(ptr.as_ptr(), this.alloc) } + unsafe { Arc::from_non_null(ptr, this.alloc) } } } @@ -896,14 +852,14 @@ impl Arc { #[inline] #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, ArcAllocator::new(Global)) } + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), ArcAllocator::new(Global)) + } } - /// Constructs an `Arc` from a raw pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn from_raw_in(ptr: *const T, alloc: ArcAllocator) -> Self { - Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc, _marker: PhantomData } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: ArcAllocator) -> Self { + Self { ptr, alloc, _marker: PhantomData } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1111,44 +1067,6 @@ impl Arc { } impl Arc { - /// Allocates an `Rc` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided, - /// returning an error if allocation fails. - /// - /// The function `mem_to_rcbox` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `RcBox`. - #[inline] - #[cfg(not(no_global_oom_handling))] - fn allocate( - alloc: &ArcAllocator, - layout: Layout, - meta: ArcMetadata, - init: AllocInit, - mem_to_ptr: impl FnOnce(NonNull) -> NonNull, - ) -> NonNull { - let ptr = mem_to_ptr(allocate(alloc, layout, init)); - unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } - ptr - } - - /// Allocates an `Arc` with sufficient space for - /// a possibly-unsized inner value where the value has the layout provided. - /// - /// The function `mem_to_mem` is called with the data pointer - /// and must return back a (potentially fat)-pointer for the `Arc`. - #[inline] - fn try_allocate( - alloc: &ArcAllocator, - layout: Layout, - meta: ArcMetadata, - init: AllocInit, - mem_to_ptr: impl FnOnce(NonNull) -> NonNull, - ) -> Result, AllocError> { - let ptr = mem_to_ptr(try_allocate(alloc, layout, init)?); - unsafe { Self::metadata_ptr(ptr).as_ptr().write(meta) } - Ok(ptr) - } - #[cfg(not(no_global_oom_handling))] fn from_box(v: Box) -> Arc { unsafe { @@ -1157,13 +1075,8 @@ impl Arc { let arc_alloc = ArcAllocator::new(alloc); let layout = Layout::for_value(&*bptr); - let ptr = Self::allocate( - &arc_alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - |mem| NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())), - ); + let mem = allocate(&arc_alloc, layout, ArcMetadata::new_strong()).cast(); + let ptr = NonNull::new_unchecked(bptr.set_ptr_value(mem.as_ptr())); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1175,7 +1088,7 @@ impl Arc { // Free the allocation without dropping its contents box_free(box_unique, &arc_alloc.parent); - Self::from_raw_in(ptr.as_ptr(), arc_alloc) + Self::from_non_null(ptr, arc_alloc) } } } @@ -1187,16 +1100,14 @@ impl Arc<[T]> { #[cfg(not(no_global_oom_handling))] unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> { let alloc = ArcAllocator::new(Global); - let ptr = Self::allocate( - &alloc, - Layout::array::(v.len()).unwrap(), - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), v.len()), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, Layout::array::(v.len()).unwrap(), ArcMetadata::new_strong()) + .cast(), + v.len(), ); unsafe { ptr::copy_nonoverlapping(v.as_ptr(), ptr.as_non_null_ptr().as_ptr(), v.len()); - Self::from_raw_in(ptr.as_ptr(), alloc) + Self::from_non_null(ptr, alloc) } } @@ -1229,12 +1140,9 @@ impl Arc<[T]> { unsafe { let alloc = ArcAllocator::new(Global); let layout = Layout::array::(len).unwrap(); - let ptr = Self::allocate( - &alloc, - layout, - ArcMetadata::new_strong(), - AllocInit::Uninitialized, - |ptr| NonNull::slice_from_raw_parts(ptr.cast(), len), + let ptr = NonNull::slice_from_raw_parts( + allocate(&alloc, layout, ArcMetadata::new_strong()).cast(), + len, ); let mut guard = @@ -1248,7 +1156,7 @@ impl Arc<[T]> { // All clear. Forget the guard so it doesn't free the new RcBox. mem::forget(guard); - Self::from_raw_in(ptr.as_ptr(), alloc) + Self::from_non_null(ptr, alloc) } } } @@ -1629,7 +1537,7 @@ impl Arc { { if (*self).is::() { let this = mem::ManuallyDrop::new(self); - unsafe { Ok(Arc::from_raw_in(this.ptr.cast().as_ptr(), this.alloc)) } + unsafe { Ok(Arc::from_non_null(this.ptr.cast(), this.alloc)) } } else { Err(self) } @@ -1769,14 +1677,14 @@ impl Weak { #[inline] #[stable(feature = "weak_into_raw", since = "1.45.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { Self::from_raw_in(ptr, ArcAllocator::new(Global)) } + unsafe { + Self::from_non_null(NonNull::new_unchecked(ptr as *mut T), ArcAllocator::new(Global)) + } } - /// Constructs a `Weak` from a raw pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn from_raw_in(ptr: *const T, alloc: ArcAllocator) -> Self { - Self { ptr: unsafe { NonNull::new_unchecked(ptr as *mut T) }, alloc } + #[inline(always)] + unsafe fn from_non_null(ptr: NonNull, alloc: ArcAllocator) -> Self { + Self { ptr, alloc } } } @@ -2492,24 +2400,37 @@ impl> ToArcSlice for I { } /// Dediated function for allocating to prevent generating a function for every `T` -#[inline] #[cfg(not(no_global_oom_handling))] -fn allocate(alloc: &ArcAllocator, layout: Layout, init: AllocInit) -> NonNull { - try_allocate(alloc, layout, init).unwrap_or_else(|_| handle_alloc_error(layout)) +fn allocate(alloc: &ArcAllocator, layout: Layout, metadata: ArcMetadata) -> NonNull { + try_allocate(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +#[cfg(not(no_global_oom_handling))] +fn allocate_zeroed( + alloc: &ArcAllocator, + layout: Layout, + metadata: ArcMetadata, +) -> NonNull { + try_allocate_zeroed(alloc, layout, metadata).unwrap_or_else(|_| handle_alloc_error(layout)) } /// Dediated function for allocating to prevent generating a function for every `T` -#[inline] fn try_allocate( alloc: &ArcAllocator, layout: Layout, - init: AllocInit, + metadata: ArcMetadata, ) -> Result, AllocError> { - let ptr = match init { - AllocInit::Uninitialized => alloc.allocate(layout)?, - AllocInit::Zeroed => alloc.allocate_zeroed(layout)?, - }; - Ok(ptr.as_non_null_ptr()) + alloc.allocate_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) +} + +/// Dediated function for allocating to prevent generating a function for every `T` +fn try_allocate_zeroed( + alloc: &ArcAllocator, + layout: Layout, + metadata: ArcMetadata, +) -> Result, AllocError> { + alloc.allocate_zeroed_with_prefix(layout, metadata).map(NonNull::as_non_null_ptr) } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/core/src/alloc/helper.rs b/library/core/src/alloc/helper.rs index d5051ca7f4287..d7fdd72503a8c 100644 --- a/library/core/src/alloc/helper.rs +++ b/library/core/src/alloc/helper.rs @@ -128,6 +128,42 @@ impl PrefixAllocator { Ok(Self::create_ptr(alloc(layout)?, offset_prefix)) } + + /// Behaves like `allocate` but also writes the `prefix` + pub fn allocate_with_prefix( + &self, + layout: Layout, + prefix: Prefix, + ) -> Result, AllocError> + where + Alloc: Allocator, + { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + let memory = self.parent.allocate(layout)?; + // SAFETY: memory was just allocated with the layout of `Prefix` + unsafe { memory.as_mut_ptr().cast::().write(prefix) }; + Ok(Self::create_ptr(memory, offset_prefix)) + } + + /// Behaves like `allocate_zeroed` but also writes the `prefix` + pub fn allocate_zeroed_with_prefix( + &self, + layout: Layout, + prefix: Prefix, + ) -> Result, AllocError> + where + Alloc: Allocator, + { + let (layout, offset_prefix) = + Layout::new::().extend(layout).map_err(|_| AllocError)?; + + let memory = self.parent.allocate_zeroed(layout)?; + // SAFETY: memory was just allocated with the layout of `Prefix` + unsafe { memory.as_mut_ptr().cast::().write(prefix) }; + Ok(Self::create_ptr(memory, offset_prefix)) + } } unsafe impl Allocator for PrefixAllocator