Skip to content

Commit 7e0034b

Browse files
author
Andreas Molzer
committed
Add Clone-less converter from {Arc,Rc} to Box
Adds methods try_unwrap_as_box that work similar to try_unwrap but instead of reading the value, they move it a separate allocation. This allows unwrapping unsized values as well. By extension this can be used for converting in a vector of slices.
1 parent 15d9ba0 commit 7e0034b

File tree

4 files changed

+165
-26
lines changed

4 files changed

+165
-26
lines changed

library/alloc/src/boxed.rs

+30
Original file line numberDiff line numberDiff line change
@@ -1077,6 +1077,36 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
10771077
// additional requirements.
10781078
unsafe { Pin::new_unchecked(boxed) }
10791079
}
1080+
1081+
/// Allocates a box with the requested layout, which may be for a possibly-unsized value where
1082+
/// the layout has been determined from a value.
1083+
///
1084+
/// The function `mem_to_imbued` is the called to turn the raw memory pointer into a pointer to
1085+
/// the type `T`. It's expected that this will add additional fat-pointer metadata.
1086+
#[cfg(not(no_global_oom_handling))]
1087+
pub(crate) unsafe fn allocate_for_layout(
1088+
allocator: &A,
1089+
layout: Layout,
1090+
mem_to_imbued: impl FnOnce(*mut u8) -> *mut T,
1091+
) -> *mut T {
1092+
mem_to_imbued(
1093+
allocator.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)).as_ptr()
1094+
as *mut u8,
1095+
)
1096+
}
1097+
1098+
/// Allocates a box with sufficient space for the pointee and copy the metadata.
1099+
#[unstable(feature = "unwrap_rc_as_box", issue = "none")]
1100+
#[cfg(not(no_global_oom_handling))]
1101+
#[doc(hidden)]
1102+
pub unsafe fn allocate_for_ptr(allocator: &A, ptr: *const T) -> *mut T {
1103+
// Allocate for the `ArcInner<T>` using the given value.
1104+
unsafe {
1105+
Self::allocate_for_layout(allocator, Layout::for_value(&*ptr), |mem| {
1106+
ptr.set_ptr_value(mem) as *mut T
1107+
})
1108+
}
1109+
}
10801110
}
10811111

10821112
#[stable(feature = "rust1", since = "1.0.0")]

library/alloc/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@
136136
#![feature(unicode_internals)]
137137
#![feature(unsize)]
138138
#![feature(unsized_fn_params)]
139+
#![cfg_attr(not(no_global_oom_handling), feature(unwrap_rc_as_box))]
139140
#![feature(allocator_internals)]
140141
#![feature(slice_partition_dedup)]
141142
#![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_uninit_array)]

library/alloc/src/rc.rs

+70-15
Original file line numberDiff line numberDiff line change
@@ -612,21 +612,10 @@ impl<T> Rc<T> {
612612
#[inline]
613613
#[stable(feature = "rc_unique", since = "1.4.0")]
614614
pub fn try_unwrap(this: Self) -> Result<T, Self> {
615-
if Rc::strong_count(&this) == 1 {
616-
unsafe {
617-
let val = ptr::read(&*this); // copy the contained object
618-
619-
// Indicate to Weaks that they can't be promoted by decrementing
620-
// the strong count, and then remove the implicit "strong weak"
621-
// pointer while also handling drop logic by just crafting a
622-
// fake Weak.
623-
this.inner().dec_strong();
624-
let _weak = Weak { ptr: this.ptr };
625-
forget(this);
626-
Ok(val)
627-
}
628-
} else {
629-
Err(this)
615+
let weak = Self::leak_as_owning_weak(this)?;
616+
unsafe {
617+
let val = ptr::read(weak.as_ptr()); // copy the contained object
618+
Ok(val)
630619
}
631620
}
632621
}
@@ -997,6 +986,72 @@ impl<T: ?Sized> Rc<T> {
997986
unsafe { mem::drop(Rc::from_raw(ptr)) };
998987
}
999988

989+
/// Reduce the strong count, if the `Rc` has exactly one strong reference.
990+
///
991+
/// Otherwise, an [`Err`] is returned with the same `Rc` that was passed in.
992+
///
993+
/// This will succeed even if there are outstanding weak references.
994+
///
995+
/// After this operation succeeds, no more strong references to the allocation can be created,
996+
/// making the caller the owner of the contained value. This returns a `Weak` that manages the
997+
/// allocation while the caller can (unsafely) take advantage of their ownership. In contrast
998+
/// to `try_unwrap` this also works for unsized pointees.
999+
fn leak_as_owning_weak(this: Self) -> Result<Weak<T>, Self> {
1000+
if Rc::strong_count(&this) == 1 {
1001+
// Indicate to Weaks that they can't be promoted by decrementing
1002+
// the strong count, and then produce the implicit "strong weak"
1003+
// pointer that is still handling dropping of the allocation.
1004+
this.inner().dec_strong();
1005+
let this = mem::ManuallyDrop::new(this);
1006+
let weak = Weak { ptr: this.ptr };
1007+
// Return the 'fake weak'.
1008+
Ok(weak)
1009+
} else {
1010+
Err(this)
1011+
}
1012+
}
1013+
1014+
/// Returns the boxed inner value, if the `Rc` has exactly one strong reference.
1015+
///
1016+
/// Otherwise, an [`Err`] is returned with the same `Rc` that was
1017+
/// passed in.
1018+
///
1019+
/// This will succeed even if there are outstanding weak references.
1020+
///
1021+
/// # Examples
1022+
///
1023+
/// ```
1024+
/// #![feature(unwrap_rc_as_box)]
1025+
///
1026+
/// use std::rc::Rc;
1027+
///
1028+
/// let x: Rc<str> = Rc::from("Hello, world");
1029+
/// assert!(matches!(
1030+
/// Rc::try_unwrap_as_box(x),
1031+
/// Ok(b) if &b[..2] == ("He")
1032+
/// ));
1033+
/// ```
1034+
#[cfg(not(no_global_oom_handling))]
1035+
#[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")]
1036+
pub fn try_unwrap_as_box(this: Self) -> Result<Box<T>, Self> {
1037+
let owning_weak = Self::leak_as_owning_weak(this)?;
1038+
let src_ptr = owning_weak.as_ptr();
1039+
1040+
unsafe {
1041+
// We 'own' this value right now so it is still initialized.
1042+
let size = mem::size_of_val(&*src_ptr);
1043+
// The raw allocation for our Box—after this we don't panic as otherwise we would leak
1044+
// this memory. We can't use MaybeUninit here as that is only valid for sized types.
1045+
let raw_box = Box::<T>::allocate_for_ptr(&Global, src_ptr);
1046+
1047+
// This is a new allocation so it can not overlap with the one which `owning_weak` is
1048+
// still holding onto.
1049+
ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size);
1050+
1051+
Ok(Box::from_raw(raw_box))
1052+
}
1053+
}
1054+
10001055
/// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
10011056
/// this allocation.
10021057
#[inline]

library/alloc/src/sync.rs

+64-11
Original file line numberDiff line numberDiff line change
@@ -617,19 +617,10 @@ impl<T> Arc<T> {
617617
#[inline]
618618
#[stable(feature = "arc_unique", since = "1.4.0")]
619619
pub fn try_unwrap(this: Self) -> Result<T, Self> {
620-
if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
621-
return Err(this);
622-
}
623-
624-
acquire!(this.inner().strong);
620+
let weak = Self::leak_as_owning_weak(this)?;
625621

626622
unsafe {
627-
let elem = ptr::read(&this.ptr.as_ref().data);
628-
629-
// Make a weak pointer to clean up the implicit strong-weak reference
630-
let _weak = Weak { ptr: this.ptr };
631-
mem::forget(this);
632-
623+
let elem = ptr::read(&weak.ptr.as_ref().data);
633624
Ok(elem)
634625
}
635626
}
@@ -1047,6 +1038,68 @@ impl<T: ?Sized> Arc<T> {
10471038
unsafe { mem::drop(Arc::from_raw(ptr)) };
10481039
}
10491040

1041+
/// Reduce the strong count, if this is the last strong reference.
1042+
///
1043+
/// When this operation succeeds that no more strong references to the allocation can be
1044+
/// created, making this the owner of the contained value. This returns a `Weak` that manages
1045+
/// the allocation while the caller can (unsafely) take advantage of their ownership. In
1046+
/// contrast to `try_unwrap` this also works for unsized pointees.
1047+
fn leak_as_owning_weak(this: Self) -> Result<Weak<T>, Self> {
1048+
if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
1049+
return Err(this);
1050+
}
1051+
1052+
acquire!(this.inner().strong);
1053+
1054+
// At this point we own the pointee. We keep it alive by a Weak reference while having the
1055+
// caller handling ownership. This leaks the value but not the allocation, which is
1056+
// eventually deallocated via the returned `Weak`.
1057+
// The weak pointer also cleans up the implicit strong-weak reference
1058+
let this = mem::ManuallyDrop::new(this);
1059+
Ok(Weak { ptr: this.ptr })
1060+
}
1061+
1062+
/// Returns the boxed inner value, if the `Arc` has exactly one strong reference.
1063+
///
1064+
/// Otherwise, an [`Err`] is returned with the same `Arc` that was
1065+
/// passed in.
1066+
///
1067+
/// This will succeed even if there are outstanding weak references.
1068+
///
1069+
/// # Examples
1070+
///
1071+
/// ```
1072+
/// #![feature(unwrap_rc_as_box)]
1073+
///
1074+
/// use std::sync::Arc;
1075+
///
1076+
/// let x: Arc<str> = Arc::from("Hello, world");
1077+
/// assert!(matches!(
1078+
/// Arc::try_unwrap_as_box(x),
1079+
/// Ok(b) if &b[..2] == ("He")
1080+
/// ));
1081+
/// ```
1082+
#[cfg(not(no_global_oom_handling))]
1083+
#[unstable(feature = "unwrap_rc_as_box", reason = "recently added", issue = "none")]
1084+
pub fn try_unwrap_as_box(this: Self) -> Result<Box<T>, Self> {
1085+
let owning_weak = Self::leak_as_owning_weak(this)?;
1086+
let src_ptr = owning_weak.as_ptr();
1087+
1088+
unsafe {
1089+
// We 'own' this value right now so it is still initialized.
1090+
let size = mem::size_of_val(&*src_ptr);
1091+
// The raw allocation for our Box—after this we don't panic as otherwise we would leak
1092+
// this memory. We can't use MaybeUninit here as that is only valid for sized types.
1093+
let raw_box = Box::<T>::allocate_for_ptr(&Global, src_ptr);
1094+
1095+
// This is a new allocation so it can not overlap with the one which `owning_weak` is
1096+
// still holding onto.
1097+
ptr::copy_nonoverlapping(src_ptr as *const u8, raw_box as *mut u8, size);
1098+
1099+
Ok(Box::from_raw(raw_box))
1100+
}
1101+
}
1102+
10501103
#[inline]
10511104
fn inner(&self) -> &ArcInner<T> {
10521105
// This unsafety is ok because while this arc is alive we're guaranteed

0 commit comments

Comments
 (0)