Skip to content

Relax Allocator bounds into pin-safe trait #94114

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions library/alloc/src/alloc.rs
Original file line number Diff line number Diff line change
@@ -44,6 +44,8 @@ extern "Rust" {
/// to the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crate’s default.
///
/// This type is always guaranteed to implement [`PinSafeAllocator`].
///
/// Note: while this type is unstable, the functionality it provides can be
/// accessed through the [free functions in `alloc`](self#functions).
#[unstable(feature = "allocator_api", issue = "32838")]
@@ -314,6 +316,11 @@ unsafe impl Allocator for Global {
}
}

// SAFETY: memory blocks allocated by `Global` are not invalidated when `Global` is dropped.
#[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(test))]
unsafe impl PinSafeAllocator for Global {}

/// The allocator for unique pointers.
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[lang = "exchange_malloc"]
43 changes: 21 additions & 22 deletions library/alloc/src/boxed.rs
Original file line number Diff line number Diff line change
@@ -168,7 +168,7 @@ use core::task::{Context, Poll};

#[cfg(not(no_global_oom_handling))]
use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw};
use crate::alloc::{AllocError, Allocator, Global, Layout};
use crate::alloc::{AllocError, Allocator, Global, Layout, PinSafeAllocator};
#[cfg(not(no_global_oom_handling))]
use crate::borrow::Cow;
use crate::raw_vec::RawVec;
@@ -575,7 +575,7 @@ impl<T, A: Allocator> Box<T, A> {
#[inline(always)]
pub const fn pin_in(x: T, alloc: A) -> Pin<Self>
where
A: 'static + ~const Allocator + ~const Destruct,
A: ~const Allocator + ~const PinSafeAllocator + ~const Destruct,
{
Self::into_pin(Self::new_in(x, alloc))
}
@@ -1123,8 +1123,12 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
// recognized as "releasing" the unique pointer to permit aliased raw accesses,
// so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer
// behaves correctly.
let alloc = unsafe { ptr::read(&b.1) };
(Unique::from(Box::leak(b)), alloc)
let manually_drop = mem::ManuallyDrop::new(b);
// SAFETY: unique ownership of the memory block moves into `ptr`
let ptr = unsafe { &mut *manually_drop.0.as_ptr() };
// SAFETY: moving the allocator will not invalidate `ptr`
let alloc = unsafe { ptr::read(&manually_drop.1) };
(Unique::from(ptr), alloc)
}

/// Returns a reference to the underlying allocator.
@@ -1179,9 +1183,13 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[inline]
pub const fn leak<'a>(b: Self) -> &'a mut T
where
A: 'a,
A: ~const PinSafeAllocator,
{
unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
let (ptr, alloc) = Box::into_unique(b);
mem::forget(alloc);
// SAFETY: ptr will remain valid for any lifetime since `alloc` is never
// dropped
unsafe { &mut *ptr.as_ptr() }
}

/// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
@@ -1218,7 +1226,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
pub const fn into_pin(boxed: Self) -> Pin<Self>
where
A: 'static,
A: ~const PinSafeAllocator,
{
// It's not possible to move or replace the insides of a `Pin<Box<T>>`
// when `T: !Unpin`, so it's safe to pin it directly without any
@@ -1454,9 +1462,9 @@ impl<T> From<T> for Box<T> {

#[stable(feature = "pin", since = "1.33.0")]
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
impl<T: ?Sized, A: Allocator> const From<Box<T, A>> for Pin<Box<T, A>>
impl<T: ?Sized, A> const From<Box<T, A>> for Pin<Box<T, A>>
where
A: 'static,
A: ~const PinSafeAllocator,
{
/// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
/// `*boxed` will be pinned in memory and unable to be moved.
@@ -2033,13 +2041,10 @@ impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
*/
#[stable(feature = "pin", since = "1.33.0")]
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> where A: 'static {}
impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> {}

#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
where
A: 'static,
{
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A> {
type Yield = G::Yield;
type Return = G::Return;

@@ -2049,10 +2054,7 @@ where
}

#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>>
where
A: 'static,
{
impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>> {
type Yield = G::Yield;
type Return = G::Return;

@@ -2062,10 +2064,7 @@ where
}

#[stable(feature = "futures_api", since = "1.36.0")]
impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A>
where
A: 'static,
{
impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A> {
type Output = F::Output;

fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
36 changes: 29 additions & 7 deletions library/alloc/src/collections/btree/append.rs
Original file line number Diff line number Diff line change
@@ -15,7 +15,11 @@ impl<K, V> Root<K, V> {
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
/// order, each greater than all keys in the tree, including any keys
/// already in the tree upon entry.
pub fn append_from_sorted_iters<I, A: Allocator + Clone>(
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn append_from_sorted_iters<I, A: Allocator + Clone>(
&mut self,
left: I,
right: I,
@@ -29,14 +33,25 @@ impl<K, V> Root<K, V> {
let iter = MergeIter(MergeIterInner::new(left, right));

// Meanwhile, we build a tree from the sorted sequence in linear time.
self.bulk_push(iter, length, alloc)

// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
unsafe { self.bulk_push(iter, length, alloc) }
}

/// Pushes all key-value pairs to the end of the tree, incrementing a
/// `length` variable along the way. The latter makes it easier for the
/// caller to avoid a leak when the iterator panicks.
pub fn bulk_push<I, A: Allocator + Clone>(&mut self, iter: I, length: &mut usize, alloc: A)
where
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn bulk_push<I, A: Allocator + Clone>(
&mut self,
iter: I,
length: &mut usize,
alloc: A,
) where
I: Iterator<Item = (K, V)>,
{
let mut cur_node = self.borrow_mut().last_leaf_edge().into_node();
@@ -64,17 +79,24 @@ impl<K, V> Root<K, V> {
}
Err(_) => {
// We are at the top, create a new root node and push there.
open_node = self.push_internal_level(alloc.clone());

// SAFETY: The caller has guaranteed that `alloc` is the allocator for
// the owning `BTreeMap`.
open_node = unsafe { self.push_internal_level(alloc.clone()) };
break;
}
}
}

// Push key-value pair and new right subtree.
let tree_height = open_node.height() - 1;
let mut right_tree = Root::new(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
let mut right_tree = unsafe { Root::new(alloc.clone()) };
for _ in 0..tree_height {
right_tree.push_internal_level(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the
// owning `BTreeMap`.
unsafe { right_tree.push_internal_level(alloc.clone()) };
}
open_node.push(key, value, right_tree);

40 changes: 27 additions & 13 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
@@ -216,7 +216,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap {
root: Some(Root::new(alloc.clone())),
// SAFETY: `alloc` is the allocator for both the original and the cloned
// `BTreeMap`.
root: unsafe { Some(Root::new(alloc.clone())) },
length: 0,
alloc: ManuallyDrop::new(alloc),
_marker: PhantomData,
@@ -247,7 +249,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {

{
let out_root = out_tree.root.as_mut().unwrap();
let mut out_node = out_root.push_internal_level(alloc.clone());
// SAFETY: `alloc` is the allocator for both the original and the cloned
// `BTreeMap`.
let mut out_node = unsafe { out_root.push_internal_level(alloc.clone()) };
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
@@ -269,7 +273,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
out_node.push(
k,
v,
subroot.unwrap_or_else(|| Root::new(alloc.clone())),
// SAFETY: `alloc` is the allocator for both the original and cloned
// `BTreeMap`.
subroot.unwrap_or_else(|| unsafe { Root::new(alloc.clone()) }),
);
out_tree.length += 1 + sublength;
}
@@ -323,8 +329,9 @@ where

fn replace(&mut self, key: K) -> Option<K> {
let (map, dormant_map) = DormantMutRef::new(self);
// SAFETY: `alloc` is the allocator for the `BTreeMap`.
let root_node =
map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut();
map.root.get_or_insert_with(|| unsafe { Root::new((*map.alloc).clone()) }).borrow_mut();
match root_node.search_tree::<K>(&key) {
Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
GoDown(handle) => {
@@ -1144,13 +1151,16 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {

let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter();
let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter();
let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone()));
root.append_from_sorted_iters(
self_iter,
other_iter,
&mut self.length,
(*self.alloc).clone(),
)
let root = self.root.get_or_insert_with(|| unsafe { Root::new((*self.alloc).clone()) });
// SAFETY: `self.alloc` is the allocator for the `BTreeMap`.
unsafe {
root.append_from_sorted_iters(
self_iter,
other_iter,
&mut self.length,
(*self.alloc).clone(),
)
}
}

/// Constructs a double-ended iterator over a sub-range of elements in the map.
@@ -1464,9 +1474,13 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
K: Ord,
I: IntoIterator<Item = (K, V)>,
{
let mut root = Root::new(alloc.clone());
// SAFETY: `alloc` is the allocator for the returned `BTreeMap`.
let mut root = unsafe { Root::new(alloc.clone()) };
let mut length = 0;
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
// SAFETY: `alloc` is the allocator for the returned `BTreeMap`.
unsafe {
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
}
BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc), _marker: PhantomData }
}
}
45 changes: 27 additions & 18 deletions library/alloc/src/collections/btree/map/entry.rs
Original file line number Diff line number Diff line change
@@ -342,30 +342,39 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
None => {
// SAFETY: There is no tree yet so no reference to it exists.
let map = unsafe { self.dormant_map.awaken() };
let mut root = NodeRef::new_leaf(self.alloc.clone());
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
let mut root = unsafe { NodeRef::new_leaf(self.alloc.clone()) };
let val_ptr = root.borrow_mut().push(self.key, value) as *mut V;
map.root = Some(root.forget_type());
map.length = 1;
val_ptr
}
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
Some(handle) => {
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
let insert_result =
unsafe { handle.insert_recursing(self.key, value, self.alloc.clone()) };
match insert_result {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
}
(Some(ins), val_ptr) => {
drop(ins.left);
// SAFETY: We have consumed self.handle and dropped the
// remaining reference to the tree, ins.left.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap(); // same as ins.left
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
unsafe {
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right)
};
map.length += 1;
val_ptr
}
}
(Some(ins), val_ptr) => {
drop(ins.left);
// SAFETY: We have consumed self.handle and dropped the
// remaining reference to the tree, ins.left.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap(); // same as ins.left
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
map.length += 1;
val_ptr
}
},
}
};
// Now that we have finished growing the tree using borrowed references,
// dereference the pointer to a part of it, that we picked up along the way.
9 changes: 8 additions & 1 deletion library/alloc/src/collections/btree/map/tests.rs
Original file line number Diff line number Diff line change
@@ -116,7 +116,14 @@ impl<K, V> BTreeMap<K, V> {
{
let iter = mem::take(self).into_iter();
if !iter.is_empty() {
self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc);
// SAFETY: `self.alloc` is the allocator for this `BTreeMap`.
unsafe {
self.root.insert(Root::new(*self.alloc)).bulk_push(
iter,
&mut self.length,
*self.alloc,
);
}
}
}
}
101 changes: 83 additions & 18 deletions library/alloc/src/collections/btree/node.rs
Original file line number Diff line number Diff line change
@@ -213,31 +213,61 @@ unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type>
unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}

impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
pub fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
Self::from_new_leaf(LeafNode::new(alloc))
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
// SAFETY: The caller has guaranteed that the allocator of the provided `Box` is the
// allocator for the owning `BTreeMap`.
unsafe { Self::from_new_leaf(LeafNode::new(alloc)) }
}

fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
/// # Safety
///
/// The allocator of the `Box` must be the allocator for the owning `BTreeMap`.
unsafe fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
// We're dropping the `alloc` part of the box here, but our safety condition guarantees that
// a clone of that allocator will outlive the returned `NodeRef` in the owning `BTreeMap`.
// This prevents the memory of the box from being invalidated.
let ptr = Box::into_raw(leaf);
// SAFETY: The pointer returned from `Box::into_raw` is guaranteed to be non-null.
let node = unsafe { NonNull::new_unchecked(ptr) };
NodeRef { height: 0, node, _marker: PhantomData }
}
}

impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
unsafe fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
let mut new_node = unsafe { InternalNode::new(alloc) };
new_node.edges[0].write(child.node);

// SAFETY:
// - `child.height + 1` is always nonzero.
// - The caller has guaranteed that the allocator of the provided `Box` is the allocator for
// the owning `BTreeMap`.
unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
}

/// # Safety
/// `height` must not be zero.
///
/// - `height` must not be zero.
/// - The allocator of the `Box` must be the allocator for the owning `BTreeMap`.
unsafe fn from_new_internal<A: Allocator + Clone>(
internal: Box<InternalNode<K, V>, A>,
height: usize,
) -> Self {
debug_assert!(height > 0);
let node = NonNull::from(Box::leak(internal)).cast();
let mut this = NodeRef { height, node, _marker: PhantomData };
// We're dropping the `alloc` part of the box here, but our safety condition guarantees that
// a clone of that allocator will outlive the returned `NodeRef` in the owning `BTreeMap`.
// This prevents the memory of the box from being invalidated.
let ptr = Box::into_raw(internal);
// SAFETY: The pointer returned from `Box::into_raw` is guaranteed to be non-null.
let node = unsafe { NonNull::new_unchecked(ptr) };

let mut this = NodeRef { height, node: node.cast(), _marker: PhantomData };
this.borrow_mut().correct_all_childrens_parent_links();
this
}
@@ -559,18 +589,32 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {

impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new<A: Allocator + Clone>(alloc: A) -> Self {
NodeRef::new_leaf(alloc).forget_type()
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn new<A: Allocator + Clone>(alloc: A) -> Self {
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
unsafe { NodeRef::new_leaf(alloc).forget_type() }
}

/// Adds a new internal node with a single edge pointing to the previous root node,
/// make that new node the root node, and return it. This increases the height by 1
/// and is the opposite of `pop_internal_level`.
pub fn push_internal_level<A: Allocator + Clone>(
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn push_internal_level<A: Allocator + Clone>(
&mut self,
alloc: A,
) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
super::mem::take_mut(self, |old_root| unsafe {
NodeRef::new_internal(old_root, alloc).forget_type()
});

// `self.borrow_mut()`, except that we just forgot we're internal now:
NodeRef { height: self.height, node: self.node, _marker: PhantomData }
@@ -869,7 +913,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// this edge. This method splits the node if there isn't enough room.
///
/// The returned pointer points to the inserted value.
fn insert<A: Allocator + Clone>(
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
unsafe fn insert<A: Allocator + Clone>(
mut self,
key: K,
val: V,
@@ -881,7 +929,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
} else {
let (middle_kv_idx, insertion) = splitpoint(self.idx);
let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) };
let mut result = middle.split(alloc);
// SAFETY: The caller has guaranteed that `alloc` is the allocator of the owning
// `BTreeMap`.
let mut result = unsafe { middle.split(alloc) };
let mut insertion_edge = match insertion {
LeftOrRight::Left(insert_idx) => unsafe {
Handle::new_edge(result.left.reborrow_mut(), insert_idx)
@@ -968,13 +1018,19 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// If the returned result is some `SplitResult`, the `left` field will be the root node.
/// The returned pointer points to the inserted value, which in the case of `SplitResult`
/// is in the `left` or `right` tree.
pub fn insert_recursing<A: Allocator + Clone>(
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn insert_recursing<A: Allocator + Clone>(
self,
key: K,
value: V,
alloc: A,
) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) {
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
let (mut split, val_ptr) = match unsafe { self.insert(key, value, alloc.clone()) } {
(None, val_ptr) => return (None, val_ptr),
(Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
};
@@ -1128,12 +1184,21 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// - The key and value pointed to by this handle are extracted.
/// - All the key-value pairs to the right of this handle are put into a newly
/// allocated node.
pub fn split<A: Allocator + Clone>(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> {
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn split<A: Allocator + Clone>(
mut self,
alloc: A,
) -> SplitResult<'a, K, V, marker::Leaf> {
let mut new_node = LeafNode::new(alloc);

let kv = self.split_leaf_data(&mut new_node);

let right = NodeRef::from_new_leaf(new_node);
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
let right = unsafe { NodeRef::from_new_leaf(new_node) };
SplitResult { left: self.node, kv, right }
}

9 changes: 6 additions & 3 deletions library/alloc/src/collections/btree/node/tests.rs
Original file line number Diff line number Diff line change
@@ -68,10 +68,13 @@ fn test_splitpoint() {

#[test]
fn test_partial_eq() {
let mut root1 = NodeRef::new_leaf(Global);
// SAFETY: `Global` is the allocator for the `BTreeMap` we're testing.
let mut root1 = unsafe { NodeRef::new_leaf(Global) };
root1.borrow_mut().push(1, ());
let mut root1 = NodeRef::new_internal(root1.forget_type(), Global).forget_type();
let root2 = Root::new(Global);
// SAFETY: `Global` is the allocator for the `BTreeMap` we're testing.
let mut root1 = unsafe { NodeRef::new_internal(root1.forget_type(), Global).forget_type() };
// SAFETY: `Global` is the allocator for the `BTreeMap` we're testing.
let root2 = unsafe { Root::new(Global) };
root1.reborrow().assert_back_pointers();
root2.reborrow().assert_back_pointers();

14 changes: 12 additions & 2 deletions library/alloc/src/collections/btree/split.rs
Original file line number Diff line number Diff line change
@@ -63,10 +63,20 @@ impl<K, V> Root<K, V> {
}

/// Creates a tree consisting of empty nodes.
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
fn new_pillar<A: Allocator + Clone>(height: usize, alloc: A) -> Self {
let mut root = Root::new(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
let mut root = unsafe { Root::new(alloc.clone()) };
for _ in 0..height {
root.push_internal_level(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
unsafe {
root.push_internal_level(alloc.clone());
}
}
root
}
1 change: 1 addition & 0 deletions library/alloc/src/rc.rs
Original file line number Diff line number Diff line change
@@ -630,6 +630,7 @@ impl<T> Rc<T> {
#[stable(feature = "pin", since = "1.33.0")]
#[must_use]
pub fn pin(value: T) -> Pin<Rc<T>> {
// SAFETY: Global is a pin-safe allocator.
unsafe { Pin::new_unchecked(Rc::new(value)) }
}

2 changes: 2 additions & 0 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
@@ -530,13 +530,15 @@ impl<T> Arc<T> {
#[stable(feature = "pin", since = "1.33.0")]
#[must_use]
pub fn pin(data: T) -> Pin<Arc<T>> {
// SAFETY: Global is a pin-safe allocator.
unsafe { Pin::new_unchecked(Arc::new(data)) }
}

/// Constructs a new `Pin<Arc<T>>`, return an error if allocation fails.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn try_pin(data: T) -> Result<Pin<Arc<T>>, AllocError> {
// SAFETY: Global is a pin-safe allocator.
unsafe { Ok(Pin::new_unchecked(Arc::try_new(data)?)) }
}

5 changes: 4 additions & 1 deletion library/alloc/tests/boxed.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use core::alloc::{AllocError, Allocator, Layout};
use core::alloc::{AllocError, Allocator, Layout, PinSafeAllocator};
use core::cell::Cell;
use core::mem::MaybeUninit;
use core::ptr::NonNull;
@@ -151,6 +151,9 @@ unsafe impl const Allocator for ConstAllocator {
}
}

// SAFETY: Memory allocated by `ConstAllocator` is never invalidated.
unsafe impl const PinSafeAllocator for ConstAllocator {}

#[test]
fn const_box() {
const VALUE: u32 = {
28 changes: 24 additions & 4 deletions library/core/src/alloc/mod.rs
Original file line number Diff line number Diff line change
@@ -87,12 +87,14 @@ impl fmt::Display for AllocError {
/// # Safety
///
/// * Memory blocks returned from an allocator must point to valid memory and retain their validity
/// until the instance and all of its clones are dropped,
/// until the instance and all of its clones are dropped, forgotten, or otherwise rendered
/// inaccessible. The validity of memory block is tied directly to the validity of the allocator
/// group that it is allocated from.
///
/// * cloning or moving the allocator must not invalidate memory blocks returned from this
/// allocator. A cloned allocator must behave like the same allocator, and
/// * Cloning or moving the allocator must not invalidate memory blocks returned from this
/// allocator. A cloned allocator must behave like the same allocator.
///
/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other
/// * Any pointer to a memory block which is [*currently allocated*] may be passed to any other
/// method of the allocator.
///
/// [*currently allocated*]: #currently-allocated-memory
@@ -408,3 +410,21 @@ where
unsafe { (**self).shrink(ptr, old_layout, new_layout) }
}
}

/// An [`Allocator`] which returns memory blocks that can safely be pinned.
///
/// Unlike `Allocator`, `PinSafeAllocator` guarantees that forgetting an instance will cause any
/// allocated memory to remain valid indefinitely.
///
/// # Safety
///
/// In addition to the safety guarantees of `Allocator`, memory blocks returned from a
/// `PinSafeAllocator` must retain their validity until the instance and all of its clones are
/// dropped.
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe trait PinSafeAllocator: Allocator {}

#[unstable(feature = "allocator_api", issue = "32838")]
// SAFETY: Allocators that live forever never become unreachable, and so never invalidate their
// allocated memory blocks.
unsafe impl<A: Allocator + ?Sized> PinSafeAllocator for &'static A {}
1 change: 1 addition & 0 deletions src/test/ui/box/leak-alloc.rs
Original file line number Diff line number Diff line change
@@ -22,6 +22,7 @@ fn use_value(_: u32) {}
fn main() {
let alloc = Alloc {};
let boxed = Box::new_in(10, alloc.by_ref());
//~^ ERROR `alloc` does not live long enough
let theref = Box::leak(boxed);
drop(alloc);
//~^ ERROR cannot move out of `alloc` because it is borrowed
29 changes: 21 additions & 8 deletions src/test/ui/box/leak-alloc.stderr
Original file line number Diff line number Diff line change
@@ -1,15 +1,28 @@
error[E0597]: `alloc` does not live long enough
--> $DIR/leak-alloc.rs:24:33
|
LL | let boxed = Box::new_in(10, alloc.by_ref());
| ----------------^^^^^^^^^^^^^^-
| | |
| | borrowed value does not live long enough
| argument requires that `alloc` is borrowed for `'static`
...
LL | }
| - `alloc` dropped here while still borrowed

error[E0505]: cannot move out of `alloc` because it is borrowed
--> $DIR/leak-alloc.rs:26:10
--> $DIR/leak-alloc.rs:27:10
|
LL | let boxed = Box::new_in(10, alloc.by_ref());
| -------------- borrow of `alloc` occurs here
LL | let theref = Box::leak(boxed);
| -------------------------------
| | |
| | borrow of `alloc` occurs here
| argument requires that `alloc` is borrowed for `'static`
...
LL | drop(alloc);
| ^^^^^ move out of `alloc` occurs here
LL |
LL | use_value(*theref)
| ------- borrow later used here

error: aborting due to previous error
error: aborting due to 2 previous errors

For more information about this error, try `rustc --explain E0505`.
Some errors have detailed explanations: E0505, E0597.
For more information about an error, try `rustc --explain E0505`.
35 changes: 35 additions & 0 deletions src/test/ui/box/pin-safe-alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
// run-pass
#![feature(allocator_api)]

use std::alloc::{AllocError, Allocator, Layout, PinSafeAllocator, System};
use std::ptr::NonNull;
use std::marker::PhantomPinned;
use std::boxed::Box;

struct Alloc {}

unsafe impl Allocator for Alloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
System.allocate(layout)
}

unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
System.deallocate(ptr, layout)
}
}

unsafe impl PinSafeAllocator for Alloc {}

fn main() {
struct MyPinned {
_value: u32,
_pinned: PhantomPinned,
}

let value = MyPinned {
_value: 0,
_pinned: PhantomPinned,
};
let alloc = Alloc {};
let _ = Box::pin_in(value, alloc);
}
33 changes: 33 additions & 0 deletions src/test/ui/box/pin-unsafe-alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#![feature(allocator_api)]

use std::alloc::{AllocError, Allocator, Layout, System};
use std::ptr::NonNull;
use std::marker::PhantomPinned;
use std::boxed::Box;

struct Alloc {}

unsafe impl Allocator for Alloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
System.allocate(layout)
}

unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
System.deallocate(ptr, layout)
}
}

fn main() {
struct MyPinned {
_value: u32,
_pinned: PhantomPinned,
}

let value = MyPinned {
_value: 0,
_pinned: PhantomPinned,
};
let alloc = Alloc {};
let _ = Box::pin_in(value, alloc);
//~^ ERROR the trait bound `Alloc: PinSafeAllocator` is not satisfied
}
21 changes: 21 additions & 0 deletions src/test/ui/box/pin-unsafe-alloc.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
error[E0277]: the trait bound `Alloc: PinSafeAllocator` is not satisfied
--> $DIR/pin-unsafe-alloc.rs:31:32
|
LL | let _ = Box::pin_in(value, alloc);
| ----------- ^^^^^ expected an implementor of trait `PinSafeAllocator`
| |
| required by a bound introduced by this call
|
note: required by a bound in `Box::<T, A>::pin_in`
--> $SRC_DIR/alloc/src/boxed.rs:LL:COL
|
LL | A: ~const Allocator + ~const PinSafeAllocator + ~const Destruct,
| ^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `Box::<T, A>::pin_in`
help: consider borrowing here
|
LL | let _ = Box::pin_in(value, &alloc);
| +

error: aborting due to previous error

For more information about this error, try `rustc --explain E0277`.