Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Relax Allocator bounds into pin-safe trait #94114

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions library/alloc/src/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ extern "Rust" {
/// to the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crate’s default.
///
/// This type is always guaranteed to implement [`PinSafeAllocator`].
///
/// Note: while this type is unstable, the functionality it provides can be
/// accessed through the [free functions in `alloc`](self#functions).
#[unstable(feature = "allocator_api", issue = "32838")]
Expand Down Expand Up @@ -314,6 +316,11 @@ unsafe impl Allocator for Global {
}
}

// SAFETY: memory blocks allocated by `Global` are not invalidated when `Global` is dropped.
#[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(test))]
unsafe impl PinSafeAllocator for Global {}

/// The allocator for unique pointers.
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[lang = "exchange_malloc"]
Expand Down
43 changes: 21 additions & 22 deletions library/alloc/src/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ use core::task::{Context, Poll};

#[cfg(not(no_global_oom_handling))]
use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw};
use crate::alloc::{AllocError, Allocator, Global, Layout};
use crate::alloc::{AllocError, Allocator, Global, Layout, PinSafeAllocator};
#[cfg(not(no_global_oom_handling))]
use crate::borrow::Cow;
use crate::raw_vec::RawVec;
Expand Down Expand Up @@ -575,7 +575,7 @@ impl<T, A: Allocator> Box<T, A> {
#[inline(always)]
pub const fn pin_in(x: T, alloc: A) -> Pin<Self>
where
A: 'static + ~const Allocator + ~const Destruct,
A: ~const Allocator + ~const PinSafeAllocator + ~const Destruct,
{
Self::into_pin(Self::new_in(x, alloc))
}
Expand Down Expand Up @@ -1123,8 +1123,12 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
// recognized as "releasing" the unique pointer to permit aliased raw accesses,
// so all raw pointer methods have to go through `Box::leak`. Turning *that* to a raw pointer
// behaves correctly.
let alloc = unsafe { ptr::read(&b.1) };
(Unique::from(Box::leak(b)), alloc)
let manually_drop = mem::ManuallyDrop::new(b);
// SAFETY: unique ownership of the memory block moves into `ptr`
let ptr = unsafe { &mut *manually_drop.0.as_ptr() };
// SAFETY: moving the allocator will not invalidate `ptr`
let alloc = unsafe { ptr::read(&manually_drop.1) };
(Unique::from(ptr), alloc)
}

/// Returns a reference to the underlying allocator.
Expand Down Expand Up @@ -1179,9 +1183,13 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[inline]
pub const fn leak<'a>(b: Self) -> &'a mut T
where
A: 'a,
A: ~const PinSafeAllocator,
{
unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
let (ptr, alloc) = Box::into_unique(b);
mem::forget(alloc);
// SAFETY: ptr will remain valid for any lifetime since `alloc` is never
// dropped
unsafe { &mut *ptr.as_ptr() }
}

/// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
Expand Down Expand Up @@ -1218,7 +1226,7 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
pub const fn into_pin(boxed: Self) -> Pin<Self>
where
A: 'static,
A: ~const PinSafeAllocator,
{
// It's not possible to move or replace the insides of a `Pin<Box<T>>`
// when `T: !Unpin`, so it's safe to pin it directly without any
Expand Down Expand Up @@ -1454,9 +1462,9 @@ impl<T> From<T> for Box<T> {

#[stable(feature = "pin", since = "1.33.0")]
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
impl<T: ?Sized, A: Allocator> const From<Box<T, A>> for Pin<Box<T, A>>
impl<T: ?Sized, A> const From<Box<T, A>> for Pin<Box<T, A>>
where
A: 'static,
A: ~const PinSafeAllocator,
{
/// Converts a `Box<T>` into a `Pin<Box<T>>`. If `T` does not implement [`Unpin`], then
/// `*boxed` will be pinned in memory and unable to be moved.
Expand Down Expand Up @@ -2033,13 +2041,10 @@ impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
*/
#[stable(feature = "pin", since = "1.33.0")]
#[rustc_const_unstable(feature = "const_box", issue = "92521")]
impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> where A: 'static {}
impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> {}

#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
where
A: 'static,
{
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A> {
type Yield = G::Yield;
type Return = G::Return;

Expand All @@ -2049,10 +2054,7 @@ where
}

#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>>
where
A: 'static,
{
impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>> {
type Yield = G::Yield;
type Return = G::Return;

Expand All @@ -2062,10 +2064,7 @@ where
}

#[stable(feature = "futures_api", since = "1.36.0")]
impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A>
where
A: 'static,
{
impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A> {
type Output = F::Output;

fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Expand Down
36 changes: 29 additions & 7 deletions library/alloc/src/collections/btree/append.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,11 @@ impl<K, V> Root<K, V> {
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
/// order, each greater than all keys in the tree, including any keys
/// already in the tree upon entry.
pub fn append_from_sorted_iters<I, A: Allocator + Clone>(
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn append_from_sorted_iters<I, A: Allocator + Clone>(
&mut self,
left: I,
right: I,
Expand All @@ -29,14 +33,25 @@ impl<K, V> Root<K, V> {
let iter = MergeIter(MergeIterInner::new(left, right));

// Meanwhile, we build a tree from the sorted sequence in linear time.
self.bulk_push(iter, length, alloc)

// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
unsafe { self.bulk_push(iter, length, alloc) }
}

/// Pushes all key-value pairs to the end of the tree, incrementing a
/// `length` variable along the way. The latter makes it easier for the
/// caller to avoid a leak when the iterator panicks.
pub fn bulk_push<I, A: Allocator + Clone>(&mut self, iter: I, length: &mut usize, alloc: A)
where
///
/// # Safety
///
/// `alloc` must be the allocator for the owning `BTreeMap`.
pub unsafe fn bulk_push<I, A: Allocator + Clone>(
&mut self,
iter: I,
length: &mut usize,
alloc: A,
) where
I: Iterator<Item = (K, V)>,
{
let mut cur_node = self.borrow_mut().last_leaf_edge().into_node();
Expand Down Expand Up @@ -64,17 +79,24 @@ impl<K, V> Root<K, V> {
}
Err(_) => {
// We are at the top, create a new root node and push there.
open_node = self.push_internal_level(alloc.clone());

// SAFETY: The caller has guaranteed that `alloc` is the allocator for
// the owning `BTreeMap`.
open_node = unsafe { self.push_internal_level(alloc.clone()) };
break;
}
}
}

// Push key-value pair and new right subtree.
let tree_height = open_node.height() - 1;
let mut right_tree = Root::new(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the owning
// `BTreeMap`.
let mut right_tree = unsafe { Root::new(alloc.clone()) };
for _ in 0..tree_height {
right_tree.push_internal_level(alloc.clone());
// SAFETY: The caller has guaranteed that `alloc` is the allocator for the
// owning `BTreeMap`.
unsafe { right_tree.push_internal_level(alloc.clone()) };
}
open_node.push(key, value, right_tree);

Expand Down
40 changes: 27 additions & 13 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap {
root: Some(Root::new(alloc.clone())),
// SAFETY: `alloc` is the allocator for both the original and the cloned
// `BTreeMap`.
root: unsafe { Some(Root::new(alloc.clone())) },
length: 0,
alloc: ManuallyDrop::new(alloc),
_marker: PhantomData,
Expand Down Expand Up @@ -247,7 +249,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {

{
let out_root = out_tree.root.as_mut().unwrap();
let mut out_node = out_root.push_internal_level(alloc.clone());
// SAFETY: `alloc` is the allocator for both the original and the cloned
// `BTreeMap`.
let mut out_node = unsafe { out_root.push_internal_level(alloc.clone()) };
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
Expand All @@ -269,7 +273,9 @@ impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
out_node.push(
k,
v,
subroot.unwrap_or_else(|| Root::new(alloc.clone())),
// SAFETY: `alloc` is the allocator for both the original and cloned
// `BTreeMap`.
subroot.unwrap_or_else(|| unsafe { Root::new(alloc.clone()) }),
);
out_tree.length += 1 + sublength;
}
Expand Down Expand Up @@ -323,8 +329,9 @@ where

fn replace(&mut self, key: K) -> Option<K> {
let (map, dormant_map) = DormantMutRef::new(self);
// SAFETY: `alloc` is the allocator for the `BTreeMap`.
let root_node =
map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut();
map.root.get_or_insert_with(|| unsafe { Root::new((*map.alloc).clone()) }).borrow_mut();
match root_node.search_tree::<K>(&key) {
Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
GoDown(handle) => {
Expand Down Expand Up @@ -1144,13 +1151,16 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {

let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter();
let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter();
let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone()));
root.append_from_sorted_iters(
self_iter,
other_iter,
&mut self.length,
(*self.alloc).clone(),
)
let root = self.root.get_or_insert_with(|| unsafe { Root::new((*self.alloc).clone()) });
// SAFETY: `self.alloc` is the allocator for the `BTreeMap`.
unsafe {
root.append_from_sorted_iters(
self_iter,
other_iter,
&mut self.length,
(*self.alloc).clone(),
)
}
}

/// Constructs a double-ended iterator over a sub-range of elements in the map.
Expand Down Expand Up @@ -1464,9 +1474,13 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
K: Ord,
I: IntoIterator<Item = (K, V)>,
{
let mut root = Root::new(alloc.clone());
// SAFETY: `alloc` is the allocator for the returned `BTreeMap`.
let mut root = unsafe { Root::new(alloc.clone()) };
let mut length = 0;
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
// SAFETY: `alloc` is the allocator for the returned `BTreeMap`.
unsafe {
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
}
BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc), _marker: PhantomData }
}
}
Expand Down
45 changes: 27 additions & 18 deletions library/alloc/src/collections/btree/map/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -342,30 +342,39 @@ impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
None => {
// SAFETY: There is no tree yet so no reference to it exists.
let map = unsafe { self.dormant_map.awaken() };
let mut root = NodeRef::new_leaf(self.alloc.clone());
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
let mut root = unsafe { NodeRef::new_leaf(self.alloc.clone()) };
let val_ptr = root.borrow_mut().push(self.key, value) as *mut V;
map.root = Some(root.forget_type());
map.length = 1;
val_ptr
}
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
Some(handle) => {
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
let insert_result =
unsafe { handle.insert_recursing(self.key, value, self.alloc.clone()) };
match insert_result {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
map.length += 1;
val_ptr
}
(Some(ins), val_ptr) => {
drop(ins.left);
// SAFETY: We have consumed self.handle and dropped the
// remaining reference to the tree, ins.left.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap(); // same as ins.left
// SAFETY: `self.alloc` is the allocator for the owning `BTreeMap`.
unsafe {
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right)
};
map.length += 1;
val_ptr
}
}
(Some(ins), val_ptr) => {
drop(ins.left);
// SAFETY: We have consumed self.handle and dropped the
// remaining reference to the tree, ins.left.
let map = unsafe { self.dormant_map.awaken() };
let root = map.root.as_mut().unwrap(); // same as ins.left
root.push_internal_level(self.alloc).push(ins.kv.0, ins.kv.1, ins.right);
map.length += 1;
val_ptr
}
},
}
};
// Now that we have finished growing the tree using borrowed references,
// dereference the pointer to a part of it, that we picked up along the way.
Expand Down
9 changes: 8 additions & 1 deletion library/alloc/src/collections/btree/map/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,14 @@ impl<K, V> BTreeMap<K, V> {
{
let iter = mem::take(self).into_iter();
if !iter.is_empty() {
self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc);
// SAFETY: `self.alloc` is the allocator for this `BTreeMap`.
unsafe {
self.root.insert(Root::new(*self.alloc)).bulk_push(
iter,
&mut self.length,
*self.alloc,
);
}
}
}
}
Expand Down
Loading