Skip to content

Commit

Permalink
Use NonNull in the allocator API
Browse files Browse the repository at this point in the history
As discussed in #32838 (comment)
and following, instead of *mut pointers, use NonNull for the allocator
API.

One issue is that older rustc versions, used to bootstrap the compiler,
expands #[global_allocator], used in various places including libstd or
librustc_[almt]san, to code that uses the Alloc trait, so changes to
that trait make bootstrapping fail. Thankfully, it does so through the
location of the Alloc trait before 94d1970
so we can use at our advantage by making stage0 expose the old API as
alloc::heap::Alloc.

At the same time, we change the expansion for #[global_allocator] to use
the new trait location under core, which will allow newer versions of
rustc to bootstrap stage0 as well, despite the workaround described above.
  • Loading branch information
glandium committed Apr 4, 2018
1 parent 199b7e2 commit 8f924a3
Show file tree
Hide file tree
Showing 20 changed files with 281 additions and 235 deletions.
2 changes: 1 addition & 1 deletion src/dlmalloc
Submodule dlmalloc updated 2 files
+17 −16 src/global.rs
+21 −29 src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,16 @@ looks like:
#![feature(global_allocator, allocator_api, heap_api)]

use std::heap::{Alloc, System, Layout, AllocErr};
use std::ptr::NonNull;

struct MyAllocator;

unsafe impl<'a> Alloc for &'a MyAllocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
System.alloc(layout)
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
System.dealloc(ptr, layout)
}
}
Expand Down
14 changes: 5 additions & 9 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -513,15 +513,13 @@ impl<T: ?Sized> Arc<T> {
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}

Expand Down Expand Up @@ -559,7 +557,7 @@ impl<T: ?Sized> Arc<T> {
.unwrap_or_else(|e| Heap.oom(e));

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr()) as *mut ArcInner<T>;

ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
Expand Down Expand Up @@ -626,7 +624,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// In the event of a panic, elements that have been written
// into the new ArcInner will be dropped, then the memory freed.
struct Guard<T> {
mem: *mut u8,
mem: NonNull<u8>,
elems: *mut T,
layout: Layout,
n_elems: usize,
Expand Down Expand Up @@ -656,7 +654,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
mem: mem,
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
n_elems: 0,
Expand Down Expand Up @@ -1148,8 +1146,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
Expand All @@ -1161,7 +1157,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Heap.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
}
Expand Down
16 changes: 8 additions & 8 deletions src/liballoc/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ impl<K, V> Root<K, V> {
pub fn pop_level(&mut self) {
debug_assert!(self.height > 0);

let top = self.node.ptr.as_ptr() as *mut u8;
let top = self.node.ptr;

self.node = unsafe {
BoxedNode::from_ptr(self.as_mut()
Expand All @@ -250,7 +250,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();

unsafe {
Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
Heap.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
Expand Down Expand Up @@ -434,9 +434,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
marker::Edge
>
> {
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
Heap.dealloc(node.cast(), Layout::new::<LeafNode<K, V>>());
ret
}
}
Expand All @@ -455,9 +455,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
marker::Edge
>
> {
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
let node = self.node;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
Heap.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
ret
}
}
Expand Down Expand Up @@ -1240,12 +1240,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}

Heap.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.cast(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Heap.dealloc(
right_node.node.as_ptr() as *mut u8,
right_node.node.cast(),
Layout::new::<LeafNode<K, V>>(),
);
}
Expand Down
Loading

0 comments on commit 8f924a3

Please sign in to comment.