Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add GlobalAlloc trait + tweaks for initial stabilization #49669

Merged
merged 27 commits into from
Apr 13, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
1569f8f
Inline docs for the heap module’s reexports
SimonSapin Apr 2, 2018
9b06886
Add a core::heap::Void extern type.
SimonSapin Apr 2, 2018
c660ced
Add a GlobalAlloc trait
SimonSapin Apr 3, 2018
1b895d8
Import the `alloc` crate as `alloc_crate` in std
SimonSapin Apr 3, 2018
09e8db1
Rename `heap` modules in the core, alloc, and std crates to `alloc`
SimonSapin Apr 3, 2018
743c29b
Actually deprecate heap modules.
SimonSapin Apr 3, 2018
88ebd2d
Rename the Heap type to Global
SimonSapin Apr 3, 2018
e521b8b
Actually deprecate the Heap type
SimonSapin Apr 3, 2018
5e5a0c2
Separate alloc::heap::Alloc trait for stage0 #[global_allocator]
SimonSapin Apr 3, 2018
a4caac5
Update to most recent version of dlmalloc
alexcrichton Apr 9, 2018
ba7081a
Make AllocErr a zero-size unit struct
SimonSapin Apr 3, 2018
eb69593
Implement GlobalAlloc for System
SimonSapin Apr 3, 2018
86753ce
Use the GlobalAlloc trait for #[global_allocator]
SimonSapin Apr 3, 2018
157ff8c
Remove the now-unit-struct AllocErr parameter of oom()
SimonSapin Apr 3, 2018
93a9ad4
Remove the now-unit-struct AllocErr field inside CollectionAllocErr
SimonSapin Apr 3, 2018
f9c96d7
Add FIXME comments for Void::null_mut usage
SimonSapin Apr 4, 2018
b017742
Return Result instead of Option in alloc::Layout constructors
SimonSapin Apr 4, 2018
c957e99
realloc with a new size only, not a full new layout.
SimonSapin Apr 4, 2018
747cc74
Conversions between Result<*mut u8, AllocErr>> and *mut Void
SimonSapin Apr 4, 2018
c033f1f
Move platform-specific OOM handling to functions
SimonSapin Apr 4, 2018
96c9d22
Remove `impl Alloc for &'a System`
SimonSapin Apr 4, 2018
eae0d46
Restore Global.oom() functionality
SimonSapin Apr 4, 2018
fd242ee
impl GlobalAlloc for Global
SimonSapin Apr 4, 2018
fddf51e
Use NonNull<Void> instead of *mut u8 in the Alloc trait
glandium Apr 2, 2018
ed29777
Remove conversions for allocated pointers
SimonSapin Apr 11, 2018
f607a38
Rename alloc::Void to alloc::Opaque
SimonSapin Apr 11, 2018
c5ffdd7
Initial docs for the GlobalAlloc trait
SimonSapin Apr 11, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions src/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/dlmalloc
Submodule dlmalloc updated 4 files
+1 −0 Cargo.toml
+4 −4 src/global.rs
+70 −96 src/lib.rs
+1 −1 tests/smoke.rs
2 changes: 1 addition & 1 deletion src/doc/nomicon
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,17 @@ looks like:
```rust
#![feature(global_allocator, allocator_api, heap_api)]

use std::heap::{Alloc, System, Layout, AllocErr};
use std::alloc::{GlobalAlloc, System, Layout, Opaque};
use std::ptr::NonNull;

struct MyAllocator;

unsafe impl<'a> Alloc for &'a MyAllocator {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
unsafe impl GlobalAlloc for MyAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
System.alloc(layout)
}

unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
System.dealloc(ptr, layout)
}
}
Expand Down
215 changes: 215 additions & 0 deletions src/liballoc/alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,215 @@
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

#![unstable(feature = "allocator_api",
reason = "the precise API and guarantees it provides may be tweaked \
slightly, especially to possibly take into account the \
types being stored to make room for a future \
tracing garbage collector",
issue = "32838")]

use core::intrinsics::{min_align_of_val, size_of_val};
use core::ptr::NonNull;
use core::usize;

#[doc(inline)]
pub use core::alloc::*;

#[cfg(stage0)]
extern "Rust" {
#[allocator]
#[rustc_allocator_nounwind]
fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
#[cold]
#[rustc_allocator_nounwind]
fn __rust_oom(err: *const u8) -> !;
#[rustc_allocator_nounwind]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_allocator_nounwind]
fn __rust_realloc(ptr: *mut u8,
old_size: usize,
old_align: usize,
new_size: usize,
new_align: usize,
err: *mut u8) -> *mut u8;
#[rustc_allocator_nounwind]
fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
}

#[cfg(not(stage0))]
extern "Rust" {
#[allocator]
#[rustc_allocator_nounwind]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;
#[cold]
#[rustc_allocator_nounwind]
fn __rust_oom() -> !;
#[rustc_allocator_nounwind]
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
#[rustc_allocator_nounwind]
fn __rust_realloc(ptr: *mut u8,
old_size: usize,
align: usize,
new_size: usize) -> *mut u8;
#[rustc_allocator_nounwind]
fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
}

#[derive(Copy, Clone, Default, Debug)]
pub struct Global;

#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
pub type Heap = Global;

#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
#[allow(non_upper_case_globals)]
pub const Heap: Global = Global;

unsafe impl GlobalAlloc for Global {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_alloc(layout.size(), layout.align());
#[cfg(stage0)]
let ptr = __rust_alloc(layout.size(), layout.align(), &mut 0);
ptr as *mut Opaque
}

#[inline]
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
__rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
}

#[inline]
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
#[cfg(stage0)]
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(),
new_size, layout.align(), &mut 0);
ptr as *mut Opaque
}

#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
#[cfg(stage0)]
let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut 0);
ptr as *mut Opaque
}

#[inline]
fn oom(&self) -> ! {
unsafe {
#[cfg(not(stage0))]
__rust_oom();
#[cfg(stage0)]
__rust_oom(&mut 0);
}
}
}

unsafe impl Alloc for Global {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
}

#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}

#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize)
-> Result<NonNull<Opaque>, AllocErr>
{
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}

#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
}

#[inline]
fn oom(&mut self) -> ! {
GlobalAlloc::oom(self)
}
}

/// The allocator for unique pointers.
// This function must not unwind. If it does, MIR trans will fail.
#[cfg(not(test))]
#[lang = "exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
if size == 0 {
align as *mut u8
} else {
let layout = Layout::from_size_align_unchecked(size, align);
let ptr = Global.alloc(layout);
if !ptr.is_null() {
ptr as *mut u8
} else {
Global.oom()
}
}
}

#[cfg_attr(not(test), lang = "box_free")]
#[inline]
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
let size = size_of_val(&*ptr);
let align = min_align_of_val(&*ptr);
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr as *mut Opaque, layout);
}
}

#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use boxed::Box;
use alloc::{Global, Alloc, Layout};

#[test]
fn allocate_zeroed() {
unsafe {
let layout = Layout::from_size_align(1024, 1).unwrap();
let ptr = Global.alloc_zeroed(layout.clone())
.unwrap_or_else(|_| Global.oom());

let mut i = ptr.cast::<u8>().as_ptr();
let end = i.offset(layout.size() as isize);
while i < end {
assert_eq!(*i, 0);
i = i.offset(1);
}
Global.dealloc(ptr, layout);
}
}

#[bench]
fn alloc_owned_small(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = box 10;
})
}
}
23 changes: 9 additions & 14 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::heap::{Alloc, Layout};
use core::intrinsics::abort;
use core::mem::{self, align_of_val, size_of_val, uninitialized};
use core::ops::Deref;
Expand All @@ -32,7 +31,7 @@ use core::hash::{Hash, Hasher};
use core::{isize, usize};
use core::convert::From;

use heap::{Heap, box_free};
use alloc::{Global, Alloc, Layout, box_free};
use boxed::Box;
use string::String;
use vec::Vec;
Expand Down Expand Up @@ -513,15 +512,13 @@ impl<T: ?Sized> Arc<T> {
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = self.ptr.as_ptr();

// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
ptr::drop_in_place(&mut self.ptr.as_mut().data);

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
}
}

Expand Down Expand Up @@ -555,11 +552,11 @@ impl<T: ?Sized> Arc<T> {

let layout = Layout::for_value(&*fake_ptr);

let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));
let mem = Global.alloc(layout)
.unwrap_or_else(|_| Global.oom());

// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;

ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
Expand Down Expand Up @@ -626,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// In the event of a panic, elements that have been written
// into the new ArcInner will be dropped, then the memory freed.
struct Guard<T> {
mem: *mut u8,
mem: NonNull<u8>,
elems: *mut T,
layout: Layout,
n_elems: usize,
Expand All @@ -640,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);

Heap.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem.as_opaque(), self.layout.clone());
}
}
}
Expand All @@ -656,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
mem: mem,
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
n_elems: 0,
Expand Down Expand Up @@ -1148,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> {
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
let ptr = self.ptr.as_ptr();

// If we find out that we were the last weak pointer, then its time to
// deallocate the data entirely. See the discussion in Arc::drop() about
// the memory orderings
Expand All @@ -1161,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
}
}
}
Expand Down
Loading