Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rework system allocator so we can use a static instance #151

Merged
merged 1 commit into from
May 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,16 @@ jobs:
- run: cargo test --doc --features test
if: matrix.rust == 'stable'

loom:
runs-on: ubuntu-latest
needs: features
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@stable
- run: cargo test -p musli --release -- tests::loom
env:
RUSTFLAGS: --cfg loom

unexpected_lints:
runs-on: ubuntu-latest
steps:
Expand All @@ -40,7 +50,7 @@ jobs:
- run: sed -i '/#!\[allow(unexpected_cfgs)\]/d' crates/*/src/lib.rs tests/src/lib.rs
- run: cargo check --all-features --all-targets
env:
RUSTFLAGS: -D warnings --check-cfg=cfg(rune_nightly,doc_cfg) -F unexpected_cfgs
RUSTFLAGS: -D warnings --check-cfg=cfg(loom,rune_nightly,doc_cfg) -F unexpected_cfgs

tests_clippy:
runs-on: ubuntu-latest
Expand All @@ -64,7 +74,7 @@ jobs:
- run: cargo +nightly run -p no-std --example ${{matrix.example}}

fuzz:
needs: [test, tests_clippy, features, recursive]
needs: [test, loom, tests_clippy, features, recursive]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand Down
4 changes: 2 additions & 2 deletions crates/musli-zerocopy/src/pointer/ref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -758,9 +758,9 @@ where
}

#[cfg(test)]
pub(crate) fn cast<U: ?Sized>(self) -> Ref<U, E, O>
pub(crate) fn cast<U>(self) -> Ref<U, E, O>
where
U: Pointee<Metadata = T::Metadata>,
U: ?Sized + Pointee<Metadata = T::Metadata>,
{
Ref {
offset: self.offset,
Expand Down
3 changes: 3 additions & 0 deletions crates/musli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@ itoa = { version = "1.0.10", optional = true }
ryu = { version = "1.0.17", optional = true }
serde = { version = "1.0.198", optional = true, default-features = false}

[target.'cfg(loom)'.dependencies]
loom = "0.7.2"

[dev-dependencies]
musli = { path = ".", features = ["test"] }
tests = { path = "../../tests" }
Expand Down
17 changes: 16 additions & 1 deletion crates/musli/src/allocator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ mod system;
#[cfg_attr(doc_cfg, doc(cfg(feature = "alloc")))]
pub use self::system::System;

/// The static system allocator instance.
#[cfg(all(feature = "alloc", not(loom)))]
pub static SYSTEM: System = System::new();

mod disabled;
pub use self::disabled::Disabled;

Expand Down Expand Up @@ -109,12 +113,23 @@ macro_rules! __default {
#[doc(inline)]
pub use __default as default;

#[cfg(feature = "alloc")]
#[cfg(all(feature = "alloc", not(loom)))]
#[macro_export]
#[doc(hidden)]
macro_rules! __default_allocator_impl {
(|$alloc:ident| $body:block) => {{
let $alloc = &$crate::allocator::SYSTEM;
$body
}};
}

#[cfg(all(feature = "alloc", loom))]
#[macro_export]
#[doc(hidden)]
macro_rules! __default_allocator_impl {
(|$alloc:ident| $body:block) => {{
let $alloc = $crate::allocator::System::new();
let $alloc = &$alloc;
$body
}};
}
Expand Down
218 changes: 176 additions & 42 deletions crates/musli/src/allocator/system.rs
Original file line number Diff line number Diff line change
@@ -1,32 +1,137 @@
use core::cell::UnsafeCell;
use core::fmt::{self, Arguments};
use core::ptr::NonNull;

use alloc::boxed::Box;
use alloc::vec::Vec;

use crate::buf::Error;
use crate::loom::cell::UnsafeCell;
use crate::loom::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use crate::loom::sync::with_mut_usize;
use crate::{Allocator, Buf};

/// The initial capacity of an allocated region.
#[cfg(not(loom))]
const INITIAL_CAPACITY: usize = 128;
#[cfg(loom)]
const INITIAL_CAPACITY: usize = 4;
/// The max capacity of an allocated region as it's being handed back.
#[cfg(not(loom))]
const MAX_CAPACITY: usize = 4096;
#[cfg(loom)]
const MAX_CAPACITY: usize = 12;
/// The maximum number of regions we hold onto to avoid leaking too much memory.
#[cfg(not(loom))]
const MAX_REGIONS: usize = 64;
#[cfg(loom)]
const MAX_REGIONS: usize = 2;

/// System buffer that can be used in combination with an [`Allocator`].
///
/// This uses the [`System`] allocator.
///
/// [`System` allocator]: https://doc.rust-lang.org/std/alloc/struct.System.html
///
/// # Examples
///
/// ```
/// use musli::allocator::System;
/// use musli::{Allocator, Buf};
///
/// let allocator = System::new();
///
/// let mut buf1 = allocator.alloc().expect("allocation failed");
/// let mut buf2 = allocator.alloc().expect("allocation failed");
///
/// assert!(buf1.write(b"Hello, "));
/// assert!(buf2.write(b"world!"));
///
/// assert_eq!(buf1.as_slice(), b"Hello, ");
/// assert_eq!(buf2.as_slice(), b"world!");
///
/// buf1.write_buffer(buf2);
/// assert_eq!(buf1.as_slice(), b"Hello, world!");
/// ```
pub struct System {
internal: UnsafeCell<Internal>,
root: Root,
}

impl System {
/// Construct a new allocator.
#[inline]
#[cfg(not(loom))]
pub const fn new() -> Self {
Self {
internal: UnsafeCell::new(Internal { head: None }),
root: Root {
locked: AtomicBool::new(false),
head: UnsafeCell::new(None),
regions: AtomicUsize::new(0),
},
}
}

/// Construct a new allocator.
#[cfg(loom)]
pub fn new() -> Self {
Self {
root: Root {
locked: AtomicBool::new(false),
head: UnsafeCell::new(None),
regions: AtomicUsize::new(0),
},
}
}
}

impl Root {
fn spin(&self) {
while self.locked.load(Ordering::Relaxed) {
crate::loom::spin_loop();
}
}

fn lock(&self) -> Guard<'_> {
loop {
self.spin();

match self
.locked
.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed)
{
Ok(_) => break,
Err(_) => continue,
}
}

Guard { root: self }
}

fn unlock(&self) {
self.locked.store(false, Ordering::SeqCst);
}
}

struct Guard<'a> {
root: &'a Root,
}

impl Guard<'_> {
#[inline]
fn with_mut<'this, O>(
&'this mut self,
f: impl FnOnce(&'this mut Option<NonNull<Region>>) -> O,
) -> O {
// SAFETY: We have access to the inner root under a lock guard.
self.root.head.with_mut(|inner| f(unsafe { &mut *inner }))
}
}

impl Drop for Guard<'_> {
fn drop(&mut self) {
self.root.unlock();
}
}

impl Default for System {
#[inline]
fn default() -> Self {
Expand All @@ -39,32 +144,35 @@ impl Allocator for System {

#[inline(always)]
fn alloc(&self) -> Option<Self::Buf<'_>> {
let region = self.root.alloc();

Some(SystemBuf {
region: Internal::alloc(&self.internal),
internal: &self.internal,
root: &self.root,
region,
})
}
}

impl Drop for System {
fn drop(&mut self) {
let internal = unsafe { &mut *self.internal.get() };
let mut current = self.root.lock().with_mut(|current| current.take());

while let Some(mut head) = internal.head.take() {
// SAFETY: This collection has exclusive access to any heads it
// contain.
unsafe {
internal.head = head.as_mut().next.take();
drop(Box::from_raw(head.as_ptr()));
}
with_mut_usize(&mut self.root.regions, |v| *v = 0);

while let Some(this) = current.take() {
// SAFETY: While the system allocator is being dropped we hold a
// mutable reference to it, which ensures exclusive access to all
// allocated regions.
let b = unsafe { Box::from_raw(this.as_ptr()) };
current = b.next;
}
}
}

/// A vector-backed allocation.
pub struct SystemBuf<'a> {
root: &'a Root,
region: &'a mut Region,
internal: &'a UnsafeCell<Internal>,
}

impl<'a> Buf for SystemBuf<'a> {
Expand Down Expand Up @@ -100,55 +208,81 @@ impl fmt::Write for SystemBuf<'_> {

impl<'a> Drop for SystemBuf<'a> {
fn drop(&mut self) {
Internal::free(self.internal, self.region);
self.root.free(self.region);
}
}

/// An allocated region.
#[repr(C)]
struct Region {
data: Vec<u8>,
// Pointer to the next free region.
next: Option<NonNull<Region>>,
}

/// Internals of the allocator.
struct Internal {
// Regions of re-usable allocations we can hand out.
head: Option<NonNull<Region>>,
struct Root {
/// Spin lock used for root.
locked: AtomicBool,
/// Regions of re-usable allocations we can hand out.
head: UnsafeCell<Option<NonNull<Region>>>,
/// The number of allocated regions.
regions: AtomicUsize,
}

impl Internal {
unsafe impl Send for Root {}
unsafe impl Sync for Root {}

impl Root {
/// Allocate a new region.
///
/// Note that this will return a leaked memory region, so the unbound
/// lifetime is intentional.
fn alloc<'a>(this: &UnsafeCell<Self>) -> &'a mut Region {
// SAFETY: We take care to only access internals in a single-threaded
// mutable fashion.
let internal = unsafe { &mut *this.get() };
///
/// Clippy note: We know that we are correctly returning mutable references
/// to different mutable regions.
#[allow(clippy::mut_from_ref)]
fn alloc(&self) -> &mut Region {
self.regions.fetch_add(1, Ordering::SeqCst);

if let Some(mut head) = internal.head.take() {
// SAFETY: This collection has exclusive access to any heads it contain.
unsafe {
let head = head.as_mut();
internal.head = head.next.take();
head
}
} else {
Box::leak(Box::new(Region {
data: Vec::new(),
next: None,
}))
// SAFETY: We have exclusive access to all regions.
let this = self.lock().with_mut(|current| unsafe {
let mut this = current.take()?;
let this = this.as_mut();
*current = this.next.take();
Some(this)
});

if let Some(this) = this {
return this;
}

Box::leak(Box::new(Region {
data: Vec::with_capacity(INITIAL_CAPACITY),
next: None,
}))
}

fn free(this: &UnsafeCell<Self>, region: &mut Region) {
unsafe {
let this = &mut *this.get();
region.data.clear();
region.next = this.head;
this.head = Some(NonNull::from(region));
fn free<'a>(&'a self, region: &'a mut Region) {
let regions = self.regions.fetch_sub(1, Ordering::SeqCst);

if regions >= MAX_REGIONS {
// SAFETY: We have exclusive access to the region, which we
// previously leaked.
unsafe {
drop(Box::from_raw(region));
}

return;
}

region.data.clear();
region.data.shrink_to(MAX_CAPACITY);

let mut current = self.lock();

current.with_mut(|current| {
region.next = current.take();
*current = Some(NonNull::from(region));
});
}
}
Loading