Skip to content

Commit

Permalink
Tighten the bounds on atomic Ordering in std::sys::unix::weak
Browse files Browse the repository at this point in the history
  • Loading branch information
thomcc committed Nov 14, 2020
1 parent 89bce3e commit 55d7f73
Showing 1 changed file with 40 additions and 6 deletions.
46 changes: 40 additions & 6 deletions library/std/src/sys/unix/weak.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
use crate::ffi::CStr;
use crate::marker;
use crate::mem;
use crate::sync::atomic::{AtomicUsize, Ordering};
use crate::sync::atomic::{self, AtomicUsize, Ordering};

macro_rules! weak {
(fn $name:ident($($t:ty),*) -> $ret:ty) => (
Expand All @@ -47,15 +47,49 @@ impl<F> Weak<F> {
pub fn get(&self) -> Option<F> {
assert_eq!(mem::size_of::<F>(), mem::size_of::<usize>());
unsafe {
if self.addr.load(Ordering::SeqCst) == 1 {
self.addr.store(fetch(self.name), Ordering::SeqCst);
}
match self.addr.load(Ordering::SeqCst) {
// Relaxed is fine here because we fence before reading through the
// pointer (see the comment below).
match self.addr.load(Ordering::Relaxed) {
1 => self.initialize(),
0 => None,
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
addr => {
let func = mem::transmute_copy::<usize, F>(&addr);
// The caller is presumably going to read through this value
// (by calling the function we've dlsymed). This means we'd
// need to have loaded it with at least C11's consume
// ordering in order to be guaranteed that the data we read
// from the pointer isn't from before the pointer was
// stored. Rust has no equivalent to memory_order_consume,
// so we use an acquire fence (sorry, ARM).
//
// Now, in practice this likely isn't needed even on CPUs
// where relaxed and consume mean different things. The
// symbols we're loading are probably present (or not) at
// init, and even if they aren't the runtime dynamic loader
// is extremely likely have sufficient barriers internally
// (possibly implicitly, for example the ones provided by
// invoking `mprotect`).
//
// That said, none of that's *guaranteed*, and so we fence.
atomic::fence(Ordering::Acquire);
Some(func)
}
}
}
}

// Cold because it should only happen during first-time initalization.
#[cold]
unsafe fn initialize(&self) -> Option<F> {
let val = fetch(self.name);
// This synchronizes with the acquire fence in `get`.
self.addr.store(val, Ordering::Release);

match val {
0 => None,
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
}
}
}

unsafe fn fetch(name: &str) -> usize {
Expand Down

0 comments on commit 55d7f73

Please sign in to comment.