Skip to content

Commit

Permalink
Rollup merge of rust-lang#65778 - bdonlan:stable_weak_count, r=dtolnay
Browse files Browse the repository at this point in the history
Stabilize `std::{rc,sync}::Weak::{weak_count, strong_count}`

* Original PR: rust-lang#56696
* Tracking issue: rust-lang#57977

Closes: rust-lang#57977

Supporting comments:

> Although these were added for testing, it is occasionally useful to have a way to probe optimistically for whether a weak pointer has become dangling, without actually taking the overhead of manipulating atomics. Are there any plans to stabilize this?

_Originally posted by @bdonlan in rust-lang#57977 (comment)

> Having this stabilized would help. Currently, the only way to check if a weak pointer has become dangling is to call `upgrade`, which is by far expensive.

_Originally posted by @glebpom in rust-lang#57977 (comment)

Not sure if stabilizing these warrants a full RFC, so throwing this out here as a start for now.

Note: per CONTRIBUTING.md, I ran the tidy checks, but they seem to be failing on unchanged files (primarily in `src/stdsimd`).
  • Loading branch information
Centril authored Dec 16, 2019
2 parents a605441 + 9778e03 commit c34ea91
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 49 deletions.
16 changes: 6 additions & 10 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1836,7 +1836,7 @@ impl<T: ?Sized> Weak<T> {
/// If `self` was created using [`Weak::new`], this will return 0.
///
/// [`Weak::new`]: #method.new
#[unstable(feature = "weak_counts", issue = "57977")]
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn strong_count(&self) -> usize {
if let Some(inner) = self.inner() {
inner.strong()
Expand All @@ -1847,20 +1847,16 @@ impl<T: ?Sized> Weak<T> {

/// Gets the number of `Weak` pointers pointing to this allocation.
///
/// If `self` was created using [`Weak::new`], this will return `None`. If
/// not, the returned value is at least 1, since `self` still points to the
/// allocation.
///
/// [`Weak::new`]: #method.new
#[unstable(feature = "weak_counts", issue = "57977")]
pub fn weak_count(&self) -> Option<usize> {
/// If no strong pointers remain, this will return zero.
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
self.inner().map(|inner| {
if inner.strong() > 0 {
inner.weak() - 1 // subtract the implicit weak ptr
} else {
inner.weak()
0
}
})
}).unwrap_or(0)
}

/// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
Expand Down
14 changes: 7 additions & 7 deletions src/liballoc/rc/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,28 +114,28 @@ fn test_weak_count() {

#[test]
fn weak_counts() {
assert_eq!(Weak::weak_count(&Weak::<u64>::new()), None);
assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);

let a = Rc::new(0);
let w = Rc::downgrade(&a);
assert_eq!(Weak::strong_count(&w), 1);
assert_eq!(Weak::weak_count(&w), Some(1));
assert_eq!(Weak::weak_count(&w), 1);
let w2 = w.clone();
assert_eq!(Weak::strong_count(&w), 1);
assert_eq!(Weak::weak_count(&w), Some(2));
assert_eq!(Weak::weak_count(&w), 2);
assert_eq!(Weak::strong_count(&w2), 1);
assert_eq!(Weak::weak_count(&w2), Some(2));
assert_eq!(Weak::weak_count(&w2), 2);
drop(w);
assert_eq!(Weak::strong_count(&w2), 1);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 1);
let a2 = a.clone();
assert_eq!(Weak::strong_count(&w2), 2);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 1);
drop(a2);
drop(a);
assert_eq!(Weak::strong_count(&w2), 0);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 0);
drop(w2);
}

Expand Down
40 changes: 15 additions & 25 deletions src/liballoc/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
use core::fmt;
use core::cmp::{self, Ordering};
use core::cmp::Ordering;
use core::iter;
use core::intrinsics::abort;
use core::mem::{self, align_of, align_of_val, size_of_val};
Expand Down Expand Up @@ -1529,7 +1529,7 @@ impl<T: ?Sized> Weak<T> {
/// If `self` was created using [`Weak::new`], this will return 0.
///
/// [`Weak::new`]: #method.new
#[unstable(feature = "weak_counts", issue = "57977")]
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn strong_count(&self) -> usize {
if let Some(inner) = self.inner() {
inner.strong.load(SeqCst)
Expand All @@ -1541,9 +1541,8 @@ impl<T: ?Sized> Weak<T> {
/// Gets an approximation of the number of `Weak` pointers pointing to this
/// allocation.
///
/// If `self` was created using [`Weak::new`], this will return 0. If not,
/// the returned value is at least 1, since `self` still points to the
/// allocation.
/// If `self` was created using [`Weak::new`], or if there are no remaining
/// strong pointers, this will return 0.
///
/// # Accuracy
///
Expand All @@ -1552,31 +1551,22 @@ impl<T: ?Sized> Weak<T> {
/// `Weak`s pointing to the same allocation.
///
/// [`Weak::new`]: #method.new
#[unstable(feature = "weak_counts", issue = "57977")]
pub fn weak_count(&self) -> Option<usize> {
// Due to the implicit weak pointer added when any strong pointers are
// around, we cannot implement `weak_count` correctly since it
// necessarily requires accessing the strong count and weak count in an
// unsynchronized fashion. So this version is a bit racy.
#[stable(feature = "weak_counts", since = "1.41.0")]
pub fn weak_count(&self) -> usize {
self.inner().map(|inner| {
let strong = inner.strong.load(SeqCst);
let weak = inner.weak.load(SeqCst);
let strong = inner.strong.load(SeqCst);
if strong == 0 {
// If the last `Arc` has *just* been dropped, it might not yet
// have removed the implicit weak count, so the value we get
// here might be 1 too high.
weak
0
} else {
// As long as there's still at least 1 `Arc` around, subtract
// the implicit weak pointer.
// Note that the last `Arc` might get dropped between the 2
// loads we do above, removing the implicit weak pointer. This
// means that the value might be 1 too low here. In order to not
// return 0 here (which would happen if we're the only weak
// pointer), we guard against that specifically.
cmp::max(1, weak - 1)
// Since we observed that there was at least one strong pointer
// after reading the weak count, we know that the implicit weak
// reference (present whenever any strong references are alive)
// was still around when we observed the weak count, and can
// therefore safely subtract it.
weak - 1
}
})
}).unwrap_or(0)
}

/// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
Expand Down
14 changes: 7 additions & 7 deletions src/liballoc/sync/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,28 +62,28 @@ fn test_arc_get_mut() {

#[test]
fn weak_counts() {
assert_eq!(Weak::weak_count(&Weak::<u64>::new()), None);
assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);

let a = Arc::new(0);
let w = Arc::downgrade(&a);
assert_eq!(Weak::strong_count(&w), 1);
assert_eq!(Weak::weak_count(&w), Some(1));
assert_eq!(Weak::weak_count(&w), 1);
let w2 = w.clone();
assert_eq!(Weak::strong_count(&w), 1);
assert_eq!(Weak::weak_count(&w), Some(2));
assert_eq!(Weak::weak_count(&w), 2);
assert_eq!(Weak::strong_count(&w2), 1);
assert_eq!(Weak::weak_count(&w2), Some(2));
assert_eq!(Weak::weak_count(&w2), 2);
drop(w);
assert_eq!(Weak::strong_count(&w2), 1);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 1);
let a2 = a.clone();
assert_eq!(Weak::strong_count(&w2), 2);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 1);
drop(a2);
drop(a);
assert_eq!(Weak::strong_count(&w2), 0);
assert_eq!(Weak::weak_count(&w2), Some(1));
assert_eq!(Weak::weak_count(&w2), 0);
drop(w2);
}

Expand Down

0 comments on commit c34ea91

Please sign in to comment.