diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 1ad79072e75ab..a408bf8e284e7 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -54,7 +54,9 @@ use heap::deallocate; /// ``` #[unsafe_no_drop_flag] pub struct Arc { - x: *mut ArcInner, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut ArcInner, } /// A weak pointer to an `Arc`. @@ -63,7 +65,9 @@ pub struct Arc { /// used to break cycles between `Arc` pointers. #[unsafe_no_drop_flag] pub struct Weak { - x: *mut ArcInner, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut ArcInner, } struct ArcInner { @@ -83,7 +87,7 @@ impl Arc { weak: atomics::AtomicUint::new(1), data: data, }; - Arc { x: unsafe { mem::transmute(x) } } + Arc { _ptr: unsafe { mem::transmute(x) } } } #[inline] @@ -93,7 +97,7 @@ impl Arc { // `ArcInner` structure itself is `Share` because the inner data is // `Share` as well, so we're ok loaning out an immutable pointer to // these contents. - unsafe { &*self.x } + unsafe { &*self._ptr } } /// Downgrades a strong pointer to a weak pointer @@ -104,7 +108,7 @@ impl Arc { pub fn downgrade(&self) -> Weak { // See the clone() impl for why this is relaxed self.inner().weak.fetch_add(1, atomics::Relaxed); - Weak { x: self.x } + Weak { _ptr: self._ptr } } } @@ -128,7 +132,7 @@ impl Clone for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) self.inner().strong.fetch_add(1, atomics::Relaxed); - Arc { x: self.x } + Arc { _ptr: self._ptr } } } @@ -166,7 +170,7 @@ impl Drop for Arc { // This structure has #[unsafe_no_drop_flag], so this drop glue may run // more than once (but it is guaranteed to be zeroed after the first if // it's run more than once) - if self.x.is_null() { return } + if self._ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This @@ -198,7 +202,7 @@ impl Drop for Arc { if self.inner().weak.fetch_sub(1, atomics::Release) == 1 { atomics::fence(atomics::Acquire); - unsafe { deallocate(self.x as *mut u8, size_of::>(), + unsafe { deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } } @@ -218,14 +222,14 @@ impl Weak { let n = inner.strong.load(atomics::SeqCst); if n == 0 { return None } let old = inner.strong.compare_and_swap(n, n + 1, atomics::SeqCst); - if old == n { return Some(Arc { x: self.x }) } + if old == n { return Some(Arc { _ptr: self._ptr }) } } } #[inline] fn inner<'a>(&'a self) -> &'a ArcInner { // See comments above for why this is "safe" - unsafe { &*self.x } + unsafe { &*self._ptr } } } @@ -234,7 +238,7 @@ impl Clone for Weak { fn clone(&self) -> Weak { // See comments in Arc::clone() for why this is relaxed self.inner().weak.fetch_add(1, atomics::Relaxed); - Weak { x: self.x } + Weak { _ptr: self._ptr } } } @@ -242,14 +246,14 @@ impl Clone for Weak { impl Drop for Weak { fn drop(&mut self) { // see comments above for why this check is here - if self.x.is_null() { return } + if self._ptr.is_null() { return } // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings if self.inner().weak.fetch_sub(1, atomics::Release) == 1 { atomics::fence(atomics::Acquire); - unsafe { deallocate(self.x as *mut u8, size_of::>(), + unsafe { deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } } @@ -261,7 +265,7 @@ mod tests { use std::clone::Clone; use std::comm::channel; use std::mem::drop; - use std::ops::{Drop, Deref, DerefMut}; + use std::ops::Drop; use std::option::{Option, Some, None}; use std::sync::atomics; use std::task; @@ -374,7 +378,7 @@ mod tests { let a = Arc::new(Cycle { x: Mutex::new(None) }); let b = a.clone().downgrade(); - *a.deref().x.lock().deref_mut() = Some(b); + *a.x.lock() = Some(b); // hopefully we don't double-free (or leak)... } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 5a877d9362e40..8ded3c431d4e2 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -45,9 +45,11 @@ struct RcBox { /// Immutable reference counted pointer type #[unsafe_no_drop_flag] pub struct Rc { - ptr: *mut RcBox, - nosend: marker::NoSend, - noshare: marker::NoShare + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut RcBox, + _nosend: marker::NoSend, + _noshare: marker::NoShare } impl Rc { @@ -60,13 +62,13 @@ impl Rc { // destructor never frees the allocation while the // strong destructor is running, even if the weak // pointer is stored inside the strong one. - ptr: transmute(box RcBox { + _ptr: transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) }), - nosend: marker::NoSend, - noshare: marker::NoShare + _nosend: marker::NoSend, + _noshare: marker::NoShare } } } @@ -77,9 +79,9 @@ impl Rc { pub fn downgrade(&self) -> Weak { self.inc_weak(); Weak { - ptr: self.ptr, - nosend: marker::NoSend, - noshare: marker::NoShare + _ptr: self._ptr, + _nosend: marker::NoSend, + _noshare: marker::NoShare } } } @@ -96,7 +98,7 @@ impl Deref for Rc { impl Drop for Rc { fn drop(&mut self) { unsafe { - if !self.ptr.is_null() { + if !self._ptr.is_null() { self.dec_strong(); if self.strong() == 0 { ptr::read(self.deref()); // destroy the contained object @@ -106,7 +108,7 @@ impl Drop for Rc { self.dec_weak(); if self.weak() == 0 { - deallocate(self.ptr as *mut u8, size_of::>(), + deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -119,7 +121,7 @@ impl Clone for Rc { #[inline] fn clone(&self) -> Rc { self.inc_strong(); - Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare } + Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare } } } @@ -154,9 +156,11 @@ impl TotalOrd for Rc { /// Weak reference to a reference-counted box #[unsafe_no_drop_flag] pub struct Weak { - ptr: *mut RcBox, - nosend: marker::NoSend, - noshare: marker::NoShare + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut RcBox, + _nosend: marker::NoSend, + _noshare: marker::NoShare } impl Weak { @@ -166,7 +170,7 @@ impl Weak { None } else { self.inc_strong(); - Some(Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }) + Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }) } } } @@ -175,12 +179,12 @@ impl Weak { impl Drop for Weak { fn drop(&mut self) { unsafe { - if !self.ptr.is_null() { + if !self._ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to // zero if all the strong pointers have disappeared. if self.weak() == 0 { - deallocate(self.ptr as *mut u8, size_of::>(), + deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -192,7 +196,7 @@ impl Clone for Weak { #[inline] fn clone(&self) -> Weak { self.inc_weak(); - Weak { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare } + Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare } } } @@ -221,12 +225,12 @@ trait RcBoxPtr { impl RcBoxPtr for Rc { #[inline(always)] - fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self.ptr) } } + fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self._ptr) } } } impl RcBoxPtr for Weak { #[inline(always)] - fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self.ptr) } } + fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self._ptr) } } } #[cfg(test)] diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 4d630db898bc3..3e4c67c4d1ff5 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -250,7 +250,7 @@ impl RefCell { WRITING => None, borrow => { self.borrow.set(borrow + 1); - Some(Ref { parent: self }) + Some(Ref { _parent: self }) } } } @@ -280,7 +280,7 @@ impl RefCell { match self.borrow.get() { UNUSED => { self.borrow.set(WRITING); - Some(RefMut { parent: self }) + Some(RefMut { _parent: self }) }, _ => None } @@ -316,22 +316,24 @@ impl Eq for RefCell { /// Wraps a borrowed reference to a value in a `RefCell` box. pub struct Ref<'b, T> { - parent: &'b RefCell + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _parent: &'b RefCell } #[unsafe_destructor] impl<'b, T> Drop for Ref<'b, T> { fn drop(&mut self) { - let borrow = self.parent.borrow.get(); + let borrow = self._parent.borrow.get(); debug_assert!(borrow != WRITING && borrow != UNUSED); - self.parent.borrow.set(borrow - 1); + self._parent.borrow.set(borrow - 1); } } impl<'b, T> Deref for Ref<'b, T> { #[inline] fn deref<'a>(&'a self) -> &'a T { - unsafe { &*self.parent.value.get() } + unsafe { &*self._parent.value.get() } } } @@ -345,40 +347,42 @@ impl<'b, T> Deref for Ref<'b, T> { pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> { // Since this Ref exists, we know the borrow flag // is not set to WRITING. - let borrow = orig.parent.borrow.get(); + let borrow = orig._parent.borrow.get(); debug_assert!(borrow != WRITING && borrow != UNUSED); - orig.parent.borrow.set(borrow + 1); + orig._parent.borrow.set(borrow + 1); Ref { - parent: orig.parent, + _parent: orig._parent, } } /// Wraps a mutable borrowed reference to a value in a `RefCell` box. pub struct RefMut<'b, T> { - parent: &'b RefCell + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _parent: &'b RefCell } #[unsafe_destructor] impl<'b, T> Drop for RefMut<'b, T> { fn drop(&mut self) { - let borrow = self.parent.borrow.get(); + let borrow = self._parent.borrow.get(); debug_assert!(borrow == WRITING); - self.parent.borrow.set(UNUSED); + self._parent.borrow.set(UNUSED); } } impl<'b, T> Deref for RefMut<'b, T> { #[inline] fn deref<'a>(&'a self) -> &'a T { - unsafe { &*self.parent.value.get() } + unsafe { &*self._parent.value.get() } } } impl<'b, T> DerefMut for RefMut<'b, T> { #[inline] fn deref_mut<'a>(&'a mut self) -> &'a mut T { - unsafe { &mut *self.parent.value.get() } + unsafe { &mut *self._parent.value.get() } } } diff --git a/src/libstd/local_data.rs b/src/libstd/local_data.rs index 8798c035fcaf1..2c7e16cf18bc1 100644 --- a/src/libstd/local_data.rs +++ b/src/libstd/local_data.rs @@ -127,10 +127,12 @@ fn key_to_key_value(key: Key) -> *u8 { /// The task-local data can be accessed through this value, and when this /// structure is dropped it will return the borrow on the data. pub struct Ref { - ptr: &'static T, - key: Key, - index: uint, - nosend: marker::NoSend, + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: &'static T, + _key: Key, + _index: uint, + _nosend: marker::NoSend, } impl KeyValue { @@ -233,7 +235,7 @@ impl KeyValue { let data = data as *Box as *raw::TraitObject; &mut *((*data).data as *mut T) }; - Ref { ptr: ptr, index: pos, nosend: marker::NoSend, key: self } + Ref { _ptr: ptr, _index: pos, _nosend: marker::NoSend, _key: self } }) } @@ -252,7 +254,7 @@ impl KeyValue { } impl Deref for Ref { - fn deref<'a>(&'a self) -> &'a T { self.ptr } + fn deref<'a>(&'a self) -> &'a T { self._ptr } } #[unsafe_destructor] @@ -260,7 +262,7 @@ impl Drop for Ref { fn drop(&mut self) { let map = unsafe { get_local_map() }; - let (_, _, ref mut loan) = *map.get_mut(self.index).get_mut_ref(); + let (_, _, ref mut loan) = *map.get_mut(self._index).get_mut_ref(); *loan -= 1; } } diff --git a/src/libsync/lock.rs b/src/libsync/lock.rs index 9f59f587770ea..1b620185610e4 100644 --- a/src/libsync/lock.rs +++ b/src/libsync/lock.rs @@ -174,7 +174,9 @@ pub struct Mutex { /// An guard which is created by locking a mutex. Through this guard the /// underlying data can be accessed. pub struct MutexGuard<'a, T> { - data: &'a mut T, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _data: &'a mut T, /// Inner condition variable connected to the locked mutex that this guard /// was created from. This can be used for atomic-unlock-and-deschedule. pub cond: Condvar<'a>, @@ -221,7 +223,7 @@ impl Mutex { let data = unsafe { &mut *self.data.get() }; MutexGuard { - data: data, + _data: data, cond: Condvar { name: "Mutex", poison: PoisonOnFail::new(poison, "Mutex"), @@ -232,10 +234,10 @@ impl Mutex { } impl<'a, T: Send> Deref for MutexGuard<'a, T> { - fn deref<'a>(&'a self) -> &'a T { &*self.data } + fn deref<'a>(&'a self) -> &'a T { &*self._data } } impl<'a, T: Send> DerefMut for MutexGuard<'a, T> { - fn deref_mut<'a>(&'a mut self) -> &'a mut T { &mut *self.data } + fn deref_mut<'a>(&'a mut self) -> &'a mut T { &mut *self._data } } /**************************************************************************** @@ -272,7 +274,9 @@ pub struct RWLock { /// A guard which is created by locking an rwlock in write mode. Through this /// guard the underlying data can be accessed. pub struct RWLockWriteGuard<'a, T> { - data: &'a mut T, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _data: &'a mut T, /// Inner condition variable that can be used to sleep on the write mode of /// this rwlock. pub cond: Condvar<'a>, @@ -281,8 +285,10 @@ pub struct RWLockWriteGuard<'a, T> { /// A guard which is created by locking an rwlock in read mode. Through this /// guard the underlying data can be accessed. pub struct RWLockReadGuard<'a, T> { - data: &'a T, - guard: raw::RWLockReadGuard<'a>, + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _data: &'a T, + _guard: raw::RWLockReadGuard<'a>, } impl RWLock { @@ -320,7 +326,7 @@ impl RWLock { let data = unsafe { &mut *self.data.get() }; RWLockWriteGuard { - data: data, + _data: data, cond: Condvar { name: "RWLock", poison: PoisonOnFail::new(poison, "RWLock"), @@ -340,8 +346,8 @@ impl RWLock { let guard = self.lock.read(); PoisonOnFail::check(unsafe { *self.failed.get() }, "RWLock"); RWLockReadGuard { - guard: guard, - data: unsafe { &*self.data.get() }, + _guard: guard, + _data: unsafe { &*self.data.get() }, } } } @@ -351,25 +357,25 @@ impl<'a, T: Send + Share> RWLockWriteGuard<'a, T> { /// /// This will allow pending readers to come into the lock. pub fn downgrade(self) -> RWLockReadGuard<'a, T> { - let RWLockWriteGuard { data, cond } = self; + let RWLockWriteGuard { _data, cond } = self; // convert the data to read-only explicitly - let data = &*data; + let data = &*_data; let guard = match cond.inner { InnerMutex(..) => unreachable!(), InnerRWLock(guard) => guard.downgrade() }; - RWLockReadGuard { guard: guard, data: data } + RWLockReadGuard { _guard: guard, _data: data } } } impl<'a, T: Send + Share> Deref for RWLockReadGuard<'a, T> { - fn deref<'a>(&'a self) -> &'a T { self.data } + fn deref<'a>(&'a self) -> &'a T { self._data } } impl<'a, T: Send + Share> Deref for RWLockWriteGuard<'a, T> { - fn deref<'a>(&'a self) -> &'a T { &*self.data } + fn deref<'a>(&'a self) -> &'a T { &*self._data } } impl<'a, T: Send + Share> DerefMut for RWLockWriteGuard<'a, T> { - fn deref_mut<'a>(&'a mut self) -> &'a mut T { &mut *self.data } + fn deref_mut<'a>(&'a mut self) -> &'a mut T { &mut *self._data } } /**************************************************************************** @@ -812,4 +818,3 @@ mod tests { } } } -