Skip to content

Commit bbb70cd

Browse files
committed
auto merge of #14402 : huonw/rust/arc-field-rename, r=alexcrichton
Paper over privacy issues with Deref by changing field names. Types that implement Deref can cause weird error messages due to their private fields conflicting with a field of the type they deref to, e.g., previously struct Foo { x: int } let a: Arc<Foo> = ...; println!("{}", a.x); would complain the the `x` field of `Arc` was private (since Arc has a private field called `x`) rather than just ignoring it. This patch doesn't fix that issue, but does mean one would have to write `a._ptr` to hit the same error message, which seems far less common. (This patch `_`-prefixes all private fields of `Deref`-implementing types.) cc #12808
2 parents 07563be + 9698221 commit bbb70cd

File tree

5 files changed

+93
-74
lines changed

5 files changed

+93
-74
lines changed

src/liballoc/arc.rs

Lines changed: 19 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,9 @@ use heap::deallocate;
5454
/// ```
5555
#[unsafe_no_drop_flag]
5656
pub struct Arc<T> {
57-
x: *mut ArcInner<T>,
57+
// FIXME #12808: strange name to try to avoid interfering with
58+
// field accesses of the contained type via Deref
59+
_ptr: *mut ArcInner<T>,
5860
}
5961

6062
/// A weak pointer to an `Arc`.
@@ -63,7 +65,9 @@ pub struct Arc<T> {
6365
/// used to break cycles between `Arc` pointers.
6466
#[unsafe_no_drop_flag]
6567
pub struct Weak<T> {
66-
x: *mut ArcInner<T>,
68+
// FIXME #12808: strange name to try to avoid interfering with
69+
// field accesses of the contained type via Deref
70+
_ptr: *mut ArcInner<T>,
6771
}
6872

6973
struct ArcInner<T> {
@@ -83,7 +87,7 @@ impl<T: Share + Send> Arc<T> {
8387
weak: atomics::AtomicUint::new(1),
8488
data: data,
8589
};
86-
Arc { x: unsafe { mem::transmute(x) } }
90+
Arc { _ptr: unsafe { mem::transmute(x) } }
8791
}
8892

8993
#[inline]
@@ -93,7 +97,7 @@ impl<T: Share + Send> Arc<T> {
9397
// `ArcInner` structure itself is `Share` because the inner data is
9498
// `Share` as well, so we're ok loaning out an immutable pointer to
9599
// these contents.
96-
unsafe { &*self.x }
100+
unsafe { &*self._ptr }
97101
}
98102

99103
/// Downgrades a strong pointer to a weak pointer
@@ -104,7 +108,7 @@ impl<T: Share + Send> Arc<T> {
104108
pub fn downgrade(&self) -> Weak<T> {
105109
// See the clone() impl for why this is relaxed
106110
self.inner().weak.fetch_add(1, atomics::Relaxed);
107-
Weak { x: self.x }
111+
Weak { _ptr: self._ptr }
108112
}
109113
}
110114

@@ -128,7 +132,7 @@ impl<T: Share + Send> Clone for Arc<T> {
128132
//
129133
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
130134
self.inner().strong.fetch_add(1, atomics::Relaxed);
131-
Arc { x: self.x }
135+
Arc { _ptr: self._ptr }
132136
}
133137
}
134138

@@ -166,7 +170,7 @@ impl<T: Share + Send> Drop for Arc<T> {
166170
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
167171
// more than once (but it is guaranteed to be zeroed after the first if
168172
// it's run more than once)
169-
if self.x.is_null() { return }
173+
if self._ptr.is_null() { return }
170174

171175
// Because `fetch_sub` is already atomic, we do not need to synchronize
172176
// with other threads unless we are going to delete the object. This
@@ -198,7 +202,7 @@ impl<T: Share + Send> Drop for Arc<T> {
198202

199203
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
200204
atomics::fence(atomics::Acquire);
201-
unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
205+
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
202206
min_align_of::<ArcInner<T>>()) }
203207
}
204208
}
@@ -218,14 +222,14 @@ impl<T: Share + Send> Weak<T> {
218222
let n = inner.strong.load(atomics::SeqCst);
219223
if n == 0 { return None }
220224
let old = inner.strong.compare_and_swap(n, n + 1, atomics::SeqCst);
221-
if old == n { return Some(Arc { x: self.x }) }
225+
if old == n { return Some(Arc { _ptr: self._ptr }) }
222226
}
223227
}
224228

225229
#[inline]
226230
fn inner<'a>(&'a self) -> &'a ArcInner<T> {
227231
// See comments above for why this is "safe"
228-
unsafe { &*self.x }
232+
unsafe { &*self._ptr }
229233
}
230234
}
231235

@@ -234,22 +238,22 @@ impl<T: Share + Send> Clone for Weak<T> {
234238
fn clone(&self) -> Weak<T> {
235239
// See comments in Arc::clone() for why this is relaxed
236240
self.inner().weak.fetch_add(1, atomics::Relaxed);
237-
Weak { x: self.x }
241+
Weak { _ptr: self._ptr }
238242
}
239243
}
240244

241245
#[unsafe_destructor]
242246
impl<T: Share + Send> Drop for Weak<T> {
243247
fn drop(&mut self) {
244248
// see comments above for why this check is here
245-
if self.x.is_null() { return }
249+
if self._ptr.is_null() { return }
246250

247251
// If we find out that we were the last weak pointer, then its time to
248252
// deallocate the data entirely. See the discussion in Arc::drop() about
249253
// the memory orderings
250254
if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
251255
atomics::fence(atomics::Acquire);
252-
unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
256+
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
253257
min_align_of::<ArcInner<T>>()) }
254258
}
255259
}
@@ -261,7 +265,7 @@ mod tests {
261265
use std::clone::Clone;
262266
use std::comm::channel;
263267
use std::mem::drop;
264-
use std::ops::{Drop, Deref, DerefMut};
268+
use std::ops::Drop;
265269
use std::option::{Option, Some, None};
266270
use std::sync::atomics;
267271
use std::task;
@@ -374,7 +378,7 @@ mod tests {
374378

375379
let a = Arc::new(Cycle { x: Mutex::new(None) });
376380
let b = a.clone().downgrade();
377-
*a.deref().x.lock().deref_mut() = Some(b);
381+
*a.x.lock() = Some(b);
378382

379383
// hopefully we don't double-free (or leak)...
380384
}

src/liballoc/rc.rs

Lines changed: 25 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,11 @@ struct RcBox<T> {
4545
/// Immutable reference counted pointer type
4646
#[unsafe_no_drop_flag]
4747
pub struct Rc<T> {
48-
ptr: *mut RcBox<T>,
49-
nosend: marker::NoSend,
50-
noshare: marker::NoShare
48+
// FIXME #12808: strange names to try to avoid interfering with
49+
// field accesses of the contained type via Deref
50+
_ptr: *mut RcBox<T>,
51+
_nosend: marker::NoSend,
52+
_noshare: marker::NoShare
5153
}
5254

5355
impl<T> Rc<T> {
@@ -60,13 +62,13 @@ impl<T> Rc<T> {
6062
// destructor never frees the allocation while the
6163
// strong destructor is running, even if the weak
6264
// pointer is stored inside the strong one.
63-
ptr: transmute(box RcBox {
65+
_ptr: transmute(box RcBox {
6466
value: value,
6567
strong: Cell::new(1),
6668
weak: Cell::new(1)
6769
}),
68-
nosend: marker::NoSend,
69-
noshare: marker::NoShare
70+
_nosend: marker::NoSend,
71+
_noshare: marker::NoShare
7072
}
7173
}
7274
}
@@ -77,9 +79,9 @@ impl<T> Rc<T> {
7779
pub fn downgrade(&self) -> Weak<T> {
7880
self.inc_weak();
7981
Weak {
80-
ptr: self.ptr,
81-
nosend: marker::NoSend,
82-
noshare: marker::NoShare
82+
_ptr: self._ptr,
83+
_nosend: marker::NoSend,
84+
_noshare: marker::NoShare
8385
}
8486
}
8587
}
@@ -96,7 +98,7 @@ impl<T> Deref<T> for Rc<T> {
9698
impl<T> Drop for Rc<T> {
9799
fn drop(&mut self) {
98100
unsafe {
99-
if !self.ptr.is_null() {
101+
if !self._ptr.is_null() {
100102
self.dec_strong();
101103
if self.strong() == 0 {
102104
ptr::read(self.deref()); // destroy the contained object
@@ -106,7 +108,7 @@ impl<T> Drop for Rc<T> {
106108
self.dec_weak();
107109

108110
if self.weak() == 0 {
109-
deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
111+
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
110112
min_align_of::<RcBox<T>>())
111113
}
112114
}
@@ -119,7 +121,7 @@ impl<T> Clone for Rc<T> {
119121
#[inline]
120122
fn clone(&self) -> Rc<T> {
121123
self.inc_strong();
122-
Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
124+
Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
123125
}
124126
}
125127

@@ -154,9 +156,11 @@ impl<T: TotalOrd> TotalOrd for Rc<T> {
154156
/// Weak reference to a reference-counted box
155157
#[unsafe_no_drop_flag]
156158
pub struct Weak<T> {
157-
ptr: *mut RcBox<T>,
158-
nosend: marker::NoSend,
159-
noshare: marker::NoShare
159+
// FIXME #12808: strange names to try to avoid interfering with
160+
// field accesses of the contained type via Deref
161+
_ptr: *mut RcBox<T>,
162+
_nosend: marker::NoSend,
163+
_noshare: marker::NoShare
160164
}
161165

162166
impl<T> Weak<T> {
@@ -166,7 +170,7 @@ impl<T> Weak<T> {
166170
None
167171
} else {
168172
self.inc_strong();
169-
Some(Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare })
173+
Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
170174
}
171175
}
172176
}
@@ -175,12 +179,12 @@ impl<T> Weak<T> {
175179
impl<T> Drop for Weak<T> {
176180
fn drop(&mut self) {
177181
unsafe {
178-
if !self.ptr.is_null() {
182+
if !self._ptr.is_null() {
179183
self.dec_weak();
180184
// the weak count starts at 1, and will only go to
181185
// zero if all the strong pointers have disappeared.
182186
if self.weak() == 0 {
183-
deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
187+
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
184188
min_align_of::<RcBox<T>>())
185189
}
186190
}
@@ -192,7 +196,7 @@ impl<T> Clone for Weak<T> {
192196
#[inline]
193197
fn clone(&self) -> Weak<T> {
194198
self.inc_weak();
195-
Weak { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
199+
Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
196200
}
197201
}
198202

@@ -221,12 +225,12 @@ trait RcBoxPtr<T> {
221225

222226
impl<T> RcBoxPtr<T> for Rc<T> {
223227
#[inline(always)]
224-
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
228+
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
225229
}
226230

227231
impl<T> RcBoxPtr<T> for Weak<T> {
228232
#[inline(always)]
229-
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
233+
fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
230234
}
231235

232236
#[cfg(test)]

src/libcore/cell.rs

Lines changed: 18 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -250,7 +250,7 @@ impl<T> RefCell<T> {
250250
WRITING => None,
251251
borrow => {
252252
self.borrow.set(borrow + 1);
253-
Some(Ref { parent: self })
253+
Some(Ref { _parent: self })
254254
}
255255
}
256256
}
@@ -280,7 +280,7 @@ impl<T> RefCell<T> {
280280
match self.borrow.get() {
281281
UNUSED => {
282282
self.borrow.set(WRITING);
283-
Some(RefMut { parent: self })
283+
Some(RefMut { _parent: self })
284284
},
285285
_ => None
286286
}
@@ -316,22 +316,24 @@ impl<T: Eq> Eq for RefCell<T> {
316316

317317
/// Wraps a borrowed reference to a value in a `RefCell` box.
318318
pub struct Ref<'b, T> {
319-
parent: &'b RefCell<T>
319+
// FIXME #12808: strange name to try to avoid interfering with
320+
// field accesses of the contained type via Deref
321+
_parent: &'b RefCell<T>
320322
}
321323

322324
#[unsafe_destructor]
323325
impl<'b, T> Drop for Ref<'b, T> {
324326
fn drop(&mut self) {
325-
let borrow = self.parent.borrow.get();
327+
let borrow = self._parent.borrow.get();
326328
debug_assert!(borrow != WRITING && borrow != UNUSED);
327-
self.parent.borrow.set(borrow - 1);
329+
self._parent.borrow.set(borrow - 1);
328330
}
329331
}
330332

331333
impl<'b, T> Deref<T> for Ref<'b, T> {
332334
#[inline]
333335
fn deref<'a>(&'a self) -> &'a T {
334-
unsafe { &*self.parent.value.get() }
336+
unsafe { &*self._parent.value.get() }
335337
}
336338
}
337339

@@ -345,40 +347,42 @@ impl<'b, T> Deref<T> for Ref<'b, T> {
345347
pub fn clone_ref<'b, T>(orig: &Ref<'b, T>) -> Ref<'b, T> {
346348
// Since this Ref exists, we know the borrow flag
347349
// is not set to WRITING.
348-
let borrow = orig.parent.borrow.get();
350+
let borrow = orig._parent.borrow.get();
349351
debug_assert!(borrow != WRITING && borrow != UNUSED);
350-
orig.parent.borrow.set(borrow + 1);
352+
orig._parent.borrow.set(borrow + 1);
351353

352354
Ref {
353-
parent: orig.parent,
355+
_parent: orig._parent,
354356
}
355357
}
356358

357359
/// Wraps a mutable borrowed reference to a value in a `RefCell` box.
358360
pub struct RefMut<'b, T> {
359-
parent: &'b RefCell<T>
361+
// FIXME #12808: strange name to try to avoid interfering with
362+
// field accesses of the contained type via Deref
363+
_parent: &'b RefCell<T>
360364
}
361365

362366
#[unsafe_destructor]
363367
impl<'b, T> Drop for RefMut<'b, T> {
364368
fn drop(&mut self) {
365-
let borrow = self.parent.borrow.get();
369+
let borrow = self._parent.borrow.get();
366370
debug_assert!(borrow == WRITING);
367-
self.parent.borrow.set(UNUSED);
371+
self._parent.borrow.set(UNUSED);
368372
}
369373
}
370374

371375
impl<'b, T> Deref<T> for RefMut<'b, T> {
372376
#[inline]
373377
fn deref<'a>(&'a self) -> &'a T {
374-
unsafe { &*self.parent.value.get() }
378+
unsafe { &*self._parent.value.get() }
375379
}
376380
}
377381

378382
impl<'b, T> DerefMut<T> for RefMut<'b, T> {
379383
#[inline]
380384
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
381-
unsafe { &mut *self.parent.value.get() }
385+
unsafe { &mut *self._parent.value.get() }
382386
}
383387
}
384388

0 commit comments

Comments
 (0)