@@ -79,8 +79,7 @@ use core::mem;
79
79
use core:: ops:: { Drop , Deref } ;
80
80
use core:: option:: Option ;
81
81
use core:: option:: Option :: { Some , None } ;
82
- use core:: ptr:: RawPtr ;
83
- use core:: ptr;
82
+ use core:: ptr:: { mod, NonZero , RawPtr } ;
84
83
use heap:: deallocate;
85
84
86
85
/// An atomically reference counted wrapper for shared state.
@@ -114,7 +113,7 @@ use heap::deallocate;
114
113
pub struct Arc < T > {
115
114
// FIXME #12808: strange name to try to avoid interfering with
116
115
// field accesses of the contained type via Deref
117
- _ptr : * mut ArcInner < T > ,
116
+ _ptr : NonZero < * mut ArcInner < T > > ,
118
117
}
119
118
120
119
unsafe impl < T : Sync + Send > Send for Arc < T > { }
@@ -130,7 +129,7 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { }
130
129
pub struct Weak < T > {
131
130
// FIXME #12808: strange name to try to avoid interfering with
132
131
// field accesses of the contained type via Deref
133
- _ptr : * mut ArcInner < T > ,
132
+ _ptr : NonZero < * mut ArcInner < T > > ,
134
133
}
135
134
136
135
unsafe impl < T : Sync + Send > Send for Weak < T > { }
@@ -165,7 +164,7 @@ impl<T> Arc<T> {
165
164
weak : atomic:: AtomicUint :: new ( 1 ) ,
166
165
data : data,
167
166
} ;
168
- Arc { _ptr : unsafe { mem:: transmute ( x) } }
167
+ Arc { _ptr : NonZero ( unsafe { mem:: transmute ( x) } ) }
169
168
}
170
169
171
170
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
@@ -194,7 +193,8 @@ impl<T> Arc<T> {
194
193
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
195
194
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
196
195
// to these contents.
197
- unsafe { & * self . _ptr }
196
+ let NonZero ( ptr) = self . _ptr ;
197
+ unsafe { & * ptr }
198
198
}
199
199
}
200
200
@@ -281,7 +281,8 @@ impl<T: Send + Sync + Clone> Arc<T> {
281
281
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
282
282
// this point, and we required the Arc itself to be `mut`, so we're returning the only
283
283
// possible reference to the inner data.
284
- let inner = unsafe { & mut * self . _ptr } ;
284
+ let NonZero ( ptr) = self . _ptr ;
285
+ let inner = unsafe { & mut * ptr } ;
285
286
& mut inner. data
286
287
}
287
288
}
@@ -316,7 +317,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
316
317
fn drop ( & mut self ) {
317
318
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
318
319
// it is guaranteed to be zeroed after the first if it's run more than once)
319
- if self . _ptr . is_null ( ) { return }
320
+ let NonZero ( ptr) = self . _ptr ;
321
+ if ptr. is_null ( ) { return }
320
322
321
323
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
322
324
// unless we are going to delete the object. This same logic applies to the below
@@ -346,7 +348,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
346
348
347
349
if self . inner ( ) . weak . fetch_sub ( 1 , atomic:: Release ) == 1 {
348
350
atomic:: fence ( atomic:: Acquire ) ;
349
- unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
351
+ unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
350
352
min_align_of :: < ArcInner < T > > ( ) ) }
351
353
}
352
354
}
@@ -386,7 +388,8 @@ impl<T: Sync + Send> Weak<T> {
386
388
#[ inline]
387
389
fn inner ( & self ) -> & ArcInner < T > {
388
390
// See comments above for why this is "safe"
389
- unsafe { & * self . _ptr }
391
+ let NonZero ( ptr) = self . _ptr ;
392
+ unsafe { & * ptr }
390
393
}
391
394
}
392
395
@@ -442,14 +445,16 @@ impl<T: Sync + Send> Drop for Weak<T> {
442
445
/// } // implicit drop
443
446
/// ```
444
447
fn drop ( & mut self ) {
448
+ let NonZero ( ptr) = self . _ptr ;
449
+
445
450
// see comments above for why this check is here
446
- if self . _ptr . is_null ( ) { return }
451
+ if ptr . is_null ( ) { return }
447
452
448
453
// If we find out that we were the last weak pointer, then its time to deallocate the data
449
454
// entirely. See the discussion in Arc::drop() about the memory orderings
450
455
if self . inner ( ) . weak . fetch_sub ( 1 , atomic:: Release ) == 1 {
451
456
atomic:: fence ( atomic:: Acquire ) ;
452
- unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
457
+ unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
453
458
min_align_of :: < ArcInner < T > > ( ) ) }
454
459
}
455
460
}
0 commit comments