1313//! Concurrency-enabled mechanisms for sharing mutable and/or immutable state
1414//! between tasks.
1515
16- use core:: atomics ;
16+ use core:: atomic ;
1717use core:: clone:: Clone ;
1818use core:: kinds:: { Share , Send } ;
1919use core:: mem:: { min_align_of, size_of, drop} ;
@@ -71,8 +71,8 @@ pub struct Weak<T> {
7171}
7272
7373struct ArcInner < T > {
74- strong : atomics :: AtomicUint ,
75- weak : atomics :: AtomicUint ,
74+ strong : atomic :: AtomicUint ,
75+ weak : atomic :: AtomicUint ,
7676 data : T ,
7777}
7878
@@ -84,8 +84,8 @@ impl<T: Share + Send> Arc<T> {
8484 // Start the weak pointer count as 1 which is the weak pointer that's
8585 // held by all the strong pointers (kinda), see std/rc.rs for more info
8686 let x = box ArcInner {
87- strong : atomics :: AtomicUint :: new ( 1 ) ,
88- weak : atomics :: AtomicUint :: new ( 1 ) ,
87+ strong : atomic :: AtomicUint :: new ( 1 ) ,
88+ weak : atomic :: AtomicUint :: new ( 1 ) ,
8989 data : data,
9090 } ;
9191 Arc { _ptr : unsafe { mem:: transmute ( x) } }
@@ -109,7 +109,7 @@ impl<T: Share + Send> Arc<T> {
109109 #[ experimental = "Weak pointers may not belong in this module." ]
110110 pub fn downgrade ( & self ) -> Weak < T > {
111111 // See the clone() impl for why this is relaxed
112- self . inner ( ) . weak . fetch_add ( 1 , atomics :: Relaxed ) ;
112+ self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
113113 Weak { _ptr : self . _ptr }
114114 }
115115}
@@ -134,7 +134,7 @@ impl<T: Share + Send> Clone for Arc<T> {
134134 // another must already provide any required synchronization.
135135 //
136136 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
137- self . inner ( ) . strong . fetch_add ( 1 , atomics :: Relaxed ) ;
137+ self . inner ( ) . strong . fetch_add ( 1 , atomic :: Relaxed ) ;
138138 Arc { _ptr : self . _ptr }
139139 }
140140}
@@ -159,8 +159,8 @@ impl<T: Send + Share + Clone> Arc<T> {
159159 // Note that we hold a strong reference, which also counts as
160160 // a weak reference, so we only clone if there is an
161161 // additional reference of either kind.
162- if self . inner ( ) . strong . load ( atomics :: SeqCst ) != 1 ||
163- self . inner ( ) . weak . load ( atomics :: SeqCst ) != 1 {
162+ if self . inner ( ) . strong . load ( atomic :: SeqCst ) != 1 ||
163+ self . inner ( ) . weak . load ( atomic :: SeqCst ) != 1 {
164164 * self = Arc :: new ( self . deref ( ) . clone ( ) )
165165 }
166166 // This unsafety is ok because we're guaranteed that the pointer
@@ -185,7 +185,7 @@ impl<T: Share + Send> Drop for Arc<T> {
185185 // Because `fetch_sub` is already atomic, we do not need to synchronize
186186 // with other threads unless we are going to delete the object. This
187187 // same logic applies to the below `fetch_sub` to the `weak` count.
188- if self . inner ( ) . strong . fetch_sub ( 1 , atomics :: Release ) != 1 { return }
188+ if self . inner ( ) . strong . fetch_sub ( 1 , atomic :: Release ) != 1 { return }
189189
190190 // This fence is needed to prevent reordering of use of the data and
191191 // deletion of the data. Because it is marked `Release`, the
@@ -204,14 +204,14 @@ impl<T: Share + Send> Drop for Arc<T> {
204204 // and an "acquire" operation before deleting the object.
205205 //
206206 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
207- atomics :: fence ( atomics :: Acquire ) ;
207+ atomic :: fence ( atomic :: Acquire ) ;
208208
209209 // Destroy the data at this time, even though we may not free the box
210210 // allocation itself (there may still be weak pointers lying around).
211211 unsafe { drop ( ptr:: read ( & self . inner ( ) . data ) ) ; }
212212
213- if self . inner ( ) . weak . fetch_sub ( 1 , atomics :: Release ) == 1 {
214- atomics :: fence ( atomics :: Acquire ) ;
213+ if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
214+ atomic :: fence ( atomic :: Acquire ) ;
215215 unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
216216 min_align_of :: < ArcInner < T > > ( ) ) }
217217 }
@@ -230,9 +230,9 @@ impl<T: Share + Send> Weak<T> {
230230 // fetch_add because once the count hits 0 is must never be above 0.
231231 let inner = self . inner ( ) ;
232232 loop {
233- let n = inner. strong . load ( atomics :: SeqCst ) ;
233+ let n = inner. strong . load ( atomic :: SeqCst ) ;
234234 if n == 0 { return None }
235- let old = inner. strong . compare_and_swap ( n, n + 1 , atomics :: SeqCst ) ;
235+ let old = inner. strong . compare_and_swap ( n, n + 1 , atomic :: SeqCst ) ;
236236 if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
237237 }
238238 }
@@ -249,7 +249,7 @@ impl<T: Share + Send> Clone for Weak<T> {
249249 #[ inline]
250250 fn clone ( & self ) -> Weak < T > {
251251 // See comments in Arc::clone() for why this is relaxed
252- self . inner ( ) . weak . fetch_add ( 1 , atomics :: Relaxed ) ;
252+ self . inner ( ) . weak . fetch_add ( 1 , atomic :: Relaxed ) ;
253253 Weak { _ptr : self . _ptr }
254254 }
255255}
@@ -264,8 +264,8 @@ impl<T: Share + Send> Drop for Weak<T> {
264264 // If we find out that we were the last weak pointer, then its time to
265265 // deallocate the data entirely. See the discussion in Arc::drop() about
266266 // the memory orderings
267- if self . inner ( ) . weak . fetch_sub ( 1 , atomics :: Release ) == 1 {
268- atomics :: fence ( atomics :: Acquire ) ;
267+ if self . inner ( ) . weak . fetch_sub ( 1 , atomic :: Release ) == 1 {
268+ atomic :: fence ( atomic :: Acquire ) ;
269269 unsafe { deallocate ( self . _ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
270270 min_align_of :: < ArcInner < T > > ( ) ) }
271271 }
@@ -281,21 +281,21 @@ mod tests {
281281 use std:: mem:: drop;
282282 use std:: ops:: Drop ;
283283 use std:: option:: { Option , Some , None } ;
284- use std:: sync:: atomics ;
284+ use std:: sync:: atomic ;
285285 use std:: task;
286286 use std:: vec:: Vec ;
287287 use super :: { Arc , Weak } ;
288288 use std:: sync:: Mutex ;
289289
290- struct Canary ( * mut atomics :: AtomicUint ) ;
290+ struct Canary ( * mut atomic :: AtomicUint ) ;
291291
292292 impl Drop for Canary
293293 {
294294 fn drop ( & mut self ) {
295295 unsafe {
296296 match * self {
297297 Canary ( c) => {
298- ( * c) . fetch_add ( 1 , atomics :: SeqCst ) ;
298+ ( * c) . fetch_add ( 1 , atomic :: SeqCst ) ;
299299 }
300300 }
301301 }
@@ -413,20 +413,20 @@ mod tests {
413413
414414 #[ test]
415415 fn drop_arc ( ) {
416- let mut canary = atomics :: AtomicUint :: new ( 0 ) ;
417- let x = Arc :: new ( Canary ( & mut canary as * mut atomics :: AtomicUint ) ) ;
416+ let mut canary = atomic :: AtomicUint :: new ( 0 ) ;
417+ let x = Arc :: new ( Canary ( & mut canary as * mut atomic :: AtomicUint ) ) ;
418418 drop ( x) ;
419- assert ! ( canary. load( atomics :: Acquire ) == 1 ) ;
419+ assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
420420 }
421421
422422 #[ test]
423423 fn drop_arc_weak ( ) {
424- let mut canary = atomics :: AtomicUint :: new ( 0 ) ;
425- let arc = Arc :: new ( Canary ( & mut canary as * mut atomics :: AtomicUint ) ) ;
424+ let mut canary = atomic :: AtomicUint :: new ( 0 ) ;
425+ let arc = Arc :: new ( Canary ( & mut canary as * mut atomic :: AtomicUint ) ) ;
426426 let arc_weak = arc. downgrade ( ) ;
427- assert ! ( canary. load( atomics :: Acquire ) == 0 ) ;
427+ assert ! ( canary. load( atomic :: Acquire ) == 0 ) ;
428428 drop ( arc) ;
429- assert ! ( canary. load( atomics :: Acquire ) == 1 ) ;
429+ assert ! ( canary. load( atomic :: Acquire ) == 1 ) ;
430430 drop ( arc_weak) ;
431431 }
432432}
0 commit comments