@@ -18,6 +18,17 @@ use core::ops::Deref;
1818use core:: ptr:: { self , NonNull } ;
1919use core:: slice:: { self } ;
2020
21+ macro non_null {
22+ ( mut $place: expr, $t: ident) => { {
23+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
24+ unsafe { & mut * ( ptr:: addr_of_mut!( $place) as * mut NonNull < $t> ) }
25+ } } ,
26+ ( $place: expr, $t: ident) => { {
27+ #![ allow( unused_unsafe) ] // we're sometimes used within an unsafe block
28+ unsafe { * ( ptr:: addr_of!( $place) as * const NonNull < $t> ) }
29+ } } ,
30+ }
31+
2132/// An iterator that moves out of a vector.
2233 ///
2334/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
@@ -41,10 +52,12 @@ pub struct IntoIter<
4152 // the drop impl reconstructs a RawVec from buf, cap and alloc
4253 // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
4354 pub ( super ) alloc : ManuallyDrop < A > ,
44- pub ( super ) ptr : * const T ,
45- pub ( super ) end : * const T , // If T is a ZST, this is actually ptr+len. This encoding is picked so that
46- // ptr == end is a quick test for the Iterator being empty, that works
47- // for both ZST and non-ZST.
55+ pub ( super ) ptr : NonNull < T > ,
56+ /// If T is a ZST, this is actually ptr+len. This encoding is picked so that
57+ /// ptr == end is a quick test for the Iterator being empty, that works
58+ /// for both ZST and non-ZST.
59+ /// For non-ZSTs the pointer is treated as `NonNull<T>`
60+ pub ( super ) end : * const T ,
4861}
4962
5063#[ stable( feature = "vec_intoiter_debug" , since = "1.13.0" ) ]
@@ -68,7 +81,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
6881 /// ```
6982 #[ stable( feature = "vec_into_iter_as_slice" , since = "1.15.0" ) ]
7083 pub fn as_slice ( & self ) -> & [ T ] {
71- unsafe { slice:: from_raw_parts ( self . ptr , self . len ( ) ) }
84+ unsafe { slice:: from_raw_parts ( self . ptr . as_ptr ( ) , self . len ( ) ) }
7285 }
7386
7487 /// Returns the remaining items of this iterator as a mutable slice.
@@ -97,7 +110,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
97110 }
98111
99112 fn as_raw_mut_slice ( & mut self ) -> * mut [ T ] {
100- ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , self . len ( ) )
113+ ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , self . len ( ) )
101114 }
102115
103116 /// Drops remaining elements and relinquishes the backing allocation.
@@ -124,7 +137,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
124137 // this creates less assembly
125138 self . cap = 0 ;
126139 self . buf = unsafe { NonNull :: new_unchecked ( RawVec :: NEW . ptr ( ) ) } ;
127- self . ptr = self . buf . as_ptr ( ) ;
140+ self . ptr = self . buf ;
128141 self . end = self . buf . as_ptr ( ) ;
129142
130143 // Dropping the remaining elements can panic, so this needs to be
@@ -138,7 +151,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
138151 pub ( crate ) fn forget_remaining_elements ( & mut self ) {
139152 // For the ZST case, it is crucial that we mutate `end` here, not `ptr`.
140153 // `ptr` must stay aligned, while `end` may be unaligned.
141- self . end = self . ptr ;
154+ self . end = self . ptr . as_ptr ( ) ;
142155 }
143156
144157 #[ cfg( not( no_global_oom_handling) ) ]
@@ -160,7 +173,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
160173 // say that they're all at the beginning of the "allocation".
161174 0 ..this. len ( )
162175 } else {
163- this. ptr . sub_ptr ( buf) ..this. end . sub_ptr ( buf)
176+ this. ptr . sub_ptr ( this . buf ) ..this. end . sub_ptr ( buf)
164177 } ;
165178 let cap = this. cap ;
166179 let alloc = ManuallyDrop :: take ( & mut this. alloc ) ;
@@ -187,37 +200,43 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
187200
188201 #[ inline]
189202 fn next ( & mut self ) -> Option < T > {
190- if self . ptr == self . end {
191- None
192- } else if T :: IS_ZST {
193- // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
194- // reducing the `end`.
195- self . end = self . end . wrapping_byte_sub ( 1 ) ;
196-
197- // Make up a value of this ZST.
198- Some ( unsafe { mem:: zeroed ( ) } )
203+ if T :: IS_ZST {
204+ if self . ptr . as_ptr ( ) == self . end as * mut _ {
205+ None
206+ } else {
207+ // `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
208+ // reducing the `end`.
209+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
210+
211+ // Make up a value of this ZST.
212+ Some ( unsafe { mem:: zeroed ( ) } )
213+ }
199214 } else {
200- let old = self . ptr ;
201- self . ptr = unsafe { self . ptr . add ( 1 ) } ;
215+ if self . ptr == non_null ! ( self . end, T ) {
216+ None
217+ } else {
218+ let old = self . ptr ;
219+ self . ptr = unsafe { old. add ( 1 ) } ;
202220
203- Some ( unsafe { ptr:: read ( old) } )
221+ Some ( unsafe { ptr:: read ( old. as_ptr ( ) ) } )
222+ }
204223 }
205224 }
206225
207226 #[ inline]
208227 fn size_hint ( & self ) -> ( usize , Option < usize > ) {
209228 let exact = if T :: IS_ZST {
210- self . end . addr ( ) . wrapping_sub ( self . ptr . addr ( ) )
229+ self . end . addr ( ) . wrapping_sub ( self . ptr . as_ptr ( ) . addr ( ) )
211230 } else {
212- unsafe { self . end . sub_ptr ( self . ptr ) }
231+ unsafe { non_null ! ( self . end, T ) . sub_ptr ( self . ptr ) }
213232 } ;
214233 ( exact, Some ( exact) )
215234 }
216235
217236 #[ inline]
218237 fn advance_by ( & mut self , n : usize ) -> Result < ( ) , NonZeroUsize > {
219238 let step_size = self . len ( ) . min ( n) ;
220- let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr as * mut T , step_size) ;
239+ let to_drop = ptr:: slice_from_raw_parts_mut ( self . ptr . as_ptr ( ) , step_size) ;
221240 if T :: IS_ZST {
222241 // See `next` for why we sub `end` here.
223242 self . end = self . end . wrapping_byte_sub ( step_size) ;
@@ -259,7 +278,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
259278 // Safety: `len` indicates that this many elements are available and we just checked that
260279 // it fits into the array.
261280 unsafe {
262- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
281+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , len) ;
263282 self . forget_remaining_elements ( ) ;
264283 return Err ( array:: IntoIter :: new_unchecked ( raw_ary, 0 ..len) ) ;
265284 }
@@ -268,7 +287,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
268287 // Safety: `len` is larger than the array size. Copy a fixed amount here to fully initialize
269288 // the array.
270289 return unsafe {
271- ptr:: copy_nonoverlapping ( self . ptr , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
290+ ptr:: copy_nonoverlapping ( self . ptr . as_ptr ( ) , raw_ary. as_mut_ptr ( ) as * mut T , N ) ;
272291 self . ptr = self . ptr . add ( N ) ;
273292 Ok ( raw_ary. transpose ( ) . assume_init ( ) )
274293 } ;
@@ -286,26 +305,33 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
286305 // Also note the implementation of `Self: TrustedRandomAccess` requires
287306 // that `T: Copy` so reading elements from the buffer doesn't invalidate
288307 // them for `Drop`.
289- unsafe { if T :: IS_ZST { mem:: zeroed ( ) } else { ptr :: read ( self . ptr . add ( i) ) } }
308+ unsafe { if T :: IS_ZST { mem:: zeroed ( ) } else { self . ptr . add ( i) . read ( ) } }
290309 }
291310}
292311
293312#[ stable( feature = "rust1" , since = "1.0.0" ) ]
294313impl < T , A : Allocator > DoubleEndedIterator for IntoIter < T , A > {
295314 #[ inline]
296315 fn next_back ( & mut self ) -> Option < T > {
297- if self . end == self . ptr {
298- None
299- } else if T :: IS_ZST {
300- // See above for why 'ptr.offset' isn't used
301- self . end = self . end . wrapping_byte_sub ( 1 ) ;
302-
303- // Make up a value of this ZST.
304- Some ( unsafe { mem:: zeroed ( ) } )
316+ if T :: IS_ZST {
317+ if self . end as * mut _ == self . ptr . as_ptr ( ) {
318+ None
319+ } else {
320+ // See above for why 'ptr.offset' isn't used
321+ self . end = self . end . wrapping_byte_sub ( 1 ) ;
322+
323+ // Make up a value of this ZST.
324+ Some ( unsafe { mem:: zeroed ( ) } )
325+ }
305326 } else {
306- self . end = unsafe { self . end . sub ( 1 ) } ;
327+ if non_null ! ( self . end, T ) == self . ptr {
328+ None
329+ } else {
330+ let new_end = unsafe { non_null ! ( self . end, T ) . sub ( 1 ) } ;
331+ * non_null ! ( mut self . end, T ) = new_end;
307332
308- Some ( unsafe { ptr:: read ( self . end ) } )
333+ Some ( unsafe { ptr:: read ( new_end. as_ptr ( ) ) } )
334+ }
309335 }
310336 }
311337
@@ -331,7 +357,11 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
331357#[ stable( feature = "rust1" , since = "1.0.0" ) ]
332358impl < T , A : Allocator > ExactSizeIterator for IntoIter < T , A > {
333359 fn is_empty ( & self ) -> bool {
334- self . ptr == self . end
360+ if T :: IS_ZST {
361+ self . ptr . as_ptr ( ) == self . end as * mut _
362+ } else {
363+ self . ptr == non_null ! ( self . end, T )
364+ }
335365 }
336366}
337367
0 commit comments