@@ -29,7 +29,7 @@ use parse::Directory;
2929use  parse:: token:: { self ,  Token } ; 
3030use  print:: pprust; 
3131use  serialize:: { Decoder ,  Decodable ,  Encoder ,  Encodable } ; 
32- use  util:: RcSlice ; 
32+ use  util:: RcVec ; 
3333
3434use  std:: borrow:: Cow ; 
3535use  std:: { fmt,  iter,  mem} ; 
@@ -221,7 +221,7 @@ impl TokenStream {
221221                new_slice. extend_from_slice ( parts. 0 ) ; 
222222                new_slice. push ( comma) ; 
223223                new_slice. extend_from_slice ( parts. 1 ) ; 
224-                 let  slice = RcSlice :: new ( new_slice) ; 
224+                 let  slice = RcVec :: new ( new_slice) ; 
225225                return  Some ( ( TokenStream  {  kind :  TokenStreamKind :: Stream ( slice)  } ,  sp) ) ; 
226226            } 
227227        } 
@@ -234,7 +234,7 @@ enum TokenStreamKind {
234234    Empty , 
235235    Tree ( TokenTree ) , 
236236    JointTree ( TokenTree ) , 
237-     Stream ( RcSlice < TokenStream > ) , 
237+     Stream ( RcVec < TokenStream > ) , 
238238} 
239239
240240impl  From < TokenTree >  for  TokenStream  { 
@@ -255,6 +255,60 @@ impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
255255    } 
256256} 
257257
258+ impl  Extend < TokenStream >  for  TokenStream  { 
259+     fn  extend < I :  IntoIterator < Item  = TokenStream > > ( & mut  self ,  iter :  I )  { 
260+         let  iter = iter. into_iter ( ) ; 
261+         let  kind = mem:: replace ( & mut  self . kind ,  TokenStreamKind :: Empty ) ; 
262+ 
263+         // Vector of token streams originally in self. 
264+         let  tts:  Vec < TokenStream >  = match  kind { 
265+             TokenStreamKind :: Empty  => { 
266+                 let  mut  vec = Vec :: new ( ) ; 
267+                 vec. reserve ( iter. size_hint ( ) . 0 ) ; 
268+                 vec
269+             } 
270+             TokenStreamKind :: Tree ( _)  | TokenStreamKind :: JointTree ( _)  => { 
271+                 let  mut  vec = Vec :: new ( ) ; 
272+                 vec. reserve ( 1  + iter. size_hint ( ) . 0 ) ; 
273+                 vec. push ( TokenStream  {  kind } ) ; 
274+                 vec
275+             } 
276+             TokenStreamKind :: Stream ( rc_vec)  => match  RcVec :: try_unwrap ( rc_vec)  { 
277+                 Ok ( mut  vec)  => { 
278+                     // Extend in place using the existing capacity if possible. 
279+                     // This is the fast path for libraries like `quote` that 
280+                     // build a token stream. 
281+                     vec. reserve ( iter. size_hint ( ) . 0 ) ; 
282+                     vec
283+                 } 
284+                 Err ( rc_vec)  => { 
285+                     // Self is shared so we need to copy and extend that. 
286+                     let  mut  vec = Vec :: new ( ) ; 
287+                     vec. reserve ( rc_vec. len ( )  + iter. size_hint ( ) . 0 ) ; 
288+                     vec. extend_from_slice ( & rc_vec) ; 
289+                     vec
290+                 } 
291+             } 
292+         } ; 
293+ 
294+         // Perform the extend, joining tokens as needed along the way. 
295+         let  mut  builder = TokenStreamBuilder ( tts) ; 
296+         for  stream in  iter { 
297+             builder. push ( stream) ; 
298+         } 
299+ 
300+         // Build the resulting token stream. If it contains more than one token, 
301+         // preserve capacity in the vector in anticipation of the caller 
302+         // performing additional calls to extend. 
303+         let  mut  tts = builder. 0 ; 
304+         * self  = match  tts. len ( )  { 
305+             0  => TokenStream :: empty ( ) , 
306+             1  => tts. pop ( ) . unwrap ( ) , 
307+             _ => TokenStream :: concat_rc_vec ( RcVec :: new_preserving_capacity ( tts) ) , 
308+         } ; 
309+     } 
310+ } 
311+ 
258312impl  Eq  for  TokenStream  { } 
259313
260314impl  PartialEq < TokenStream >  for  TokenStream  { 
@@ -287,11 +341,11 @@ impl TokenStream {
287341        match  streams. len ( )  { 
288342            0  => TokenStream :: empty ( ) , 
289343            1  => streams. pop ( ) . unwrap ( ) , 
290-             _ => TokenStream :: concat_rc_slice ( RcSlice :: new ( streams) ) , 
344+             _ => TokenStream :: concat_rc_vec ( RcVec :: new ( streams) ) , 
291345        } 
292346    } 
293347
294-     fn  concat_rc_slice ( streams :  RcSlice < TokenStream > )  -> TokenStream  { 
348+     fn  concat_rc_vec ( streams :  RcVec < TokenStream > )  -> TokenStream  { 
295349        TokenStream  {  kind :  TokenStreamKind :: Stream ( streams)  } 
296350    } 
297351
@@ -434,7 +488,7 @@ impl TokenStreamBuilder {
434488            match  len { 
435489                1  => { } 
436490                2  => self . 0 . push ( streams[ 0 ] . clone ( ) . into ( ) ) , 
437-                 _ => self . 0 . push ( TokenStream :: concat_rc_slice ( streams. sub_slice ( 0  .. len - 1 ) ) ) , 
491+                 _ => self . 0 . push ( TokenStream :: concat_rc_vec ( streams. sub_slice ( 0  .. len - 1 ) ) ) , 
438492            } 
439493            self . push_all_but_last_tree ( & streams[ len - 1 ] ) 
440494        } 
@@ -446,7 +500,7 @@ impl TokenStreamBuilder {
446500            match  len { 
447501                1  => { } 
448502                2  => self . 0 . push ( streams[ 1 ] . clone ( ) . into ( ) ) , 
449-                 _ => self . 0 . push ( TokenStream :: concat_rc_slice ( streams. sub_slice ( 1  .. len) ) ) , 
503+                 _ => self . 0 . push ( TokenStream :: concat_rc_vec ( streams. sub_slice ( 1  .. len) ) ) , 
450504            } 
451505            self . push_all_but_first_tree ( & streams[ 0 ] ) 
452506        } 
@@ -466,13 +520,13 @@ enum CursorKind {
466520
467521#[ derive( Clone ) ]  
468522struct  StreamCursor  { 
469-     stream :  RcSlice < TokenStream > , 
523+     stream :  RcVec < TokenStream > , 
470524    index :  usize , 
471-     stack :  Vec < ( RcSlice < TokenStream > ,  usize ) > , 
525+     stack :  Vec < ( RcVec < TokenStream > ,  usize ) > , 
472526} 
473527
474528impl  StreamCursor  { 
475-     fn  new ( stream :  RcSlice < TokenStream > )  -> Self  { 
529+     fn  new ( stream :  RcVec < TokenStream > )  -> Self  { 
476530        StreamCursor  {  stream :  stream,  index :  0 ,  stack :  Vec :: new ( )  } 
477531    } 
478532
@@ -495,7 +549,7 @@ impl StreamCursor {
495549        } 
496550    } 
497551
498-     fn  insert ( & mut  self ,  stream :  RcSlice < TokenStream > )  { 
552+     fn  insert ( & mut  self ,  stream :  RcVec < TokenStream > )  { 
499553        self . stack . push ( ( mem:: replace ( & mut  self . stream ,  stream) , 
500554                         mem:: replace ( & mut  self . index ,  0 ) ) ) ; 
501555    } 
@@ -557,7 +611,7 @@ impl Cursor {
557611            CursorKind :: Empty  => TokenStream :: empty ( ) , 
558612            CursorKind :: Tree ( ref  tree,  _)  => tree. clone ( ) . into ( ) , 
559613            CursorKind :: JointTree ( ref  tree,  _)  => tree. clone ( ) . joint ( ) , 
560-             CursorKind :: Stream ( ref  cursor)  => TokenStream :: concat_rc_slice ( { 
614+             CursorKind :: Stream ( ref  cursor)  => TokenStream :: concat_rc_vec ( { 
561615                cursor. stack . get ( 0 ) . cloned ( ) . map ( |( stream,  _) | stream) 
562616                    . unwrap_or ( cursor. stream . clone ( ) ) 
563617            } ) , 
@@ -607,22 +661,22 @@ impl Cursor {
607661/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. 
608662/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. 
609663#[ derive( Debug ,  Clone ) ]  
610- pub  struct  ThinTokenStream ( Option < RcSlice < TokenStream > > ) ; 
664+ pub  struct  ThinTokenStream ( Option < RcVec < TokenStream > > ) ; 
611665
612666impl  From < TokenStream >  for  ThinTokenStream  { 
613667    fn  from ( stream :  TokenStream )  -> ThinTokenStream  { 
614668        ThinTokenStream ( match  stream. kind  { 
615669            TokenStreamKind :: Empty  => None , 
616-             TokenStreamKind :: Tree ( tree)  => Some ( RcSlice :: new ( vec ! [ tree. into( ) ] ) ) , 
617-             TokenStreamKind :: JointTree ( tree)  => Some ( RcSlice :: new ( vec ! [ tree. joint( ) ] ) ) , 
670+             TokenStreamKind :: Tree ( tree)  => Some ( RcVec :: new ( vec ! [ tree. into( ) ] ) ) , 
671+             TokenStreamKind :: JointTree ( tree)  => Some ( RcVec :: new ( vec ! [ tree. joint( ) ] ) ) , 
618672            TokenStreamKind :: Stream ( stream)  => Some ( stream) , 
619673        } ) 
620674    } 
621675} 
622676
623677impl  From < ThinTokenStream >  for  TokenStream  { 
624678    fn  from ( stream :  ThinTokenStream )  -> TokenStream  { 
625-         stream. 0 . map ( TokenStream :: concat_rc_slice ) . unwrap_or_else ( TokenStream :: empty) 
679+         stream. 0 . map ( TokenStream :: concat_rc_vec ) . unwrap_or_else ( TokenStream :: empty) 
626680    } 
627681} 
628682
@@ -773,4 +827,106 @@ mod tests {
773827        assert_eq ! ( stream. trees( ) . count( ) ,  1 ) ; 
774828    } 
775829
830+     #[ test]  
831+     fn  test_extend_empty ( )  { 
832+         with_globals ( || { 
833+             // Append a token onto an empty token stream. 
834+             let  mut  stream = TokenStream :: empty ( ) ; 
835+             stream. extend ( vec ! [ string_to_ts( "t" ) ] ) ; 
836+ 
837+             let  expected = string_to_ts ( "t" ) ; 
838+             assert ! ( stream. eq_unspanned( & expected) ) ; 
839+         } ) ; 
840+     } 
841+ 
842+     #[ test]  
843+     fn  test_extend_nothing ( )  { 
844+         with_globals ( || { 
845+             // Append nothing onto a token stream containing one token. 
846+             let  mut  stream = string_to_ts ( "t" ) ; 
847+             stream. extend ( vec ! [ ] ) ; 
848+ 
849+             let  expected = string_to_ts ( "t" ) ; 
850+             assert ! ( stream. eq_unspanned( & expected) ) ; 
851+         } ) ; 
852+     } 
853+ 
854+     #[ test]  
855+     fn  test_extend_single ( )  { 
856+         with_globals ( || { 
857+             // Append a token onto token stream containing a single token. 
858+             let  mut  stream = string_to_ts ( "t1" ) ; 
859+             stream. extend ( vec ! [ string_to_ts( "t2" ) ] ) ; 
860+ 
861+             let  expected = string_to_ts ( "t1 t2" ) ; 
862+             assert ! ( stream. eq_unspanned( & expected) ) ; 
863+         } ) ; 
864+     } 
865+ 
866+     #[ test]  
867+     fn  test_extend_in_place ( )  { 
868+         with_globals ( || { 
869+             // Append a token onto token stream containing a reference counted 
870+             // vec of tokens. The token stream has a reference count of 1 so 
871+             // this can happen in place. 
872+             let  mut  stream = string_to_ts ( "t1 t2" ) ; 
873+             stream. extend ( vec ! [ string_to_ts( "t3" ) ] ) ; 
874+ 
875+             let  expected = string_to_ts ( "t1 t2 t3" ) ; 
876+             assert ! ( stream. eq_unspanned( & expected) ) ; 
877+         } ) ; 
878+     } 
879+ 
880+     #[ test]  
881+     fn  test_extend_copy ( )  { 
882+         with_globals ( || { 
883+             // Append a token onto token stream containing a reference counted 
884+             // vec of tokens. The token stream is shared so the extend takes 
885+             // place on a copy. 
886+             let  mut  stream = string_to_ts ( "t1 t2" ) ; 
887+             let  _incref = stream. clone ( ) ; 
888+             stream. extend ( vec ! [ string_to_ts( "t3" ) ] ) ; 
889+ 
890+             let  expected = string_to_ts ( "t1 t2 t3" ) ; 
891+             assert ! ( stream. eq_unspanned( & expected) ) ; 
892+         } ) ; 
893+     } 
894+ 
895+     #[ test]  
896+     fn  test_extend_no_join ( )  { 
897+         with_globals ( || { 
898+             let  first = TokenTree :: Token ( DUMMY_SP ,  Token :: Dot ) ; 
899+             let  second = TokenTree :: Token ( DUMMY_SP ,  Token :: Dot ) ; 
900+ 
901+             // Append a dot onto a token stream containing a dot, but do not 
902+             // join them. 
903+             let  mut  stream = TokenStream :: from ( first) ; 
904+             stream. extend ( vec ! [ TokenStream :: from( second) ] ) ; 
905+ 
906+             let  expected = string_to_ts ( ". ." ) ; 
907+             assert ! ( stream. eq_unspanned( & expected) ) ; 
908+ 
909+             let  unexpected = string_to_ts ( ".." ) ; 
910+             assert ! ( !stream. eq_unspanned( & unexpected) ) ; 
911+         } ) ; 
912+     } 
913+ 
914+     #[ test]  
915+     fn  test_extend_join ( )  { 
916+         with_globals ( || { 
917+             let  first = TokenTree :: Token ( DUMMY_SP ,  Token :: Dot ) . joint ( ) ; 
918+             let  second = TokenTree :: Token ( DUMMY_SP ,  Token :: Dot ) ; 
919+ 
920+             // Append a dot onto a token stream containing a dot, forming a 
921+             // dotdot. 
922+             let  mut  stream = first; 
923+             stream. extend ( vec ! [ TokenStream :: from( second) ] ) ; 
924+ 
925+             let  expected = string_to_ts ( ".." ) ; 
926+             assert ! ( stream. eq_unspanned( & expected) ) ; 
927+ 
928+             let  unexpected = string_to_ts ( ". ." ) ; 
929+             assert ! ( !stream. eq_unspanned( & unexpected) ) ; 
930+         } ) ; 
931+     } 
776932} 
0 commit comments