]> git.lizzy.rs Git - rust.git/commitdiff
Rename `TokenStream::concat` and remove `TokenStream::concat_rc_vec`.
authorNicholas Nethercote <nnethercote@mozilla.com>
Tue, 11 Dec 2018 23:01:08 +0000 (10:01 +1100)
committerNicholas Nethercote <nnethercote@mozilla.com>
Wed, 12 Dec 2018 09:36:00 +0000 (20:36 +1100)
`TokenStream::new` is a better name for the former, and the latter is
now just equivalent to `TokenStream::Stream`.

src/libsyntax/attr/mod.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs

index 7723c15a266f197ea3ce411f440bbb3af63091fc..73cbe49f43b049ee955b140d0e3e547e3ed1eb2f 100644 (file)
@@ -483,7 +483,7 @@ fn tokens(&self) -> TokenStream {
             last_pos = segment.ident.span.hi();
         }
         idents.push(self.node.tokens(self.span));
-        TokenStream::concat(idents)
+        TokenStream::new(idents)
     }
 
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
@@ -539,7 +539,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
+                TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
             }
             MetaItemKind::List(ref list) => {
                 let mut tokens = Vec::new();
@@ -552,7 +552,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                 TokenTree::Delimited(
                     DelimSpan::from_single(span),
                     token::Paren,
-                    TokenStream::concat(tokens).into(),
+                    TokenStream::new(tokens).into(),
                 ).into()
             }
         }
index c3497a17806b1797ed86b02801331852b8ab9633..5820b49ab621636bb19735b3e4ae045121ad6198 100644 (file)
@@ -247,7 +247,7 @@ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
 
             let delim_span = DelimSpan::from_single(self.span);
             r.push(TokenTree::Delimited(
-                delim_span, token::Bracket, TokenStream::concat(inner).into()
+                delim_span, token::Bracket, TokenStream::new(inner).into()
             ));
             r
         }
index a76779ffebdc003dd39f420920527220f0e59705..a63abd40495136e457d055dca0fb088837973488 100644 (file)
@@ -103,12 +103,12 @@ pub fn transcribe(cx: &ExtCtxt,
                 }
                 Frame::Delimited { forest, span, .. } => {
                     if result_stack.is_empty() {
-                        return TokenStream::concat(result);
+                        return TokenStream::new(result);
                     }
                     let tree = TokenTree::Delimited(
                         span,
                         forest.delim,
-                        TokenStream::concat(result).into(),
+                        TokenStream::new(result).into(),
                     );
                     result = result_stack.pop().unwrap();
                     result.push(tree.into());
index 4ff6048e821787693269ef053f496fe4ad50ef44..1bd0656846bcea90ebddb47a26b56e74e9ff06d5 100644 (file)
@@ -170,7 +170,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                     token::CloseDelim(_) | token::Eof => self.unexpected()?,
                     _ => self.parse_token_tree(),
                 };
-                TokenStream::concat(vec![eq.into(), tree.into()])
+                TokenStream::new(vec![eq.into(), tree.into()])
             } else {
                 TokenStream::empty()
             };
index 86c87cf898d09615eabcf1c990e6f508648aa98f..0906c25cab36103e9d38ec30541d6eaabba1ad3b 100644 (file)
@@ -22,7 +22,7 @@ impl<'a> StringReader<'a> {
             tts.push(self.parse_token_tree()?);
         }
 
-        Ok(TokenStream::concat(tts))
+        Ok(TokenStream::new(tts))
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
@@ -30,14 +30,14 @@ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         let mut tts = vec![];
         loop {
             if let token::CloseDelim(..) = self.token {
-                return TokenStream::concat(tts);
+                return TokenStream::new(tts);
             }
 
             match self.parse_token_tree() {
                 Ok(tree) => tts.push(tree),
                 Err(mut e) => {
                     e.emit();
-                    return TokenStream::concat(tts);
+                    return TokenStream::new(tts);
                 }
             }
         }
index eb71003d3d0cfbf27f90dc992e6a1a41d520f7c5..a1685d537c8bb04b5571a784cafb6118e6a0f6a9 100644 (file)
@@ -842,13 +842,13 @@ fn string_to_tts_1() {
         with_globals(|| {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
-            let expected = TokenStream::concat(vec![
+            let expected = TokenStream::new(vec![
                 TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
                 TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
-                    TokenStream::concat(vec![
+                    TokenStream::new(vec![
                         TokenTree::Token(sp(6, 7),
                                          token::Ident(Ident::from_str("b"), false)).into(),
                         TokenTree::Token(sp(8, 9), token::Colon).into(),
@@ -859,7 +859,7 @@ fn string_to_tts_1() {
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
-                    TokenStream::concat(vec![
+                    TokenStream::new(vec![
                         TokenTree::Token(sp(17, 18),
                                          token::Ident(Ident::from_str("b"), false)).into(),
                         TokenTree::Token(sp(18, 19), token::Semi).into(),
index ded6da9f3adb8845e00a132e043b1731c613aba7..4e209f580248d7f38ee00303eae03a0240f9a9d0 100644 (file)
@@ -2928,7 +2928,7 @@ pub fn parse_tokens(&mut self) -> TokenStream {
                 _ => result.push(self.parse_token_tree().into()),
             }
         }
-        TokenStream::concat(result)
+        TokenStream::new(result)
     }
 
     /// Parse a prefix-unary-operator expr
@@ -4624,7 +4624,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                         self.unexpected()?;
                         unreachable!()
                     };
-                    TokenStream::concat(vec![
+                    TokenStream::new(vec![
                         args.into(),
                         TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
                         body.into(),
index e31bde4082dad0897fcc46c4a159ca617d7dc4b2..c11ef33f931d8ab94a82ba8ceca62795333595f1 100644 (file)
@@ -195,7 +195,7 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
                 new_stream.extend_from_slice(parts.0);
                 new_stream.push(comma);
                 new_stream.extend_from_slice(parts.1);
-                return Some((TokenStream::concat(new_stream), sp));
+                return Some((TokenStream::new(new_stream), sp));
             }
         }
         None
@@ -216,7 +216,7 @@ fn from(token: Token) -> TokenStream {
 
 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
-        TokenStream::concat(iter.into_iter().map(Into::into).collect::<Vec<_>>())
+        TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<_>>())
     }
 }
 
@@ -265,7 +265,7 @@ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, iter: I) {
         // Build the resulting token stream. If it contains more than one token,
         // preserve capacity in the vector in anticipation of the caller
         // performing additional calls to extend.
-        *self = TokenStream::concat(builder.0);
+        *self = TokenStream::new(builder.0);
     }
 }
 
@@ -297,18 +297,14 @@ pub fn is_empty(&self) -> bool {
         }
     }
 
-    pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
+    pub fn new(mut streams: Vec<TokenStream>) -> TokenStream {
         match streams.len() {
             0 => TokenStream::empty(),
             1 => streams.pop().unwrap(),
-            _ => TokenStream::concat_rc_vec(Lrc::new(streams)),
+            _ => TokenStream::Stream(Lrc::new(streams)),
         }
     }
 
-    fn concat_rc_vec(streams: Lrc<Vec<TokenStream>>) -> TokenStream {
-        TokenStream::Stream(streams)
-    }
-
     pub fn trees(&self) -> Cursor {
         self.clone().into_trees()
     }
@@ -389,7 +385,7 @@ pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -
             });
             i += 1;
         }
-        TokenStream::concat(result)
+        TokenStream::new(result)
     }
 
     pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
@@ -402,7 +398,7 @@ pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
                 _ => unreachable!()
             });
         }
-        TokenStream::concat(result)
+        TokenStream::new(result)
     }
 
     fn first_tree_and_joint(&self) -> Option<(TokenTree, bool)> {
@@ -461,7 +457,7 @@ pub fn add<T: Into<TokenStream>>(mut self, stream: T) -> Self {
     }
 
     pub fn build(self) -> TokenStream {
-        TokenStream::concat(self.0)
+        TokenStream::new(self.0)
     }
 
     fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
@@ -470,7 +466,7 @@ fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
             match len {
                 1 => {}
                 2 => self.0.push(streams[0].clone().into()),
-                _ => self.0.push(TokenStream::concat(streams[0 .. len - 1].to_vec())),
+                _ => self.0.push(TokenStream::new(streams[0 .. len - 1].to_vec())),
             }
             self.push_all_but_last_tree(&streams[len - 1])
         }
@@ -482,7 +478,7 @@ fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
             match len {
                 1 => {}
                 2 => self.0.push(streams[1].clone().into()),
-                _ => self.0.push(TokenStream::concat(streams[1 .. len].to_vec())),
+                _ => self.0.push(TokenStream::new(streams[1 .. len].to_vec())),
             }
             self.push_all_but_first_tree(&streams[0])
         }
@@ -577,7 +573,7 @@ pub fn insert(&mut self, stream: TokenStream) {
             _ if stream.is_empty() => return,
             CursorKind::Empty => *self = stream.trees(),
             CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => {
-                *self = TokenStream::concat(vec![self.original_stream(), stream]).trees();
+                *self = TokenStream::new(vec![self.original_stream(), stream]).trees();
                 if consumed {
                     self.next();
                 }
@@ -593,10 +589,10 @@ pub fn original_stream(&self) -> TokenStream {
             CursorKind::Empty => TokenStream::empty(),
             CursorKind::Tree(ref tree, _) => tree.clone().into(),
             CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
-            CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({
+            CursorKind::Stream(ref cursor) => TokenStream::Stream(
                 cursor.stack.get(0).cloned().map(|(stream, _)| stream)
                     .unwrap_or_else(|| cursor.stream.clone())
-            }),
+            ),
         }
     }
 
@@ -664,7 +660,7 @@ fn from(stream: TokenStream) -> ThinTokenStream {
 
 impl From<ThinTokenStream> for TokenStream {
     fn from(stream: ThinTokenStream) -> TokenStream {
-        stream.0.map(TokenStream::concat_rc_vec).unwrap_or_else(TokenStream::empty)
+        stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
     }
 }
 
@@ -763,7 +759,7 @@ fn test_concat() {
             let test_res = string_to_ts("foo::bar::baz");
             let test_fst = string_to_ts("foo::bar");
             let test_snd = string_to_ts("::baz");
-            let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
+            let eq_res = TokenStream::new(vec![test_fst, test_snd]);
             assert_eq!(test_res.trees().count(), 5);
             assert_eq!(eq_res.trees().count(), 5);
             assert_eq!(test_res.eq_unspanned(&eq_res), true);