]> git.lizzy.rs Git - rust.git/commitdiff
syntax: Switch function parameter order in `TokenTree::token`
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>
Wed, 5 Jun 2019 10:25:26 +0000 (13:25 +0300)
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>
Thu, 6 Jun 2019 11:04:02 +0000 (14:04 +0300)
15 files changed:
src/libsyntax/attr/mod.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/literal.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/assert.rs
src/libsyntax_ext/deriving/custom.rs
src/libsyntax_ext/proc_macro_server.rs

index cc16bac320d0ea66d64248e07a11809689ce57fa..8c9bed57bfdfd4ee0a98430c4a01b2665001c003 100644 (file)
@@ -465,10 +465,10 @@ fn tokens(&self) -> TokenStream {
                 let mod_sep_span = Span::new(last_pos,
                                              segment.ident.span.lo(),
                                              segment.ident.span.ctxt());
-                idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
+                idents.push(TokenTree::token(token::ModSep, mod_sep_span).into());
             }
-            idents.push(TokenTree::token(segment.ident.span,
-                                         TokenKind::from_ast_ident(segment.ident)).into());
+            idents.push(TokenTree::token(TokenKind::from_ast_ident(segment.ident),
+                                         segment.ident.span).into());
             last_pos = segment.ident.span.hi();
         }
         self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
@@ -532,7 +532,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::token(span, token::Eq).into()];
+                let mut vec = vec![TokenTree::token(token::Eq, span).into()];
                 lit.tokens().append_to_tree_and_joint_vec(&mut vec);
                 TokenStream::new(vec)
             }
@@ -540,7 +540,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                 let mut tokens = Vec::new();
                 for (i, item) in list.iter().enumerate() {
                     if i > 0 {
-                        tokens.push(TokenTree::token(span, token::Comma).into());
+                        tokens.push(TokenTree::token(token::Comma, span).into());
                     }
                     item.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
index 3b24837e365986e91777017def8db928411784d8..61c736662c71e3d09db17455f3044a9e8ac0c3ea 100644 (file)
@@ -268,8 +268,9 @@ fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
                 if let tokenstream::TokenTree::Token(token) = tt {
                     if let token::Interpolated(nt) = &token.kind {
                         if let token::NtIdent(ident, is_raw) = **nt {
-                            *tt = tokenstream::TokenTree::token(ident.span,
-                                                                token::Ident(ident.name, is_raw));
+                            *tt = tokenstream::TokenTree::token(
+                                token::Ident(ident.name, is_raw), ident.span
+                            );
                         }
                     }
                 }
index 4396b9be9bbb096b59bd812cfad01426c4e54607..7cd847eac469039e7e4b0c1c7157db09b673645a 100644 (file)
@@ -585,14 +585,14 @@ fn expand_attr_invoc(&mut self,
             }
             AttrProcMacro(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
+                let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
                     Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
                     Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
                     Annotatable::Expr(expr) => token::NtExpr(expr),
-                }))).into();
+                })), DUMMY_SP).into();
                 let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
                 let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
                 let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
index 77f53c35b0b5ed09d370a4596c29c38691aa592a..d25339a78f43c1f56bd243b6dabf6326c264f4e0 100644 (file)
@@ -270,7 +270,7 @@ pub fn compile(
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
             ],
             separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +279,7 @@ pub fn compile(
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
+            tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -613,7 +613,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
+                            first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -663,7 +663,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
+                                first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
                             }
 
                             assert!(first.maybe_empty);
@@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
+                    new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
                     &new
                 } else {
                     &suffix_first
index 582d87b911dbcbf7f94a45287cd815537944c4f1..b4672fb4a58b74534bdd1b13f56657895b3820e3 100644 (file)
@@ -40,7 +40,7 @@ pub fn open_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(open_span, self.open_token())
+        TokenTree::token(self.open_token(), open_span)
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -50,7 +50,7 @@ pub fn close_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::token(close_span, self.close_token())
+        TokenTree::token(self.close_token(), close_span)
     }
 }
 
@@ -153,7 +153,7 @@ pub fn span(&self) -> Span {
         }
     }
 
-    crate fn token(span: Span, kind: TokenKind) -> TokenTree {
+    crate fn token(kind: TokenKind, span: Span) -> TokenTree {
         TokenTree::Token(Token::new(kind, span))
     }
 }
@@ -325,7 +325,7 @@ fn parse_tree<I>(
                 let (ident, is_raw) = token.ident().unwrap();
                 let span = ident.span.with_lo(span.lo());
                 if ident.name == kw::Crate && !is_raw {
-                    TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw))
+                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
                 } else {
                     TokenTree::MetaVar(span, ident)
                 }
@@ -342,7 +342,7 @@ fn parse_tree<I>(
             }
 
             // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::token(span, token::Dollar),
+            None => TokenTree::token(token::Dollar, span),
         },
 
         // `tree` is an arbitrary token. Keep it.
index 1dbb0638df195968f7045218697c8a38d35abc41..b382893ce4ece0aff8ee02cf00064944c5379357 100644 (file)
@@ -119,7 +119,7 @@ pub fn transcribe(
                             Some((tt, _)) => tt.span(),
                             None => DUMMY_SP,
                         };
-                        result.push(TokenTree::token(prev_span, sep).into());
+                        result.push(TokenTree::token(sep, prev_span).into());
                     }
                     continue;
                 }
@@ -225,7 +225,7 @@ pub fn transcribe(
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
+                            let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
                             result.push(token.into());
                         }
                     } else {
@@ -241,8 +241,8 @@ pub fn transcribe(
                     let ident =
                         Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
                     sp = sp.apply_mark(cx.current_expansion.mark);
-                    result.push(TokenTree::token(sp, token::Dollar).into());
-                    result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
+                    result.push(TokenTree::token(token::Dollar, sp).into());
+                    result.push(TokenTree::token(token::TokenKind::from_ast_ident(ident), sp).into());
                 }
             }
 
index 8040168a67ec3bf4ba9c7abab063dab79e524c7d..d83b76f4d236610be39f7408e1bea25d4dece762 100644 (file)
@@ -157,7 +157,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                self.check(&token::OpenDelim(DelimToken::Brace)) {
                    self.parse_token_tree().into()
             } else if self.eat(&token::Eq) {
-                let eq = TokenTree::token(self.prev_span, token::Eq);
+                let eq = TokenTree::token(token::Eq, self.prev_span);
                 let mut is_interpolated_expr = false;
                 if let token::Interpolated(nt) = &self.token.kind {
                     if let token::NtExpr(..) = **nt {
index 7b213655abdbf35eca8184280800e735acb20b44..4979a4dd27f4a39fecda7c18a9511b88f6dc38c5 100644 (file)
@@ -261,7 +261,7 @@ pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
             token::Bool => token::Ident(self.token.symbol, false),
             _ => token::Literal(self.token),
         };
-        TokenTree::token(self.span, token).into()
+        TokenTree::token(token, self.span).into()
     }
 }
 
index 2b82767d7e91df7164c5daa381e1a5e99332d1ac..8d3518d0373686b8eb28b0c0e25234803e670cbf 100644 (file)
@@ -476,23 +476,23 @@ fn string_to_tts_1() {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(),
-                TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(),
+                TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(),
-                        TokenTree::token(sp(8, 9), token::Colon).into(),
-                        TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
+                        TokenTree::token(token::Colon, sp(8, 9)).into(),
+                        TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
                     ]).into(),
                 ).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(),
-                        TokenTree::token(sp(18, 19), token::Semi).into(),
+                        TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
+                        TokenTree::token(token::Semi, sp(18, 19)).into(),
                     ]).into(),
                 ).into()
             ]);
index 57a49d1524d81cc5a412db136f971ed2a5f72db3..e9e908eb858c876f3a8fdd0f271e734354999a28 100644 (file)
@@ -362,11 +362,11 @@ fn next_desugared(&mut self) -> Token {
             delim_span,
             token::Bracket,
             [
-                TokenTree::token(sp, token::Ident(sym::doc, false)),
-                TokenTree::token(sp, token::Eq),
-                TokenTree::token(sp, token::TokenKind::lit(
+                TokenTree::token(token::Ident(sym::doc, false), sp),
+                TokenTree::token(token::Eq, sp),
+                TokenTree::token(token::TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
-                )),
+                ), sp),
             ]
             .iter().cloned().collect::<TokenStream>().into(),
         );
@@ -375,10 +375,10 @@ fn next_desugared(&mut self) -> Token {
             delim_span,
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
-                [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
+                [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::token(sp, token::Pound), body]
+                [TokenTree::token(token::Pound, sp), body]
                     .iter().cloned().collect::<TokenStream>().into()
             },
         )));
@@ -4344,7 +4344,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                     };
                     TokenStream::new(vec![
                         args.into(),
-                        TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
+                        TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
                         body.into(),
                     ])
                 } else {
index ba7c88e700074fdea4d1f6939826a9a5540665ee..58c30a07e3e1bc7f21dad99dcbc35a5f38cc925d 100644 (file)
@@ -763,10 +763,10 @@ pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
                 prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
             }
             Nonterminal::NtIdent(ident, is_raw) => {
-                Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into())
+                Some(TokenTree::token(Ident(ident.name, is_raw), ident.span).into())
             }
             Nonterminal::NtLifetime(ident) => {
-                Some(TokenTree::token(ident.span, Lifetime(ident.name)).into())
+                Some(TokenTree::token(Lifetime(ident.name), ident.span).into())
             }
             Nonterminal::NtTT(ref tt) => {
                 Some(tt.clone().into())
@@ -852,7 +852,7 @@ fn prepend_attrs(sess: &ParseSess,
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
             let token = Ident(ident.name, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::token(ident.span, token));
+            brackets.push(tokenstream::TokenTree::token(token, ident.span));
 
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
@@ -866,7 +866,7 @@ fn prepend_attrs(sess: &ParseSess,
         // The span we list here for `#` and for `[ ... ]` are both wrong in
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
-        builder.push(tokenstream::TokenTree::token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::token(Pound, attr.span));
         let delim_span = DelimSpan::from_single(attr.span);
         builder.push(tokenstream::TokenTree::Delimited(
             delim_span, DelimToken::Bracket, brackets.build().into()));
index bb80c1a1b3f33353c6373bddacd93bb6cc7ba198..b4643229285cde97a070333c5b723e2e9f1cdaf9 100644 (file)
@@ -138,7 +138,7 @@ pub fn joint(self) -> TokenStream {
         TokenStream::new(vec![(self, Joint)])
     }
 
-    pub fn token(span: Span, kind: TokenKind) -> TokenTree {
+    pub fn token(kind: TokenKind, span: Span) -> TokenTree {
         TokenTree::Token(Token::new(kind, span))
     }
 
@@ -149,7 +149,7 @@ pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_hi(span.lo() + BytePos(delim.len() as u32))
         };
-        TokenTree::token(open_span, token::OpenDelim(delim))
+        TokenTree::token(token::OpenDelim(delim), open_span)
     }
 
     /// Returns the closing delimiter as a token tree.
@@ -159,7 +159,7 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(delim.len() as u32))
         };
-        TokenTree::token(close_span, token::CloseDelim(delim))
+        TokenTree::token(token::CloseDelim(delim), close_span)
     }
 }
 
@@ -212,7 +212,7 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -433,7 +433,7 @@ pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
                     let last_stream = self.0.pop().unwrap();
                     self.push_all_but_last_tree(&last_stream);
                     let glued_span = last_token.span.to(token.span);
-                    let glued_tt = TokenTree::token(glued_span, glued_tok);
+                    let glued_tt = TokenTree::token(glued_tok, glued_span);
                     let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
                     self.0.push(glued_tokenstream);
                     self.push_all_but_first_tree(&stream);
@@ -660,7 +660,7 @@ fn test_is_empty() {
         with_default_globals(|| {
             let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
             let test1: TokenStream =
-                TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into();
+                TokenTree::token(token::Ident(Name::intern("a"), false), sp(0, 1)).into();
             let test2 = string_to_ts("foo(bar::baz)");
 
             assert_eq!(test0.is_empty(), true);
@@ -673,9 +673,9 @@ fn test_is_empty() {
     fn test_dotdotdot() {
         with_default_globals(|| {
             let mut builder = TokenStreamBuilder::new();
-            builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
-            builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
-            builder.push(TokenTree::token(sp(2, 3), token::Dot));
+            builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
+            builder.push(TokenTree::token(token::Dot, sp(2, 3)));
             let stream = builder.build();
             assert!(stream.eq_unspanned(&string_to_ts("...")));
             assert_eq!(stream.trees().count(), 1);
index e5e422c4d9c778fd8607da94d2956b97ce7eeed9..ce1e3276af39bacc453d6da2d18301fb8cea7d5f 100644 (file)
@@ -30,11 +30,11 @@ pub fn expand_assert<'cx>(
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
             TokenStream::from(TokenTree::token(
-                DUMMY_SP,
                 TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
                     pprust::expr_to_string(&cond_expr).escape_debug()
                 )), None),
+                DUMMY_SP,
             ))
         }).into(),
         delim: MacDelimiter::Parenthesis,
index 3deab97db88c0fe28e255e59412f3724c90e1d7e..98465d75e4680e9d31beb6665f871e22e0ade22b 100644 (file)
@@ -69,7 +69,7 @@ fn expand(&self,
         MarkAttrs(&self.attrs).visit_item(&item);
 
         let token = token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
index ff2835c70f75e84df1aa5d11b03f8dbedaa83fb1..00a420d3fa89922c9cd1758a5a99c9163e3700ac 100644 (file)
@@ -163,7 +163,7 @@ macro_rules! op {
                     TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|kind| tokenstream::TokenTree::token(span, kind))
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -210,7 +210,7 @@ fn to_internal(self) -> TokenStream {
                 .into();
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
-                return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into();
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -219,8 +219,8 @@ fn to_internal(self) -> TokenStream {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::token(span, minus);
-                let b = tokenstream::TokenTree::token(span, integer);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -230,12 +230,12 @@ fn to_internal(self) -> TokenStream {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::token(span, minus);
-                let b = tokenstream::TokenTree::token(span, float);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
             }
         };
 
@@ -265,7 +265,7 @@ fn to_internal(self) -> TokenStream {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::token(span, kind);
+        let tree = tokenstream::TokenTree::token(kind, span);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }