extern crate rustc;
use syntax::codemap::Span;
-use syntax::parse::token::{IDENT, get_ident};
+use syntax::parse::token;
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint
("I", 1)];
let text = match args {
- [TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
+ [TtToken(_, token::Ident(s, _))] => token::get_ident(s).to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
use syntax::ast;
use syntax::ast::Name;
-use syntax::parse::token::*;
+use syntax::parse::token;
use syntax::parse::lexer::TokenAndSpan;
fn parse_token_list(file: &str) -> HashMap<String, Token> {
fn id() -> Token {
- IDENT(ast::Ident { name: Name(0), ctxt: 0, }, false)
+ token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, false)
}
let mut res = HashMap::new();
let num = line.slice_from(eq + 1);
let tok = match val {
- "SHR" => BINOP(SHR),
- "DOLLAR" => DOLLAR,
- "LT" => LT,
- "STAR" => BINOP(STAR),
- "FLOAT_SUFFIX" => id(),
- "INT_SUFFIX" => id(),
- "SHL" => BINOP(SHL),
- "LBRACE" => LBRACE,
- "RARROW" => RARROW,
- "LIT_STR" => LIT_STR(Name(0)),
- "DOTDOT" => DOTDOT,
- "MOD_SEP" => MOD_SEP,
- "DOTDOTDOT" => DOTDOTDOT,
- "NOT" => NOT,
- "AND" => BINOP(AND),
- "LPAREN" => LPAREN,
- "ANDAND" => ANDAND,
- "AT" => AT,
- "LBRACKET" => LBRACKET,
- "LIT_STR_RAW" => LIT_STR_RAW(Name(0), 0),
- "RPAREN" => RPAREN,
- "SLASH" => BINOP(SLASH),
- "COMMA" => COMMA,
- "LIFETIME" => LIFETIME(ast::Ident { name: Name(0), ctxt: 0 }),
- "CARET" => BINOP(CARET),
- "TILDE" => TILDE,
- "IDENT" => id(),
- "PLUS" => BINOP(PLUS),
- "LIT_CHAR" => LIT_CHAR(Name(0)),
- "LIT_BYTE" => LIT_BYTE(Name(0)),
- "EQ" => EQ,
- "RBRACKET" => RBRACKET,
- "COMMENT" => COMMENT,
- "DOC_COMMENT" => DOC_COMMENT(Name(0)),
- "DOT" => DOT,
- "EQEQ" => EQEQ,
- "NE" => NE,
- "GE" => GE,
- "PERCENT" => BINOP(PERCENT),
- "RBRACE" => RBRACE,
- "BINOP" => BINOP(PLUS),
- "POUND" => POUND,
- "OROR" => OROR,
- "LIT_INTEGER" => LIT_INTEGER(Name(0)),
- "BINOPEQ" => BINOPEQ(PLUS),
- "LIT_FLOAT" => LIT_FLOAT(Name(0)),
- "WHITESPACE" => WS,
- "UNDERSCORE" => UNDERSCORE,
- "MINUS" => BINOP(MINUS),
- "SEMI" => SEMI,
- "COLON" => COLON,
- "FAT_ARROW" => FAT_ARROW,
- "OR" => BINOP(OR),
- "GT" => GT,
- "LE" => LE,
- "LIT_BINARY" => LIT_BINARY(Name(0)),
- "LIT_BINARY_RAW" => LIT_BINARY_RAW(Name(0), 0),
- _ => continue
+ "SHR" => token::BinOp(token::Shr),
+ "DOLLAR" => token::Dollar,
+ "LT" => token::Lt,
+ "STAR" => token::BinOp(token::Star),
+ "FLOAT_SUFFIX" => id(),
+ "INT_SUFFIX" => id(),
+ "SHL" => token::BinOp(token::Shl),
+ "LBRACE" => token::LBrace,
+ "RARROW" => token::Rarrow,
+ "LIT_STR" => token::LitStr(Name(0)),
+ "DOTDOT" => token::DotDot,
+ "MOD_SEP" => token::ModSep,
+ "DOTDOTDOT" => token::DotDotDot,
+ "NOT" => token::Not,
+ "AND" => token::BinOp(token::And),
+ "LPAREN" => token::LParen,
+ "ANDAND" => token::AndAnd,
+ "AT" => token::At,
+ "LBRACKET" => token::LBracket,
+ "LIT_STR_RAW" => token::LitStrRaw(Name(0), 0),
+ "RPAREN" => token::RParen,
+ "SLASH" => token::BinOp(token::Slash),
+ "COMMA" => token::Comma,
+ "LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
+ "CARET" => token::BinOp(token::Caret),
+ "TILDE" => token::Tilde,
+ "IDENT" => token::Id(),
+ "PLUS" => token::BinOp(token::Plus),
+ "LIT_CHAR" => token::LitChar(Name(0)),
+ "LIT_BYTE" => token::LitByte(Name(0)),
+ "EQ" => token::Eq,
+ "RBRACKET" => token::RBracket,
+ "COMMENT" => token::Comment,
+ "DOC_COMMENT" => token::DocComment(Name(0)),
+ "DOT" => token::Dot,
+ "EQEQ" => token::EqEq,
+ "NE" => token::Ne,
+ "GE" => token::Ge,
+ "PERCENT" => token::BinOp(token::Percent),
+ "RBRACE" => token::RBrace,
+ "BINOP" => token::BinOp(token::Plus),
+ "POUND" => token::Pound,
+ "OROR" => token::OrOr,
+ "LIT_INTEGER" => token::LitInteger(Name(0)),
+ "BINOPEQ" => token::BinOpEq(token::Plus),
+ "LIT_FLOAT" => token::LitFloat(Name(0)),
+ "WHITESPACE" => token::Whitespace,
+ "UNDERSCORE" => token::Underscore,
+ "MINUS" => token::BinOp(token::Minus),
+ "SEMI" => token::Semi,
+ "COLON" => token::Colon,
+ "FAT_ARROW" => token::FatArrow,
+ "OR" => token::BinOp(token::Or),
+ "GT" => token::Gt,
+ "LE" => token::Le,
+ "LIT_BINARY" => token::LitBinary(Name(0)),
+ "LIT_BINARY_RAW" => token::LitBinaryRaw(Name(0), 0),
+ _ => continue,
};
res.insert(num.to_string(), tok);
res
}
-fn str_to_binop(s: &str) -> BinOp {
+fn str_to_binop(s: &str) -> BinOpToken {
match s {
- "+" => PLUS,
- "/" => SLASH,
- "-" => MINUS,
- "*" => STAR,
- "%" => PERCENT,
- "^" => CARET,
- "&" => AND,
- "|" => OR,
- "<<" => SHL,
- ">>" => SHR,
- _ => fail!("Bad binop str `{}`", s)
+ "+" => token::Plus,
+ "/" => token::Slash,
+ "-" => token::Minus,
+ "*" => token::Star,
+ "%" => token::Percent,
+ "^" => token::Caret,
+ "&" => token::And,
+ "|" => token::Or,
+ "<<" => token::Shl,
+ ">>" => token::Shr,
+ _ => fail!("Bad binop str `{}`", s),
}
}
debug!("What we got: content (`{}`), proto: {}", content, proto_tok);
let real_tok = match *proto_tok {
- BINOP(..) => BINOP(str_to_binop(content)),
- BINOPEQ(..) => BINOPEQ(str_to_binop(content.slice_to(content.len() - 1))),
- LIT_STR(..) => LIT_STR(fix(content)),
- LIT_STR_RAW(..) => LIT_STR_RAW(fix(content), count(content)),
- LIT_CHAR(..) => LIT_CHAR(fixchar(content)),
- LIT_BYTE(..) => LIT_BYTE(fixchar(content)),
- DOC_COMMENT(..) => DOC_COMMENT(nm),
- LIT_INTEGER(..) => LIT_INTEGER(nm),
- LIT_FLOAT(..) => LIT_FLOAT(nm),
- LIT_BINARY(..) => LIT_BINARY(nm),
- LIT_BINARY_RAW(..) => LIT_BINARY_RAW(fix(content), count(content)),
- IDENT(..) => IDENT(ast::Ident { name: nm, ctxt: 0 }, true),
- LIFETIME(..) => LIFETIME(ast::Ident { name: nm, ctxt: 0 }),
+ token::BinOp(..) => token::BinOp(str_to_binop(content)),
+ token::BinOpEq(..) => token::BinOpEq(str_to_binop(content.slice_to(
+ content.len() - 1))),
+ token::LitStr(..) => token::LitStr(fix(content)),
+ token::LitStrRaw(..) => token::LitStrRaw(fix(content), count(content)),
+ token::LitChar(..) => token::LitChar(fixchar(content)),
+ token::LitByte(..) => token::LitByte(fixchar(content)),
+ token::DocComment(..) => token::DocComment(nm),
+ token::LitInteger(..) => token::LitInteger(nm),
+ token::LitFloat(..) => token::LitFloat(nm),
+ token::LitBinary(..) => token::LitBinary(nm),
+ token::LitBinaryRaw(..) => token::LitBinaryRaw(fix(content), count(content)),
+ token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 }, true),
+ token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }),
ref t => t.clone()
};
fn tok_cmp(a: &Token, b: &Token) -> bool {
match a {
- &IDENT(id, _) => match b {
- &IDENT(id2, _) => id == id2,
+ &token::Ident(id, _) => match b {
+ &token::Ident(id2, _) => id == id2,
_ => false
},
_ => a == b
)
)
- matches!(LIT_BYTE(..),
- LIT_CHAR(..),
- LIT_INTEGER(..),
- LIT_FLOAT(..),
- LIT_STR(..),
- LIT_STR_RAW(..),
- LIT_BINARY(..),
- LIT_BINARY_RAW(..),
- IDENT(..),
- LIFETIME(..),
- INTERPOLATED(..),
- DOC_COMMENT(..),
- SHEBANG(..)
+ matches!(
+ LitByte(..),
+ LitChar(..),
+ LitInteger(..),
+ LitFloat(..),
+ LitStr(..),
+ LitStrRaw(..),
+ LitBinary(..),
+ LitBinaryRaw(..),
+ Ident(..),
+ Lifetime(..),
+ Interpolated(..),
+ DocComment(..),
+ Shebang(..)
);
}
}
return None
}
};
- if !parser.eat(&token::EOF) {
+ if !parser.eat(&token::Eof) {
cx.span_err(parser.span, "only one string literal allowed");
return None;
}
let qualname = format!("{}::{}", qualname, name);
let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
(*self.analysis.ty_cx.node_types.borrow())[field.node.id as uint]);
- match self.span.sub_span_before_token(field.span, token::COLON) {
+ match self.span.sub_span_before_token(field.span, token::Colon) {
Some(sub_span) => self.fmt.field_str(field.span,
Some(sub_span),
field.node.id,
// 'use' always introduces an alias, if there is not an explicit
// one, there is an implicit one.
let sub_span =
- match self.span.sub_span_before_token(path.span, token::EQ) {
+ match self.span.sub_span_before_token(path.span, token::Eq) {
Some(sub_span) => Some(sub_span),
None => sub_span,
};
let mut bracket_count = 0u;
loop {
let ts = toks.next_token();
- if ts.tok == token::EOF {
+ if ts.tok == token::Eof {
return self.make_sub_span(span, result)
}
if bracket_count == 0 &&
}
bracket_count += match ts.tok {
- token::LT => 1,
- token::GT => -1,
- token::BINOP(token::SHR) => -2,
+ token::Lt => 1,
+ token::Gt => -1,
+ token::BinOp(token::Shr) => -2,
_ => 0
}
}
let mut bracket_count = 0u;
loop {
let ts = toks.next_token();
- if ts.tok == token::EOF {
+ if ts.tok == token::Eof {
return None;
}
if bracket_count == 0 &&
}
bracket_count += match ts.tok {
- token::LT => 1,
- token::GT => -1,
- token::BINOP(token::SHR) => -2,
+ token::Lt => 1,
+ token::Gt => -1,
+ token::BinOp(token::Shr) => -2,
_ => 0
}
}
let mut result = None;
let mut bracket_count = 0u;
let mut last_span = None;
- while prev.tok != token::EOF {
+ while prev.tok != token::Eof {
last_span = None;
let mut next = toks.next_token();
- if (next.tok == token::LPAREN ||
- next.tok == token::LT) &&
+ if (next.tok == token::LParen ||
+ next.tok == token::Lt) &&
bracket_count == 0 &&
is_ident(&prev.tok) {
result = Some(prev.sp);
}
if bracket_count == 0 &&
- next.tok == token::MOD_SEP {
+ next.tok == token::ModSep {
let old = prev;
prev = next;
next = toks.next_token();
- if next.tok == token::LT &&
+ if next.tok == token::Lt &&
is_ident(&old.tok) {
result = Some(old.sp);
}
}
bracket_count += match prev.tok {
- token::LPAREN | token::LT => 1,
- token::RPAREN | token::GT => -1,
- token::BINOP(token::SHR) => -2,
+ token::LParen | token::Lt => 1,
+ token::RParen | token::Gt => -1,
+ token::BinOp(token::Shr) => -2,
_ => 0
};
loop {
let next = toks.next_token();
- if (next.tok == token::LT ||
- next.tok == token::COLON) &&
+ if (next.tok == token::Lt ||
+ next.tok == token::Colon) &&
bracket_count == 0 &&
is_ident(&prev.tok) {
result = Some(prev.sp);
}
bracket_count += match prev.tok {
- token::LT => 1,
- token::GT => -1,
- token::BINOP(token::SHR) => -2,
+ token::Lt => 1,
+ token::Gt => -1,
+ token::BinOp(token::Shr) => -2,
_ => 0
};
- if next.tok == token::EOF {
+ if next.tok == token::Eof {
break;
}
prev = next;
let mut bracket_count = 0i;
loop {
let ts = toks.next_token();
- if ts.tok == token::EOF {
+ if ts.tok == token::Eof {
if bracket_count != 0 {
let loc = self.sess.codemap().lookup_char_pos(span.lo);
self.sess.span_bug(span, format!(
return result;
}
bracket_count += match ts.tok {
- token::LT => 1,
- token::GT => -1,
- token::BINOP(token::SHL) => 2,
- token::BINOP(token::SHR) => -2,
+ token::Lt => 1,
+ token::Gt => -1,
+ token::BinOp(token::Shl) => 2,
+ token::BinOp(token::Shr) => -2,
_ => 0
};
if is_ident(&ts.tok) &&
let mut toks = self.retokenise_span(span);
let mut prev = toks.next_token();
loop {
- if prev.tok == token::EOF {
+ if prev.tok == token::Eof {
return None;
}
let next = toks.next_token();
let mut toks = self.retokenise_span(span);
loop {
let ts = toks.next_token();
- if ts.tok == token::EOF {
+ if ts.tok == token::Eof {
return None;
}
if is_keyword(keyword, &ts.tok) {
let ts = toks.next_token();
- if ts.tok == token::EOF {
+ if ts.tok == token::Eof {
return None
} else {
return self.make_sub_span(span, Some(ts.sp));
use std::io;
use syntax::parse::lexer;
-use syntax::parse::token as t;
+use syntax::parse::token;
use syntax::parse;
/// Highlights some source code, returning the HTML output.
let snip = |sp| sess.span_diagnostic.cm.span_to_snippet(sp).unwrap();
- if next.tok == t::EOF { break }
+ if next.tok == token::Eof { break }
let klass = match next.tok {
- t::WS => {
+ token::Whitespace => {
try!(write!(out, "{}", Escape(snip(next.sp).as_slice())));
continue
},
- t::COMMENT => {
+ token::Comment => {
try!(write!(out, "<span class='comment'>{}</span>",
Escape(snip(next.sp).as_slice())));
continue
},
- t::SHEBANG(s) => {
+ token::Shebang(s) => {
try!(write!(out, "{}", Escape(s.as_str())));
continue
},
// that it's the address-of operator instead of the and-operator.
// This allows us to give all pointers their own class (`Box` and
// `@` are below).
- t::BINOP(t::AND) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
- t::AT | t::TILDE => "kw-2",
+ token::BinOp(token::And) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
+ token::At | token::Tilde => "kw-2",
// consider this as part of a macro invocation if there was a
// leading identifier
- t::NOT if is_macro => { is_macro = false; "macro" }
+ token::Not if is_macro => { is_macro = false; "macro" }
// operators
- t::EQ | t::LT | t::LE | t::EQEQ | t::NE | t::GE | t::GT |
- t::ANDAND | t::OROR | t::NOT | t::BINOP(..) | t::RARROW |
- t::BINOPEQ(..) | t::FAT_ARROW => "op",
+ token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
+ token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
+ token::BinOpEq(..) | token::FatArrow => "op",
// miscellaneous, no highlighting
- t::DOT | t::DOTDOT | t::DOTDOTDOT | t::COMMA | t::SEMI |
- t::COLON | t::MOD_SEP | t::LARROW | t::LPAREN |
- t::RPAREN | t::LBRACKET | t::LBRACE | t::RBRACE | t::QUESTION => "",
- t::DOLLAR => {
- if t::is_ident(&lexer.peek().tok) {
+ token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
+ token::Colon | token::ModSep | token::LArrow | token::LParen |
+ token::RParen | token::LBracket | token::LBrace | token::RBrace |
+ token::Question => "",
+ token::Dollar => {
+ if token::is_ident(&lexer.peek().tok) {
is_macro_nonterminal = true;
"macro-nonterminal"
} else {
// continue highlighting it as an attribute until the ending ']' is
// seen, so skip out early. Down below we terminate the attribute
// span when we see the ']'.
- t::POUND => {
+ token::Pound => {
is_attribute = true;
try!(write!(out, r"<span class='attribute'>#"));
continue
}
- t::RBRACKET => {
+ token::RBracket => {
if is_attribute {
is_attribute = false;
try!(write!(out, "]</span>"));
}
// text literals
- t::LIT_BYTE(..) | t::LIT_BINARY(..) | t::LIT_BINARY_RAW(..) |
- t::LIT_CHAR(..) | t::LIT_STR(..) | t::LIT_STR_RAW(..) => "string",
+ token::LitByte(..) | token::LitBinary(..) | token::LitBinaryRaw(..) |
+ token::LitChar(..) | token::LitStr(..) | token::LitStrRaw(..) => "string",
// number literals
- t::LIT_INTEGER(..) | t::LIT_FLOAT(..) => "number",
+ token::LitInteger(..) | token::LitFloat(..) => "number",
// keywords are also included in the identifier set
- t::IDENT(ident, _is_mod_sep) => {
- match t::get_ident(ident).get() {
+ token::Ident(ident, _is_mod_sep) => {
+ match token::get_ident(ident).get() {
"ref" | "mut" => "kw-2",
"self" => "self",
"Option" | "Result" => "prelude-ty",
"Some" | "None" | "Ok" | "Err" => "prelude-val",
- _ if t::is_any_keyword(&next.tok) => "kw",
+ _ if token::is_any_keyword(&next.tok) => "kw",
_ => {
if is_macro_nonterminal {
is_macro_nonterminal = false;
"macro-nonterminal"
- } else if lexer.peek().tok == t::NOT {
+ } else if lexer.peek().tok == token::Not {
is_macro = true;
"macro"
} else {
}
}
- t::LIFETIME(..) => "lifetime",
- t::DOC_COMMENT(..) => "doccomment",
- t::UNDERSCORE | t::EOF | t::INTERPOLATED(..) => "",
+ token::Lifetime(..) => "lifetime",
+ token::DocComment(..) => "doccomment",
+ token::Underscore | token::Eof | token::Interpolated(..) => "",
};
// as mentioned above, use the original source code instead of
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match token_tree {
- [ast::TtToken(_, token::IDENT(code, _))] => code,
+ [ast::TtToken(_, token::Ident(code, _))] => code,
_ => unreachable!()
};
with_registered_diagnostics(|diagnostics| {
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let (code, description) = match token_tree {
- [ast::TtToken(_, token::IDENT(ref code, _))] => {
+ [ast::TtToken(_, token::Ident(ref code, _))] => {
(code, None)
},
- [ast::TtToken(_, token::IDENT(ref code, _)),
- ast::TtToken(_, token::COMMA),
- ast::TtToken(_, token::LIT_STR_RAW(description, _))] => {
+ [ast::TtToken(_, token::Ident(ref code, _)),
+ ast::TtToken(_, token::Comma),
+ ast::TtToken(_, token::LitStrRaw(description, _))] => {
(code, Some(description))
}
_ => unreachable!()
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let name = match token_tree {
- [ast::TtToken(_, token::IDENT(ref name, _))] => name,
+ [ast::TtToken(_, token::Ident(ref name, _))] => name,
_ => unreachable!()
};
asm_str_style = Some(style);
}
Outputs => {
- while p.token != token::EOF &&
- p.token != token::COLON &&
- p.token != token::MOD_SEP {
+ while p.token != token::Eof &&
+ p.token != token::Colon &&
+ p.token != token::ModSep {
if outputs.len() != 0 {
- p.eat(&token::COMMA);
+ p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
let span = p.last_span;
- p.expect(&token::LPAREN);
+ p.expect(&token::LParen);
let out = p.parse_expr();
- p.expect(&token::RPAREN);
+ p.expect(&token::RParen);
// Expands a read+write operand into two operands.
//
}
}
Inputs => {
- while p.token != token::EOF &&
- p.token != token::COLON &&
- p.token != token::MOD_SEP {
+ while p.token != token::Eof &&
+ p.token != token::Colon &&
+ p.token != token::ModSep {
if inputs.len() != 0 {
- p.eat(&token::COMMA);
+ p.eat(&token::Comma);
}
let (constraint, _str_style) = p.parse_str();
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
- p.expect(&token::LPAREN);
+ p.expect(&token::LParen);
let input = p.parse_expr();
- p.expect(&token::RPAREN);
+ p.expect(&token::RParen);
inputs.push((constraint, input));
}
}
Clobbers => {
let mut clobs = Vec::new();
- while p.token != token::EOF &&
- p.token != token::COLON &&
- p.token != token::MOD_SEP {
+ while p.token != token::Eof &&
+ p.token != token::Colon &&
+ p.token != token::ModSep {
if clobs.len() != 0 {
- p.eat(&token::COMMA);
+ p.eat(&token::Comma);
}
let (s, _str_style) = p.parse_str();
cx.span_warn(p.last_span, "unrecognized option");
}
- if p.token == token::COMMA {
- p.eat(&token::COMMA);
+ if p.token == token::Comma {
+ p.eat(&token::Comma);
}
}
StateNone => ()
// MOD_SEP is a double colon '::' without space in between.
// When encountered, the state must be advanced twice.
match (&p.token, state.next(), state.next().next()) {
- (&token::COLON, StateNone, _) |
- (&token::MOD_SEP, _, StateNone) => {
+ (&token::Colon, StateNone, _) |
+ (&token::ModSep, _, StateNone) => {
p.bump();
break 'statement;
}
- (&token::COLON, st, _) |
- (&token::MOD_SEP, _, st) => {
+ (&token::Colon, st, _) |
+ (&token::ModSep, _, st) => {
p.bump();
state = st;
}
- (&token::EOF, _, _) => break 'statement,
+ (&token::Eof, _, _) => break 'statement,
_ => break
}
}
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
} else {
match tts[0] {
- ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
- ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => {
+ ast::TtToken(_, token::LitStr(ident)) => return Some(parse::str_lit(ident.as_str())),
+ ast::TtToken(_, token::LitStrRaw(ident, _)) => {
return Some(parse::raw_str_lit(ident.as_str()))
}
_ => {
tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new();
- while p.token != token::EOF {
+ while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr()));
- if p.eat(&token::COMMA) {
+ if p.eat(&token::Comma) {
continue;
}
- if p.token != token::EOF {
+ if p.token != token::Eof {
cx.span_err(sp, "expected token: `,`");
return None;
}
let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item();
- if !p.eat(&token::EOF) {
+ if !p.eat(&token::Eof) {
cx.span_err(sp, "expected 1 cfg-pattern");
return DummyResult::expr(sp);
}
for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 {
match *e {
- ast::TtToken(_, token::COMMA) => (),
+ ast::TtToken(_, token::Comma) => {},
_ => {
cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::expr(sp);
- }
+ },
}
} else {
match *e {
- ast::TtToken(_, token::IDENT(ident,_)) => {
+ ast::TtToken(_, token::Ident(ident, _)) => {
res_str.push_str(token::get_ident(ident).get())
- }
+ },
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");
return DummyResult::expr(sp);
- }
+ },
}
}
}
// Parse the leading function expression (maybe a block, maybe a path)
let invocation = if allow_method {
let e = p.parse_expr();
- if !p.eat(&token::COMMA) {
+ if !p.eat(&token::Comma) {
ecx.span_err(sp, "expected token: `,`");
return (Call(e), None);
}
} else {
Call(p.parse_expr())
};
- if !p.eat(&token::COMMA) {
+ if !p.eat(&token::Comma) {
ecx.span_err(sp, "expected token: `,`");
return (invocation, None);
}
- if p.token == token::EOF {
+ if p.token == token::Eof {
ecx.span_err(sp, "requires at least a format string argument");
return (invocation, None);
}
let fmtstr = p.parse_expr();
let mut named = false;
- while p.token != token::EOF {
- if !p.eat(&token::COMMA) {
+ while p.token != token::Eof {
+ if !p.eat(&token::Comma) {
ecx.span_err(sp, "expected token: `,`");
return (invocation, None);
}
- if p.token == token::EOF { break } // accept trailing commas
+ if p.token == token::Eof { break } // accept trailing commas
if named || (token::is_ident(&p.token) &&
- p.look_ahead(1, |t| *t == token::EQ)) {
+ p.look_ahead(1, |t| *t == token::Eq)) {
named = true;
let ident = match p.token {
- token::IDENT(i, _) => {
+ token::Ident(i, _) => {
p.bump();
i
}
};
let interned_name = token::get_ident(ident);
let name = interned_name.get();
- p.expect(&token::EQ);
+ p.expect(&token::Eq);
let e = p.parse_expr();
match names.find_equiv(&name) {
None => {}
cx.expr_path(cx.path_global(sp, idents))
}
-fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> P<ast::Expr> {
+fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
let name = match bop {
- PLUS => "PLUS",
- MINUS => "MINUS",
- STAR => "STAR",
- SLASH => "SLASH",
- PERCENT => "PERCENT",
- CARET => "CARET",
- AND => "AND",
- OR => "OR",
- SHL => "SHL",
- SHR => "SHR"
+ token::Plus => "Plus",
+ token::Minus => "Minus",
+ token::Star => "Star",
+ token::Slash => "Slash",
+ token::Percent => "Percent",
+ token::Caret => "Caret",
+ token::And => "And",
+ token::Or => "Or",
+ token::Shl => "Shl",
+ token::Shr => "Shr"
};
mk_token_path(cx, sp, name)
}
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
-
match *tok {
- BINOP(binop) => {
- return cx.expr_call(sp, mk_token_path(cx, sp, "BINOP"), vec!(mk_binop(cx, sp, binop)));
+ token::BinOp(binop) => {
+ return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec!(mk_binop(cx, sp, binop)));
}
- BINOPEQ(binop) => {
- return cx.expr_call(sp, mk_token_path(cx, sp, "BINOPEQ"),
+ token::BinOpEq(binop) => {
+ return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"),
vec!(mk_binop(cx, sp, binop)));
}
- LIT_BYTE(i) => {
+ token::LitByte(i) => {
let e_byte = mk_name(cx, sp, i.ident());
- return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_BYTE"), vec!(e_byte));
+ return cx.expr_call(sp, mk_token_path(cx, sp, "LitByte"), vec!(e_byte));
}
- LIT_CHAR(i) => {
+ token::LitChar(i) => {
let e_char = mk_name(cx, sp, i.ident());
- return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_CHAR"), vec!(e_char));
+ return cx.expr_call(sp, mk_token_path(cx, sp, "LitChar"), vec!(e_char));
}
- LIT_INTEGER(i) => {
+ token::LitInteger(i) => {
let e_int = mk_name(cx, sp, i.ident());
- return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_INTEGER"), vec!(e_int));
+ return cx.expr_call(sp, mk_token_path(cx, sp, "LitInteger"), vec!(e_int));
}
- LIT_FLOAT(fident) => {
+ token::LitFloat(fident) => {
let e_fident = mk_name(cx, sp, fident.ident());
- return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_FLOAT"), vec!(e_fident));
+ return cx.expr_call(sp, mk_token_path(cx, sp, "LitFloat"), vec!(e_fident));
}
- LIT_STR(ident) => {
+ token::LitStr(ident) => {
return cx.expr_call(sp,
- mk_token_path(cx, sp, "LIT_STR"),
+ mk_token_path(cx, sp, "LitStr"),
vec!(mk_name(cx, sp, ident.ident())));
}
- LIT_STR_RAW(ident, n) => {
+ token::LitStrRaw(ident, n) => {
return cx.expr_call(sp,
- mk_token_path(cx, sp, "LIT_STR_RAW"),
+ mk_token_path(cx, sp, "LitStrRaw"),
vec!(mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n)));
}
- IDENT(ident, b) => {
+ token::Ident(ident, b) => {
return cx.expr_call(sp,
- mk_token_path(cx, sp, "IDENT"),
+ mk_token_path(cx, sp, "Ident"),
vec!(mk_ident(cx, sp, ident), cx.expr_bool(sp, b)));
}
- LIFETIME(ident) => {
+ token::Lifetime(ident) => {
return cx.expr_call(sp,
- mk_token_path(cx, sp, "LIFETIME"),
+ mk_token_path(cx, sp, "Lifetime"),
vec!(mk_ident(cx, sp, ident)));
}
- DOC_COMMENT(ident) => {
+ token::DocComment(ident) => {
return cx.expr_call(sp,
- mk_token_path(cx, sp, "DOC_COMMENT"),
+ mk_token_path(cx, sp, "DocComment"),
vec!(mk_name(cx, sp, ident.ident())));
}
- INTERPOLATED(_) => fail!("quote! with interpolated token"),
+ token::Interpolated(_) => fail!("quote! with interpolated token"),
_ => ()
}
let name = match *tok {
- EQ => "EQ",
- LT => "LT",
- LE => "LE",
- EQEQ => "EQEQ",
- NE => "NE",
- GE => "GE",
- GT => "GT",
- ANDAND => "ANDAND",
- OROR => "OROR",
- NOT => "NOT",
- TILDE => "TILDE",
- AT => "AT",
- DOT => "DOT",
- DOTDOT => "DOTDOT",
- COMMA => "COMMA",
- SEMI => "SEMI",
- COLON => "COLON",
- MOD_SEP => "MOD_SEP",
- RARROW => "RARROW",
- LARROW => "LARROW",
- FAT_ARROW => "FAT_ARROW",
- LPAREN => "LPAREN",
- RPAREN => "RPAREN",
- LBRACKET => "LBRACKET",
- RBRACKET => "RBRACKET",
- LBRACE => "LBRACE",
- RBRACE => "RBRACE",
- POUND => "POUND",
- DOLLAR => "DOLLAR",
- UNDERSCORE => "UNDERSCORE",
- EOF => "EOF",
- _ => fail!()
+ token::Eq => "Eq",
+ token::Lt => "Lt",
+ token::Le => "Le",
+ token::EqEq => "EqEq",
+ token::Ne => "Ne",
+ token::Ge => "Ge",
+ token::Gt => "Gt",
+ token::AndAnd => "AndAnd",
+ token::OrOr => "OrOr",
+ token::Not => "Not",
+ token::Tilde => "Tilde",
+ token::At => "At",
+ token::Dot => "Dot",
+ token::DotDot => "DotDot",
+ token::Comma => "Comma",
+ token::Semi => "Semi",
+ token::Colon => "Colon",
+ token::ModSep => "ModSep",
+ token::RArrow => "RArrow",
+ token::LArrow => "LArrow",
+ token::FatArrow => "FatArrow",
+ token::LParen => "LParen",
+ token::RParen => "RParen",
+ token::LBracket => "LBracket",
+ token::RBracket => "RBracket",
+ token::LBrace => "LBrace",
+ token::RBrace => "RBrace",
+ token::Pound => "Pound",
+ token::Dollar => "Dollar",
+ token::Underscore => "Underscore",
+ token::Eof => "Eof",
+ _ => fail!(),
};
mk_token_path(cx, sp, name)
}
p.quote_depth += 1u;
let cx_expr = p.parse_expr();
- if !p.eat(&token::COMMA) {
+ if !p.eat(&token::Comma) {
p.fatal("expected token `,`");
}
use parse::ParseSess;
use parse::attr::ParserAttr;
use parse::parser::{LifetimeAndTypesWithoutColons, Parser};
-use parse::token::{Token, EOF, Nonterminal};
+use parse::token::{Token, Nonterminal};
use parse::token;
use ptr::P;
/// unhygienic comparison)
pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) {
- (&token::IDENT(id1,_),&token::IDENT(id2,_))
- | (&token::LIFETIME(id1),&token::LIFETIME(id2)) =>
+ (&token::Ident(id1,_),&token::Ident(id2,_))
+ | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
id1.name == id2.name,
_ => *t1 == *t2
}
// Built-in nonterminals never start with these tokens,
// so we can eliminate them from consideration.
match tok {
- token::RPAREN |
- token::RBRACE |
- token::RBRACKET => {},
+ token::RParen |
+ token::RBrace |
+ token::RBracket => {},
_ => bb_eis.push(ei)
}
}
}
/* error messages here could be improved with links to orig. rules */
- if token_name_eq(&tok, &EOF) {
+ if token_name_eq(&tok, &token::Eof) {
if eof_eis.len() == 1u {
let mut v = Vec::new();
for dv in eof_eis.get_mut(0).matches.iter_mut() {
"ty" => token::NtTy(p.parse_ty(false /* no need to disambiguate*/)),
// this could be handled like a token, since it is one
"ident" => match p.token {
- token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
+ token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => {
let token_str = token::to_string(&p.token);
p.fatal((format!("expected ident, found {}",
use parse::parser::Parser;
use parse::attr::ParserAttr;
use parse::token::{special_idents, gensym_ident};
-use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
+use parse::token::{NtMatchers, NtTT};
use parse::token;
use print;
use ptr::P;
/// allowed to be there.
fn ensure_complete_parse(&self, allow_semi: bool) {
let mut parser = self.parser.borrow_mut();
- if allow_semi && parser.token == SEMI {
+ if allow_semi && parser.token == token::Semi {
parser.bump()
}
- if parser.token != EOF {
+ if parser.token != token::Eof {
let token_str = parser.this_token_to_string();
let msg = format!("macro expansion ignores token `{}` and any \
following",
loop {
let mut parser = self.parser.borrow_mut();
match parser.token {
- EOF => break,
+ token::Eof => break,
_ => {
let attrs = parser.parse_outer_attributes();
ret.push(parser.parse_method(attrs, ast::Inherited))
let argument_gram = vec!(
ms(MatchSeq(vec!(
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
- ms(MatchTok(FAT_ARROW)),
- ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI),
- ast::OneOrMore, 0u, 2u)),
+ ms(MatchTok(token::FatArrow)),
+ ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))),
+ Some(token::Semi), ast::OneOrMore, 0u, 2u)),
//to phase into semicolon-termination instead of
//semicolon-separation
- ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u)));
+ ms(MatchSeq(vec!(ms(MatchTok(token::Semi))), None,
+ ast::ZeroOrMore, 2u, 2u)));
// Parse the macro_rules! invocation (`none` is for no interpolations):
use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent};
+use parse::token::{Token, NtIdent};
use parse::token;
use parse::lexer::TokenAndSpan;
repeat_idx: Vec::new(),
repeat_len: Vec::new(),
/* dummy values, never read: */
- cur_tok: EOF,
+ cur_tok: token::Eof,
cur_span: DUMMY_SP,
};
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
loop {
let should_pop = match r.stack.last() {
None => {
- assert_eq!(ret_val.tok, EOF);
+ assert_eq!(ret_val.tok, token::Eof);
return ret_val;
}
Some(frame) => {
let prev = r.stack.pop().unwrap();
match r.stack.last_mut() {
None => {
- r.cur_tok = EOF;
+ r.cur_tok = token::Eof;
return ret_val;
}
Some(frame) => {
(b) we actually can, since it's a token. */
MatchedNonterminal(NtIdent(box sn, b)) => {
r.cur_span = sp;
- r.cur_tok = IDENT(sn,b);
+ r.cur_tok = token::Ident(sn,b);
return ret_val;
}
MatchedNonterminal(ref other_whole_nt) => {
// FIXME(pcwalton): Bad copy.
r.cur_span = sp;
- r.cur_tok = INTERPOLATED((*other_whole_nt).clone());
+ r.cur_tok = token::Interpolated((*other_whole_nt).clone());
return ret_val;
}
MatchedSeq(..) => {
// apply ident folder if it's an ident, apply other folds to interpolated nodes
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
match t {
- token::IDENT(id, followed_by_colons) => {
- token::IDENT(fld.fold_ident(id), followed_by_colons)
+ token::Ident(id, followed_by_colons) => {
+ token::Ident(fld.fold_ident(id), followed_by_colons)
}
- token::LIFETIME(id) => token::LIFETIME(fld.fold_ident(id)),
- token::INTERPOLATED(nt) => token::INTERPOLATED(fld.fold_interpolated(nt)),
+ token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
+ token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)),
_ => t
}
}
use parse::common::*; //resolve bug?
use parse::token;
use parse::parser::Parser;
-use parse::token::INTERPOLATED;
use ptr::P;
/// A parser that can parse attributes.
debug!("parse_outer_attributes: self.token={}",
self.token);
match self.token {
- token::POUND => {
+ token::Pound => {
attrs.push(self.parse_attribute(false));
}
- token::DOC_COMMENT(s) => {
+ token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr(
attr::mk_attr_id(),
self.id_to_interned_str(s.ident()),
debug!("parse_attributes: permit_inner={} self.token={}",
permit_inner, self.token);
let (span, value, mut style) = match self.token {
- token::POUND => {
+ token::Pound => {
let lo = self.span.lo;
self.bump();
- let style = if self.eat(&token::NOT) {
+ let style = if self.eat(&token::Not) {
if !permit_inner {
let span = self.span;
self.span_err(span,
ast::AttrOuter
};
- self.expect(&token::LBRACKET);
+ self.expect(&token::LBracket);
let meta_item = self.parse_meta_item();
let hi = self.span.hi;
- self.expect(&token::RBRACKET);
+ self.expect(&token::RBracket);
(mk_sp(lo, hi), meta_item, style)
}
}
};
- if permit_inner && self.eat(&token::SEMI) {
+ if permit_inner && self.eat(&token::Semi) {
self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon.");
style = ast::AttrInner;
let mut next_outer_attrs: Vec<ast::Attribute> = Vec::new();
loop {
let attr = match self.token {
- token::POUND => {
+ token::Pound => {
self.parse_attribute(true)
}
- token::DOC_COMMENT(s) => {
+ token::DocComment(s) => {
// we need to get the position of this token before we bump.
let Span { lo, hi, .. } = self.span;
self.bump();
/// | IDENT meta_seq
fn parse_meta_item(&mut self) -> P<ast::MetaItem> {
let nt_meta = match self.token {
- token::INTERPOLATED(token::NtMeta(ref e)) => {
+ token::Interpolated(token::NtMeta(ref e)) => {
Some(e.clone())
}
_ => None
let ident = self.parse_ident();
let name = self.id_to_interned_str(ident);
match self.token {
- token::EQ => {
+ token::Eq => {
self.bump();
let lit = self.parse_lit();
// FIXME #623 Non-string meta items are not serialized correctly;
let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
}
- token::LPAREN => {
+ token::LParen => {
let inner_items = self.parse_meta_seq();
let hi = self.span.hi;
P(spanned(lo, hi, ast::MetaList(name, inner_items)))
/// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
- self.parse_seq(&token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_disallowed(token::COMMA),
+ self.parse_seq(&token::LParen,
+ &token::RParen,
+ seq_sep_trailing_disallowed(token::Comma),
|p| p.parse_meta_item()).node
}
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
match self.token {
- token::LPAREN => self.parse_meta_seq(),
+ token::LParen => self.parse_meta_seq(),
_ => Vec::new()
}
}
/// Return the next token. EFFECT: advances the string_reader.
fn next_token(&mut self) -> TokenAndSpan {
let ret_val = TokenAndSpan {
- tok: replace(&mut self.peek_tok, token::UNDERSCORE),
+ tok: replace(&mut self.peek_tok, token::Underscore),
sp: self.peek_span,
};
self.advance_token();
impl<'a> Reader for TtReader<'a> {
fn is_eof(&self) -> bool {
- self.cur_tok == token::EOF
+ self.cur_tok == token::Eof
}
fn next_token(&mut self) -> TokenAndSpan {
let r = tt_next_token(self);
curr: Some('\n'),
filemap: filemap,
/* dummy values; not read */
- peek_tok: token::EOF,
+ peek_tok: token::Eof,
peek_span: codemap::DUMMY_SP,
read_embedded_ident: false,
};
},
None => {
if self.is_eof() {
- self.peek_tok = token::EOF;
+ self.peek_tok = token::Eof;
} else {
let start_bytepos = self.last_pos;
self.peek_tok = self.next_token_inner();
return self.with_str_from(start_bpos, |string| {
// but comments with only more "/"s are not
let tok = if is_doc_comment(string) {
- token::DOC_COMMENT(token::intern(string))
+ token::DocComment(token::intern(string))
} else {
- token::COMMENT
+ token::Comment
};
return Some(TokenAndSpan{
let start_bpos = self.last_pos - BytePos(2);
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
return Some(TokenAndSpan {
- tok: token::COMMENT,
+ tok: token::Comment,
sp: codemap::mk_sp(start_bpos, self.last_pos)
});
}
let start = self.last_pos;
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
return Some(TokenAndSpan {
- tok: token::SHEBANG(self.name_from(start)),
+ tok: token::Shebang(self.name_from(start)),
sp: codemap::mk_sp(start, self.last_pos)
});
}
let start_bpos = self.last_pos;
while is_whitespace(self.curr) { self.bump(); }
let c = Some(TokenAndSpan {
- tok: token::WS,
+ tok: token::Whitespace,
sp: codemap::mk_sp(start_bpos, self.last_pos)
});
debug!("scanning whitespace: {}", c);
self.translate_crlf(start_bpos, string,
"bare CR not allowed in block doc-comment")
} else { string.into_maybe_owned() };
- token::DOC_COMMENT(token::intern(string.as_slice()))
+ token::DocComment(token::intern(string.as_slice()))
} else {
- token::COMMENT
+ token::Comment
};
Some(TokenAndSpan{
}
'u' | 'i' => {
self.scan_int_suffix();
- return token::LIT_INTEGER(self.name_from(start_bpos));
+ return token::LitInteger(self.name_from(start_bpos));
},
'f' => {
let last_pos = self.last_pos;
self.scan_float_suffix();
self.check_float_base(start_bpos, last_pos, base);
- return token::LIT_FLOAT(self.name_from(start_bpos));
+ return token::LitFloat(self.name_from(start_bpos));
}
_ => {
// just a 0
- return token::LIT_INTEGER(self.name_from(start_bpos));
+ return token::LitInteger(self.name_from(start_bpos));
}
}
} else if c.is_digit_radix(10) {
self.err_span_(start_bpos, self.last_pos, "no valid digits found for number");
// eat any suffix
self.scan_int_suffix();
- return token::LIT_INTEGER(token::intern("0"));
+ return token::LitInteger(token::intern("0"));
}
// might be a float, but don't be greedy if this is actually an
}
let last_pos = self.last_pos;
self.check_float_base(start_bpos, last_pos, base);
- return token::LIT_FLOAT(self.name_from(start_bpos));
+ return token::LitFloat(self.name_from(start_bpos));
} else if self.curr_is('f') {
// or it might be an integer literal suffixed as a float
self.scan_float_suffix();
let last_pos = self.last_pos;
self.check_float_base(start_bpos, last_pos, base);
- return token::LIT_FLOAT(self.name_from(start_bpos));
+ return token::LitFloat(self.name_from(start_bpos));
} else {
// it might be a float if it has an exponent
if self.curr_is('e') || self.curr_is('E') {
self.scan_float_suffix();
let last_pos = self.last_pos;
self.check_float_base(start_bpos, last_pos, base);
- return token::LIT_FLOAT(self.name_from(start_bpos));
+ return token::LitFloat(self.name_from(start_bpos));
}
// but we certainly have an integer!
self.scan_int_suffix();
- return token::LIT_INTEGER(self.name_from(start_bpos));
+ return token::LitInteger(self.name_from(start_bpos));
}
}
}
}
- fn binop(&mut self, op: token::BinOp) -> token::Token {
+ fn binop(&mut self, op: token::BinOpToken) -> token::Token {
self.bump();
if self.curr_is('=') {
self.bump();
- return token::BINOPEQ(op);
+ return token::BinOpEq(op);
} else {
- return token::BINOP(op);
+ return token::BinOp(op);
}
}
return self.with_str_from(start, |string| {
if string == "_" {
- token::UNDERSCORE
+ token::Underscore
} else {
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
// FIXME: perform NFKC normalization here. (Issue #2253)
- token::IDENT(str_to_ident(string), is_mod_name)
+ token::Ident(str_to_ident(string), is_mod_name)
}
})
}
('\x00', Some('n'), Some('a')) => {
let ast_ident = self.scan_embedded_hygienic_ident();
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
- return token::IDENT(ast_ident, is_mod_name);
+ return token::Ident(ast_ident, is_mod_name);
}
_ => {}
}
match c.expect("next_token_inner called at EOF") {
// One-byte tokens.
- ';' => { self.bump(); return token::SEMI; }
- ',' => { self.bump(); return token::COMMA; }
+ ';' => { self.bump(); return token::Semi; }
+ ',' => { self.bump(); return token::Comma; }
'.' => {
self.bump();
return if self.curr_is('.') {
self.bump();
if self.curr_is('.') {
self.bump();
- token::DOTDOTDOT
+ token::DotDotDot
} else {
- token::DOTDOT
+ token::DotDot
}
} else {
- token::DOT
+ token::Dot
};
}
- '(' => { self.bump(); return token::LPAREN; }
- ')' => { self.bump(); return token::RPAREN; }
- '{' => { self.bump(); return token::LBRACE; }
- '}' => { self.bump(); return token::RBRACE; }
- '[' => { self.bump(); return token::LBRACKET; }
- ']' => { self.bump(); return token::RBRACKET; }
- '@' => { self.bump(); return token::AT; }
- '#' => { self.bump(); return token::POUND; }
- '~' => { self.bump(); return token::TILDE; }
- '?' => { self.bump(); return token::QUESTION; }
+ '(' => { self.bump(); return token::LParen; }
+ ')' => { self.bump(); return token::RParen; }
+ '{' => { self.bump(); return token::LBrace; }
+ '}' => { self.bump(); return token::RBrace; }
+ '[' => { self.bump(); return token::LBracket; }
+ ']' => { self.bump(); return token::RBracket; }
+ '@' => { self.bump(); return token::At; }
+ '#' => { self.bump(); return token::Pound; }
+ '~' => { self.bump(); return token::Tilde; }
+ '?' => { self.bump(); return token::Question; }
':' => {
self.bump();
if self.curr_is(':') {
self.bump();
- return token::MOD_SEP;
+ return token::ModSep;
} else {
- return token::COLON;
+ return token::Colon;
}
}
- '$' => { self.bump(); return token::DOLLAR; }
+ '$' => { self.bump(); return token::Dollar; }
// Multi-byte tokens.
'=' => {
self.bump();
if self.curr_is('=') {
self.bump();
- return token::EQEQ;
+ return token::EqEq;
} else if self.curr_is('>') {
self.bump();
- return token::FAT_ARROW;
+ return token::FatArrow;
} else {
- return token::EQ;
+ return token::Eq;
}
}
'!' => {
self.bump();
if self.curr_is('=') {
self.bump();
- return token::NE;
- } else { return token::NOT; }
+ return token::Ne;
+ } else { return token::Not; }
}
'<' => {
self.bump();
match self.curr.unwrap_or('\x00') {
- '=' => { self.bump(); return token::LE; }
- '<' => { return self.binop(token::SHL); }
+ '=' => { self.bump(); return token::Le; }
+ '<' => { return self.binop(token::Shl); }
'-' => {
self.bump();
match self.curr.unwrap_or('\x00') {
- _ => { return token::LARROW; }
+ _ => { return token::LArrow; }
}
}
- _ => { return token::LT; }
+ _ => { return token::Lt; }
}
}
'>' => {
self.bump();
match self.curr.unwrap_or('\x00') {
- '=' => { self.bump(); return token::GE; }
- '>' => { return self.binop(token::SHR); }
- _ => { return token::GT; }
+ '=' => { self.bump(); return token::Ge; }
+ '>' => { return self.binop(token::Shr); }
+ _ => { return token::Gt; }
}
}
'\'' => {
str_to_ident(lifetime_name)
});
let keyword_checking_token =
- &token::IDENT(keyword_checking_ident, false);
+ &token::Ident(keyword_checking_ident, false);
let last_bpos = self.last_pos;
if token::is_keyword(token::keywords::Self,
keyword_checking_token) {
last_bpos,
"invalid lifetime name");
}
- return token::LIFETIME(ident);
+ return token::Lifetime(ident);
}
// Otherwise it is a character constant:
}
let id = if valid { self.name_from(start) } else { token::intern("0") };
self.bump(); // advance curr past token
- return token::LIT_CHAR(id);
+ return token::LitChar(id);
}
'b' => {
self.bump();
Some('\'') => self.scan_byte(),
Some('"') => self.scan_byte_string(),
Some('r') => self.scan_raw_byte_string(),
- _ => unreachable!() // Should have been a token::IDENT above.
+ _ => unreachable!() // Should have been a token::Ident above.
};
}
let id = if valid { self.name_from(start_bpos + BytePos(1)) }
else { token::intern("??") };
self.bump();
- return token::LIT_STR(id);
+ return token::LitStr(id);
}
'r' => {
let start_bpos = self.last_pos;
} else {
token::intern("??")
};
- return token::LIT_STR_RAW(id, hash_count);
+ return token::LitStrRaw(id, hash_count);
}
'-' => {
if self.nextch_is('>') {
self.bump();
self.bump();
- return token::RARROW;
- } else { return self.binop(token::MINUS); }
+ return token::RArrow;
+ } else { return self.binop(token::Minus); }
}
'&' => {
if self.nextch_is('&') {
self.bump();
self.bump();
- return token::ANDAND;
- } else { return self.binop(token::AND); }
+ return token::AndAnd;
+ } else { return self.binop(token::And); }
}
'|' => {
match self.nextch() {
- Some('|') => { self.bump(); self.bump(); return token::OROR; }
- _ => { return self.binop(token::OR); }
+ Some('|') => { self.bump(); self.bump(); return token::OrOr; }
+ _ => { return self.binop(token::Or); }
}
}
- '+' => { return self.binop(token::PLUS); }
- '*' => { return self.binop(token::STAR); }
- '/' => { return self.binop(token::SLASH); }
- '^' => { return self.binop(token::CARET); }
- '%' => { return self.binop(token::PERCENT); }
+ '+' => { return self.binop(token::Plus); }
+ '*' => { return self.binop(token::Star); }
+ '/' => { return self.binop(token::Slash); }
+ '^' => { return self.binop(token::Caret); }
+ '%' => { return self.binop(token::Percent); }
c => {
let last_bpos = self.last_pos;
let bpos = self.pos;
let id = if valid { self.name_from(start) } else { token::intern("??") };
self.bump(); // advance curr past token
- return token::LIT_BYTE(id);
+ return token::LitByte(id);
}
fn scan_byte_string(&mut self) -> token::Token {
}
let id = if valid { self.name_from(start) } else { token::intern("??") };
self.bump();
- return token::LIT_BINARY(id);
+ return token::LitBinary(id);
}
fn scan_raw_byte_string(&mut self) -> token::Token {
self.bump();
}
self.bump();
- return token::LIT_BINARY_RAW(self.name_from_to(content_start_bpos, content_end_bpos),
+ return token::LitBinaryRaw(self.name_from_to(content_start_bpos, content_end_bpos),
hash_count);
}
}
"/* my source file */ \
fn main() { println!(\"zebra\"); }\n".to_string());
let id = str_to_ident("fn");
- assert_eq!(string_reader.next_token().tok, token::COMMENT);
- assert_eq!(string_reader.next_token().tok, token::WS);
+ assert_eq!(string_reader.next_token().tok, token::Comment);
+ assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
- tok:token::IDENT(id, false),
+ tok:token::Ident(id, false),
sp:Span {lo:BytePos(21),hi:BytePos(23),expn_id: NO_EXPANSION}};
assert_eq!(tok1,tok2);
- assert_eq!(string_reader.next_token().tok, token::WS);
+ assert_eq!(string_reader.next_token().tok, token::Whitespace);
// the 'main' id is already read:
assert_eq!(string_reader.last_pos.clone(), BytePos(28));
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan{
- tok:token::IDENT(str_to_ident("main"), false),
+ tok:token::Ident(str_to_ident("main"), false),
sp:Span {lo:BytePos(24),hi:BytePos(28),expn_id: NO_EXPANSION}};
assert_eq!(tok3,tok4);
// the lparen is already read:
// make the identifier by looking up the string in the interner
fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
- token::IDENT (str_to_ident(id),is_mod_name)
+ token::Ident (str_to_ident(id),is_mod_name)
}
#[test] fn doublecolonparsing () {
check_tokenization(setup(&mk_sh(), "a b".to_string()),
vec!(mk_ident("a",false),
- token::WS,
+ token::Whitespace,
mk_ident("b",false)));
}
#[test] fn dcparsing_2 () {
check_tokenization(setup(&mk_sh(), "a::b".to_string()),
vec!(mk_ident("a",true),
- token::MOD_SEP,
+ token::ModSep,
mk_ident("b",false)));
}
#[test] fn dcparsing_3 () {
check_tokenization(setup(&mk_sh(), "a ::b".to_string()),
vec!(mk_ident("a",false),
- token::WS,
- token::MOD_SEP,
+ token::Whitespace,
+ token::ModSep,
mk_ident("b",false)));
}
#[test] fn dcparsing_4 () {
check_tokenization(setup(&mk_sh(), "a:: b".to_string()),
vec!(mk_ident("a",true),
- token::MOD_SEP,
- token::WS,
+ token::ModSep,
+ token::Whitespace,
mk_ident("b",false)));
}
#[test] fn character_a() {
assert_eq!(setup(&mk_sh(), "'a'".to_string()).next_token().tok,
- token::LIT_CHAR(token::intern("a")));
+ token::LitChar(token::intern("a")));
}
#[test] fn character_space() {
assert_eq!(setup(&mk_sh(), "' '".to_string()).next_token().tok,
- token::LIT_CHAR(token::intern(" ")));
+ token::LitChar(token::intern(" ")));
}
#[test] fn character_escaped() {
assert_eq!(setup(&mk_sh(), "'\\n'".to_string()).next_token().tok,
- token::LIT_CHAR(token::intern("\\n")));
+ token::LitChar(token::intern("\\n")));
}
#[test] fn lifetime_name() {
assert_eq!(setup(&mk_sh(), "'abc".to_string()).next_token().tok,
- token::LIFETIME(token::str_to_ident("'abc")));
+ token::Lifetime(token::str_to_ident("'abc")));
}
#[test] fn raw_string() {
assert_eq!(setup(&mk_sh(),
"r###\"\"#a\\b\x00c\"\"###".to_string()).next_token()
.tok,
- token::LIT_STR_RAW(token::intern("\"#a\\b\x00c\""), 3));
+ token::LitStrRaw(token::intern("\"#a\\b\x00c\""), 3));
}
#[test] fn line_doc_comments() {
let sh = mk_sh();
let mut lexer = setup(&sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
- token::COMMENT => { },
+ token::Comment => { },
_ => fail!("expected a comment!")
}
- assert_eq!(lexer.next_token().tok, token::LIT_CHAR(token::intern("a")));
+ assert_eq!(lexer.next_token().tok, token::LitChar(token::intern("a")));
}
}
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
- [ast::TtToken(_, token::IDENT(name_macro_rules, false)),
- ast::TtToken(_, token::NOT),
- ast::TtToken(_, token::IDENT(name_zip, false)),
+ [ast::TtToken(_, token::Ident(name_macro_rules, false)),
+ ast::TtToken(_, token::Not),
+ ast::TtToken(_, token::Ident(name_zip, false)),
ast::TtDelimited(_, ref macro_delimed)]
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed;
match (macro_open, macro_tts.as_slice(), macro_close) {
- (&ast::Delimiter { token: token::LPAREN, .. },
+ (&ast::Delimiter { token: token::LParen, .. },
[ast::TtDelimited(_, ref first_delimed),
- ast::TtToken(_, token::FAT_ARROW),
+ ast::TtToken(_, token::FatArrow),
ast::TtDelimited(_, ref second_delimed)],
- &ast::Delimiter { token: token::RPAREN, .. }) => {
+ &ast::Delimiter { token: token::RParen, .. }) => {
let (ref first_open, ref first_tts, ref first_close) = **first_delimed;
match (first_open, first_tts.as_slice(), first_close) {
- (&ast::Delimiter { token: token::LPAREN, .. },
- [ast::TtToken(_, token::DOLLAR),
- ast::TtToken(_, token::IDENT(name, false))],
- &ast::Delimiter { token: token::RPAREN, .. })
+ (&ast::Delimiter { token: token::LParen, .. },
+ [ast::TtToken(_, token::Dollar),
+ ast::TtToken(_, token::Ident(name, false))],
+ &ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 3: {}", **first_delimed),
}
let (ref second_open, ref second_tts, ref second_close) = **second_delimed;
match (second_open, second_tts.as_slice(), second_close) {
- (&ast::Delimiter { token: token::LPAREN, .. },
- [ast::TtToken(_, token::DOLLAR),
- ast::TtToken(_, token::IDENT(name, false))],
- &ast::Delimiter { token: token::RPAREN, .. })
+ (&ast::Delimiter { token: token::LParen, .. },
+ [ast::TtToken(_, token::Dollar),
+ ast::TtToken(_, token::Ident(name, false))],
+ &ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 4: {}", **second_delimed),
}
\"fields\":[\
null,\
{\
- \"variant\":\"IDENT\",\
+ \"variant\":\"Ident\",\
\"fields\":[\
\"fn\",\
false\
\"fields\":[\
null,\
{\
- \"variant\":\"IDENT\",\
+ \"variant\":\"Ident\",\
\"fields\":[\
\"a\",\
false\
[\
{\
\"span\":null,\
- \"token\":\"LPAREN\"\
+ \"token\":\"LParen\"\
},\
[\
{\
\"fields\":[\
null,\
{\
- \"variant\":\"IDENT\",\
+ \"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
- \"COLON\"\
+ \"Colon\"\
]\
},\
{\
\"fields\":[\
null,\
{\
- \"variant\":\"IDENT\",\
+ \"variant\":\"Ident\",\
\"fields\":[\
\"int\",\
false\
],\
{\
\"span\":null,\
- \"token\":\"RPAREN\"\
+ \"token\":\"RParen\"\
}\
]\
]\
[\
{\
\"span\":null,\
- \"token\":\"LBRACE\"\
+ \"token\":\"LBrace\"\
},\
[\
{\
\"fields\":[\
null,\
{\
- \"variant\":\"IDENT\",\
+ \"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"variant\":\"TtToken\",\
\"fields\":[\
null,\
- \"SEMI\"\
+ \"Semi\"\
]\
}\
],\
{\
\"span\":null,\
- \"token\":\"RBRACE\"\
+ \"token\":\"RBrace\"\
}\
]\
]\
}
fn parser_done(p: Parser){
- assert_eq!(p.token.clone(), token::EOF);
+ assert_eq!(p.token.clone(), token::Eof);
}
#[test] fn parse_ident_pat () {
fn is_obsolete_ident(&mut self, ident: &str) -> bool {
match self.token {
- token::IDENT(sid, _) => {
+ token::Ident(sid, _) => {
token::get_ident(sid).equiv(&ident)
}
_ => false
use parse::lexer::Reader;
use parse::lexer::TokenAndSpan;
use parse::obsolete::*;
-use parse::token::{INTERPOLATED, InternedString, can_begin_expr};
+use parse::token::{InternedString, can_begin_expr};
use parse::token::{is_ident, is_ident_or_path, is_plain_ident};
use parse::token::{keywords, special_idents, token_to_binop};
use parse::token;
}
-/// Possibly accept an `INTERPOLATED` expression (a pre-parsed expression
-/// dropped into the token stream, which happens while parsing the
-/// result of macro expansion)
-/// Placement of these is not as complex as I feared it would be.
-/// The important thing is to make sure that lookahead doesn't balk
-/// at INTERPOLATED tokens
+/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
+/// dropped into the token stream, which happens while parsing the result of
+/// macro expansion). Placement of these is not as complex as I feared it would
+/// be. The important thing is to make sure that lookahead doesn't balk at
+/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr (
($p:expr) => (
{
let found = match $p.token {
- INTERPOLATED(token::NtExpr(ref e)) => {
+ token::Interpolated(token::NtExpr(ref e)) => {
Some((*e).clone())
}
- INTERPOLATED(token::NtPath(_)) => {
+ token::Interpolated(token::NtPath(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
// but the clone is unfortunate.
let pt = match $p.token {
- INTERPOLATED(token::NtPath(ref pt)) => (**pt).clone(),
+ token::Interpolated(token::NtPath(ref pt)) => (**pt).clone(),
_ => unreachable!()
};
let span = $p.span;
Some($p.mk_expr(span.lo, span.hi, ExprPath(pt)))
}
- INTERPOLATED(token::NtBlock(_)) => {
+ token::Interpolated(token::NtBlock(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
// but the clone is unfortunate.
let b = match $p.token {
- INTERPOLATED(token::NtBlock(ref b)) => (*b).clone(),
+ token::Interpolated(token::NtBlock(ref b)) => (*b).clone(),
_ => unreachable!()
};
let span = $p.span;
($p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return x.clone()
}
_ => {}
(no_clone $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return x
}
_ => {}
(deref $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return (*x).clone()
}
_ => {}
(Some $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return Some(x.clone()),
}
_ => {}
(iovi $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return IoviItem(x.clone())
}
_ => {}
(pair_empty $p:expr, $constructor:ident) => (
{
let found = match ($p).token {
- INTERPOLATED(token::$constructor(_)) => {
+ token::Interpolated(token::$constructor(_)) => {
Some(($p).bump_and_get())
}
_ => None
};
match found {
- Some(INTERPOLATED(token::$constructor(x))) => {
+ Some(token::Interpolated(token::$constructor(x))) => {
return (Vec::new(), x)
}
_ => {}
}
fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
- is_plain_ident(t) || *t == token::UNDERSCORE
+ is_plain_ident(t) || *t == token::Underscore
}
/// Get a token the parser cares about
let mut t = rdr.next_token();
loop {
match t.tok {
- token::WS | token::COMMENT | token::SHEBANG(_) => {
+ token::Whitespace | token::Comment | token::Shebang(_) => {
t = rdr.next_token();
},
_ => break
let tok0 = real_token(&mut *rdr);
let span = tok0.sp;
let placeholder = TokenAndSpan {
- tok: token::UNDERSCORE,
+ tok: token::Underscore,
sp: span,
};
/// recover (without consuming any expected input token). Returns
/// true if and only if input was consumed for recovery.
pub fn check_for_erroneous_unit_struct_expecting(&mut self, expected: &[token::Token]) -> bool {
- if self.token == token::LBRACE
- && expected.iter().all(|t| *t != token::LBRACE)
- && self.look_ahead(1, |t| *t == token::RBRACE) {
+ if self.token == token::LBrace
+ && expected.iter().all(|t| *t != token::LBrace)
+ && self.look_ahead(1, |t| *t == token::RBrace) {
// matched; signal non-fatal error and recover.
let span = self.span;
self.span_err(span,
"unit-like struct construction is written with no trailing `{ }`");
- self.eat(&token::LBRACE);
- self.eat(&token::RBRACE);
+ self.eat(&token::LBrace);
+ self.eat(&token::RBrace);
true
} else {
false
self.check_strict_keywords();
self.check_reserved_keywords();
match self.token {
- token::IDENT(i, _) => {
+ token::Ident(i, _) => {
self.bump();
i
}
- token::INTERPOLATED(token::NtIdent(..)) => {
+ token::Interpolated(token::NtIdent(..)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
/// `&` and continue. If an `&` is not seen, signal an error.
fn expect_and(&mut self) {
match self.token {
- token::BINOP(token::AND) => self.bump(),
- token::ANDAND => {
+ token::BinOp(token::And) => self.bump(),
+ token::AndAnd => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::BINOP(token::AND), lo, span.hi)
+ self.replace_token(token::BinOp(token::And), lo, span.hi)
}
_ => {
let token_str = self.this_token_to_string();
let found_token =
- Parser::token_to_string(&token::BINOP(token::AND));
+ Parser::token_to_string(&token::BinOp(token::And));
self.fatal(format!("expected `{}`, found `{}`",
found_token,
token_str).as_slice())
/// `|` and continue. If a `|` is not seen, signal an error.
fn expect_or(&mut self) {
match self.token {
- token::BINOP(token::OR) => self.bump(),
- token::OROR => {
+ token::BinOp(token::Or) => self.bump(),
+ token::OrOr => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::BINOP(token::OR), lo, span.hi)
+ self.replace_token(token::BinOp(token::Or), lo, span.hi)
}
_ => {
let found_token = self.this_token_to_string();
let token_str =
- Parser::token_to_string(&token::BINOP(token::OR));
+ Parser::token_to_string(&token::BinOp(token::Or));
self.fatal(format!("expected `{}`, found `{}`",
token_str,
found_token).as_slice())
/// impl Foo<<'a> ||>() { ... }
fn eat_lt(&mut self, force: bool) -> bool {
match self.token {
- token::LT => { self.bump(); true }
- token::BINOP(token::SHL) => {
+ token::Lt => { self.bump(); true }
+ token::BinOp(token::Shl) => {
let next_lifetime = self.look_ahead(1, |t| match *t {
- token::LIFETIME(..) => true,
+ token::Lifetime(..) => true,
_ => false,
});
if force || next_lifetime {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::LT, lo, span.hi);
+ self.replace_token(token::Lt, lo, span.hi);
true
} else {
false
fn expect_lt(&mut self) {
if !self.eat_lt(true) {
let found_token = self.this_token_to_string();
- let token_str = Parser::token_to_string(&token::LT);
+ let token_str = Parser::token_to_string(&token::Lt);
self.fatal(format!("expected `{}`, found `{}`",
token_str,
found_token).as_slice())
-> Vec<T> {
let mut first = true;
let mut vector = Vec::new();
- while self.token != token::BINOP(token::OR) &&
- self.token != token::OROR {
+ while self.token != token::BinOp(token::Or) &&
+ self.token != token::OrOr {
if first {
first = false
} else {
/// signal an error.
pub fn expect_gt(&mut self) {
match self.token {
- token::GT => self.bump(),
- token::BINOP(token::SHR) => {
+ token::Gt => self.bump(),
+ token::BinOp(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::GT, lo, span.hi)
+ self.replace_token(token::Gt, lo, span.hi)
}
- token::BINOPEQ(token::SHR) => {
+ token::BinOpEq(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::GE, lo, span.hi)
+ self.replace_token(token::Ge, lo, span.hi)
}
- token::GE => {
+ token::Ge => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::EQ, lo, span.hi)
+ self.replace_token(token::Eq, lo, span.hi)
}
_ => {
- let gt_str = Parser::token_to_string(&token::GT);
+ let gt_str = Parser::token_to_string(&token::Gt);
let this_token_str = self.this_token_to_string();
self.fatal(format!("expected `{}`, found `{}`",
gt_str,
// commas in generic parameters, because it can stop either after
// parsing a type or after parsing a comma.
for i in iter::count(0u, 1) {
- if self.token == token::GT
- || self.token == token::BINOP(token::SHR)
- || self.token == token::GE
- || self.token == token::BINOPEQ(token::SHR) {
+ if self.token == token::Gt
+ || self.token == token::BinOp(token::Shr)
+ || self.token == token::Ge
+ || self.token == token::BinOpEq(token::Shr) {
break;
}
self.buffer_start = next_index as int;
let placeholder = TokenAndSpan {
- tok: token::UNDERSCORE,
+ tok: token::Underscore,
sp: self.span,
};
replace(&mut self.buffer[buffer_start], placeholder)
/// Advance the parser by one token and return the bumped token.
pub fn bump_and_get(&mut self) -> token::Token {
- let old_token = replace(&mut self.token, token::UNDERSCORE);
+ let old_token = replace(&mut self.token, token::Underscore);
self.bump();
old_token
}
pub fn token_is_lifetime(tok: &token::Token) -> bool {
match *tok {
- token::LIFETIME(..) => true,
+ token::Lifetime(..) => true,
_ => false,
}
}
pub fn get_lifetime(&mut self) -> ast::Ident {
match self.token {
- token::LIFETIME(ref ident) => *ident,
+ token::Lifetime(ref ident) => *ident,
_ => self.bug("not a lifetime"),
}
}
*/
- let lifetime_defs = if self.eat(&token::LT) {
+ let lifetime_defs = if self.eat(&token::Lt) {
let lifetime_defs = self.parse_lifetime_defs();
self.expect_gt();
lifetime_defs
/// Parses an optional unboxed closure kind (`&:`, `&mut:`, or `:`).
pub fn parse_optional_unboxed_closure_kind(&mut self)
-> Option<UnboxedClosureKind> {
- if self.token == token::BINOP(token::AND) &&
+ if self.token == token::BinOp(token::And) &&
self.look_ahead(1, |t| {
token::is_keyword(keywords::Mut, t)
}) &&
- self.look_ahead(2, |t| *t == token::COLON) {
+ self.look_ahead(2, |t| *t == token::Colon) {
self.bump();
self.bump();
self.bump();
return Some(FnMutUnboxedClosureKind)
}
- if self.token == token::BINOP(token::AND) &&
- self.look_ahead(1, |t| *t == token::COLON) {
+ if self.token == token::BinOp(token::And) &&
+ self.look_ahead(1, |t| *t == token::Colon) {
self.bump();
self.bump();
return Some(FnUnboxedClosureKind)
}
- if self.eat(&token::COLON) {
+ if self.eat(&token::Colon) {
return Some(FnOnceUnboxedClosureKind)
}
let fn_style = self.parse_unsafety();
let onceness = if self.eat_keyword(keywords::Once) {Once} else {Many};
- let lifetime_defs = if self.eat(&token::LT) {
+ let lifetime_defs = if self.eat(&token::Lt) {
let lifetime_defs = self.parse_lifetime_defs();
self.expect_gt();
Vec::new()
};
- let (optional_unboxed_closure_kind, inputs) = if self.eat(&token::OROR) {
+ let (optional_unboxed_closure_kind, inputs) = if self.eat(&token::OrOr) {
(None, Vec::new())
} else {
self.expect_or();
self.parse_optional_unboxed_closure_kind();
let inputs = self.parse_seq_to_before_or(
- &token::COMMA,
+ &token::Comma,
|p| p.parse_arg_general(false));
self.expect_or();
(optional_unboxed_closure_kind, inputs)
Lifetime_defs
*/
- let lifetime_defs = if self.eat(&token::LT) {
+ let lifetime_defs = if self.eat(&token::Lt) {
let lifetime_defs = self.parse_lifetime_defs();
self.expect_gt();
lifetime_defs
let lo = self.span.lo;
let ident = self.parse_ident();
let hi = self.span.hi;
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
AssociatedType {
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
-> Typedef {
let lo = self.span.lo;
let ident = self.parse_ident();
- self.expect(&token::EQ);
+ self.expect(&token::Eq);
let typ = self.parse_ty(true);
let hi = self.span.hi;
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
Typedef {
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
/// Parse the items in a trait declaration
pub fn parse_trait_items(&mut self) -> Vec<TraitItem> {
self.parse_unspanned_seq(
- &token::LBRACE,
- &token::RBRACE,
+ &token::LBrace,
+ &token::RBrace,
seq_sep_none(),
|p| {
let attrs = p.parse_outer_attributes();
let hi = p.last_span.hi;
match p.token {
- token::SEMI => {
+ token::Semi => {
p.bump();
debug!("parse_trait_methods(): parsing required method");
RequiredMethod(TypeMethod {
vis: vis,
})
}
- token::LBRACE => {
+ token::LBrace => {
debug!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) =
p.parse_inner_attrs_and_block();
let lo = self.span.lo;
let mutbl = self.parse_mutability();
let id = self.parse_ident();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let ty = self.parse_ty(true);
let hi = ty.span.hi;
ast::TypeField {
/// Parse optional return type [ -> TY ] in function decl
pub fn parse_ret_ty(&mut self) -> (RetStyle, P<Ty>) {
- return if self.eat(&token::RARROW) {
+ return if self.eat(&token::RArrow) {
let lo = self.span.lo;
- if self.eat(&token::NOT) {
+ if self.eat(&token::Not) {
(
NoReturn,
P(Ty {
let lo = self.span.lo;
- let t = if self.token == token::LPAREN {
+ let t = if self.token == token::LParen {
self.bump();
- if self.token == token::RPAREN {
+ if self.token == token::RParen {
self.bump();
TyNil
} else {
// of type t
let mut ts = vec!(self.parse_ty(true));
let mut one_tuple = false;
- while self.token == token::COMMA {
+ while self.token == token::Comma {
self.bump();
- if self.token != token::RPAREN {
+ if self.token != token::RParen {
ts.push(self.parse_ty(true));
}
else {
}
if ts.len() == 1 && !one_tuple {
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
TyParen(ts.into_iter().nth(0).unwrap())
} else {
let t = TyTup(ts);
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
t
}
}
- } else if self.token == token::TILDE {
+ } else if self.token == token::Tilde {
// OWNED POINTER
self.bump();
let last_span = self.last_span;
match self.token {
- token::LBRACKET => self.obsolete(last_span, ObsoleteOwnedVector),
+ token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
_ => self.obsolete(last_span, ObsoleteOwnedType)
}
TyUniq(self.parse_ty(false))
- } else if self.token == token::BINOP(token::STAR) {
+ } else if self.token == token::BinOp(token::Star) {
// STAR POINTER (bare pointer?)
self.bump();
TyPtr(self.parse_ptr())
- } else if self.token == token::LBRACKET {
+ } else if self.token == token::LBracket {
// VECTOR
- self.expect(&token::LBRACKET);
+ self.expect(&token::LBracket);
let t = self.parse_ty(true);
// Parse the `, ..e` in `[ int, ..e ]`
None => TyVec(t),
Some(suffix) => TyFixedLengthVec(t, suffix)
};
- self.expect(&token::RBRACKET);
+ self.expect(&token::RBracket);
t
- } else if self.token == token::BINOP(token::AND) ||
- self.token == token::ANDAND {
+ } else if self.token == token::BinOp(token::And) ||
+ self.token == token::AndAnd {
// BORROWED POINTER
self.expect_and();
self.parse_borrowed_pointee()
// BARE FUNCTION
self.parse_ty_bare_fn()
} else if self.token_is_closure_keyword() ||
- self.token == token::BINOP(token::OR) ||
- self.token == token::OROR ||
- (self.token == token::LT &&
+ self.token == token::BinOp(token::Or) ||
+ self.token == token::OrOr ||
+ (self.token == token::Lt &&
self.look_ahead(1, |t| {
- *t == token::GT || Parser::token_is_lifetime(t)
+ *t == token::Gt || Parser::token_is_lifetime(t)
})) {
// CLOSURE
} else if self.eat_keyword(keywords::Typeof) {
// TYPEOF
// In order to not be ambiguous, the type must be surrounded by parens.
- self.expect(&token::LPAREN);
+ self.expect(&token::LParen);
let e = self.parse_expr();
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
TyTypeof(e)
} else if self.eat_keyword(keywords::Proc) {
self.parse_proc_type()
- } else if self.token == token::LT {
+ } else if self.token == token::Lt {
// QUALIFIED PATH
self.bump();
let for_type = self.parse_ty(true);
self.expect_keyword(keywords::As);
let trait_name = self.parse_path(LifetimeAndTypesWithoutColons);
- self.expect(&token::GT);
- self.expect(&token::MOD_SEP);
+ self.expect(&token::Gt);
+ self.expect(&token::ModSep);
let item_name = self.parse_ident();
TyQPath(P(QPath {
for_type: for_type,
trait_name: trait_name.path,
item_name: item_name,
}))
- } else if self.token == token::MOD_SEP
+ } else if self.token == token::ModSep
|| is_ident_or_path(&self.token) {
// NAMED TYPE
let mode = if plus_allowed {
bounds
} = self.parse_path(mode);
TyPath(path, bounds, ast::DUMMY_NODE_ID)
- } else if self.eat(&token::UNDERSCORE) {
+ } else if self.eat(&token::Underscore) {
// TYPE TO BE INFERRED
TyInfer
} else {
pub fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
- token::BINOP(token::AND) => 1,
- token::ANDAND => 1,
+ token::BinOp(token::And) => 1,
+ token::AndAnd => 1,
_ if token::is_keyword(keywords::Mut, &self.token) => 1,
_ => 0
};
if offset == 0 {
is_plain_ident_or_underscore(&self.token)
- && self.look_ahead(1, |t| *t == token::COLON)
+ && self.look_ahead(1, |t| *t == token::Colon)
} else {
self.look_ahead(offset, |t| is_plain_ident_or_underscore(t))
- && self.look_ahead(offset + 1, |t| *t == token::COLON)
+ && self.look_ahead(offset + 1, |t| *t == token::Colon)
}
}
require_name);
let pat = self.parse_pat();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
pat
} else {
debug!("parse_arg_general ident_to_pat");
/// Parse an argument in a lambda header e.g. |arg, arg|
pub fn parse_fn_block_arg(&mut self) -> Arg {
let pat = self.parse_pat();
- let t = if self.eat(&token::COLON) {
+ let t = if self.eat(&token::Colon) {
self.parse_ty(true)
} else {
P(Ty {
}
pub fn maybe_parse_fixed_vstore(&mut self) -> Option<P<ast::Expr>> {
- if self.token == token::COMMA &&
- self.look_ahead(1, |t| *t == token::DOTDOT) {
+ if self.token == token::Comma &&
+ self.look_ahead(1, |t| *t == token::DotDot) {
self.bump();
self.bump();
Some(self.parse_expr())
/// Matches token_lit = LIT_INTEGER | ...
pub fn lit_from_token(&mut self, tok: &token::Token) -> Lit_ {
match *tok {
- token::LIT_BYTE(i) => LitByte(parse::byte_lit(i.as_str()).val0()),
- token::LIT_CHAR(i) => LitChar(parse::char_lit(i.as_str()).val0()),
- token::LIT_INTEGER(s) => parse::integer_lit(s.as_str(),
+ token::LitByte(i) => LitByte(parse::byte_lit(i.as_str()).val0()),
+ token::LitChar(i) => LitChar(parse::char_lit(i.as_str()).val0()),
+ token::LitInteger(s) => parse::integer_lit(s.as_str(),
&self.sess.span_diagnostic, self.span),
- token::LIT_FLOAT(s) => parse::float_lit(s.as_str()),
- token::LIT_STR(s) => {
+ token::LitFloat(s) => parse::float_lit(s.as_str()),
+ token::LitStr(s) => {
LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()),
ast::CookedStr)
}
- token::LIT_STR_RAW(s, n) => {
+ token::LitStrRaw(s, n) => {
LitStr(token::intern_and_get_ident(parse::raw_str_lit(s.as_str()).as_slice()),
ast::RawStr(n))
}
- token::LIT_BINARY(i) =>
+ token::LitBinary(i) =>
LitBinary(parse::binary_lit(i.as_str())),
- token::LIT_BINARY_RAW(i, _) =>
+ token::LitBinaryRaw(i, _) =>
LitBinary(Rc::new(i.as_str().as_bytes().iter().map(|&x| x).collect())),
- token::LPAREN => { self.expect(&token::RPAREN); LitNil },
+ token::LParen => { self.expect(&token::RParen); LitNil },
_ => { self.unexpected_last(tok); }
}
}
/// matches '-' lit | lit
pub fn parse_literal_maybe_minus(&mut self) -> P<Expr> {
let minus_lo = self.span.lo;
- let minus_present = self.eat(&token::BINOP(token::MINUS));
+ let minus_present = self.eat(&token::BinOp(token::Minus));
let lo = self.span.lo;
let literal = P(self.parse_lit());
pub fn parse_path(&mut self, mode: PathParsingMode) -> PathAndBounds {
// Check for a whole path...
let found = match self.token {
- INTERPOLATED(token::NtPath(_)) => Some(self.bump_and_get()),
+ token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
_ => None,
};
match found {
- Some(INTERPOLATED(token::NtPath(box path))) => {
+ Some(token::Interpolated(token::NtPath(box path))) => {
return PathAndBounds {
path: path,
bounds: None
}
let lo = self.span.lo;
- let is_global = self.eat(&token::MOD_SEP);
+ let is_global = self.eat(&token::ModSep);
// Parse any number of segments and bound sets. A segment is an
// identifier followed by an optional lifetime and a set of types.
// Parse the '::' before type parameters if it's required. If
// it is required and wasn't present, then we're done.
if mode == LifetimeAndTypesWithColons &&
- !self.eat(&token::MOD_SEP) {
+ !self.eat(&token::ModSep) {
segments.push(ast::PathSegment {
identifier: identifier,
lifetimes: Vec::new(),
// a double colon to get here in the first place.
if !(mode == LifetimeAndTypesWithColons &&
!any_lifetime_or_types) {
- if !self.eat(&token::MOD_SEP) {
+ if !self.eat(&token::ModSep) {
break
}
}
// error.
let opt_bounds = {
if mode == LifetimeAndTypesAndBounds &&
- self.eat(&token::BINOP(token::PLUS))
+ self.eat(&token::BinOp(token::Plus))
{
let bounds = self.parse_ty_param_bounds();
/// parses 0 or 1 lifetime
pub fn parse_opt_lifetime(&mut self) -> Option<ast::Lifetime> {
match self.token {
- token::LIFETIME(..) => {
+ token::Lifetime(..) => {
Some(self.parse_lifetime())
}
_ => {
/// Matches lifetime = LIFETIME
pub fn parse_lifetime(&mut self) -> ast::Lifetime {
match self.token {
- token::LIFETIME(i) => {
+ token::Lifetime(i) => {
let span = self.span;
self.bump();
return ast::Lifetime {
let mut res = Vec::new();
loop {
match self.token {
- token::LIFETIME(_) => {
+ token::Lifetime(_) => {
let lifetime = self.parse_lifetime();
let bounds =
- if self.eat(&token::COLON) {
- self.parse_lifetimes(token::BINOP(token::PLUS))
+ if self.eat(&token::Colon) {
+ self.parse_lifetimes(token::BinOp(token::Plus))
} else {
Vec::new()
};
}
match self.token {
- token::COMMA => { self.bump(); }
- token::GT => { return res; }
- token::BINOP(token::SHR) => { return res; }
+ token::Comma => { self.bump(); }
+ token::Gt => { return res; }
+ token::BinOp(token::Shr) => { return res; }
_ => {
let msg = format!("expected `,` or `>` after lifetime \
name, got: {}",
let mut res = Vec::new();
loop {
match self.token {
- token::LIFETIME(_) => {
+ token::Lifetime(_) => {
res.push(self.parse_lifetime());
}
_ => {
let lo = self.span.lo;
let i = self.parse_ident();
let hi = self.last_span.hi;
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let e = self.parse_expr();
ast::Field {
ident: spanned(lo, hi, i),
let ex: Expr_;
match self.token {
- token::LPAREN => {
+ token::LParen => {
self.bump();
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
let mut trailing_comma = false;
- if self.token == token::RPAREN {
+ if self.token == token::RParen {
hi = self.span.hi;
self.bump();
let lit = P(spanned(lo, hi, LitNil));
return self.mk_expr(lo, hi, ExprLit(lit));
}
let mut es = vec!(self.parse_expr());
- self.commit_expr(&**es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]);
- while self.token == token::COMMA {
+ self.commit_expr(&**es.last().unwrap(), &[], &[token::Comma, token::RParen]);
+ while self.token == token::Comma {
self.bump();
- if self.token != token::RPAREN {
+ if self.token != token::RParen {
es.push(self.parse_expr());
self.commit_expr(&**es.last().unwrap(), &[],
- &[token::COMMA, token::RPAREN]);
+ &[token::Comma, token::RParen]);
} else {
trailing_comma = true;
}
}
hi = self.span.hi;
- self.commit_expr_expecting(&**es.last().unwrap(), token::RPAREN);
+ self.commit_expr_expecting(&**es.last().unwrap(), token::RParen);
return if es.len() == 1 && !trailing_comma {
self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap()))
self.mk_expr(lo, hi, ExprTup(es))
}
},
- token::LBRACE => {
+ token::LBrace => {
self.bump();
let blk = self.parse_block_tail(lo, DefaultBlock);
return self.mk_expr(blk.span.lo, blk.span.hi,
ExprBlock(blk));
},
- token::BINOP(token::OR) | token::OROR => {
+ token::BinOp(token::Or) | token::OrOr => {
return self.parse_lambda_expr(CaptureByRef);
},
// FIXME #13626: Should be able to stick in
// token::SELF_KEYWORD_NAME
- token::IDENT(id @ ast::Ident{
+ token::Ident(id @ ast::Ident{
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
ctxt: _
} ,false) => {
ex = ExprPath(path);
hi = self.last_span.hi;
}
- token::LBRACKET => {
+ token::LBracket => {
self.bump();
- if self.token == token::RBRACKET {
+ if self.token == token::RBracket {
// Empty vector.
self.bump();
ex = ExprVec(Vec::new());
} else {
// Nonempty vector.
let first_expr = self.parse_expr();
- if self.token == token::COMMA &&
- self.look_ahead(1, |t| *t == token::DOTDOT) {
+ if self.token == token::Comma &&
+ self.look_ahead(1, |t| *t == token::DotDot) {
// Repeating vector syntax: [ 0, ..512 ]
self.bump();
self.bump();
let count = self.parse_expr();
- self.expect(&token::RBRACKET);
+ self.expect(&token::RBracket);
ex = ExprRepeat(first_expr, count);
- } else if self.token == token::COMMA {
+ } else if self.token == token::Comma {
// Vector with two or more elements.
self.bump();
let remaining_exprs = self.parse_seq_to_end(
- &token::RBRACKET,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::RBracket,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_expr()
);
let mut exprs = vec!(first_expr);
ex = ExprVec(exprs);
} else {
// Vector with one element.
- self.expect(&token::RBRACKET);
+ self.expect(&token::RBracket);
ex = ExprVec(vec!(first_expr));
}
}
if Parser::token_is_lifetime(&self.token) {
let lifetime = self.get_lifetime();
self.bump();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
if self.eat_keyword(keywords::While) {
return self.parse_while_expr(Some(lifetime))
}
ex = ExprBreak(None);
}
hi = self.span.hi;
- } else if self.token == token::MOD_SEP ||
+ } else if self.token == token::ModSep ||
is_ident(&self.token) &&
!self.is_keyword(keywords::True) &&
!self.is_keyword(keywords::False) {
self.parse_path(LifetimeAndTypesWithColons).path;
// `!`, as an operator, is prefix, so we know this isn't that
- if self.token == token::NOT {
+ if self.token == token::Not {
// MACRO INVOCATION expression
self.bump();
tts,
EMPTY_CTXT));
}
- if self.token == token::LBRACE {
+ if self.token == token::LBrace {
// This is a struct literal, unless we're prohibited
// from parsing struct literals here.
if !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL) {
let mut fields = Vec::new();
let mut base = None;
- while self.token != token::RBRACE {
- if self.eat(&token::DOTDOT) {
+ while self.token != token::RBrace {
+ if self.eat(&token::DotDot) {
base = Some(self.parse_expr());
break;
}
fields.push(self.parse_field());
self.commit_expr(&*fields.last().unwrap().expr,
- &[token::COMMA],
- &[token::RBRACE]);
+ &[token::Comma],
+ &[token::RBrace]);
}
if fields.len() == 0 && base.is_none() {
}
hi = self.span.hi;
- self.expect(&token::RBRACE);
+ self.expect(&token::RBrace);
ex = ExprStruct(pth, fields, base);
return self.mk_expr(lo, hi, ex);
}
/// Parse a block or unsafe block
pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode)
-> P<Expr> {
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let blk = self.parse_block_tail(lo, blk_mode);
return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk));
}
let mut hi;
loop {
// expr.f
- if self.eat(&token::DOT) {
+ if self.eat(&token::Dot) {
match self.token {
- token::IDENT(i, _) => {
+ token::Ident(i, _) => {
let dot = self.last_span.hi;
hi = self.span.hi;
self.bump();
- let (_, tys) = if self.eat(&token::MOD_SEP) {
+ let (_, tys) = if self.eat(&token::ModSep) {
self.expect_lt();
self.parse_generic_values_after_lt()
} else {
// expr.f() method call
match self.token {
- token::LPAREN => {
+ token::LParen => {
let mut es = self.parse_unspanned_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_expr()
);
hi = self.last_span.hi;
}
}
}
- token::LIT_INTEGER(n) => {
+ token::LitInteger(n) => {
let index = n.as_str();
let dot = self.last_span.hi;
hi = self.span.hi;
self.bump();
- let (_, tys) = if self.eat(&token::MOD_SEP) {
+ let (_, tys) = if self.eat(&token::ModSep) {
self.expect_lt();
self.parse_generic_values_after_lt()
} else {
}
}
}
- token::LIT_FLOAT(n) => {
+ token::LitFloat(n) => {
self.bump();
let last_span = self.last_span;
self.span_err(last_span,
if self.expr_is_complete(&*e) { break; }
match self.token {
// expr(...)
- token::LPAREN => {
+ token::LParen => {
let es = self.parse_unspanned_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_expr()
);
hi = self.last_span.hi;
// Could be either an index expression or a slicing expression.
// Any slicing non-terminal can have a mutable version with `mut`
// after the opening square bracket.
- token::LBRACKET => {
+ token::LBracket => {
self.bump();
let mutbl = if self.eat_keyword(keywords::Mut) {
MutMutable
};
match self.token {
// e[]
- token::RBRACKET => {
+ token::RBracket => {
self.bump();
hi = self.span.hi;
let slice = self.mk_slice(e, None, None, mutbl);
e = self.mk_expr(lo, hi, slice)
}
// e[..e]
- token::DOTDOT => {
+ token::DotDot => {
self.bump();
match self.token {
// e[..]
- token::RBRACKET => {
+ token::RBracket => {
self.bump();
hi = self.span.hi;
let slice = self.mk_slice(e, None, None, mutbl);
_ => {
hi = self.span.hi;
let e2 = self.parse_expr();
- self.commit_expr_expecting(&*e2, token::RBRACKET);
+ self.commit_expr_expecting(&*e2, token::RBracket);
let slice = self.mk_slice(e, None, Some(e2), mutbl);
e = self.mk_expr(lo, hi, slice)
}
let ix = self.parse_expr();
match self.token {
// e[e..] | e[e..e]
- token::DOTDOT => {
+ token::DotDot => {
self.bump();
let e2 = match self.token {
// e[e..]
- token::RBRACKET => {
+ token::RBracket => {
self.bump();
None
}
// e[e..e]
_ => {
let e2 = self.parse_expr();
- self.commit_expr_expecting(&*e2, token::RBRACKET);
+ self.commit_expr_expecting(&*e2, token::RBracket);
Some(e2)
}
};
"`mut` keyword is invalid in index expressions");
}
hi = self.span.hi;
- self.commit_expr_expecting(&*ix, token::RBRACKET);
+ self.commit_expr_expecting(&*ix, token::RBracket);
let index = self.mk_index(e, ix);
e = self.mk_expr(lo, hi, index)
}
pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) {
fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> {
match parser.token {
- token::BINOP(token::STAR) => {
+ token::BinOp(token::Star) => {
parser.bump();
Some(ast::ZeroOrMore)
},
- token::BINOP(token::PLUS) => {
+ token::BinOp(token::Plus) => {
parser.bump();
Some(ast::OneOrMore)
},
fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
maybe_whole!(deref p, NtTT);
match p.token {
- token::RPAREN | token::RBRACE | token::RBRACKET => {
+ token::RParen | token::RBrace | token::RBracket => {
// This is a conservative error: only report the last unclosed delimiter. The
// previous unclosed delimiters could actually be closed! The parser just hasn't
// gotten to them yet.
token_str).as_slice())
},
/* we ought to allow different depths of unquotation */
- token::DOLLAR if p.quote_depth > 0u => {
+ token::Dollar if p.quote_depth > 0u => {
p.bump();
let sp = p.span;
- if p.token == token::LPAREN {
+ if p.token == token::LParen {
let seq = p.parse_seq(
- &token::LPAREN,
- &token::RPAREN,
+ &token::LParen,
+ &token::RParen,
seq_sep_none(),
|p| p.parse_token_tree()
);
}
match (&self.token, token::close_delimiter_for(&self.token)) {
- (&token::EOF, _) => {
+ (&token::Eof, _) => {
let open_braces = self.open_braces.clone();
for sp in open_braces.iter() {
self.span_note(*sp, "Did you mean to close this delimiter?");
// up to EOF.
pub fn parse_all_token_trees(&mut self) -> Vec<TokenTree> {
let mut tts = Vec::new();
- while self.token != token::EOF {
+ while self.token != token::Eof {
tts.push(self.parse_token_tree());
}
tts
let mut lparens = 0u;
while self.token != *ket || lparens > 0u {
- if self.token == token::LPAREN { lparens += 1u; }
- if self.token == token::RPAREN { lparens -= 1u; }
+ if self.token == token::LParen { lparens += 1u; }
+ if self.token == token::RParen { lparens -= 1u; }
ret_val.push(self.parse_matcher(name_idx));
}
pub fn parse_matcher(&mut self, name_idx: &mut uint) -> Matcher {
let lo = self.span.lo;
- let m = if self.token == token::DOLLAR {
+ let m = if self.token == token::Dollar {
self.bump();
- if self.token == token::LPAREN {
+ if self.token == token::LParen {
let name_idx_lo = *name_idx;
self.bump();
let ms = self.parse_matcher_subseq_upto(name_idx,
- &token::RPAREN);
+ &token::RParen);
if ms.len() == 0u {
self.fatal("repetition body must be nonempty");
}
MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx)
} else {
let bound_to = self.parse_ident();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let nt_name = self.parse_ident();
let m = MatchNonterminal(bound_to, nt_name, *name_idx);
*name_idx += 1;
let ex;
match self.token {
- token::NOT => {
+ token::Not => {
self.bump();
let e = self.parse_prefix_expr();
hi = e.span.hi;
ex = self.mk_unary(UnNot, e);
}
- token::BINOP(token::MINUS) => {
+ token::BinOp(token::Minus) => {
self.bump();
let e = self.parse_prefix_expr();
hi = e.span.hi;
ex = self.mk_unary(UnNeg, e);
}
- token::BINOP(token::STAR) => {
+ token::BinOp(token::Star) => {
self.bump();
let e = self.parse_prefix_expr();
hi = e.span.hi;
ex = self.mk_unary(UnDeref, e);
}
- token::BINOP(token::AND) | token::ANDAND => {
+ token::BinOp(token::And) | token::AndAnd => {
self.expect_and();
let m = self.parse_mutability();
let e = self.parse_prefix_expr();
hi = e.span.hi;
ex = ExprAddrOf(m, e);
}
- token::TILDE => {
+ token::Tilde => {
self.bump();
let last_span = self.last_span;
match self.token {
- token::LBRACKET => self.obsolete(last_span, ObsoleteOwnedVector),
+ token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
_ => self.obsolete(last_span, ObsoleteOwnedExpr)
}
hi = e.span.hi;
ex = self.mk_unary(UnUniq, e);
}
- token::IDENT(_, _) => {
+ token::Ident(_, _) => {
if !self.is_keyword(keywords::Box) {
return self.parse_dot_or_call_expr();
}
self.bump();
// Check for a place: `box(PLACE) EXPR`.
- if self.eat(&token::LPAREN) {
+ if self.eat(&token::LParen) {
// Support `box() EXPR` as the default.
- if !self.eat(&token::RPAREN) {
+ if !self.eat(&token::RParen) {
let place = self.parse_expr();
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
let subexpression = self.parse_prefix_expr();
hi = subexpression.span.hi;
ex = ExprBox(place, subexpression);
// Prevent dynamic borrow errors later on by limiting the
// scope of the borrows.
- if self.token == token::BINOP(token::OR) &&
+ if self.token == token::BinOp(token::Or) &&
self.restrictions.contains(RESTRICTION_NO_BAR_OP) {
return lhs;
}
let lhs = self.parse_binops();
let restrictions = self.restrictions & RESTRICTION_NO_STRUCT_LITERAL;
match self.token {
- token::EQ => {
+ token::Eq => {
self.bump();
let rhs = self.parse_expr_res(restrictions);
self.mk_expr(lo, rhs.span.hi, ExprAssign(lhs, rhs))
}
- token::BINOPEQ(op) => {
+ token::BinOpEq(op) => {
self.bump();
let rhs = self.parse_expr_res(restrictions);
let aop = match op {
- token::PLUS => BiAdd,
- token::MINUS => BiSub,
- token::STAR => BiMul,
- token::SLASH => BiDiv,
- token::PERCENT => BiRem,
- token::CARET => BiBitXor,
- token::AND => BiBitAnd,
- token::OR => BiBitOr,
- token::SHL => BiShl,
- token::SHR => BiShr
+ token::Plus => BiAdd,
+ token::Minus => BiSub,
+ token::Star => BiMul,
+ token::Slash => BiDiv,
+ token::Percent => BiRem,
+ token::Caret => BiBitXor,
+ token::And => BiBitAnd,
+ token::Or => BiBitOr,
+ token::Shl => BiShl,
+ token::Shr => BiShr
};
let rhs_span = rhs.span;
let assign_op = self.mk_assign_op(aop, lhs, rhs);
let lo = self.last_span.lo;
self.expect_keyword(keywords::Let);
let pat = self.parse_pat();
- self.expect(&token::EQ);
+ self.expect(&token::Eq);
let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
let thn = self.parse_block();
let (hi, els) = if self.eat_keyword(keywords::Else) {
let lo = self.last_span.lo;
self.expect_keyword(keywords::Let);
let pat = self.parse_pat();
- self.expect(&token::EQ);
+ self.expect(&token::Eq);
let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
let body = self.parse_block();
let hi = body.span.hi;
fn parse_match_expr(&mut self) -> P<Expr> {
let lo = self.last_span.lo;
let discriminant = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- self.commit_expr_expecting(&*discriminant, token::LBRACE);
+ self.commit_expr_expecting(&*discriminant, token::LBrace);
let mut arms: Vec<Arm> = Vec::new();
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
arms.push(self.parse_arm());
}
let hi = self.span.hi;
if self.eat_keyword(keywords::If) {
guard = Some(self.parse_expr());
}
- self.expect(&token::FAT_ARROW);
+ self.expect(&token::FatArrow);
let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR);
let require_comma =
!classify::expr_is_simple_block(&*expr)
- && self.token != token::RBRACE;
+ && self.token != token::RBrace;
if require_comma {
- self.commit_expr(&*expr, &[token::COMMA], &[token::RBRACE]);
+ self.commit_expr(&*expr, &[token::Comma], &[token::RBrace]);
} else {
- self.eat(&token::COMMA);
+ self.eat(&token::Comma);
}
ast::Arm {
/// Parse the RHS of a local variable declaration (e.g. '= 14;')
fn parse_initializer(&mut self) -> Option<P<Expr>> {
- if self.token == token::EQ {
+ if self.token == token::Eq {
self.bump();
Some(self.parse_expr())
} else {
let mut pats = Vec::new();
loop {
pats.push(self.parse_pat());
- if self.token == token::BINOP(token::OR) { self.bump(); }
+ if self.token == token::BinOp(token::Or) { self.bump(); }
else { return pats; }
};
}
let mut first = true;
let mut before_slice = true;
- while self.token != token::RBRACKET {
+ while self.token != token::RBracket {
if first {
first = false;
} else {
- self.expect(&token::COMMA);
+ self.expect(&token::Comma);
}
if before_slice {
- if self.token == token::DOTDOT {
+ if self.token == token::DotDot {
self.bump();
- if self.token == token::COMMA ||
- self.token == token::RBRACKET {
+ if self.token == token::Comma ||
+ self.token == token::RBracket {
slice = Some(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: PatWild(PatWildMulti),
}
let subpat = self.parse_pat();
- if before_slice && self.token == token::DOTDOT {
+ if before_slice && self.token == token::DotDot {
self.bump();
slice = Some(subpat);
before_slice = false;
let mut fields = Vec::new();
let mut etc = false;
let mut first = true;
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
if first {
first = false;
} else {
- self.expect(&token::COMMA);
+ self.expect(&token::Comma);
// accept trailing commas
- if self.token == token::RBRACE { break }
+ if self.token == token::RBrace { break }
}
let lo = self.span.lo;
let hi;
- if self.token == token::DOTDOT {
+ if self.token == token::DotDot {
self.bump();
- if self.token != token::RBRACE {
+ if self.token != token::RBrace {
let token_str = self.this_token_to_string();
self.fatal(format!("expected `{}`, found `{}`", "}",
token_str).as_slice())
let fieldname = self.parse_ident();
- let (subpat, is_shorthand) = if self.token == token::COLON {
+ let (subpat, is_shorthand) = if self.token == token::Colon {
match bind_type {
BindByRef(..) | BindByValue(MutMutable) => {
let token_str = self.this_token_to_string();
let pat;
match self.token {
// parse _
- token::UNDERSCORE => {
+ token::Underscore => {
self.bump();
pat = PatWild(PatWildSingle);
hi = self.last_span.hi;
span: mk_sp(lo, hi)
})
}
- token::TILDE => {
+ token::Tilde => {
// parse ~pat
self.bump();
let sub = self.parse_pat();
span: mk_sp(lo, hi)
})
}
- token::BINOP(token::AND) | token::ANDAND => {
+ token::BinOp(token::And) | token::AndAnd => {
// parse &pat
let lo = self.span.lo;
self.expect_and();
span: mk_sp(lo, hi)
})
}
- token::LPAREN => {
+ token::LParen => {
// parse (pat,pat,pat,...) as tuple
self.bump();
- if self.token == token::RPAREN {
+ if self.token == token::RParen {
hi = self.span.hi;
self.bump();
let lit = P(codemap::Spanned {
pat = PatLit(expr);
} else {
let mut fields = vec!(self.parse_pat());
- if self.look_ahead(1, |t| *t != token::RPAREN) {
- while self.token == token::COMMA {
+ if self.look_ahead(1, |t| *t != token::RParen) {
+ while self.token == token::Comma {
self.bump();
- if self.token == token::RPAREN { break; }
+ if self.token == token::RParen { break; }
fields.push(self.parse_pat());
}
}
- if fields.len() == 1 { self.expect(&token::COMMA); }
- self.expect(&token::RPAREN);
+ if fields.len() == 1 { self.expect(&token::Comma); }
+ self.expect(&token::RParen);
pat = PatTup(fields);
}
hi = self.last_span.hi;
span: mk_sp(lo, hi)
})
}
- token::LBRACKET => {
+ token::LBracket => {
// parse [pat,pat,...] as vector pattern
self.bump();
let (before, slice, after) =
self.parse_pat_vec_elements();
- self.expect(&token::RBRACKET);
+ self.expect(&token::RBracket);
pat = ast::PatVec(before, slice, after);
hi = self.last_span.hi;
return P(ast::Pat {
}
// at this point, token != _, ~, &, &&, (, [
- if (!is_ident_or_path(&self.token) && self.token != token::MOD_SEP)
+ if (!is_ident_or_path(&self.token) && self.token != token::ModSep)
|| self.is_keyword(keywords::True)
|| self.is_keyword(keywords::False) {
// Parse an expression pattern or exp .. exp.
// These expressions are limited to literals (possibly
// preceded by unary-minus) or identifiers.
let val = self.parse_literal_maybe_minus();
- if (self.token == token::DOTDOTDOT) &&
+ if (self.token == token::DotDotDot) &&
self.look_ahead(1, |t| {
- *t != token::COMMA && *t != token::RBRACKET
+ *t != token::Comma && *t != token::RBracket
}) {
self.bump();
let end = if is_ident_or_path(&self.token) {
} else {
let can_be_enum_or_struct = self.look_ahead(1, |t| {
match *t {
- token::LPAREN | token::LBRACKET | token::LT |
- token::LBRACE | token::MOD_SEP => true,
+ token::LParen | token::LBracket | token::Lt |
+ token::LBrace | token::ModSep => true,
_ => false,
}
});
- if self.look_ahead(1, |t| *t == token::DOTDOTDOT) &&
+ if self.look_ahead(1, |t| *t == token::DotDotDot) &&
self.look_ahead(2, |t| {
- *t != token::COMMA && *t != token::RBRACKET
+ *t != token::Comma && *t != token::RBracket
}) {
let start = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
- self.eat(&token::DOTDOTDOT);
+ self.eat(&token::DotDotDot);
let end = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
pat = PatRange(start, end);
} else if is_plain_ident(&self.token) && !can_be_enum_or_struct {
let id = self.parse_ident();
let id_span = self.last_span;
let pth1 = codemap::Spanned{span:id_span, node: id};
- if self.eat(&token::NOT) {
+ if self.eat(&token::Not) {
// macro invocation
let ket = token::close_delimiter_for(&self.token)
.unwrap_or_else(|| self.fatal("expected open delimiter"));
let mac = MacInvocTT(ident_to_path(id_span,id), tts, EMPTY_CTXT);
pat = ast::PatMac(codemap::Spanned {node: mac, span: self.span});
} else {
- let sub = if self.eat(&token::AT) {
+ let sub = if self.eat(&token::At) {
// parse foo @ pat
Some(self.parse_pat())
} else {
let enum_path = self.parse_path(LifetimeAndTypesWithColons)
.path;
match self.token {
- token::LBRACE => {
+ token::LBrace => {
self.bump();
let (fields, etc) =
self.parse_pat_fields();
_ => {
let mut args: Vec<P<Pat>> = Vec::new();
match self.token {
- token::LPAREN => {
+ token::LParen => {
let is_dotdot = self.look_ahead(1, |t| {
match *t {
- token::DOTDOT => true,
+ token::DotDot => true,
_ => false,
}
});
// This is a "top constructor only" pat
self.bump();
self.bump();
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
pat = PatEnum(enum_path, None);
} else {
args = self.parse_enum_variant_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_pat()
);
pat = PatEnum(enum_path, Some(args));
let ident = self.parse_ident();
let last_span = self.last_span;
let name = codemap::Spanned{span: last_span, node: ident};
- let sub = if self.eat(&token::AT) {
+ let sub = if self.eat(&token::At) {
Some(self.parse_pat())
} else {
None
// leads to a parse error. Note that if there is no explicit
// binding mode then we do not end up here, because the lookahead
// will direct us over to parse_enum_variant()
- if self.token == token::LPAREN {
+ if self.token == token::LParen {
let last_span = self.last_span;
self.span_fatal(
last_span,
node: TyInfer,
span: mk_sp(lo, lo),
});
- if self.eat(&token::COLON) {
+ if self.eat(&token::Colon) {
ty = self.parse_ty(true);
}
let init = self.parse_initializer();
self.fatal("expected ident");
}
let name = self.parse_ident();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let ty = self.parse_ty(true);
spanned(lo, self.last_span.hi, ast::StructField_ {
kind: NamedField(name, pr),
P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))
} else if is_ident(&self.token)
&& !token::is_any_keyword(&self.token)
- && self.look_ahead(1, |t| *t == token::NOT) {
+ && self.look_ahead(1, |t| *t == token::Not) {
// it's a macro invocation:
check_expected_item(self, item_attrs.as_slice());
maybe_whole!(no_clone self, NtBlock);
let lo = self.span.lo;
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
return self.parse_block_tail_(lo, DefaultBlock, Vec::new());
}
maybe_whole!(pair_empty self, NtBlock);
let lo = self.span.lo;
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let (inner, next) = self.parse_inner_attrs_and_next();
(inner, self.parse_block_tail_(lo, DefaultBlock, next))
let mut attributes_box = attrs_remaining;
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
// parsing items even when they're not allowed lets us give
// better error messages and recover more gracefully.
attributes_box.push_all(self.parse_outer_attributes().as_slice());
match self.token {
- token::SEMI => {
+ token::Semi => {
if !attributes_box.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
}
self.bump(); // empty
}
- token::RBRACE => {
+ token::RBrace => {
// fall through and out.
}
_ => {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(&*e) {
// Just check for errors and recover; do not eat semicolon yet.
- self.commit_stmt(&[], &[token::SEMI, token::RBRACE]);
+ self.commit_stmt(&[], &[token::Semi, token::RBrace]);
}
match self.token {
- token::SEMI => {
+ token::Semi => {
self.bump();
let span_with_semi = Span {
lo: span.lo,
span: span_with_semi,
}));
}
- token::RBRACE => {
+ token::RBrace => {
expr = Some(e);
}
_ => {
StmtMac(m, semi) => {
// statement macro; might be an expr
match self.token {
- token::SEMI => {
+ token::Semi => {
stmts.push(P(Spanned {
node: StmtMac(m, true),
span: span,
}));
self.bump();
}
- token::RBRACE => {
+ token::RBrace => {
// if a block ends in `m!(arg)` without
// a `;`, it must be an expr
expr = Some(
}
_ => { // all other kinds of statements:
if classify::stmt_ends_with_semi(&node) {
- self.commit_stmt_expecting(token::SEMI);
+ self.commit_stmt_expecting(token::Semi);
}
stmts.push(P(Spanned {
fn parse_colon_then_ty_param_bounds(&mut self)
-> OwnedSlice<TyParamBound>
{
- if !self.eat(&token::COLON) {
+ if !self.eat(&token::Colon) {
OwnedSlice::empty()
} else {
self.parse_ty_param_bounds()
{
let mut result = vec!();
loop {
- let lifetime_defs = if self.eat(&token::LT) {
+ let lifetime_defs = if self.eat(&token::Lt) {
let lifetime_defs = self.parse_lifetime_defs();
self.expect_gt();
lifetime_defs
Vec::new()
};
match self.token {
- token::LIFETIME(lifetime) => {
+ token::Lifetime(lifetime) => {
if lifetime_defs.len() > 0 {
let span = self.last_span;
self.span_err(span, "lifetime declarations are not \
}));
self.bump();
}
- token::MOD_SEP | token::IDENT(..) => {
+ token::ModSep | token::Ident(..) => {
let path =
self.parse_path(LifetimeAndTypesWithoutColons).path;
- if self.token == token::LPAREN {
+ if self.token == token::LParen {
self.bump();
let inputs = self.parse_seq_to_end(
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_arg_general(false));
let (return_style, output) = self.parse_ret_ty();
result.push(UnboxedFnTyParamBound(P(UnboxedFnBound {
_ => break,
}
- if !self.eat(&token::BINOP(token::PLUS)) {
+ if !self.eat(&token::BinOp(token::Plus)) {
break;
}
}
let mut span = self.span;
let mut ident = self.parse_ident();
let mut unbound = None;
- if self.eat(&token::QUESTION) {
+ if self.eat(&token::Question) {
let tref = Parser::trait_ref_from_ident(ident, span);
unbound = Some(TraitTyParamBound(tref));
span = self.span;
let bounds = self.parse_colon_then_ty_param_bounds();
- let default = if self.token == token::EQ {
+ let default = if self.token == token::Eq {
self.bump();
Some(self.parse_ty(true))
}
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
pub fn parse_generics(&mut self) -> ast::Generics {
- if self.eat(&token::LT) {
+ if self.eat(&token::Lt) {
let lifetime_defs = self.parse_lifetime_defs();
let mut seen_default = false;
- let ty_params = self.parse_seq_to_gt(Some(token::COMMA), |p| {
+ let ty_params = self.parse_seq_to_gt(Some(token::Comma), |p| {
p.forbid_lifetime();
let ty_param = p.parse_ty_param();
if ty_param.default.is_some() {
}
fn parse_generic_values_after_lt(&mut self) -> (Vec<ast::Lifetime>, Vec<P<Ty>> ) {
- let lifetimes = self.parse_lifetimes(token::COMMA);
+ let lifetimes = self.parse_lifetimes(token::Comma);
let result = self.parse_seq_to_gt(
- Some(token::COMMA),
+ Some(token::Comma),
|p| {
p.forbid_lifetime();
p.parse_ty(true)
loop {
let lo = self.span.lo;
let ident = match self.token {
- token::IDENT(..) => self.parse_ident(),
+ token::Ident(..) => self.parse_ident(),
_ => break,
};
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let bounds = self.parse_ty_param_bounds();
let hi = self.span.hi;
});
parsed_something = true;
- if !self.eat(&token::COMMA) {
+ if !self.eat(&token::Comma) {
break
}
}
let sp = self.span;
let mut args: Vec<Option<Arg>> =
self.parse_unspanned_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| {
- if p.token == token::DOTDOTDOT {
+ if p.token == token::DotDotDot {
p.bump();
if allow_variadic {
- if p.token != token::RPAREN {
+ if p.token != token::RParen {
let span = p.span;
p.span_fatal(span,
"`...` must be last in argument list for variadic function");
fn is_self_ident(&mut self) -> bool {
match self.token {
- token::IDENT(id, false) => id.name == special_idents::self_.name,
+ token::Ident(id, false) => id.name == special_idents::self_.name,
_ => false
}
}
fn expect_self_ident(&mut self) -> ast::Ident {
match self.token {
- token::IDENT(id, false) if id.name == special_idents::self_.name => {
+ token::Ident(id, false) if id.name == special_idents::self_.name => {
self.bump();
id
},
}
}
- self.expect(&token::LPAREN);
+ self.expect(&token::LParen);
// A bit of complexity and lookahead is needed here in order to be
// backwards compatible.
let mut mutbl_self = MutImmutable;
let explicit_self = match self.token {
- token::BINOP(token::AND) => {
+ token::BinOp(token::And) => {
let eself = maybe_parse_borrowed_explicit_self(self);
self_ident_lo = self.last_span.lo;
self_ident_hi = self.last_span.hi;
eself
}
- token::TILDE => {
+ token::Tilde => {
// We need to make sure it isn't a type
if self.look_ahead(1, |t| token::is_keyword(keywords::Self, t)) {
self.bump();
}
SelfStatic
}
- token::BINOP(token::STAR) => {
+ token::BinOp(token::Star) => {
// Possibly "*self" or "*mut self" -- not supported. Try to avoid
// emitting cryptic "unexpected token" errors.
self.bump();
// error case, making bogus self ident:
SelfValue(special_idents::self_)
}
- token::IDENT(..) => {
+ token::Ident(..) => {
if self.is_self_ident() {
let self_ident = self.expect_self_ident();
// Determine whether this is the fully explicit form, `self:
// TYPE`.
- if self.eat(&token::COLON) {
+ if self.eat(&token::Colon) {
SelfExplicit(self.parse_ty(false), self_ident)
} else {
SelfValue(self_ident)
// Determine whether this is the fully explicit form,
// `self: TYPE`.
- if self.eat(&token::COLON) {
+ if self.eat(&token::Colon) {
SelfExplicit(self.parse_ty(false), self_ident)
} else {
SelfValue(self_ident)
}
} else if Parser::token_is_mutability(&self.token) &&
- self.look_ahead(1, |t| *t == token::TILDE) &&
+ self.look_ahead(1, |t| *t == token::Tilde) &&
self.look_ahead(2, |t| {
token::is_keyword(keywords::Self, t)
}) {
{
// If we parsed a self type, expect a comma before the argument list.
match self.token {
- token::COMMA => {
+ token::Comma => {
self.bump();
- let sep = seq_sep_trailing_allowed(token::COMMA);
+ let sep = seq_sep_trailing_allowed(token::Comma);
let mut fn_inputs = self.parse_seq_to_before_end(
- &token::RPAREN,
+ &token::RParen,
sep,
parse_arg_fn
);
fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id));
fn_inputs
}
- token::RPAREN => {
+ token::RParen => {
vec!(Arg::new_self(explicit_self_sp, mutbl_self, $self_id))
}
_ => {
let fn_inputs = match explicit_self {
SelfStatic => {
- let sep = seq_sep_trailing_allowed(token::COMMA);
- self.parse_seq_to_before_end(&token::RPAREN, sep, parse_arg_fn)
+ let sep = seq_sep_trailing_allowed(token::Comma);
+ self.parse_seq_to_before_end(&token::RParen, sep, parse_arg_fn)
}
SelfValue(id) => parse_remaining_arguments!(id),
SelfRegion(_,_,id) => parse_remaining_arguments!(id),
};
- self.expect(&token::RPAREN);
+ self.expect(&token::RParen);
let hi = self.span.hi;
fn parse_fn_block_decl(&mut self)
-> (P<FnDecl>, Option<UnboxedClosureKind>) {
let (optional_unboxed_closure_kind, inputs_captures) = {
- if self.eat(&token::OROR) {
+ if self.eat(&token::OrOr) {
(None, Vec::new())
} else {
- self.expect(&token::BINOP(token::OR));
+ self.expect(&token::BinOp(token::Or));
let optional_unboxed_closure_kind =
self.parse_optional_unboxed_closure_kind();
let args = self.parse_seq_to_before_end(
- &token::BINOP(token::OR),
- seq_sep_trailing_allowed(token::COMMA),
+ &token::BinOp(token::Or),
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_fn_block_arg()
);
self.bump();
(optional_unboxed_closure_kind, args)
}
};
- let (style, output) = if self.token == token::RARROW {
+ let (style, output) = if self.token == token::RArrow {
self.parse_ret_ty()
} else {
(Return, P(Ty {
/// Parses the `(arg, arg) -> return_type` header on a procedure.
fn parse_proc_decl(&mut self) -> P<FnDecl> {
let inputs =
- self.parse_unspanned_seq(&token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ self.parse_unspanned_seq(&token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_fn_block_arg());
- let (style, output) = if self.token == token::RARROW {
+ let (style, output) = if self.token == token::RArrow {
self.parse_ret_ty()
} else {
(Return, P(Ty {
// code copied from parse_macro_use_or_failure... abstraction!
let (method_, hi, new_attrs) = {
if !token::is_any_keyword(&self.token)
- && self.look_ahead(1, |t| *t == token::NOT)
- && (self.look_ahead(2, |t| *t == token::LPAREN)
- || self.look_ahead(2, |t| *t == token::LBRACE)) {
+ && self.look_ahead(1, |t| *t == token::Not)
+ && (self.look_ahead(2, |t| *t == token::LParen)
+ || self.look_ahead(2, |t| *t == token::LBrace)) {
// method macro.
let pth = self.parse_path(NoTypesAllowed).path;
- self.expect(&token::NOT);
+ self.expect(&token::Not);
// eat a matched-delimiter token tree:
let tts = match token::close_delimiter_for(&self.token) {
fn parse_impl_items(&mut self) -> (Vec<ImplItem>, Vec<Attribute>) {
let mut impl_items = Vec::new();
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let (inner_attrs, mut method_attrs) =
self.parse_inner_attrs_and_next();
- while !self.eat(&token::RBRACE) {
+ while !self.eat(&token::RBrace) {
method_attrs.extend(self.parse_outer_attributes().into_iter());
let vis = self.parse_visibility();
if self.eat_keyword(keywords::Type) {
// Special case: if the next identifier that follows is '(', don't
// allow this to be parsed as a trait.
- let could_be_trait = self.token != token::LPAREN;
+ let could_be_trait = self.token != token::LParen;
// Parse the trait.
let mut ty = self.parse_ty(true);
let class_name = self.parse_ident();
let mut generics = self.parse_generics();
- if self.eat(&token::COLON) {
+ if self.eat(&token::Colon) {
let ty = self.parse_ty(true);
self.span_err(ty.span, "`virtual` structs have been removed from the language");
}
let mut fields: Vec<StructField>;
let is_tuple_like;
- if self.eat(&token::LBRACE) {
+ if self.eat(&token::LBrace) {
// It's a record-like struct.
is_tuple_like = false;
fields = Vec::new();
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
fields.push(self.parse_struct_decl_field());
}
if fields.len() == 0 {
token::get_ident(class_name)).as_slice());
}
self.bump();
- } else if self.token == token::LPAREN {
+ } else if self.token == token::LParen {
// It's a tuple-like struct.
is_tuple_like = true;
fields = self.parse_unspanned_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| {
let attrs = p.parse_outer_attributes();
let lo = p.span.lo;
written as `struct {};`",
token::get_ident(class_name)).as_slice());
}
- self.expect(&token::SEMI);
- } else if self.eat(&token::SEMI) {
+ self.expect(&token::Semi);
+ } else if self.eat(&token::Semi) {
// It's a unit-like struct.
is_tuple_like = true;
fields = Vec::new();
-> StructField {
let a_var = self.parse_name_and_ty(vis, attrs);
match self.token {
- token::COMMA => {
+ token::Comma => {
self.bump();
}
- token::RBRACE => {}
+ token::RBrace => {}
_ => {
let span = self.span;
let token_str = self.this_token_to_string();
if self.eat_keyword(keywords::For) {
let span = self.span;
let ident = self.parse_ident();
- if !self.eat(&token::QUESTION) {
+ if !self.eat(&token::Question) {
self.span_err(span,
"expected 'Sized?' after `for` in trait item");
return None;
fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
let id = self.parse_ident();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let ty = self.parse_ty(true);
- self.expect(&token::EQ);
+ self.expect(&token::Eq);
let e = self.parse_expr();
- self.commit_expr_expecting(&*e, token::SEMI);
+ self.commit_expr_expecting(&*e, token::Semi);
let item = match m {
Some(m) => ItemStatic(ty, m, e),
None => ItemConst(ty, e),
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> ItemInfo {
let id_span = self.span;
let id = self.parse_ident();
- if self.token == token::SEMI {
+ if self.token == token::Semi {
self.bump();
// This mod is in an external file. Let's go get it!
let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span);
(id, m, Some(attrs))
} else {
self.push_mod_path(id, outer_attrs);
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let mod_inner_lo = self.span.lo;
let old_owns_directory = self.owns_directory;
self.owns_directory = true;
let (inner, next) = self.parse_inner_attrs_and_next();
- let m = self.parse_mod_items(token::RBRACE, next, mod_inner_lo);
- self.expect(&token::RBRACE);
+ let m = self.parse_mod_items(token::RBrace, next, mod_inner_lo);
+ self.expect(&token::RBrace);
self.owns_directory = old_owns_directory;
self.pop_mod_path();
(id, ItemMod(m), Some(inner))
let mod_inner_lo = p0.span.lo;
let (mod_attrs, next) = p0.parse_inner_attrs_and_next();
let first_item_outer_attrs = next;
- let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs, mod_inner_lo);
+ let m0 = p0.parse_mod_items(token::Eof, first_item_outer_attrs, mod_inner_lo);
self.sess.included_mod_stack.borrow_mut().pop();
return (ast::ItemMod(m0), mod_attrs);
}
let decl = self.parse_fn_decl(true);
self.parse_where_clause(&mut generics);
let hi = self.span.hi;
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
P(ast::ForeignItem {
ident: ident,
attrs: attrs,
let mutbl = self.eat_keyword(keywords::Mut);
let ident = self.parse_ident();
- self.expect(&token::COLON);
+ self.expect(&token::Colon);
let ty = self.parse_ty(true);
let hi = self.span.hi;
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
P(ForeignItem {
ident: ident,
attrs: attrs,
self.span_err(last_span,
Parser::expected_item_err(attrs_remaining.as_slice()));
}
- assert!(self.token == token::RBRACE);
+ assert!(self.token == token::RBrace);
ast::ForeignMod {
abi: abi,
view_items: view_items,
let span = self.span;
let (maybe_path, ident) = match self.token {
- token::IDENT(..) => {
+ token::Ident(..) => {
let the_ident = self.parse_ident();
- let path = if self.eat(&token::EQ) {
+ let path = if self.eat(&token::Eq) {
let path = self.parse_str();
let span = self.span;
self.obsolete(span, ObsoleteExternCrateRenaming);
} else {
None
};
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
(path, the_ident)
},
- token::LIT_STR(..) | token::LIT_STR_RAW(..) => {
+ token::LitStr(..) | token::LitStrRaw(..) => {
let path = self.parse_str();
self.expect_keyword(keywords::As);
let the_ident = self.parse_ident();
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
(Some(path), the_ident)
},
_ => {
attrs: Vec<Attribute> )
-> ItemOrViewItem {
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let abi = opt_abi.unwrap_or(abi::C);
let (inner, next) = self.parse_inner_attrs_and_next();
let m = self.parse_foreign_mod_items(abi, next);
- self.expect(&token::RBRACE);
+ self.expect(&token::RBrace);
let last_span = self.last_span;
let item = self.mk_item(lo,
let ident = self.parse_ident();
let mut tps = self.parse_generics();
self.parse_where_clause(&mut tps);
- self.expect(&token::EQ);
+ self.expect(&token::Eq);
let ty = self.parse_ty(true);
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
(ident, ItemTy(ty, tps), None)
}
/// this should probably be renamed or refactored...
fn parse_struct_def(&mut self) -> P<StructDef> {
let mut fields: Vec<StructField> = Vec::new();
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
fields.push(self.parse_struct_decl_field());
}
self.bump();
let mut variants = Vec::new();
let mut all_nullary = true;
let mut any_disr = None;
- while self.token != token::RBRACE {
+ while self.token != token::RBrace {
let variant_attrs = self.parse_outer_attributes();
let vlo = self.span.lo;
let mut args = Vec::new();
let mut disr_expr = None;
ident = self.parse_ident();
- if self.eat(&token::LBRACE) {
+ if self.eat(&token::LBrace) {
// Parse a struct variant.
all_nullary = false;
kind = StructVariantKind(self.parse_struct_def());
- } else if self.token == token::LPAREN {
+ } else if self.token == token::LParen {
all_nullary = false;
let arg_tys = self.parse_enum_variant_seq(
- &token::LPAREN,
- &token::RPAREN,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LParen,
+ &token::RParen,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_ty(true)
);
for ty in arg_tys.into_iter() {
});
}
kind = TupleVariantKind(args);
- } else if self.eat(&token::EQ) {
+ } else if self.eat(&token::Eq) {
disr_expr = Some(self.parse_expr());
any_disr = disr_expr.as_ref().map(|expr| expr.span);
kind = TupleVariantKind(args);
};
variants.push(P(spanned(vlo, self.last_span.hi, vr)));
- if !self.eat(&token::COMMA) { break; }
+ if !self.eat(&token::Comma) { break; }
}
- self.expect(&token::RBRACE);
+ self.expect(&token::RBrace);
match any_disr {
Some(disr_span) if !all_nullary =>
self.span_err(disr_span,
let id = self.parse_ident();
let mut generics = self.parse_generics();
self.parse_where_clause(&mut generics);
- self.expect(&token::LBRACE);
+ self.expect(&token::LBrace);
let enum_definition = self.parse_enum_def(&generics);
(id, ItemEnum(enum_definition, generics), None)
fn fn_expr_lookahead(tok: &token::Token) -> bool {
match *tok {
- token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
+ token::LParen | token::At | token::Tilde | token::BinOp(_) => true,
_ => false
}
}
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> Option<abi::Abi> {
match self.token {
- token::LIT_STR(s) | token::LIT_STR_RAW(s, _) => {
+ token::LitStr(s) | token::LitStrRaw(s, _) => {
self.bump();
let the_string = s.as_str();
match abi::lookup(the_string) {
macros_allowed: bool)
-> ItemOrViewItem {
let nt_item = match self.token {
- INTERPOLATED(token::NtItem(ref item)) => {
+ token::Interpolated(token::NtItem(ref item)) => {
Some((**item).clone())
}
_ => None
if self.eat_keyword(keywords::Use) {
// USE ITEM (IoviViewItem)
let view_item = self.parse_use();
- self.expect(&token::SEMI);
+ self.expect(&token::Semi);
return IoviViewItem(ast::ViewItem {
node: view_item,
attrs: attrs,
visibility,
maybe_append(attrs, extra_attrs));
return IoviItem(item);
- } else if self.token == token::LBRACE {
+ } else if self.token == token::LBrace {
return self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs);
}
return IoviItem(item);
}
if self.is_keyword(keywords::Unsafe)
- && self.look_ahead(1u, |t| *t != token::LBRACE) {
+ && self.look_ahead(1u, |t| *t != token::LBrace) {
// UNSAFE FUNCTION ITEM
self.bump();
let abi = if self.eat_keyword(keywords::Extern) {
visibility: Visibility
) -> ItemOrViewItem {
if macros_allowed && !token::is_any_keyword(&self.token)
- && self.look_ahead(1, |t| *t == token::NOT)
+ && self.look_ahead(1, |t| *t == token::Not)
&& (self.look_ahead(2, |t| is_plain_ident(t))
- || self.look_ahead(2, |t| *t == token::LPAREN)
- || self.look_ahead(2, |t| *t == token::LBRACE)) {
+ || self.look_ahead(2, |t| *t == token::LParen)
+ || self.look_ahead(2, |t| *t == token::LBrace)) {
// MACRO INVOCATION ITEM
// item macro.
let pth = self.parse_path(NoTypesAllowed).path;
- self.expect(&token::NOT);
+ self.expect(&token::Not);
// a 'special' identifier (like what `macro_rules!` uses)
// is optional. We should eventually unify invoc syntax
fn parse_view_path(&mut self) -> P<ViewPath> {
let lo = self.span.lo;
- if self.token == token::LBRACE {
+ if self.token == token::LBrace {
// use {foo,bar}
let idents = self.parse_unspanned_seq(
- &token::LBRACE, &token::RBRACE,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LBrace, &token::RBrace,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_path_list_item());
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
let first_ident = self.parse_ident();
let mut path = vec!(first_ident);
match self.token {
- token::EQ => {
+ token::Eq => {
// x = foo::bar
self.bump();
let path_lo = self.span.lo;
path = vec!(self.parse_ident());
- while self.token == token::MOD_SEP {
+ while self.token == token::ModSep {
self.bump();
let id = self.parse_ident();
path.push(id);
ast::DUMMY_NODE_ID)));
}
- token::MOD_SEP => {
+ token::ModSep => {
// foo::bar or foo::{a,b,c} or foo::*
- while self.token == token::MOD_SEP {
+ while self.token == token::ModSep {
self.bump();
match self.token {
- token::IDENT(i, _) => {
+ token::Ident(i, _) => {
self.bump();
path.push(i);
}
// foo::bar::{a,b,c}
- token::LBRACE => {
+ token::LBrace => {
let idents = self.parse_unspanned_seq(
- &token::LBRACE,
- &token::RBRACE,
- seq_sep_trailing_allowed(token::COMMA),
+ &token::LBrace,
+ &token::RBrace,
+ seq_sep_trailing_allowed(token::Comma),
|p| p.parse_path_list_item()
);
let path = ast::Path {
}
// foo::bar::*
- token::BINOP(token::STAR) => {
+ token::BinOp(token::Star) => {
self.bump();
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
loop {
match self.parse_foreign_item(attrs, macros_allowed) {
IoviNone(returned_attrs) => {
- if self.token == token::RBRACE {
+ if self.token == token::RBrace {
attrs = returned_attrs;
break
}
let (inner, next) = self.parse_inner_attrs_and_next();
let first_item_outer_attrs = next;
// parse the items inside the crate:
- let m = self.parse_mod_items(token::EOF, first_item_outer_attrs, lo);
+ let m = self.parse_mod_items(token::Eof, first_item_outer_attrs, lo);
ast::Crate {
module: m,
pub fn parse_optional_str(&mut self)
-> Option<(InternedString, ast::StrStyle)> {
let (s, style) = match self.token {
- token::LIT_STR(s) => (self.id_to_interned_str(s.ident()), ast::CookedStr),
- token::LIT_STR_RAW(s, n) => {
+ token::LitStr(s) => (self.id_to_interned_str(s.ident()), ast::CookedStr),
+ token::LitStrRaw(s, n) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n))
}
_ => return None
// except according to those terms.
use ast;
-use ast::{Ident, Name, Mrk};
use ext::mtwt;
-use parse::token;
use ptr::P;
use util::interner::{RcStr, StrInterner};
use util::interner;
use std::path::BytesContainer;
use std::rc::Rc;
+// NOTE(stage0): remove these re-exports after the next snapshot
+// (needed to allow quotations to pass stage0)
+#[cfg(stage0)] pub use self::Plus as PLUS;
+#[cfg(stage0)] pub use self::Minus as MINUS;
+#[cfg(stage0)] pub use self::Star as STAR;
+#[cfg(stage0)] pub use self::Slash as SLASH;
+#[cfg(stage0)] pub use self::Percent as PERCENT;
+#[cfg(stage0)] pub use self::Caret as CARET;
+#[cfg(stage0)] pub use self::And as AND;
+#[cfg(stage0)] pub use self::Or as OR;
+#[cfg(stage0)] pub use self::Shl as SHL;
+#[cfg(stage0)] pub use self::Shr as SHR;
+#[cfg(stage0)] pub use self::Eq as EQ;
+#[cfg(stage0)] pub use self::Lt as LT;
+#[cfg(stage0)] pub use self::Le as LE;
+#[cfg(stage0)] pub use self::EqEq as EQEQ;
+#[cfg(stage0)] pub use self::Ne as NE;
+#[cfg(stage0)] pub use self::Ge as GE;
+#[cfg(stage0)] pub use self::Gt as GT;
+#[cfg(stage0)] pub use self::AndAnd as ANDAND;
+#[cfg(stage0)] pub use self::OrOr as OROR;
+#[cfg(stage0)] pub use self::Not as NOT;
+#[cfg(stage0)] pub use self::Tilde as TILDE;
+#[cfg(stage0)] pub use self::BinOp as BINOP;
+#[cfg(stage0)] pub use self::BinOpEq as BINOPEQ;
+#[cfg(stage0)] pub use self::At as AT;
+#[cfg(stage0)] pub use self::Dot as DOT;
+#[cfg(stage0)] pub use self::DotDot as DOTDOT;
+#[cfg(stage0)] pub use self::DotDotDot as DOTDOTDOT;
+#[cfg(stage0)] pub use self::Comma as COMMA;
+#[cfg(stage0)] pub use self::Semi as SEMI;
+#[cfg(stage0)] pub use self::Colon as COLON;
+#[cfg(stage0)] pub use self::ModSep as MOD_SEP;
+#[cfg(stage0)] pub use self::RArrow as RARROW;
+#[cfg(stage0)] pub use self::LArrow as LARROW;
+#[cfg(stage0)] pub use self::FatArrow as FAT_ARROW;
+#[cfg(stage0)] pub use self::LParen as LPAREN;
+#[cfg(stage0)] pub use self::RParen as RPAREN;
+#[cfg(stage0)] pub use self::LBracket as LBRACKET;
+#[cfg(stage0)] pub use self::RBracket as RBRACKET;
+#[cfg(stage0)] pub use self::LBrace as LBRACE;
+#[cfg(stage0)] pub use self::RBrace as RBRACE;
+#[cfg(stage0)] pub use self::Pound as POUND;
+#[cfg(stage0)] pub use self::Dollar as DOLLAR;
+#[cfg(stage0)] pub use self::Question as QUESTION;
+#[cfg(stage0)] pub use self::LitByte as LIT_BYTE;
+#[cfg(stage0)] pub use self::LitChar as LIT_CHAR;
+#[cfg(stage0)] pub use self::LitInteger as LIT_INTEGER;
+#[cfg(stage0)] pub use self::LitFloat as LIT_FLOAT;
+#[cfg(stage0)] pub use self::LitStr as LIT_STR;
+#[cfg(stage0)] pub use self::LitStrRaw as LIT_STR_RAW;
+#[cfg(stage0)] pub use self::LitBinary as LIT_BINARY;
+#[cfg(stage0)] pub use self::LitBinaryRaw as LIT_BINARY_RAW;
+#[cfg(stage0)] pub use self::Ident as IDENT;
+#[cfg(stage0)] pub use self::Underscore as UNDERSCORE;
+#[cfg(stage0)] pub use self::Lifetime as LIFETIME;
+#[cfg(stage0)] pub use self::Interpolated as INTERPOLATED;
+#[cfg(stage0)] pub use self::DocComment as DOC_COMMENT;
+#[cfg(stage0)] pub use self::Whitespace as WS;
+#[cfg(stage0)] pub use self::Comment as COMMENT;
+#[cfg(stage0)] pub use self::Shebang as SHEBANG;
+#[cfg(stage0)] pub use self::Eof as EOF;
+
#[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
-pub enum BinOp {
- PLUS,
- MINUS,
- STAR,
- SLASH,
- PERCENT,
- CARET,
- AND,
- OR,
- SHL,
- SHR,
+pub enum BinOpToken {
+ Plus,
+ Minus,
+ Star,
+ Slash,
+ Percent,
+ Caret,
+ And,
+ Or,
+ Shl,
+ Shr,
}
#[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
pub enum Token {
/* Expression-operator symbols. */
- EQ,
- LT,
- LE,
- EQEQ,
- NE,
- GE,
- GT,
- ANDAND,
- OROR,
- NOT,
- TILDE,
- BINOP(BinOp),
- BINOPEQ(BinOp),
+ Eq,
+ Lt,
+ Le,
+ EqEq,
+ Ne,
+ Ge,
+ Gt,
+ AndAnd,
+ OrOr,
+ Not,
+ Tilde,
+ BinOp(BinOpToken),
+ BinOpEq(BinOpToken),
/* Structural symbols */
- AT,
- DOT,
- DOTDOT,
- DOTDOTDOT,
- COMMA,
- SEMI,
- COLON,
- MOD_SEP,
- RARROW,
- LARROW,
- FAT_ARROW,
- LPAREN,
- RPAREN,
- LBRACKET,
- RBRACKET,
- LBRACE,
- RBRACE,
- POUND,
- DOLLAR,
- QUESTION,
+ At,
+ Dot,
+ DotDot,
+ DotDotDot,
+ Comma,
+ Semi,
+ Colon,
+ ModSep,
+ RArrow,
+ LArrow,
+ FatArrow,
+ LParen,
+ RParen,
+ LBracket,
+ RBracket,
+ LBrace,
+ RBrace,
+ Pound,
+ Dollar,
+ Question,
/* Literals */
- LIT_BYTE(Name),
- LIT_CHAR(Name),
- LIT_INTEGER(Name),
- LIT_FLOAT(Name),
- LIT_STR(Name),
- LIT_STR_RAW(Name, uint), /* raw str delimited by n hash symbols */
- LIT_BINARY(Name),
- LIT_BINARY_RAW(Name, uint), /* raw binary str delimited by n hash symbols */
+ LitByte(ast::Name),
+ LitChar(ast::Name),
+ LitInteger(ast::Name),
+ LitFloat(ast::Name),
+ LitStr(ast::Name),
+ LitStrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
+ LitBinary(ast::Name),
+ LitBinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
/* Name components */
/// An identifier contains an "is_mod_name" boolean,
/// indicating whether :: follows this token with no
/// whitespace in between.
- IDENT(Ident, bool),
- UNDERSCORE,
- LIFETIME(Ident),
+ Ident(ast::Ident, bool),
+ Underscore,
+ Lifetime(ast::Ident),
/* For interpolation */
- INTERPOLATED(Nonterminal),
- DOC_COMMENT(Name),
+ Interpolated(Nonterminal),
+ DocComment(ast::Name),
// Junk. These carry no data because we don't really care about the data
// they *would* carry, and don't really want to allocate a new ident for
// them. Instead, users could extract that from the associated span.
/// Whitespace
- WS,
+ Whitespace,
/// Comment
- COMMENT,
- SHEBANG(Name),
+ Comment,
+ Shebang(ast::Name),
- EOF,
+ Eof,
}
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash)]
NtExpr( P<ast::Expr>),
NtTy( P<ast::Ty>),
/// See IDENT, above, for meaning of bool in NtIdent:
- NtIdent(Box<Ident>, bool),
+ NtIdent(Box<ast::Ident>, bool),
/// Stuff inside brackets for attributes
NtMeta( P<ast::MetaItem>),
NtPath(Box<ast::Path>),
}
}
-pub fn binop_to_string(o: BinOp) -> &'static str {
+pub fn binop_to_string(o: BinOpToken) -> &'static str {
match o {
- PLUS => "+",
- MINUS => "-",
- STAR => "*",
- SLASH => "/",
- PERCENT => "%",
- CARET => "^",
- AND => "&",
- OR => "|",
- SHL => "<<",
- SHR => ">>"
+ Plus => "+",
+ Minus => "-",
+ Star => "*",
+ Slash => "/",
+ Percent => "%",
+ Caret => "^",
+ And => "&",
+ Or => "|",
+ Shl => "<<",
+ Shr => ">>",
}
}
pub fn to_string(t: &Token) -> String {
match *t {
- EQ => "=".into_string(),
- LT => "<".into_string(),
- LE => "<=".into_string(),
- EQEQ => "==".into_string(),
- NE => "!=".into_string(),
- GE => ">=".into_string(),
- GT => ">".into_string(),
- NOT => "!".into_string(),
- TILDE => "~".into_string(),
- OROR => "||".into_string(),
- ANDAND => "&&".into_string(),
- BINOP(op) => binop_to_string(op).into_string(),
- BINOPEQ(op) => {
- let mut s = binop_to_string(op).into_string();
- s.push_str("=");
- s
- }
-
- /* Structural symbols */
- AT => "@".into_string(),
- DOT => ".".into_string(),
- DOTDOT => "..".into_string(),
- DOTDOTDOT => "...".into_string(),
- COMMA => ",".into_string(),
- SEMI => ";".into_string(),
- COLON => ":".into_string(),
- MOD_SEP => "::".into_string(),
- RARROW => "->".into_string(),
- LARROW => "<-".into_string(),
- FAT_ARROW => "=>".into_string(),
- LPAREN => "(".into_string(),
- RPAREN => ")".into_string(),
- LBRACKET => "[".into_string(),
- RBRACKET => "]".into_string(),
- LBRACE => "{".into_string(),
- RBRACE => "}".into_string(),
- POUND => "#".into_string(),
- DOLLAR => "$".into_string(),
- QUESTION => "?".into_string(),
-
- /* Literals */
- LIT_BYTE(b) => {
- format!("b'{}'", b.as_str())
- }
- LIT_CHAR(c) => {
- format!("'{}'", c.as_str())
- }
- LIT_INTEGER(c) | LIT_FLOAT(c) => {
- c.as_str().into_string()
- }
-
- LIT_STR(s) => {
- format!("\"{}\"", s.as_str())
- }
- LIT_STR_RAW(s, n) => {
- format!("r{delim}\"{string}\"{delim}",
- delim="#".repeat(n), string=s.as_str())
- }
- LIT_BINARY(v) => {
- format!("b\"{}\"", v.as_str())
- }
- LIT_BINARY_RAW(s, n) => {
- format!("br{delim}\"{string}\"{delim}",
- delim="#".repeat(n), string=s.as_str())
- }
-
- /* Name components */
- IDENT(s, _) => get_ident(s).get().into_string(),
- LIFETIME(s) => {
- format!("{}", get_ident(s))
- }
- UNDERSCORE => "_".into_string(),
-
- /* Other */
- DOC_COMMENT(s) => s.as_str().into_string(),
- EOF => "<eof>".into_string(),
- WS => " ".into_string(),
- COMMENT => "/* */".into_string(),
- SHEBANG(s) => format!("/* shebang: {}*/", s.as_str()),
-
- INTERPOLATED(ref nt) => {
- match nt {
- &NtExpr(ref e) => ::print::pprust::expr_to_string(&**e),
- &NtMeta(ref e) => ::print::pprust::meta_item_to_string(&**e),
- &NtTy(ref e) => ::print::pprust::ty_to_string(&**e),
- &NtPath(ref e) => ::print::pprust::path_to_string(&**e),
- _ => {
- let mut s = "an interpolated ".into_string();
- match *nt {
- NtItem(..) => s.push_str("item"),
- NtBlock(..) => s.push_str("block"),
- NtStmt(..) => s.push_str("statement"),
- NtPat(..) => s.push_str("pattern"),
- NtMeta(..) => fail!("should have been handled"),
- NtExpr(..) => fail!("should have been handled"),
- NtTy(..) => fail!("should have been handled"),
- NtIdent(..) => s.push_str("identifier"),
- NtPath(..) => fail!("should have been handled"),
- NtTT(..) => s.push_str("tt"),
- NtMatchers(..) => s.push_str("matcher sequence")
- };
- s
- }
+ Eq => "=".into_string(),
+ Lt => "<".into_string(),
+ Le => "<=".into_string(),
+ EqEq => "==".into_string(),
+ Ne => "!=".into_string(),
+ Ge => ">=".into_string(),
+ Gt => ">".into_string(),
+ Not => "!".into_string(),
+ Tilde => "~".into_string(),
+ OrOr => "||".into_string(),
+ AndAnd => "&&".into_string(),
+ BinOp(op) => binop_to_string(op).into_string(),
+ BinOpEq(op) => format!("{}=", binop_to_string(op)),
+
+ /* Structural symbols */
+ At => "@".into_string(),
+ Dot => ".".into_string(),
+ DotDot => "..".into_string(),
+ DotDotDot => "...".into_string(),
+ Comma => ",".into_string(),
+ Semi => ";".into_string(),
+ Colon => ":".into_string(),
+ ModSep => "::".into_string(),
+ RArrow => "->".into_string(),
+ LArrow => "<-".into_string(),
+ FatArrow => "=>".into_string(),
+ LParen => "(".into_string(),
+ RParen => ")".into_string(),
+ LBracket => "[".into_string(),
+ RBracket => "]".into_string(),
+ LBrace => "{".into_string(),
+ RBrace => "}".into_string(),
+ Pound => "#".into_string(),
+ Dollar => "$".into_string(),
+ Question => "?".into_string(),
+
+ /* Literals */
+ LitByte(b) => format!("b'{}'", b.as_str()),
+ LitChar(c) => format!("'{}'", c.as_str()),
+ LitFloat(c) => c.as_str().into_string(),
+ LitInteger(c) => c.as_str().into_string(),
+ LitStr(s) => format!("\"{}\"", s.as_str()),
+ LitStrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
+ delim="#".repeat(n),
+ string=s.as_str()),
+ LitBinary(v) => format!("b\"{}\"", v.as_str()),
+ LitBinaryRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
+ delim="#".repeat(n),
+ string=s.as_str()),
+
+ /* Name components */
+ Ident(s, _) => get_ident(s).get().into_string(),
+ Lifetime(s) => format!("{}", get_ident(s)),
+ Underscore => "_".into_string(),
+
+ /* Other */
+ DocComment(s) => s.as_str().into_string(),
+ Eof => "<eof>".into_string(),
+ Whitespace => " ".into_string(),
+ Comment => "/* */".into_string(),
+ Shebang(s) => format!("/* shebang: {}*/", s.as_str()),
+
+ Interpolated(ref nt) => match *nt {
+ NtExpr(ref e) => ::print::pprust::expr_to_string(&**e),
+ NtMeta(ref e) => ::print::pprust::meta_item_to_string(&**e),
+ NtTy(ref e) => ::print::pprust::ty_to_string(&**e),
+ NtPath(ref e) => ::print::pprust::path_to_string(&**e),
+ NtItem(..) => "an interpolated item".into_string(),
+ NtBlock(..) => "an interpolated block".into_string(),
+ NtStmt(..) => "an interpolated statement".into_string(),
+ NtPat(..) => "an interpolated pattern".into_string(),
+ NtIdent(..) => "an interpolated identifier".into_string(),
+ NtTT(..) => "an interpolated tt".into_string(),
+ NtMatchers(..) => "an interpolated matcher sequence".into_string(),
}
- }
}
}
pub fn can_begin_expr(t: &Token) -> bool {
match *t {
- LPAREN => true,
- LBRACE => true,
- LBRACKET => true,
- IDENT(_, _) => true,
- UNDERSCORE => true,
- TILDE => true,
- LIT_BYTE(_) => true,
- LIT_CHAR(_) => true,
- LIT_INTEGER(_) => true,
- LIT_FLOAT(_) => true,
- LIT_STR(_) => true,
- LIT_STR_RAW(_, _) => true,
- LIT_BINARY(_) => true,
- LIT_BINARY_RAW(_, _) => true,
- POUND => true,
- AT => true,
- NOT => true,
- BINOP(MINUS) => true,
- BINOP(STAR) => true,
- BINOP(AND) => true,
- BINOP(OR) => true, // in lambda syntax
- OROR => true, // in lambda syntax
- MOD_SEP => true,
- INTERPOLATED(NtExpr(..))
- | INTERPOLATED(NtIdent(..))
- | INTERPOLATED(NtBlock(..))
- | INTERPOLATED(NtPath(..)) => true,
- _ => false
+ LParen => true,
+ LBrace => true,
+ LBracket => true,
+ Ident(_, _) => true,
+ Underscore => true,
+ Tilde => true,
+ LitByte(_) => true,
+ LitChar(_) => true,
+ LitInteger(_) => true,
+ LitFloat(_) => true,
+ LitStr(_) => true,
+ LitStrRaw(_, _) => true,
+ LitBinary(_) => true,
+ LitBinaryRaw(_, _) => true,
+ Pound => true,
+ At => true,
+ Not => true,
+ BinOp(Minus) => true,
+ BinOp(Star) => true,
+ BinOp(And) => true,
+ BinOp(Or) => true, // in lambda syntax
+ OrOr => true, // in lambda syntax
+ ModSep => true,
+ Interpolated(NtExpr(..)) => true,
+ Interpolated(NtIdent(..)) => true,
+ Interpolated(NtBlock(..)) => true,
+ Interpolated(NtPath(..)) => true,
+ _ => false,
}
}
/// otherwise `None`.
pub fn close_delimiter_for(t: &Token) -> Option<Token> {
match *t {
- LPAREN => Some(RPAREN),
- LBRACE => Some(RBRACE),
- LBRACKET => Some(RBRACKET),
- _ => None
+ LParen => Some(RParen),
+ LBrace => Some(RBrace),
+ LBracket => Some(RBracket),
+ _ => None,
}
}
pub fn is_lit(t: &Token) -> bool {
match *t {
- LIT_BYTE(_) => true,
- LIT_CHAR(_) => true,
- LIT_INTEGER(_) => true,
- LIT_FLOAT(_) => true,
- LIT_STR(_) => true,
- LIT_STR_RAW(_, _) => true,
- LIT_BINARY(_) => true,
- LIT_BINARY_RAW(_, _) => true,
- _ => false
+ LitByte(_) => true,
+ LitChar(_) => true,
+ LitInteger(_) => true,
+ LitFloat(_) => true,
+ LitStr(_) => true,
+ LitStrRaw(_, _) => true,
+ LitBinary(_) => true,
+ LitBinaryRaw(_, _) => true,
+ _ => false,
}
}
pub fn is_ident(t: &Token) -> bool {
- match *t { IDENT(_, _) => true, _ => false }
+ match *t {
+ Ident(_, _) => true,
+ _ => false,
+ }
}
pub fn is_ident_or_path(t: &Token) -> bool {
match *t {
- IDENT(_, _) | INTERPOLATED(NtPath(..)) => true,
- _ => false
+ Ident(_, _) => true,
+ Interpolated(NtPath(..)) => true,
+ _ => false,
}
}
pub fn is_plain_ident(t: &Token) -> bool {
- match *t { IDENT(_, false) => true, _ => false }
+ match *t {
+ Ident(_, false) => true,
+ _ => false,
+ }
}
// Get the first "argument"
$( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )*
}
) => {
- static STRICT_KEYWORD_START: Name = first!($( Name($sk_name), )*);
- static STRICT_KEYWORD_FINAL: Name = last!($( Name($sk_name), )*);
- static RESERVED_KEYWORD_START: Name = first!($( Name($rk_name), )*);
- static RESERVED_KEYWORD_FINAL: Name = last!($( Name($rk_name), )*);
+ static STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*);
+ static STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*);
+ static RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*);
+ static RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*);
pub mod special_idents {
- use ast::{Ident, Name};
+ use ast;
$(
#[allow(non_uppercase_statics)]
- pub const $si_static: Ident = Ident { name: Name($si_name), ctxt: 0 };
+ pub const $si_static: ast::Ident = ast::Ident {
+ name: ast::Name($si_name),
+ ctxt: 0,
+ };
)*
}
pub mod special_names {
- use ast::Name;
- $( #[allow(non_uppercase_statics)] pub const $si_static: Name = Name($si_name); )*
+ use ast;
+ $(
+ #[allow(non_uppercase_statics)]
+ pub const $si_static: ast::Name = ast::Name($si_name);
+ )*
}
/**
* the language and may not appear as identifiers.
*/
pub mod keywords {
- use ast::Name;
+ use ast;
pub enum Keyword {
$( $sk_variant, )*
}
impl Keyword {
- pub fn to_name(&self) -> Name {
+ pub fn to_name(&self) -> ast::Name {
match *self {
- $( $sk_variant => Name($sk_name), )*
- $( $rk_variant => Name($rk_name), )*
+ $( $sk_variant => ast::Name($sk_name), )*
+ $( $rk_variant => ast::Name($rk_name), )*
}
}
}
}}
// If the special idents get renumbered, remember to modify these two as appropriate
-pub const SELF_KEYWORD_NAME: Name = Name(SELF_KEYWORD_NAME_NUM);
-const STATIC_KEYWORD_NAME: Name = Name(STATIC_KEYWORD_NAME_NUM);
-const SUPER_KEYWORD_NAME: Name = Name(SUPER_KEYWORD_NAME_NUM);
+pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM);
+const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM);
+const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM);
pub const SELF_KEYWORD_NAME_NUM: u32 = 1;
const STATIC_KEYWORD_NAME_NUM: u32 = 2;
* operator
*/
pub fn token_to_binop(tok: &Token) -> Option<ast::BinOp> {
- match *tok {
- BINOP(STAR) => Some(ast::BiMul),
- BINOP(SLASH) => Some(ast::BiDiv),
- BINOP(PERCENT) => Some(ast::BiRem),
- BINOP(PLUS) => Some(ast::BiAdd),
- BINOP(MINUS) => Some(ast::BiSub),
- BINOP(SHL) => Some(ast::BiShl),
- BINOP(SHR) => Some(ast::BiShr),
- BINOP(AND) => Some(ast::BiBitAnd),
- BINOP(CARET) => Some(ast::BiBitXor),
- BINOP(OR) => Some(ast::BiBitOr),
- LT => Some(ast::BiLt),
- LE => Some(ast::BiLe),
- GE => Some(ast::BiGe),
- GT => Some(ast::BiGt),
- EQEQ => Some(ast::BiEq),
- NE => Some(ast::BiNe),
- ANDAND => Some(ast::BiAnd),
- OROR => Some(ast::BiOr),
- _ => None
- }
+ match *tok {
+ BinOp(Star) => Some(ast::BiMul),
+ BinOp(Slash) => Some(ast::BiDiv),
+ BinOp(Percent) => Some(ast::BiRem),
+ BinOp(Plus) => Some(ast::BiAdd),
+ BinOp(Minus) => Some(ast::BiSub),
+ BinOp(Shl) => Some(ast::BiShl),
+ BinOp(Shr) => Some(ast::BiShr),
+ BinOp(And) => Some(ast::BiBitAnd),
+ BinOp(Caret) => Some(ast::BiBitXor),
+ BinOp(Or) => Some(ast::BiBitOr),
+ Lt => Some(ast::BiLt),
+ Le => Some(ast::BiLe),
+ Ge => Some(ast::BiGe),
+ Gt => Some(ast::BiGt),
+ EqEq => Some(ast::BiEq),
+ Ne => Some(ast::BiNe),
+ AndAnd => Some(ast::BiAnd),
+ OrOr => Some(ast::BiOr),
+ _ => None
+ }
}
// looks like we can get rid of this completely...
/// Returns the string contents of a name, using the task-local interner.
#[inline]
-pub fn get_name(name: Name) -> InternedString {
+pub fn get_name(name: ast::Name) -> InternedString {
let interner = get_ident_interner();
InternedString::new_from_rc_str(interner.get(name))
}
/// Returns the string contents of an identifier, using the task-local
/// interner.
#[inline]
-pub fn get_ident(ident: Ident) -> InternedString {
+pub fn get_ident(ident: ast::Ident) -> InternedString {
get_name(ident.name)
}
/// Maps a string to its interned representation.
#[inline]
-pub fn intern(s: &str) -> Name {
+pub fn intern(s: &str) -> ast::Name {
get_ident_interner().intern(s)
}
/// gensym's a new uint, using the current interner.
#[inline]
-pub fn gensym(s: &str) -> Name {
+pub fn gensym(s: &str) -> ast::Name {
get_ident_interner().gensym(s)
}
/// Maps a string to an identifier with an empty syntax context.
#[inline]
-pub fn str_to_ident(s: &str) -> Ident {
- Ident::new(intern(s))
+pub fn str_to_ident(s: &str) -> ast::Ident {
+ ast::Ident::new(intern(s))
}
/// Maps a string to a gensym'ed identifier.
#[inline]
-pub fn gensym_ident(s: &str) -> Ident {
- Ident::new(gensym(s))
+pub fn gensym_ident(s: &str) -> ast::Ident {
+ ast::Ident::new(gensym(s))
}
// create a fresh name that maps to the same string as the old one.
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
// that is, that the new name and the old one are connected to ptr_eq strings.
-pub fn fresh_name(src: &Ident) -> Name {
+pub fn fresh_name(src: &ast::Ident) -> ast::Name {
let interner = get_ident_interner();
interner.gensym_copy(src.name)
// following: debug version. Could work in final except that it's incompatible with
}
// create a fresh mark.
-pub fn fresh_mark() -> Mrk {
+pub fn fresh_mark() -> ast::Mrk {
gensym("mark").uint() as u32
}
pub fn is_keyword(kw: keywords::Keyword, tok: &Token) -> bool {
match *tok {
- token::IDENT(sid, false) => { kw.to_name() == sid.name }
+ Ident(sid, false) => { kw.to_name() == sid.name }
_ => { false }
}
}
pub fn is_any_keyword(tok: &Token) -> bool {
match *tok {
- token::IDENT(sid, false) => {
+ Ident(sid, false) => {
let n = sid.name;
n == SELF_KEYWORD_NAME
pub fn is_strict_keyword(tok: &Token) -> bool {
match *tok {
- token::IDENT(sid, false) => {
+ Ident(sid, false) => {
let n = sid.name;
n == SELF_KEYWORD_NAME
|| STRICT_KEYWORD_START <= n
&& n <= STRICT_KEYWORD_FINAL
},
- token::IDENT(sid, true) => {
+ Ident(sid, true) => {
let n = sid.name;
n != SELF_KEYWORD_NAME
pub fn is_reserved_keyword(tok: &Token) -> bool {
match *tok {
- token::IDENT(sid, false) => {
+ Ident(sid, false) => {
let n = sid.name;
RESERVED_KEYWORD_START <= n
pub fn mtwt_token_eq(t1 : &Token, t2 : &Token) -> bool {
match (t1,t2) {
- (&IDENT(id1,_),&IDENT(id2,_)) | (&LIFETIME(id1),&LIFETIME(id2)) =>
+ (&Ident(id1,_),&Ident(id2,_)) | (&Lifetime(id1),&Lifetime(id2)) =>
mtwt::resolve(id1) == mtwt::resolve(id2),
_ => *t1 == *t2
}
}
#[test] fn mtwt_token_eq_test() {
- assert!(mtwt_token_eq(>,>));
+ assert!(mtwt_token_eq(&Gt,&Gt));
let a = str_to_ident("bac");
let a1 = mark_ident(a,92);
- assert!(mtwt_token_eq(&IDENT(a,true),&IDENT(a1,false)));
+ assert!(mtwt_token_eq(&Ident(a,true),&Ident(a1,false)));
}
}
ast::TtToken(_, ref tk) => {
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
match *tk {
- parse::token::DOC_COMMENT(..) => {
+ parse::token::DocComment(..) => {
hardbreak(&mut self.s)
}
_ => Ok(())
extern crate rustc;
use syntax::codemap::Span;
-use syntax::parse::token::{IDENT, get_ident};
+use syntax::parse::token;
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint
("I", 1)];
let text = match args {
- [TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
+ [TtToken(_, token::Ident(s, _))] => token::get_ident(s).to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-type t = { f: () }; //~ ERROR expected type, found token LBRACE
+type t = { f: () }; //~ ERROR expected type, found token LBrace