name,
source
);
- p.quote_depth += 1u;
+ p.quote_depth += 1us;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
}
name,
source
);
- p.quote_depth += 1u;
+ p.quote_depth += 1us;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
}
}
/// Abort if necessary
- pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
+ pub fn maybe_aborted<T>(result: T, p: Parser) -> T {
p.abort_if_errors();
result
}
/// Rather than just accepting/rejecting a given literal, unescapes it as
/// well. Can take any slice prefixed by a character escape. Returns the
/// character and the number of characters consumed.
-pub fn char_lit(lit: &str) -> (char, int) {
+pub fn char_lit(lit: &str) -> (char, isize) {
use std::{num, char};
let mut chars = lit.chars();
let msg = format!("lexer should have rejected a bad character escape {}", lit);
let msg2 = &msg[];
- fn esc(len: uint, lit: &str) -> Option<(char, int)> {
+ fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
num::from_str_radix(&lit[2..len], 16)
.and_then(char::from_u32)
- .map(|x| (x, len as int))
+ .map(|x| (x, len as isize))
}
- let unicode_escape = |&: | -> Option<(char, int)>
+ let unicode_escape = |&: | -> Option<(char, isize)>
if lit.as_bytes()[2] == b'{' {
let idx = lit.find('}').expect(msg2);
let subslice = &lit[3..idx];
num::from_str_radix(subslice, 16)
.and_then(char::from_u32)
- .map(|x| (x, subslice.chars().count() as int + 4))
+ .map(|x| (x, subslice.chars().count() as isize + 4))
} else {
esc(6, lit)
};
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
+ fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) {
loop {
match it.peek().map(|x| x.1) {
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
}
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
-pub fn byte_lit(lit: &str) -> (u8, uint) {
+pub fn byte_lit(lit: &str) -> (u8, usize) {
let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
- assert!(lit.as_bytes()[0] == b'\\', err(0i));
+ assert!(lit.as_bytes()[0] == b'\\', err(0is));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
+ fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
match suffix {
Some(suf) if looks_like_width_suffix(&['f'], suf) => {
match base {
- 16u => sd.span_err(sp, "hexadecimal float literal is not supported"),
- 8u => sd.span_err(sp, "octal float literal is not supported"),
- 2u => sd.span_err(sp, "binary float literal is not supported"),
+ 16us => sd.span_err(sp, "hexadecimal float literal is not supported"),
+ 8us => sd.span_err(sp, "octal float literal is not supported"),
+ 2us => sd.span_err(sp, "binary float literal is not supported"),
_ => ()
}
let ident = token::intern_and_get_ident(&*s);
#[test]
fn string_to_tts_1 () {
- let tts = string_to_tts("fn a (b : int) { b; }".to_string());
+ let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
assert_eq!(json::encode(&tts),
"[\
{\
{\
\"variant\":\"Ident\",\
\"fields\":[\
- \"int\",\
+ \"i32\",\
\"Plain\"\
]\
}\
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
- // this test depends on the intern order of "fn" and "int"
- assert!(string_to_item("fn a (b : int) { b; }".to_string()) ==
+ // this test depends on the intern order of "fn" and "i32"
+ assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
Some(
P(ast::Item{ident:str_to_ident("a"),
attrs:Vec::new(),
segments: vec!(
ast::PathSegment {
identifier:
- str_to_ident("int"),
+ str_to_ident("i32"),
parameters: ast::PathParameters::none(),
}
),
#[test] fn span_of_self_arg_pat_idents_are_correct() {
- let srcs = ["impl z { fn a (&self, &myarg: int) {} }",
- "impl z { fn a (&mut self, &myarg: int) {} }",
- "impl z { fn a (&'a self, &myarg: int) {} }",
- "impl z { fn a (self, &myarg: int) {} }",
- "impl z { fn a (self: Foo, &myarg: int) {} }",
+ let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
+ "impl z { fn a (&mut self, &myarg: i32) {} }",
+ "impl z { fn a (&'a self, &myarg: i32) {} }",
+ "impl z { fn a (self, &myarg: i32) {} }",
+ "impl z { fn a (self: Foo, &myarg: i32) {} }",
];
for &src in srcs.iter() {
let spans = get_spans_of_pat_idents(src);
let Span{ lo, hi, .. } = spans[0];
- assert!("self" == &src[lo.to_uint()..hi.to_uint()],
+ assert!("self" == &src[lo.to_usize()..hi.to_usize()],
"\"{}\" != \"self\". src=\"{}\"",
- &src[lo.to_uint()..hi.to_uint()], src)
+ &src[lo.to_usize()..hi.to_usize()], src)
}
}
/// the previous token or None (only stashed sometimes).
pub last_token: Option<Box<token::Token>>,
pub buffer: [TokenAndSpan; 4],
- pub buffer_start: int,
- pub buffer_end: int,
- pub tokens_consumed: uint,
+ pub buffer_start: isize,
+ pub buffer_end: isize,
+ pub tokens_consumed: usize,
pub restrictions: Restrictions,
- pub quote_depth: uint, // not (yet) related to the quasiquoter
+ pub quote_depth: usize, // not (yet) related to the quasiquoter
pub reader: Box<Reader+'a>,
pub interner: Rc<token::IdentInterner>,
/// The set of seen errors about obsolete syntax. Used to suppress
}
/// Convert the current token to a string using self's reader
- pub fn this_token_to_string(&mut self) -> String {
+ pub fn this_token_to_string(&self) -> String {
Parser::token_to_string(&self.token)
}
- pub fn unexpected_last(&mut self, t: &token::Token) -> ! {
+ pub fn unexpected_last(&self, t: &token::Token) -> ! {
let token_str = Parser::token_to_string(t);
let last_span = self.last_span;
self.span_fatal(last_span, &format!("unexpected token: `{}`",
token_str)[]);
}
- pub fn unexpected(&mut self) -> ! {
+ pub fn unexpected(&self) -> ! {
let this_token = self.this_token_to_string();
self.fatal(&format!("unexpected token: `{}`", this_token)[]);
}
}
}
- pub fn expect_no_suffix(&mut self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
+ pub fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
match suffix {
None => {/* everything ok */}
Some(suf) => {
// would encounter a `>` and stop. This lets the parser handle trailing
// commas in generic parameters, because it can stop either after
// parsing a type or after parsing a comma.
- for i in iter::count(0u, 1) {
+ for i in iter::count(0us, 1) {
if self.check(&token::Gt)
|| self.token == token::BinOp(token::Shr)
|| self.token == token::Ge
self.reader.real_token()
} else {
// Avoid token copies with `replace`.
- let buffer_start = self.buffer_start as uint;
- let next_index = (buffer_start + 1) & 3 as uint;
- self.buffer_start = next_index as int;
+ let buffer_start = self.buffer_start as usize;
+ let next_index = (buffer_start + 1) & 3 as usize;
+ self.buffer_start = next_index as isize;
let placeholder = TokenAndSpan {
tok: token::Underscore,
};
self.span = next.sp;
self.token = next.tok;
- self.tokens_consumed += 1u;
+ self.tokens_consumed += 1us;
self.expected_tokens.clear();
// check after each token
self.check_unknown_macro_variable();
self.token = next;
self.span = mk_sp(lo, hi);
}
- pub fn buffer_length(&mut self) -> int {
+ pub fn buffer_length(&mut self) -> isize {
if self.buffer_start <= self.buffer_end {
return self.buffer_end - self.buffer_start;
}
return (4 - self.buffer_start) + self.buffer_end;
}
- pub fn look_ahead<R, F>(&mut self, distance: uint, f: F) -> R where
+ pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
- let dist = distance as int;
+ let dist = distance as isize;
while self.buffer_length() < dist {
- self.buffer[self.buffer_end as uint] = self.reader.real_token();
+ self.buffer[self.buffer_end as usize] = self.reader.real_token();
self.buffer_end = (self.buffer_end + 1) & 3;
}
- f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
+ f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
}
- pub fn fatal(&mut self, m: &str) -> ! {
+ pub fn fatal(&self, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(self.span, m)
}
- pub fn span_fatal(&mut self, sp: Span, m: &str) -> ! {
+ pub fn span_fatal(&self, sp: Span, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(sp, m)
}
- pub fn span_fatal_help(&mut self, sp: Span, m: &str, help: &str) -> ! {
+ pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> ! {
self.span_err(sp, m);
self.span_help(sp, help);
panic!(diagnostic::FatalError);
}
- pub fn span_note(&mut self, sp: Span, m: &str) {
+ pub fn span_note(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_note(sp, m)
}
- pub fn span_help(&mut self, sp: Span, m: &str) {
+ pub fn span_help(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_help(sp, m)
}
- pub fn bug(&mut self, m: &str) -> ! {
+ pub fn bug(&self, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m)
}
- pub fn warn(&mut self, m: &str) {
+ pub fn warn(&self, m: &str) {
self.sess.span_diagnostic.span_warn(self.span, m)
}
- pub fn span_warn(&mut self, sp: Span, m: &str) {
+ pub fn span_warn(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_warn(sp, m)
}
- pub fn span_err(&mut self, sp: Span, m: &str) {
+ pub fn span_err(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_err(sp, m)
}
- pub fn span_bug(&mut self, sp: Span, m: &str) -> ! {
+ pub fn span_bug(&self, sp: Span, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(sp, m)
}
- pub fn abort_if_errors(&mut self) {
+ pub fn abort_if_errors(&self) {
self.sess.span_diagnostic.handler().abort_if_errors();
}
self.expect(&token::OpenDelim(token::Bracket));
let t = self.parse_ty_sum();
- // Parse the `; e` in `[ int; e ]`
+ // Parse the `; e` in `[ i32; e ]`
// where `e` is a const expression
let t = match self.maybe_parse_fixed_length_of_vec() {
None => TyVec(t),
}
/// Matches token_lit = LIT_INTEGER | ...
- pub fn lit_from_token(&mut self, tok: &token::Token) -> Lit_ {
+ pub fn lit_from_token(&self, tok: &token::Token) -> Lit_ {
match *tok {
token::Interpolated(token::NtExpr(ref v)) => {
match v.node {
ExprField(expr, ident)
}
- pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
+ pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
ExprTupField(expr, idx)
}
hi = self.span.hi;
self.bump();
- let index = n.as_str().parse::<uint>();
+ let index = n.as_str().parse::<usize>();
match index {
Some(n) => {
let id = spanned(dot, hi, n);
};
self.span_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
- float.trunc() as uint,
+ float.trunc() as usize,
&float.fract().to_string()[1..])[]);
}
self.abort_if_errors();
}
pub fn check_unknown_macro_variable(&mut self) {
- if self.quote_depth == 0u {
+ if self.quote_depth == 0us {
match self.token {
token::SubstNt(name, _) =>
self.fatal(&format!("unknown macro variable `{}`",
token_str)[])
},
/* we ought to allow different depths of unquotation */
- token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => {
+ token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => {
p.parse_unquoted()
}
_ => {
}
/// Parse an expression of binops of at least min_prec precedence
- pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
+ pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
if self.expr_is_complete(&*lhs) { return lhs; }
// Prevent dynamic borrow errors later on by limiting the
"Chained comparison operators require parentheses");
if op == BiLt && outer_op == BiGt {
self.span_help(op_span,
- "Use ::< instead of < if you meant to specify type arguments.");
+ "use ::< instead of < if you meant to specify type arguments");
}
}
_ => {}
Some(attrs))
}
- /// Parse a::B<String,int>
+ /// Parse a::B<String,i32>
fn parse_trait_ref(&mut self) -> TraitRef {
ast::TraitRef {
path: self.parse_path(LifetimeAndTypesWithoutColons),
}
}
- /// Parse for<'l> a::B<String,int>
+ /// Parse for<'l> a::B<String,i32>
fn parse_poly_trait_ref(&mut self) -> PolyTraitRef {
let lifetime_defs = self.parse_late_bound_lifetime_defs();
}
}
- if first && attrs_remaining_len > 0u {
+ if first && attrs_remaining_len > 0us {
// We parsed attributes for the first item but didn't find it
let last_span = self.last_span;
self.span_err(last_span,
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1u, |t| t.is_keyword(keywords::Trait))
+ self.look_ahead(1us, |t| t.is_keyword(keywords::Trait))
{
// UNSAFE TRAIT ITEM
self.expect_keyword(keywords::Unsafe);
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1u, |t| t.is_keyword(keywords::Impl))
+ self.look_ahead(1us, |t| t.is_keyword(keywords::Impl))
{
// IMPL ITEM
self.expect_keyword(keywords::Unsafe);
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe)
- && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) {
+ && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump();
let abi = if self.eat_keyword(keywords::Extern) {
}
}
}
- let mut rename_to = path[path.len() - 1u];
+ let mut rename_to = path[path.len() - 1us];
let path = ast::Path {
span: mk_sp(lo, self.last_span.hi),
global: false,