impl Abi {
#[inline]
- pub fn index(&self) -> uint {
- *self as uint
+ pub fn index(&self) -> usize {
+ *self as usize
}
#[inline]
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
-/// a fresh uint
+/// a fresh usize
// I'm representing this syntax context as an index into
// a table, in order to work around a compiler bug
}
}
- pub fn uint(&self) -> uint {
+ pub fn uint(&self) -> usize {
let Name(nm) = *self;
- nm as uint
+ nm as usize
}
pub fn ident(&self) -> Ident {
ExprAssign(P<Expr>, P<Expr>),
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
ExprField(P<Expr>, SpannedIdent),
- ExprTupField(P<Expr>, Spanned<uint>),
+ ExprTupField(P<Expr>, Spanned<usize>),
ExprIndex(P<Expr>, P<Expr>),
ExprRange(Option<P<Expr>>, Option<P<Expr>>),
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
- pub num_captures: uint,
+ pub num_captures: usize,
}
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
}
impl TokenTree {
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
match *self {
TtToken(_, token::DocComment(_)) => 2,
TtToken(_, token::SpecialVarNt(..)) => 2,
}
}
- pub fn get_tt(&self, index: uint) -> TokenTree {
+ pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TtToken(sp, token::DocComment(_)), 0) => {
TtToken(sp, token::Pound)
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
pub enum StrStyle {
CookedStr,
- RawStr(uint)
+ RawStr(usize)
}
pub type Lit = Spanned<Lit_>;
}
impl LitIntType {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
UnsuffixedIntLit(_) => 0,
SignedIntLit(s, _) => s.suffix_len(),
}
impl IntTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyIs(true) /* i */ => 1,
TyIs(false) /* is */ | TyI8 => 2,
}
impl UintTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyUs(true) /* u */ => 1,
TyUs(false) /* us */ | TyU8 => 2,
}
impl FloatTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyF32 | TyF64 => 3, // add F128 handling here
}
TyPtr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
TyRptr(Option<Lifetime>, MutTy),
- /// A bare function (e.g. `fn(uint) -> bool`)
+ /// A bare function (e.g. `fn(usize) -> bool`)
TyBareFn(P<BareFnTy>),
/// A tuple (`(A, B, C, D,...)`)
TyTup(Vec<P<Ty>> ),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
-pub struct AttrId(pub uint);
+pub struct AttrId(pub usize);
/// Doc-comments are promoted to attributes that have is_sugared_doc = true
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)]
}
impl<'ast> Map<'ast> {
- fn entry_count(&self) -> uint {
+ fn entry_count(&self) -> usize {
self.map.borrow().len()
}
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
- self.map.borrow().get(id as uint).map(|e| *e)
+ self.map.borrow().get(id as usize).map(|e| *e)
}
pub fn krate(&self) -> &'ast Crate {
fn next(&mut self) -> Option<NodeId> {
loop {
let idx = self.idx;
- if idx as uint >= self.map.entry_count() {
+ if idx as usize >= self.map.entry_count() {
return None;
}
self.idx += 1;
fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
debug!("ast_map: {:?} => {:?}", id, entry);
let len = self.map.len();
- if id as uint >= len {
- self.map.extend(repeat(NotPresent).take(id as uint - len + 1));
+ if id as usize >= len {
+ self.map.extend(repeat(NotPresent).take(id as usize - len + 1));
}
- self.map[id as uint] = entry;
+ self.map[id as usize] = entry;
}
fn insert(&mut self, id: NodeId, node: Node<'ast>) {
}
/// Get a string representation of an unsigned int type, with its value.
-/// We want to avoid "42uint" in favor of "42u"
+/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
let s = match t {
TyUs(true) if val.is_some() => "u",
}
/// Maps a binary operator to its precedence
-pub fn operator_prec(op: ast::BinOp) -> uint {
+pub fn operator_prec(op: ast::BinOp) -> usize {
match op {
// 'as' sits here with 12
BiMul | BiDiv | BiRem => 11u,
/// Precedence of the `as` operator, which is a binary operator
/// not appearing in the prior table.
#[allow(non_upper_case_globals)]
-pub static as_prec: uint = 12u;
+pub static as_prec: usize = 12us;
pub fn empty_generics() -> Generics {
Generics {
P(dummy_spanned(MetaWord(name)))
}
-thread_local! { static NEXT_ATTR_ID: Cell<uint> = Cell::new(0) }
+thread_local! { static NEXT_ATTR_ID: Cell<usize> = Cell::new(0) }
pub fn mk_attr_id() -> AttrId {
let id = NEXT_ATTR_ID.with(|slot| {
use serialize::{Encodable, Decodable, Encoder, Decoder};
pub trait Pos {
- fn from_uint(n: uint) -> Self;
- fn to_uint(&self) -> uint;
+ fn from_uint(n: usize) -> Self;
+ fn to_uint(&self) -> usize;
}
/// A byte offset. Keep this small (currently 32-bits), as AST contains
/// is not equivalent to a character offset. The CodeMap will convert BytePos
/// values to CharPos values as necessary.
#[derive(Copy, PartialEq, Hash, PartialOrd, Show)]
-pub struct CharPos(pub uint);
+pub struct CharPos(pub usize);
// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
// have been unsuccessful
impl Pos for BytePos {
- fn from_uint(n: uint) -> BytePos { BytePos(n as u32) }
- fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint }
+ fn from_uint(n: usize) -> BytePos { BytePos(n as u32) }
+ fn to_uint(&self) -> usize { let BytePos(n) = *self; n as usize }
}
impl Add for BytePos {
}
impl Pos for CharPos {
- fn from_uint(n: uint) -> CharPos { CharPos(n) }
- fn to_uint(&self) -> uint { let CharPos(n) = *self; n }
+ fn from_uint(n: usize) -> CharPos { CharPos(n) }
+ fn to_uint(&self) -> usize { let CharPos(n) = *self; n }
}
impl Add for CharPos {
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
- pub line: uint,
+ pub line: usize,
/// The (0-based) column offset
pub col: CharPos
}
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
- pub line: uint,
+ pub line: usize,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
-pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
+pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
pub struct FileLines {
pub file: Rc<FileMap>,
- pub lines: Vec<uint>
+ pub lines: Vec<usize>
}
/// Identifies an offset of a multi-byte character in a FileMap
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
- pub bytes: uint,
+ pub bytes: usize,
}
/// A single source in the CodeMap
/// get a line from the list of pre-computed line-beginnings
///
- pub fn get_line(&self, line_number: uint) -> Option<String> {
+ pub fn get_line(&self, line_number: usize) -> Option<String> {
let lines = self.lines.borrow();
lines.get(line_number).map(|&line| {
let begin: BytePos = line - self.start_pos;
})
}
- pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
+ pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
- for i in range(lo.line - 1u, hi.line as uint) {
+ for i in range(lo.line - 1u, hi.line as usize) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
}
- fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
+ fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
{
match id {
NO_EXPANSION => f(None),
- ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
+ ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize]))
}
}
use term;
/// maximum number of lines we will print for each error; arbitrary.
-static MAX_LINES: uint = 6u;
+static MAX_LINES: usize = 6u;
#[derive(Clone, Copy)]
pub enum RenderSpan {
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
- err_count: Cell<uint>,
+ err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1u);
}
- pub fn err_count(&self) -> uint {
+ pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
pub exported_macros: Vec<ast::MacroDef>,
pub syntax_env: SyntaxEnv,
- pub recursion_count: uint,
+ pub recursion_count: usize,
}
impl<'a> ExtCtxt<'a> {
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
- idx: uint) -> P<ast::Expr>;
+ idx: usize) -> P<ast::Expr>;
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
- fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr>;
+ fn expr_uint(&self, span: Span, i: usize) -> P<ast::Expr>;
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id))
}
- fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
+ fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
let field_span = Span {
lo: sp.lo - Pos::from_uint(idx.to_string().len()),
hi: sp.hi,
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
}
- fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
+ fn expr_uint(&self, span: Span, i: usize) -> P<ast::Expr> {
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false))))
}
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
/// Create a decoder for a single enum variant/struct:
/// - `outer_pat_path` is the path to this enum variant/struct
-/// - `getarg` should retrieve the `uint`-th field with name `@str`.
+/// - `getarg` should retrieve the `usize`-th field with name `@str`.
fn decode_static_fields<F>(cx: &mut ExtCtxt,
trait_span: Span,
outer_pat_path: ast::Path,
fields: &StaticFields,
mut getarg: F)
-> P<Expr> where
- F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,
+ F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>,
{
match *fields {
Unnamed(ref fields) => {
//!
//! ```ignore
//! #[derive(Encodable, Decodable)]
-//! struct Node { id: uint }
+//! struct Node { id: usize }
//! ```
//!
//! would generate two implementations like:
/// Matching variants of the enum: variant index, ast::Variant,
/// fields: the field name is only non-`None` in the case of a struct
/// variant.
- EnumMatching(uint, &'a ast::Variant, Vec<FieldInfo>),
+ EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo>),
/// Non-matching variants of the enum, but with all state hidden from
/// the consequent code. The first component holds `Ident`s for all of
.collect::<Vec<ast::Ident>>();
// The `vi_idents` will be bound, solely in the catch-all, to
- // a series of let statements mapping each self_arg to a uint
+ // a series of let statements mapping each self_arg to a usize
// corresponding to its variant index.
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
.map(|name| { let vi_suffix = format!("{}_vi", &name[]);
}).collect();
// Build a series of let statements mapping each self_arg
- // to a uint corresponding to its variant index.
+ // to a usize corresponding to its variant index.
// i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
// with three Self args, builds three statements:
//
pub struct ExpansionConfig {
pub crate_name: String,
pub enable_quotes: bool,
- pub recursion_limit: uint,
+ pub recursion_limit: usize,
}
impl ExpansionConfig {
// in principle, you might want to control this boolean on a per-varref basis,
// but that would make things even harder to understand, and might not be
// necessary for thorough testing.
- type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
+ type RenamingTest = (&'static str, Vec<Vec<usize>>, bool);
#[test]
fn automatic_renaming () {
}
// run one of the renaming tests
- fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
+ fn run_renaming_test(t: &RenamingTest, test_idx: usize) {
let invalid_name = token::special_idents::invalid.name;
let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
}
enum Position {
- Exact(uint),
+ Exact(usize),
Named(String),
}
/// Stays `true` if all formatting parameters are default (as in "{}{}").
all_pieces_simple: bool,
- name_positions: HashMap<String, uint>,
+ name_positions: HashMap<String, usize>,
/// Updated as arguments are consumed or methods are entered
- nest_level: uint,
- next_arg: uint,
+ nest_level: usize,
+ next_arg: usize,
}
/// Parses the arguments from the given list of tokens, returning None
}
let resolved = {
- let result = (*table.table.borrow())[id.ctxt as uint];
+ let result = (*table.table.borrow())[id.ctxt as usize];
match result {
EmptyCtxt => id.name,
// ignore marks here:
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
- let table_entry = (*table.table.borrow())[loopvar as uint];
+ let table_entry = (*table.table.borrow())[loopvar as usize];
match table_entry {
EmptyCtxt => {
return result;
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
- match (*sctable.table.borrow())[ctxt as uint] {
+ match (*sctable.table.borrow())[ctxt as usize] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
let mut result = Vec::new();
loop {
let table = table.table.borrow();
- match (*table)[sc as uint] {
+ match (*table)[sc as usize] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
}
impl TokenTreeOrTokenTreeVec {
- fn len(&self) -> uint {
+ fn len(&self) -> usize {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}
- fn get_tt(&self, index: uint) -> TokenTree {
+ fn get_tt(&self, index: usize) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
#[derive(Clone)]
struct MatcherTtFrame {
elts: TokenTreeOrTokenTreeVec,
- idx: uint,
+ idx: usize,
}
#[derive(Clone)]
stack: Vec<MatcherTtFrame>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
- idx: uint,
+ idx: usize,
up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>,
- match_lo: uint,
- match_cur: uint,
- match_hi: uint,
+ match_lo: usize,
+ match_cur: usize,
+ match_hi: usize,
sp_lo: BytePos,
}
-pub fn count_names(ms: &[TokenTree]) -> uint {
+pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match elt {
&TtSequence(_, ref seq) => {
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
- ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
+ ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in seq.tts.iter() {
#[derive(Clone)]
struct TtFrame {
forest: TokenTree,
- idx: uint,
+ idx: usize,
dotdotdoted: bool,
sep: Option<Token>,
}
// Some => return imported_from as the next token
crate_name_next: Option<Span>,
- repeat_idx: Vec<uint>,
- repeat_len: Vec<uint>,
+ repeat_idx: Vec<usize>,
+ repeat_len: Vec<usize>,
/* cached: */
pub cur_tok: Token,
pub cur_span: Span,
#[derive(Clone)]
enum LockstepIterSize {
LisUnconstrained,
- LisConstraint(uint, Ident),
+ LisConstraint(usize, Ident),
LisContradiction(String),
}
noop_fold_ident(i, self)
}
- fn fold_uint(&mut self, i: uint) -> uint {
+ fn fold_uint(&mut self, i: usize) -> usize {
noop_fold_uint(i, self)
}
i
}
-pub fn noop_fold_uint<T: Folder>(i: uint, _: &mut T) -> uint {
+pub fn noop_fold_uint<T: Folder>(i: usize, _: &mut T) -> usize {
i
}
use std::io;
use std::str;
use std::string::String;
-use std::uint;
+use std::usize;
#[derive(Clone, Copy, PartialEq)]
pub enum CommentStyle {
/// remove a "[ \t]*\*" block from each line, if possible
fn horizontal_trim(lines: Vec<String> ) -> Vec<String> {
- let mut i = uint::MAX;
+ let mut i = usize::MAX;
let mut can_trim = true;
let mut first = true;
for line in lines.iter() {
/// Returns None if the first col chars of s contain a non-whitespace char.
/// Otherwise returns Some(k) where k is first char offset after that leading
/// whitespace. Note k may be outside bounds of s.
-fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
+fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let len = s.len();
let mut col = col.to_uint();
- let mut cursor: uint = 0;
+ let mut cursor: usize = 0;
while col > 0 && cursor < len {
let r: str::CharRange = s.char_range_at(cursor);
if !r.ch.is_whitespace() {
return s.into_cow();
fn translate_crlf_(rdr: &StringReader, start: BytePos,
- s: &str, errmsg: &str, mut i: uint) -> String {
+ s: &str, errmsg: &str, mut i: usize) -> String {
let mut buf = String::with_capacity(s.len());
let mut j = 0;
while i < s.len() {
/// Scan through any digits (base `radix`) or underscores, and return how
/// many digits there were.
- fn scan_digits(&mut self, radix: uint) -> uint {
+ fn scan_digits(&mut self, radix: usize) -> usize {
let mut len = 0u;
loop {
let c = self.curr;
/// Scan over `n_digits` hex digits, stopping at `delim`, reporting an
/// error if too many or too few digits are encountered.
fn scan_hex_digits(&mut self,
- n_digits: uint,
+ n_digits: usize,
delim: char,
below_0x7f_only: bool)
-> bool {
fn scan_unicode_escape(&mut self, delim: char) -> bool {
self.bump(); // past the {
let start_bpos = self.last_pos;
- let mut count: uint = 0;
+ let mut count = 0us;
let mut accum_int = 0;
while !self.curr_is('}') && count <= 6 {
/// Check that a base is valid for a floating literal, emitting a nice
/// error if it isn't.
- fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: uint) {
+ fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
match base {
16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
supported"),
/// Rather than just accepting/rejecting a given literal, unescapes it as
/// well. Can take any slice prefixed by a character escape. Returns the
/// character and the number of characters consumed.
-pub fn char_lit(lit: &str) -> (char, int) {
+pub fn char_lit(lit: &str) -> (char, isize) {
use std::{num, char};
let mut chars = lit.chars();
let msg = format!("lexer should have rejected a bad character escape {}", lit);
let msg2 = &msg[];
- fn esc(len: uint, lit: &str) -> Option<(char, int)> {
+ fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
num::from_str_radix(&lit[2..len], 16)
.and_then(char::from_u32)
- .map(|x| (x, len as int))
+ .map(|x| (x, len as isize))
}
- let unicode_escape = |&: | -> Option<(char, int)>
+ let unicode_escape = |&: | -> Option<(char, isize)>
if lit.as_bytes()[2] == b'{' {
let idx = lit.find('}').expect(msg2);
let subslice = &lit[3..idx];
num::from_str_radix(subslice, 16)
.and_then(char::from_u32)
- .map(|x| (x, subslice.chars().count() as int + 4))
+ .map(|x| (x, subslice.chars().count() as isize + 4))
} else {
esc(6, lit)
};
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
+ fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) {
loop {
match it.peek().map(|x| x.1) {
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
}
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
-pub fn byte_lit(lit: &str) -> (u8, uint) {
+pub fn byte_lit(lit: &str) -> (u8, usize) {
let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
if lit.len() == 1 {
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
+ fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
#[test] fn span_of_self_arg_pat_idents_are_correct() {
- let srcs = ["impl z { fn a (&self, &myarg: int) {} }",
- "impl z { fn a (&mut self, &myarg: int) {} }",
- "impl z { fn a (&'a self, &myarg: int) {} }",
- "impl z { fn a (self, &myarg: int) {} }",
- "impl z { fn a (self: Foo, &myarg: int) {} }",
+ let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
+ "impl z { fn a (&mut self, &myarg: i32) {} }",
+ "impl z { fn a (&'a self, &myarg: i32) {} }",
+ "impl z { fn a (self, &myarg: i32) {} }",
+ "impl z { fn a (self: Foo, &myarg: i32) {} }",
];
for &src in srcs.iter() {
"use a `move ||` expression instead",
),
ObsoleteSyntax::ClosureType => (
- "`|uint| -> bool` closure type syntax",
+ "`|usize| -> bool` closure type syntax",
"use unboxed closures instead, no type annotation needed"
),
ObsoleteSyntax::Sized => (
pub buffer: [TokenAndSpan; 4],
pub buffer_start: int,
pub buffer_end: int,
- pub tokens_consumed: uint,
+ pub tokens_consumed: usize,
pub restrictions: Restrictions,
- pub quote_depth: uint, // not (yet) related to the quasiquoter
+ pub quote_depth: usize, // not (yet) related to the quasiquoter
pub reader: Box<Reader+'a>,
pub interner: Rc<token::IdentInterner>,
/// The set of seen errors about obsolete syntax. Used to suppress
self.reader.real_token()
} else {
// Avoid token copies with `replace`.
- let buffer_start = self.buffer_start as uint;
- let next_index = (buffer_start + 1) & 3 as uint;
+ let buffer_start = self.buffer_start as usize;
+ let next_index = (buffer_start + 1) & 3 as usize;
self.buffer_start = next_index as int;
let placeholder = TokenAndSpan {
}
return (4 - self.buffer_start) + self.buffer_end;
}
- pub fn look_ahead<R, F>(&mut self, distance: uint, f: F) -> R where
+ pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
let dist = distance as int;
while self.buffer_length() < dist {
- self.buffer[self.buffer_end as uint] = self.reader.real_token();
+ self.buffer[self.buffer_end as usize] = self.reader.real_token();
self.buffer_end = (self.buffer_end + 1) & 3;
}
- f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
+ f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
}
pub fn fatal(&mut self, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(self.span, m)
ExprField(expr, ident)
}
- pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
+ pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
ExprTupField(expr, idx)
}
hi = self.span.hi;
self.bump();
- let index = n.as_str().parse::<uint>();
+ let index = n.as_str().parse::<usize>();
match index {
Some(n) => {
let id = spanned(dot, hi, n);
};
self.span_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
- float.trunc() as uint,
+ float.trunc() as usize,
&float.fract().to_string()[1..])[]);
}
self.abort_if_errors();
}
/// Parse an expression of binops of at least min_prec precedence
- pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
+ pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
if self.expr_is_complete(&*lhs) { return lhs; }
// Prevent dynamic borrow errors later on by limiting the
Integer(ast::Name),
Float(ast::Name),
Str_(ast::Name),
- StrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
+ StrRaw(ast::Name, usize), /* raw str delimited by n hash symbols */
Binary(ast::Name),
- BinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
+ BinaryRaw(ast::Name, usize), /* raw binary str delimited by n hash symbols */
}
impl Lit {
get_ident_interner().intern(s)
}
-/// gensym's a new uint, using the current interner.
+/// gensym's a new usize, using the current interner.
#[inline]
pub fn gensym(s: &str) -> ast::Name {
get_ident_interner().gensym(s)
//!
//! In particular you'll see a certain amount of churn related to INTEGER vs.
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
-//! somewhat readily? In any case, I've used uint for indices-in-buffers and
+//! somewhat readily? In any case, I've used usize for indices-in-buffers and
//! ints for character-sizes-and-indentation-offsets. This respects the need
//! for ints to "go negative" while carrying a pending-calculation balance, and
//! helps differentiate all the numbers flying around internally (slightly).
pub fn buf_str(toks: &[Token],
szs: &[int],
- left: uint,
- right: uint,
- lim: uint)
+ left: usize,
+ right: usize,
+ lim: usize)
-> String {
let n = toks.len();
assert_eq!(n, szs.len());
static SIZE_INFINITY: int = 0xffff;
-pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
+pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
- let n: uint = 3 * linewidth;
+ let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
let size: Vec<int> = repeat(0i).take(n).collect();
- let scan_stack: Vec<uint> = repeat(0u).take(n).collect();
+ let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
- buf_len: uint,
+ buf_len: usize,
/// Width of lines we're constrained to
margin: int,
/// Number of spaces left on line
space: int,
/// Index of left side of input stream
- left: uint,
+ left: usize,
/// Index of right side of input stream
- right: uint,
+ right: usize,
/// Ring-buffer stream goes through
token: Vec<Token> ,
/// Ring-buffer of calculated sizes
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
- scan_stack: Vec<uint> ,
+ scan_stack: Vec<usize> ,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
- top: uint,
+ top: usize,
/// Index of bottom of scan_stack
- bottom: uint,
+ bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem> ,
/// Buffered indentation to avoid writing trailing whitespace
}
Ok(())
}
- pub fn scan_push(&mut self, x: uint) {
+ pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
}
self.scan_stack[self.top] = x;
}
- pub fn scan_pop(&mut self) -> uint {
+ pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
}
return x;
}
- pub fn scan_top(&mut self) -> uint {
+ pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
- pub fn scan_pop_bottom(&mut self) -> uint {
+ pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
// Convenience functions to talk to the printer.
//
// "raw box"
-pub fn rbox(p: &mut Printer, indent: uint, b: Breaks) -> io::IoResult<()> {
+pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
p.pretty_print(Token::Begin(BeginToken {
offset: indent as int,
breaks: b
}))
}
-pub fn ibox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Inconsistent)
}
-pub fn cbox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Consistent)
}
-pub fn break_offset(p: &mut Printer, n: uint, off: int) -> io::IoResult<()> {
+pub fn break_offset(p: &mut Printer, n: usize, off: int) -> io::IoResult<()> {
p.pretty_print(Token::Break(BreakToken {
offset: off,
blank_space: n as int
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
}
-pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
+pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> {
break_offset(p, n, 0)
}
}
pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> {
- spaces(p, SIZE_INFINITY as uint)
+ spaces(p, SIZE_INFINITY as usize)
}
pub fn hardbreak_tok_offset(off: int) -> Token {
#[derive(Copy)]
pub struct CurrentCommentAndLiteral {
- cur_cmnt: uint,
- cur_lit: uint,
+ cur_cmnt: usize,
+ cur_lit: usize,
}
pub struct State<'a> {
}
#[allow(non_upper_case_globals)]
-pub const indent_unit: uint = 4u;
+pub const indent_unit: usize = 4us;
#[allow(non_upper_case_globals)]
-pub const default_columns: uint = 78u;
+pub const default_columns: usize = 78us;
/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments and literals to
}
impl<'a> State<'a> {
- pub fn ibox(&mut self, u: uint) -> IoResult<()> {
+ pub fn ibox(&mut self, u: usize) -> IoResult<()> {
self.boxes.push(pp::Breaks::Inconsistent);
pp::ibox(&mut self.s, u)
}
pp::end(&mut self.s)
}
- pub fn cbox(&mut self, u: uint) -> IoResult<()> {
+ pub fn cbox(&mut self, u: usize) -> IoResult<()> {
self.boxes.push(pp::Breaks::Consistent);
pp::cbox(&mut self.s, u)
}
// "raw box"
- pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
+ pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> IoResult<()> {
self.boxes.push(b);
pp::rbox(&mut self.s, u, b)
}
}
pub fn bclose_(&mut self, span: codemap::Span,
- indented: uint) -> IoResult<()> {
+ indented: usize) -> IoResult<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
- indented: uint, close_box: bool) -> IoResult<()> {
+ indented: usize, close_box: bool) -> IoResult<()> {
try!(self.maybe_print_comment(span.hi));
try!(self.break_offset_if_not_bol(1u, -(indented as int)));
try!(word(&mut self.s, "}"));
if !self.is_bol() { try!(space(&mut self.s)); }
Ok(())
}
- pub fn break_offset_if_not_bol(&mut self, n: uint,
+ pub fn break_offset_if_not_bol(&mut self, n: usize,
off: int) -> IoResult<()> {
if !self.is_bol() {
break_offset(&mut self.s, n, off)
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
- indented: uint) -> IoResult<()> {
+ indented: usize) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_maybe_unclosed(&mut self,
blk: &ast::Block,
- indented: uint,
+ indented: usize,
attrs: &[ast::Attribute],
close_box: bool) -> IoResult<()> {
match blk.rules {
self.ann.post(self, NodeIdent(&ident))
}
- pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
+ pub fn print_uint(&mut self, i: usize) -> IoResult<()> {
word(&mut self.s, &i.to_string()[])
}
}
}
-fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
+fn repeat(s: &str, n: usize) -> String { iter::repeat(s).take(n).collect() }
#[cfg(test)]
mod test {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! An "interner" is a data structure that associates values with uint tags and
+//! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e. given a value, one can easily find the
//! type, and vice versa.
(*vect)[idx.uint()].clone()
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
let vect = self.vect.borrow();
(*vect).len()
}
(*self.vect.borrow())[idx.uint()].clone()
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
self.vect.borrow().len()
}
}
}
-/// Given a string and an index, return the first uint >= idx
+/// Given a string and an index, return the first usize >= idx
/// that is a non-ws-char or is outside of the legal range of
/// the string.
-fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
+fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize {
let mut i = idx;
let len = a.len();
while (i < len) && (is_whitespace(a.char_at(i))) {
}
}
- pub fn get<'a>(&'a self, idx: uint) -> &'a T {
+ pub fn get<'a>(&'a self, idx: usize) -> &'a T {
match self.repr {
One(ref v) if idx == 0 => v,
Many(ref vs) => &vs[idx],
IntoIter { repr: repr }
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
match self.repr {
Zero => 0,
One(..) => 1,
}
}
- fn size_hint(&self) -> (uint, Option<uint>) {
+ fn size_hint(&self) -> (usize, Option<usize>) {
match self.repr {
ZeroIterator => (0, Some(0)),
OneIterator(..) => (1, Some(1)),