]> git.lizzy.rs Git - rust.git/commitdiff
libsyntax: Fix errors arising from the automated `~[T]` conversion
authorPatrick Walton <pcwalton@mimiga.net>
Fri, 28 Feb 2014 20:54:01 +0000 (12:54 -0800)
committerPatrick Walton <pcwalton@mimiga.net>
Sun, 2 Mar 2014 06:40:52 +0000 (22:40 -0800)
54 files changed:
src/libstd/vec_ng.rs
src/libsyntax/abi.rs
src/libsyntax/ast.rs
src/libsyntax/ast_map.rs
src/libsyntax/ast_util.rs
src/libsyntax/attr.rs
src/libsyntax/codemap.rs
src/libsyntax/crateid.rs
src/libsyntax/diagnostic.rs
src/libsyntax/ext/asm.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/build.rs
src/libsyntax/ext/bytes.rs
src/libsyntax/ext/cfg.rs
src/libsyntax/ext/deriving/clone.rs
src/libsyntax/ext/deriving/cmp/eq.rs
src/libsyntax/ext/deriving/cmp/ord.rs
src/libsyntax/ext/deriving/cmp/totaleq.rs
src/libsyntax/ext/deriving/cmp/totalord.rs
src/libsyntax/ext/deriving/decodable.rs
src/libsyntax/ext/deriving/default.rs
src/libsyntax/ext/deriving/encodable.rs
src/libsyntax/ext/deriving/generic.rs
src/libsyntax/ext/deriving/hash.rs
src/libsyntax/ext/deriving/primitive.rs
src/libsyntax/ext/deriving/rand.rs
src/libsyntax/ext/deriving/show.rs
src/libsyntax/ext/deriving/ty.rs
src/libsyntax/ext/deriving/zero.rs
src/libsyntax/ext/env.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/format.rs
src/libsyntax/ext/log_syntax.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/registrar.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/trace_macros.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/opt_vec.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/comments.rs
src/libsyntax/parse/lexer.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pp.rs
src/libsyntax/print/pprust.rs
src/libsyntax/util/interner.rs
src/libsyntax/util/parser_testing.rs
src/libsyntax/util/small_vector.rs
src/libsyntax/visit.rs

index 6cc3ccc345237f800f63a06ae38bf0f0f9585a4c..9b6acdd9b9ee554d670f02e5682ec7c12bc30e67 100644 (file)
@@ -16,6 +16,7 @@
 use cmp::{Eq, Ordering, TotalEq, TotalOrd};
 use container::Container;
 use default::Default;
+use fmt;
 use iter::{DoubleEndedIterator, FromIterator, Iterator};
 use libc::{free, c_void};
 use mem::{size_of, move_val_init};
@@ -82,6 +83,26 @@ pub fn push_all(&mut self, other: &[T]) {
             self.push((*element).clone())
         }
     }
+
+
+    pub fn grow(&mut self, n: uint, initval: &T) {
+        let new_len = self.len() + n;
+        self.reserve(new_len);
+        let mut i: uint = 0u;
+
+        while i < n {
+            self.push((*initval).clone());
+            i += 1u;
+        }
+    }
+
+    pub fn grow_set(&mut self, index: uint, initval: &T, val: T) {
+        let l = self.len();
+        if index >= l {
+            self.grow(index - l + 1u, initval);
+        }
+        *self.get_mut(index) = val;
+    }
 }
 
 impl<T:Clone> Clone for Vec<T> {
@@ -388,6 +409,12 @@ fn default() -> Vec<T> {
     }
 }
 
+impl<T:fmt::Show> fmt::Show for Vec<T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.as_slice().fmt(f)
+    }
+}
+
 pub struct MoveItems<T> {
     priv allocation: *mut c_void, // the block of memory allocated for the vector
     priv iter: Items<'static, T>
index eda71552765cc710f4fa9878ae37f085b61e6e62..a06415bc083a8ab33e992f62a6f2f8093e94f932 100644 (file)
@@ -119,7 +119,7 @@ pub fn lookup(name: &str) -> Option<Abi> {
 }
 
 pub fn all_names() -> Vec<&'static str> {
-    AbiDatas.map(|d| d.name)
+    AbiDatas.iter().map(|d| d.name).collect()
 }
 
 impl Abi {
index deb125a935d9f33c96bbc9b548685d129e93fdf4..947463d8f47b116ba93de9f6ff26859690068ba2 100644 (file)
@@ -23,6 +23,7 @@
 use collections::HashMap;
 use std::option::Option;
 use std::rc::Rc;
+use std::vec_ng::Vec;
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 
 /// A pointer abstraction. FIXME(eddyb) #10676 use Rc<T> in the future.
@@ -1193,6 +1194,8 @@ mod test {
     use codemap::*;
     use super::*;
 
+    use std::vec_ng::Vec;
+
     fn is_freeze<T: Freeze>() {}
 
     // Assert that the AST remains Freeze (#10693).
index 2c2b4a6535d4bf199153812cd9924adae2b80673..56a99736866f6a037fdc48413c36ad3ac702d48a 100644 (file)
@@ -23,6 +23,7 @@
 use std::iter;
 use std::vec;
 use std::fmt;
+use std::vec_ng::Vec;
 
 #[deriving(Clone, Eq)]
 pub enum PathElem {
@@ -191,7 +192,11 @@ pub struct Map {
 impl Map {
     fn find_entry(&self, id: NodeId) -> Option<MapEntry> {
         let map = self.map.borrow();
-        map.get().get(id as uint).map(|x| *x)
+        if map.get().len() > id as uint {
+            Some(*map.get().get(id as uint))
+        } else {
+            None
+        }
     }
 
     /// Retrieve the Node corresponding to `id`, failing if it cannot
index 1386ac5404b0ace3effd341cd9b888624880c6b2..db9ea480e96206fb877393f2566bacab40a3d1b5 100644 (file)
@@ -23,6 +23,7 @@
 use collections::HashMap;
 use std::u32;
 use std::local_data;
+use std::vec_ng::Vec;
 
 pub fn path_name_i(idents: &[Ident]) -> ~str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
@@ -795,7 +796,7 @@ pub fn resolve_internal(id : Ident,
             let resolved = {
                 let result = {
                     let table = table.table.borrow();
-                    table.get()[id.ctxt]
+                    *table.get().get(id.ctxt as uint)
                 };
                 match result {
                     EmptyCtxt => id.name,
@@ -844,7 +845,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec<Mrk>
     loop {
         let table_entry = {
             let table = table.table.borrow();
-            table.get()[loopvar]
+            *table.get().get(loopvar as uint)
         };
         match table_entry {
             EmptyCtxt => {
@@ -873,7 +874,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec<Mrk>
 pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk {
     let sctable = get_sctable();
     let table = sctable.table.borrow();
-    match table.get()[ctxt] {
+    match *table.get().get(ctxt as uint) {
         ast::Mark(mrk,_) => mrk,
         _ => fail!("can't retrieve outer mark when outside is not a mark")
     }
@@ -901,7 +902,7 @@ pub fn getLast(arr: &Vec<Mrk> ) -> Mrk {
 pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool {
     (a.span == b.span)
     && (a.global == b.global)
-    && (segments_name_eq(a.segments, b.segments))
+    && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice()))
 }
 
 // are two arrays of segments equal when compared unhygienically?
@@ -938,6 +939,8 @@ mod test {
     use opt_vec;
     use collections::HashMap;
 
+    use std::vec_ng::Vec;
+
     fn ident_to_segment(id : &Ident) -> PathSegment {
         PathSegment {identifier:id.clone(),
                      lifetimes: opt_vec::Empty,
@@ -1000,7 +1003,7 @@ fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> {
         let mut result = Vec::new();
         loop {
             let table = table.table.borrow();
-            match table.get()[sc] {
+            match *table.get().get(sc as uint) {
                 EmptyCtxt => {return result;},
                 Mark(mrk,tail) => {
                     result.push(M(mrk));
@@ -1024,9 +1027,9 @@ fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec<TestSC> {
         assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
         {
             let table = t.table.borrow();
-            assert!(table.get()[2] == Mark(9,0));
-            assert!(table.get()[3] == Rename(id(101,0),14,2));
-            assert!(table.get()[4] == Mark(3,3));
+            assert!(*table.get().get(2) == Mark(9,0));
+            assert!(*table.get().get(3) == Rename(id(101,0),14,2));
+            assert!(*table.get().get(4) == Mark(3,3));
         }
         assert_eq!(refold_test_sc(4,&t),test_sc);
     }
@@ -1045,8 +1048,8 @@ fn unfold_marks(mrks: Vec<Mrk> , tail: SyntaxContext, table: &SCTable)
         assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
         {
             let table = t.table.borrow();
-            assert!(table.get()[2] == Mark(7,0));
-            assert!(table.get()[3] == Mark(3,2));
+            assert!(*table.get().get(2) == Mark(7,0));
+            assert!(*table.get().get(3) == Mark(3,2));
         }
     }
 
index df1ad6381deb1661d2c5b79cac43859cfc5ba0b4..ed56ef15a1c8b7ceee5846d4c275189924ff6208 100644 (file)
@@ -21,6 +21,7 @@
 use crateid::CrateId;
 
 use collections::HashSet;
+use std::vec_ng::Vec;
 
 pub trait AttrMetaMethods {
     // This could be changed to `fn check_name(&self, name: InternedString) ->
@@ -226,7 +227,8 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> Vec<@MetaItem> {
         match m.node {
             MetaList(ref n, ref mis) => {
                 @Spanned {
-                    node: MetaList((*n).clone(), sort_meta_items(*mis)),
+                    node: MetaList((*n).clone(),
+                                   sort_meta_items(mis.as_slice())),
                     .. /*bad*/ (*m).clone()
                 }
             }
@@ -243,7 +245,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> Vec<@MetaItem> {
     let mut result = Vec::new();
     for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) {
         match attr.meta().node {
-            MetaList(_, ref items) => result.push_all(*items),
+            MetaList(_, ref items) => result.push_all(items.as_slice()),
             _ => ()
         }
     }
@@ -272,9 +274,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr {
         match attr.node.value.node {
           MetaWord(ref n) if n.equiv(&("inline")) => InlineHint,
           MetaList(ref n, ref items) if n.equiv(&("inline")) => {
-            if contains_name(*items, "always") {
+            if contains_name(items.as_slice(), "always") {
                 InlineAlways
-            } else if contains_name(*items, "never") {
+            } else if contains_name(items.as_slice(), "never") {
                 InlineNever
             } else {
                 InlineHint
index 8cea8f7278680c1c4d88a901c3c78cebb104a8a4..6f17505c902279ee302d0bb241e7d180f10a81c1 100644 (file)
@@ -23,6 +23,7 @@
 
 use std::cell::RefCell;
 use std::cmp;
+use std::vec_ng::Vec;
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 
 pub trait Pos {
@@ -224,14 +225,14 @@ pub fn next_line(&self, pos: BytePos) {
         // the new charpos must be > the last one (or it's the first one).
         let mut lines = self.lines.borrow_mut();;
         let line_len = lines.get().len();
-        assert!(line_len == 0 || (lines.get()[line_len - 1] < pos))
+        assert!(line_len == 0 || (*lines.get().get(line_len - 1) < pos))
         lines.get().push(pos);
     }
 
     // get a line from the list of pre-computed line-beginnings
     pub fn get_line(&self, line: int) -> ~str {
         let mut lines = self.lines.borrow_mut();
-        let begin: BytePos = lines.get()[line] - self.start_pos;
+        let begin: BytePos = *lines.get().get(line as uint) - self.start_pos;
         let begin = begin.to_uint();
         let slice = self.src.slice_from(begin);
         match slice.find('\n') {
@@ -373,7 +374,7 @@ fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
         let mut b = len;
         while b - a > 1u {
             let m = (a + b) / 2u;
-            if files[m].start_pos > pos {
+            if files.get(m).start_pos > pos {
                 b = m;
             } else {
                 a = m;
@@ -383,7 +384,7 @@ fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
         // filemap, but are not the filemaps we want (because they are length 0, they cannot
         // contain what we are looking for). So, rewind until we find a useful filemap.
         loop {
-            let lines = files[a].lines.borrow();
+            let lines = files.get(a).lines.borrow();
             let lines = lines.get();
             if lines.len() > 0 {
                 break;
@@ -405,13 +406,13 @@ fn lookup_line(&self, pos: BytePos) -> FileMapAndLine
         let idx = self.lookup_filemap_idx(pos);
 
         let files = self.files.borrow();
-        let f = files.get()[idx];
+        let f = *files.get().get(idx);
         let mut a = 0u;
         let mut lines = f.lines.borrow_mut();
         let mut b = lines.get().len();
         while b - a > 1u {
             let m = (a + b) / 2u;
-            if lines.get()[m] > pos { b = m; } else { a = m; }
+            if *lines.get().get(m) > pos { b = m; } else { a = m; }
         }
         return FileMapAndLine {fm: f, line: a};
     }
@@ -421,7 +422,7 @@ fn lookup_pos(&self, pos: BytePos) -> Loc {
         let line = a + 1u; // Line numbers start at 1
         let chpos = self.bytepos_to_file_charpos(pos);
         let lines = f.lines.borrow();
-        let linebpos = lines.get()[a];
+        let linebpos = *lines.get().get(a);
         let linechpos = self.bytepos_to_file_charpos(linebpos);
         debug!("codemap: byte pos {:?} is on the line at byte pos {:?}",
                pos, linebpos);
@@ -440,7 +441,7 @@ fn lookup_byte_offset(&self, bpos: BytePos)
         -> FileMapAndBytePos {
         let idx = self.lookup_filemap_idx(bpos);
         let files = self.files.borrow();
-        let fm = files.get()[idx];
+        let fm = *files.get().get(idx);
         let offset = bpos - fm.start_pos;
         return FileMapAndBytePos {fm: fm, pos: offset};
     }
@@ -450,7 +451,7 @@ fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
         debug!("codemap: converting {:?} to char pos", bpos);
         let idx = self.lookup_filemap_idx(bpos);
         let files = self.files.borrow();
-        let map = files.get()[idx];
+        let map = files.get().get(idx);
 
         // The number of extra bytes due to multibyte chars in the FileMap
         let mut total_extra_bytes = 0;
index f3dcd61f2405795c4f48c16f5c4bce66ae32f6c8..e5136b7081b336e940bc1b1c02fd3cac2b096c0d 100644 (file)
@@ -19,6 +19,7 @@
 /// to be `0.0`.
 
 use std::from_str::FromStr;
+use std::vec_ng::Vec;
 
 #[deriving(Clone, Eq)]
 pub struct CrateId {
@@ -49,7 +50,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 impl FromStr for CrateId {
     fn from_str(s: &str) -> Option<CrateId> {
         let pieces: Vec<&str> = s.splitn('#', 1).collect();
-        let path = pieces[0].to_owned();
+        let path = pieces.get(0).to_owned();
 
         if path.starts_with("/") || path.ends_with("/") ||
             path.starts_with(".") || path.is_empty() {
@@ -57,16 +58,18 @@ fn from_str(s: &str) -> Option<CrateId> {
         }
 
         let path_pieces: Vec<&str> = path.rsplitn('/', 1).collect();
-        let inferred_name = path_pieces[0];
+        let inferred_name = *path_pieces.get(0);
 
         let (name, version) = if pieces.len() == 1 {
             (inferred_name.to_owned(), None)
         } else {
-            let hash_pieces: Vec<&str> = pieces[1].splitn(':', 1).collect();
+            let hash_pieces: Vec<&str> = pieces.get(1)
+                                               .splitn(':', 1)
+                                               .collect();
             let (hash_name, hash_version) = if hash_pieces.len() == 1 {
-                ("", hash_pieces[0])
+                ("", *hash_pieces.get(0))
             } else {
-                (hash_pieces[0], hash_pieces[1])
+                (*hash_pieces.get(0), *hash_pieces.get(1))
             };
 
             let name = if !hash_name.is_empty() {
@@ -89,7 +92,7 @@ fn from_str(s: &str) -> Option<CrateId> {
         };
 
         Some(CrateId {
-            path: path,
+            path: path.clone(),
             name: name,
             version: version,
         })
index cb7034a375dd04f579a815a32757af5c7587ea39..c0c64d6fd60b293770670a3479ac3ee3a0d56dfa 100644 (file)
@@ -325,7 +325,7 @@ fn highlight_lines(err: &mut EmitterWriter,
     if lines.lines.len() == 1u {
         let lo = cm.lookup_char_pos(sp.lo);
         let mut digits = 0u;
-        let mut num = (lines.lines[0] + 1u) / 10u;
+        let mut num = (*lines.lines.get(0) + 1u) / 10u;
 
         // how many digits must be indent past?
         while num > 0u { num /= 10u; digits += 1u; }
@@ -337,7 +337,7 @@ fn highlight_lines(err: &mut EmitterWriter,
         // part of the 'filename:line ' part of the previous line.
         let skip = fm.name.len() + digits + 3u;
         for _ in range(0, skip) { s.push_char(' '); }
-        let orig = fm.get_line(lines.lines[0] as int);
+        let orig = fm.get_line(*lines.lines.get(0) as int);
         for pos in range(0u, left-skip) {
             let curChar = orig[pos] as char;
             // Whenever a tab occurs on the previous line, we insert one on
index c2d16d8330157512f8f310aefd88d1c79ff4666d..6080613460da219c0e3976de88bb1df44677bfd3 100644 (file)
@@ -20,6 +20,8 @@
 use parse::token::InternedString;
 use parse::token;
 
+use std::vec_ng::Vec;
+
 enum State {
     Asm,
     Outputs,
@@ -42,7 +44,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                -> base::MacResult {
     let mut p = parse::new_parser_from_tts(cx.parse_sess(),
                                            cx.cfg(),
-                                           tts.to_owned());
+                                           tts.iter()
+                                              .map(|x| (*x).clone())
+                                              .collect());
 
     let mut asm = InternedString::new("");
     let mut asm_str_style = None;
index 85a8be1c87697c7abba72cf796adff81b99378ef..e9fe21eded60c87ab17f43a55c42eea7cd1b9d31 100644 (file)
@@ -20,6 +20,7 @@
 use util::small_vector::SmallVector;
 
 use collections::HashMap;
+use std::vec_ng::Vec;
 
 // new-style macro! tt code:
 //
@@ -461,7 +462,9 @@ pub fn get_exprs_from_tts(cx: &ExtCtxt,
                           tts: &[ast::TokenTree]) -> Option<Vec<@ast::Expr> > {
     let mut p = parse::new_parser_from_tts(cx.parse_sess(),
                                            cx.cfg(),
-                                           tts.to_owned());
+                                           tts.iter()
+                                              .map(|x| (*x).clone())
+                                              .collect());
     let mut es = Vec::new();
     while p.token != token::EOF {
         if es.len() != 0 && !p.eat(&token::COMMA) {
@@ -553,6 +556,7 @@ pub fn insert(&mut self, k: Name, v: SyntaxExtension) {
     }
 
     pub fn info<'a>(&'a mut self) -> &'a mut BlockInfo {
-        &mut self.chain[self.chain.len()-1].info
+        let last_chain_index = self.chain.len() - 1;
+        &mut self.chain.get_mut(last_chain_index).info
     }
 }
index 2dc94253a35ff1899f893bb8be91221c9f51a1e8..34625923ea1f6fb4ba0c4b1d37d8f38e1e6c4db4 100644 (file)
@@ -21,6 +21,8 @@
 use parse::token::special_idents;
 use parse::token;
 
+use std::vec_ng::Vec;
+
 pub struct Field {
     ident: ast::Ident,
     ex: @ast::Expr
@@ -132,7 +134,7 @@ fn expr_method_call(&self, span: Span,
 
     fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr;
     fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
-    fn expr_vec_uniq(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
+    fn expr_vec_ng(&self, sp: Span) -> @ast::Expr;
     fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr;
     fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr;
     fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr;
@@ -580,8 +582,13 @@ fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast:
     fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
         self.expr(sp, ast::ExprVec(exprs, ast::MutImmutable))
     }
-    fn expr_vec_uniq(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
-        self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreUniq)
+    fn expr_vec_ng(&self, sp: Span) -> @ast::Expr {
+        self.expr_call_global(sp,
+                              vec!(self.ident_of("std"),
+                                   self.ident_of("vec_ng"),
+                                   self.ident_of("Vec"),
+                                   self.ident_of("new")),
+                              Vec::new())
     }
     fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr {
         self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
@@ -701,14 +708,12 @@ fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast:
 
         self.expr(span, ast::ExprFnBlock(fn_decl, blk))
     }
-    fn lambda0(&self, _span: Span, blk: P<ast::Block>) -> @ast::Expr {
-        let blk_e = self.expr(blk.span, ast::ExprBlock(blk));
-        quote_expr!(self, || $blk_e )
+    fn lambda0(&self, span: Span, blk: P<ast::Block>) -> @ast::Expr {
+        self.lambda(span, Vec::new(), blk)
     }
 
-    fn lambda1(&self, _span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr {
-        let blk_e = self.expr(blk.span, ast::ExprBlock(blk));
-        quote_expr!(self, |$ident| $blk_e )
+    fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> @ast::Expr {
+        self.lambda(span, vec!(ident), blk)
     }
 
     fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , expr: @ast::Expr) -> @ast::Expr {
@@ -721,7 +726,11 @@ fn lambda_expr_1(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast
         self.lambda1(span, self.block_expr(expr), ident)
     }
 
-    fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident> , stmts: Vec<@ast::Stmt> ) -> @ast::Expr {
+    fn lambda_stmts(&self,
+                    span: Span,
+                    ids: Vec<ast::Ident>,
+                    stmts: Vec<@ast::Stmt>)
+                    -> @ast::Expr {
         self.lambda(span, ids, self.block(span, stmts, None))
     }
     fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr {
@@ -921,7 +930,9 @@ fn view_use_list(&self, sp: Span, vis: ast::Visibility,
         self.view_use(sp, vis,
                       vec!(@respan(sp,
                                 ast::ViewPathList(self.path(sp, path),
-                                                  imports,
+                                                  imports.iter()
+                                                         .map(|x| *x)
+                                                         .collect(),
                                                   ast::DUMMY_NODE_ID))))
     }
 
index ba6ad4888e29b09f14ac9126b6ced283b771325e..6123fd4d3d4907d1ca516e2ce49886d268889f30 100644 (file)
@@ -17,6 +17,7 @@
 use ext::build::AstBuilder;
 
 use std::char;
+use std::vec_ng::Vec;
 
 pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult {
     // Gather all argument expressions
index e1f487a95bb2bf9f2b7372edcdc2c1a89487f56d..5d11a0d1e2ff23d8dbe2d843f1dd866bdd5b91f0 100644 (file)
 use parse::token;
 use parse;
 
+use std::vec_ng::Vec;
+
 pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult {
     let mut p = parse::new_parser_from_tts(cx.parse_sess(),
                                            cx.cfg(),
-                                           tts.to_owned());
+                                           tts.iter()
+                                              .map(|x| (*x).clone())
+                                              .collect());
 
     let mut cfgs = Vec::new();
     // parse `cfg!(meta_item, meta_item(x,y), meta_item="foo", ...)`
@@ -42,7 +46,8 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M
     // test_cfg searches for meta items looking like `cfg(foo, ...)`
     let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)];
 
-    let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x));
+    let matches_cfg = attr::test_cfg(cx.cfg().as_slice(),
+                                     in_cfg.iter().map(|&x| x));
     let e = cx.expr_bool(sp, matches_cfg);
     MRExpr(e)
 }
index 3f8fb3ae4f57fda32f1d2a825e6b93ecfdd11722..feda1694ff1936eb359928a3b801ea10d30e65ad 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_clone(cx: &mut ExtCtxt,
                              span: Span,
                              mitem: @MetaItem,
@@ -99,7 +101,7 @@ fn cs_clone(
                                                                  name))
     }
 
-    if all_fields.len() >= 1 && all_fields[0].name.is_none() {
+    if all_fields.len() >= 1 && all_fields.get(0).name.is_none() {
         // enum-like
         let subcalls = all_fields.map(subcall);
         cx.expr_call_ident(trait_span, ctor_ident, subcalls)
index 975b8885de7348deee1541467544970ef83a3f4b..1e7199ccc9557ef7428138e9489853c0f58fa3f6 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_eq(cx: &mut ExtCtxt,
                           span: Span,
                           mitem: @MetaItem,
index 5605c0b61071b78563dea401f33ab2e426cea325..66f459882397c929a1cf888f7ddbfc95e2359d3f 100644 (file)
@@ -15,6 +15,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_ord(cx: &mut ExtCtxt,
                            span: Span,
                            mitem: @MetaItem,
index ffabed95db5240056618b9e47dce21e9909592b8..2b3c0b9ea69153b1e0f8f81300329608e329a200 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_totaleq(cx: &mut ExtCtxt,
                                span: Span,
                                mitem: @MetaItem,
index 13f72f5fc2f8aab1a51e6ce79cea4c115dc26e2f..89a344bdb7b3c13d62d657dc95f1e215fc009663 100644 (file)
@@ -14,7 +14,9 @@
 use ext::base::ExtCtxt;
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
+
 use std::cmp::{Ordering, Equal, Less, Greater};
+use std::vec_ng::Vec;
 
 pub fn expand_deriving_totalord(cx: &mut ExtCtxt,
                                 span: Span,
index 4c9a58c46f733270232df7b94900672ef8617329..bc6d69c7ccabe8b4570b7391b9570fb39c414912 100644 (file)
@@ -21,6 +21,8 @@
 use parse::token::InternedString;
 use parse::token;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_decodable(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: @MetaItem,
index 46e9dfb89abac3b2e608f3b7d59de1edc2ed3baf..8259459f57ab60305d8a17e322d2f5f1c4b61952 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_default(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: @MetaItem,
index 2fa6ec6888b6b41d189f2ad145e9564f352b70b2..091ff7b9c90bd360f1e4ddfdd9d5bae934591ebe 100644 (file)
@@ -89,6 +89,8 @@ fn decode(d: &D) -> spanned<T> {
 use ext::deriving::generic::*;
 use parse::token;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_encodable(cx: &mut ExtCtxt,
                                  span: Span,
                                  mitem: @MetaItem,
index 6869ff2fd0dfe59777a06f28345aeeb9d7f10f2e..1dc474551cf7c03127c73dbdd8740fa6f2e8dd19 100644 (file)
@@ -188,7 +188,8 @@ fn eq(&self, other: &int) -> bool {
 use parse::token::InternedString;
 use parse::token;
 
-use std::vec;
+use std::vec_ng::Vec;
+use std::vec_ng;
 
 pub use self::ty::*;
 mod ty;
@@ -410,7 +411,7 @@ fn create_derived_impl(&self,
         cx.item(
             self.span,
             ident,
-            vec_ng::append(vec!(doc_attr), self.attributes),
+            vec_ng::append(vec!(doc_attr), self.attributes.as_slice()),
             ast::ItemImpl(trait_generics, opt_trait_ref,
                           self_type, methods.map(|x| *x)))
     }
@@ -431,13 +432,15 @@ fn expand_struct_def(&self,
                     self,
                     struct_def,
                     type_ident,
-                    self_args, nonself_args)
+                    self_args.as_slice(),
+                    nonself_args.as_slice())
             } else {
                 method_def.expand_struct_method_body(cx,
                                                      self,
                                                      struct_def,
                                                      type_ident,
-                                                     self_args, nonself_args)
+                                                     self_args.as_slice(),
+                                                     nonself_args.as_slice())
             };
 
             method_def.create_method(cx, self,
@@ -465,13 +468,15 @@ fn expand_enum_def(&self,
                     self,
                     enum_def,
                     type_ident,
-                    self_args, nonself_args)
+                    self_args.as_slice(),
+                    nonself_args.as_slice())
             } else {
                 method_def.expand_enum_method_body(cx,
                                                    self,
                                                    enum_def,
                                                    type_ident,
-                                                   self_args, nonself_args)
+                                                   self_args.as_slice(),
+                                                   nonself_args.as_slice())
             };
 
             method_def.create_method(cx, self,
@@ -666,14 +671,15 @@ fn expand_struct_method_body(&self,
 
         // transpose raw_fields
         let fields = if raw_fields.len() > 0 {
-            raw_fields[0].iter()
-                         .enumerate()
-                         .map(|(i, &(span, opt_id, field))| {
-                let other_fields = raw_fields.tail().map(|l| {
-                    match &l[i] {
+            raw_fields.get(0)
+                      .iter()
+                      .enumerate()
+                      .map(|(i, &(span, opt_id, field))| {
+                let other_fields = raw_fields.tail().iter().map(|l| {
+                    match l.get(i) {
                         &(_, _, ex) => ex
                     }
-                });
+                }).collect();
                 FieldInfo {
                     span: span,
                     name: opt_id,
@@ -820,17 +826,17 @@ fn build_enum_match(&self,
                 Some(variant_index) => {
                     // `ref` inside let matches is buggy. Causes havoc wih rusc.
                     // let (variant_index, ref self_vec) = matches_so_far[0];
-                    let (variant, self_vec) = match matches_so_far[0] {
-                        (_, v, ref s) => (v, s)
+                    let (variant, self_vec) = match matches_so_far.get(0) {
+                        &(_, v, ref s) => (v, s)
                     };
 
-                    let mut enum_matching_fields = vec::from_elem(self_vec.len(), Vec::new());
+                    let mut enum_matching_fields = Vec::from_elem(self_vec.len(), Vec::new());
 
                     for triple in matches_so_far.tail().iter() {
                         match triple {
                             &(_, _, ref other_fields) => {
                                 for (i, &(_, _, e)) in other_fields.iter().enumerate() {
-                                    enum_matching_fields[i].push(e);
+                                    enum_matching_fields.get_mut(i).push(e);
                                 }
                             }
                         }
@@ -849,7 +855,7 @@ fn build_enum_match(&self,
                     substructure = EnumMatching(variant_index, variant, field_tuples);
                 }
                 None => {
-                    substructure = EnumNonMatching(*matches_so_far);
+                    substructure = EnumNonMatching(matches_so_far.as_slice());
                 }
             }
             self.call_substructure_method(cx, trait_, type_ident,
@@ -877,7 +883,7 @@ fn build_enum_match(&self,
                 };
 
                 // matching-variant match
-                let variant = enum_def.variants[index];
+                let variant = *enum_def.variants.get(index);
                 let (pattern, idents) = trait_.create_enum_variant_pattern(cx,
                                                                            variant,
                                                                            current_match_str,
@@ -1149,11 +1155,19 @@ pub fn cs_fold(use_foldl: bool,
         EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
             if use_foldl {
                 all_fields.iter().fold(base, |old, field| {
-                    f(cx, field.span, old, field.self_, field.other)
+                    f(cx,
+                      field.span,
+                      old,
+                      field.self_,
+                      field.other.as_slice())
                 })
             } else {
                 all_fields.rev_iter().fold(base, |old, field| {
-                    f(cx, field.span, old, field.self_, field.other)
+                    f(cx,
+                      field.span,
+                      old,
+                      field.self_,
+                      field.other.as_slice())
                 })
             }
         },
index ed7f61d5e94218a6ffcac03fd5c83b06a4215366..1d6cfab120d2de2087a1ef8909518950fd924b31 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_hash(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: @MetaItem,
index 2615479fa72aa498c8dc62d9bd9842132a0f622f..ecd042eb172ef251fefb239e1dcf60a0c031e6eb 100644 (file)
@@ -16,6 +16,8 @@
 use ext::deriving::generic::*;
 use parse::token::InternedString;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
                                       span: Span,
                                       mitem: @MetaItem,
index 203e5aae567cf48590fd1da7928de02786ac03c2..da9679eb65578a1aef250e66b5d9970f9f65be38 100644 (file)
@@ -16,6 +16,8 @@
 use ext::deriving::generic::*;
 use opt_vec;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_rand(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: @MetaItem,
@@ -64,7 +66,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
     let rand_call = |cx: &mut ExtCtxt, span| {
         cx.expr_call_global(span,
                             rand_ident.clone(),
-                            vec!( rng[0] ))
+                            vec!( *rng.get(0) ))
     };
 
     return match *substr.fields {
@@ -90,7 +92,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
             // ::std::rand::Rand::rand(rng)
             let rv_call = cx.expr_call(trait_span,
                                        rand_name,
-                                       vec!( rng[0] ));
+                                       vec!( *rng.get(0) ));
 
             // need to specify the uint-ness of the random number
             let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint"));
index 0622588be8ee4a8a4f392a003962fde700c3bc6f..51399d8efabe97c2a0df4c73a713a32cce7b4f4c 100644 (file)
@@ -19,6 +19,7 @@
 use parse::token;
 
 use collections::HashMap;
+use std::vec_ng::Vec;
 
 pub fn expand_deriving_show(cx: &mut ExtCtxt,
                             span: Span,
@@ -79,7 +80,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
         EnumMatching(_, _, ref fields) if fields.len() == 0 => {}
 
         Struct(ref fields) | EnumMatching(_, _, ref fields) => {
-            if fields[0].name.is_none() {
+            if fields.get(0).name.is_none() {
                 // tuple struct/"normal" variant
 
                 format_string.push_str("(");
@@ -135,6 +136,6 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
     // phew, not our responsibility any more!
     format::expand_preparsed_format_args(cx, span,
                                          format_closure,
-                                         format_string, exprs, ~[],
+                                         format_string, exprs, Vec::new(),
                                          HashMap::new())
 }
index f7019d65058b93595f507c6bcb4246c8f3e8fba0..b88cd117911c70896d08bbb2e1f9fc8ab858d52a 100644 (file)
@@ -21,6 +21,8 @@
 use opt_vec;
 use opt_vec::OptVec;
 
+use std::vec_ng::Vec;
+
 /// The types of pointers
 pub enum PtrTy<'a> {
     Send, // ~
@@ -188,10 +190,10 @@ pub fn to_path(&self,
 fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, bounds: &[Path],
                self_ident: Ident, self_generics: &Generics) -> ast::TyParam {
     let bounds = opt_vec::from(
-        bounds.map(|b| {
+        bounds.iter().map(|b| {
             let path = b.to_path(cx, span, self_ident, self_generics);
             cx.typarambound(path)
-        }));
+        }).collect());
     cx.typaram(cx.ident_of(name), bounds, None)
 }
 
@@ -204,8 +206,8 @@ fn mk_generics(lifetimes: Vec<ast::Lifetime> ,  ty_params: Vec<ast::TyParam> ) -
 
 /// Lifetimes and bounds on type parameters
 pub struct LifetimeBounds<'a> {
-    lifetimes: Vec<&'a str> ,
-    bounds: vec!((&'a str, Vec<Path<'a>> ))
+    lifetimes: Vec<&'a str>,
+    bounds: Vec<(&'a str, Vec<Path<'a>>)>,
 }
 
 impl<'a> LifetimeBounds<'a> {
@@ -226,7 +228,12 @@ pub fn to_generics(&self,
         let ty_params = self.bounds.map(|t| {
             match t {
                 &(ref name, ref bounds) => {
-                    mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics)
+                    mk_ty_param(cx,
+                                span,
+                                *name,
+                                bounds.as_slice(),
+                                self_ty,
+                                self_generics)
                 }
             }
         });
index 9feae18689416c6c215a4a8fe5796669a94eddae..98c0ec9d07238502ce8c6b7d31b7a75eaf785564 100644 (file)
@@ -14,6 +14,8 @@
 use ext::build::AstBuilder;
 use ext::deriving::generic::*;
 
+use std::vec_ng::Vec;
+
 pub fn expand_deriving_zero(cx: &mut ExtCtxt,
                             span: Span,
                             mitem: @MetaItem,
index aacb2a7408738fd3d7c0eb4261324cd52dbee5b1..b0b5fa26015ccd6f5ee124ba75e44e93d7f500fb 100644 (file)
@@ -19,6 +19,7 @@
 use ext::base::*;
 use ext::base;
 use ext::build::AstBuilder;
+use opt_vec;
 use parse::token;
 
 use std::os;
@@ -31,8 +32,30 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     };
 
     let e = match os::getenv(var) {
-      None => quote_expr!(cx, ::std::option::None::<&'static str>),
-      Some(s) => quote_expr!(cx, ::std::option::Some($s))
+      None => {
+          cx.expr_path(cx.path_all(sp,
+                                   true,
+                                   vec!(cx.ident_of("std"),
+                                        cx.ident_of("option"),
+                                        cx.ident_of("None")),
+                                   opt_vec::Empty,
+                                   vec!(cx.ty_rptr(sp,
+                                                   cx.ty_ident(sp,
+                                                        cx.ident_of("str")),
+                                                   Some(cx.lifetime(sp,
+                                                        cx.ident_of(
+                                                            "static").name)),
+                                                   ast::MutImmutable))))
+      }
+      Some(s) => {
+          cx.expr_call_global(sp,
+                              vec!(cx.ident_of("std"),
+                                   cx.ident_of("option"),
+                                   cx.ident_of("Some")),
+                              vec!(cx.expr_str(sp,
+                                               token::intern_and_get_ident(
+                                          s))))
+      }
     };
     MRExpr(e)
 }
@@ -48,7 +71,9 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
         Some(exprs) => exprs
     };
 
-    let var = match expr_to_str(cx, exprs[0], "expected string literal") {
+    let var = match expr_to_str(cx,
+                                *exprs.get(0),
+                                "expected string literal") {
         None => return MacResult::dummy_expr(sp),
         Some((v, _style)) => v
     };
@@ -59,7 +84,7 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                                                 var))
         }
         2 => {
-            match expr_to_str(cx, exprs[1], "expected string literal") {
+            match expr_to_str(cx, *exprs.get(1), "expected string literal") {
                 None => return MacResult::dummy_expr(sp),
                 Some((s, _style)) => s
             }
index 08d407458941a2f934b5bb2021d02baca54833d5..b162e17f53de1d55223129a031208b7cd2b43469 100644 (file)
@@ -31,6 +31,7 @@
 use std::cast;
 use std::unstable::dynamic_lib::DynamicLibrary;
 use std::os;
+use std::vec_ng::Vec;
 
 pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
     match e.node {
@@ -53,7 +54,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                         // let compilation continue
                         return MacResult::raw_dummy_expr(e.span);
                     }
-                    let extname = pth.segments[0].identifier;
+                    let extname = pth.segments.get(0).identifier;
                     let extnamestr = token::get_ident(extname);
                     // leaving explicit deref here to highlight unbox op:
                     let marked_after = match fld.extsbox.find(&extname.name) {
@@ -77,7 +78,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
                             });
                             let fm = fresh_mark();
                             // mark before:
-                            let marked_before = mark_tts(*tts,fm);
+                            let marked_before = mark_tts(tts.as_slice(), fm);
 
                             // The span that we pass to the expanders we want to
                             // be the root of the call stack. That's the most
@@ -87,7 +88,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
 
                             let expanded = match expandfun.expand(fld.cx,
                                                    mac_span.call_site,
-                                                   marked_before) {
+                                                   marked_before.as_slice()) {
                                 MRExpr(e) => e,
                                 MRAny(any_macro) => any_macro.make_expr(),
                                 _ => {
@@ -181,7 +182,10 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
             // `match i.next() { ... }`
             let match_expr = {
                 let next_call_expr =
-                    fld.cx.expr_method_call(span, fld.cx.expr_path(local_path), next_ident, Vec::new());
+                    fld.cx.expr_method_call(span,
+                                            fld.cx.expr_path(local_path),
+                                            next_ident,
+                                            Vec::new());
 
                 fld.cx.expr_match(span, next_call_expr, vec!(none_arm, some_arm))
             };
@@ -276,7 +280,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander)
         ast::ItemMac(..) => expand_item_mac(it, fld),
         ast::ItemMod(_) | ast::ItemForeignMod(_) => {
             fld.cx.mod_push(it.ident);
-            let macro_escape = contains_macro_escape(it.attrs);
+            let macro_escape = contains_macro_escape(it.attrs.as_slice());
             let result = with_exts_frame!(fld.extsbox,
                                           macro_escape,
                                           noop_fold_item(it, fld));
@@ -309,7 +313,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
         _ => fld.cx.span_bug(it.span, "invalid item macro invocation")
     };
 
-    let extname = pth.segments[0].identifier;
+    let extname = pth.segments.get(0).identifier;
     let extnamestr = token::get_ident(extname);
     let fm = fresh_mark();
     let expanded = match fld.extsbox.find(&extname.name) {
@@ -339,8 +343,8 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
                 }
             });
             // mark before expansion:
-            let marked_before = mark_tts(tts,fm);
-            expander.expand(fld.cx, it.span, marked_before)
+            let marked_before = mark_tts(tts.as_slice(), fm);
+            expander.expand(fld.cx, it.span, marked_before.as_slice())
         }
         Some(&IdentTT(ref expander, span)) => {
             if it.ident.name == parse::token::special_idents::invalid.name {
@@ -358,7 +362,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
                 }
             });
             // mark before expansion:
-            let marked_tts = mark_tts(tts,fm);
+            let marked_tts = mark_tts(tts.as_slice(), fm);
             expander.expand(fld.cx, it.span, it.ident, marked_tts)
         }
         _ => {
@@ -391,7 +395,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
             // yikes... no idea how to apply the mark to this. I'm afraid
             // we're going to have to wait-and-see on this one.
             fld.extsbox.insert(intern(name), ext);
-            if attr::contains_name(it.attrs, "macro_export") {
+            if attr::contains_name(it.attrs.as_slice(), "macro_export") {
                 SmallVector::one(it)
             } else {
                 SmallVector::zero()
@@ -504,7 +508,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
         fld.cx.span_err(pth.span, "expected macro name without module separators");
         return SmallVector::zero();
     }
-    let extname = pth.segments[0].identifier;
+    let extname = pth.segments.get(0).identifier;
     let extnamestr = token::get_ident(extname);
     let marked_after = match fld.extsbox.find(&extname.name) {
         None => {
@@ -523,7 +527,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
             });
             let fm = fresh_mark();
             // mark before expansion:
-            let marked_tts = mark_tts(tts,fm);
+            let marked_tts = mark_tts(tts.as_slice(), fm);
 
             // See the comment in expand_expr for why we want the original span,
             // not the current mac.span.
@@ -531,7 +535,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
 
             let expanded = match expandfun.expand(fld.cx,
                                                   mac_span.call_site,
-                                                  marked_tts) {
+                                                  marked_tts.as_slice()) {
                 MRExpr(e) => {
                     @codemap::Spanned {
                         node: StmtExpr(e, ast::DUMMY_NODE_ID),
@@ -676,7 +680,8 @@ fn visit_pat(&mut self, pattern: &ast::Pat, _: ()) {
                         span: _,
                         segments: ref segments
                     } if segments.len() == 1 => {
-                        self.ident_accumulator.push(segments[0].identifier)
+                        self.ident_accumulator.push(segments.get(0)
+                                                            .identifier)
                     }
                     // I believe these must be enums...
                     _ => ()
@@ -843,7 +848,7 @@ fn fold_mac(&mut self, m: &ast::Mac) -> ast::Mac {
         let macro = match m.node {
             MacInvocTT(ref path, ref tts, ctxt) => {
                 MacInvocTT(self.fold_path(path),
-                           fold_tts(*tts, self),
+                           fold_tts(tts.as_slice(), self),
                            new_mark(self.mark, ctxt))
             }
         };
@@ -912,6 +917,8 @@ mod test {
     use visit;
     use visit::Visitor;
 
+    use std::vec_ng::Vec;
+
     // a visitor that extracts the paths
     // from a given thingy and puts them in a mutable
     // array (passed in to the traversal)
@@ -1015,9 +1022,9 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> {
         let attr2 = make_dummy_attr ("bar");
         let escape_attr = make_dummy_attr ("macro_escape");
         let attrs1 = vec!(attr1, escape_attr, attr2);
-        assert_eq!(contains_macro_escape (attrs1),true);
+        assert_eq!(contains_macro_escape(attrs1.as_slice()),true);
         let attrs2 = vec!(attr1,attr2);
-        assert_eq!(contains_macro_escape (attrs2),false);
+        assert_eq!(contains_macro_escape(attrs2.as_slice()),false);
     }
 
     // make a MetaWord outer attribute with the given name
@@ -1082,7 +1089,7 @@ fn expand_crate_str(crate_str: ~str) -> ast::Crate {
     // in principle, you might want to control this boolean on a per-varref basis,
     // but that would make things even harder to understand, and might not be
     // necessary for thorough testing.
-    type RenamingTest = (&'static str, vec!(Vec<uint> ), bool);
+    type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
 
     #[test]
     fn automatic_renaming () {
@@ -1131,8 +1138,8 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
         // must be one check clause for each binding:
         assert_eq!(bindings.len(),bound_connections.len());
         for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() {
-            let binding_name = mtwt_resolve(bindings[binding_idx]);
-            let binding_marks = mtwt_marksof(bindings[binding_idx].ctxt,invalid_name);
+            let binding_name = mtwt_resolve(*bindings.get(binding_idx));
+            let binding_marks = mtwt_marksof(bindings.get(binding_idx).ctxt,invalid_name);
             // shouldmatch can't name varrefs that don't exist:
             assert!((shouldmatch.len() == 0) ||
                     (varrefs.len() > *shouldmatch.iter().max().unwrap()));
@@ -1141,13 +1148,18 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
                     // it should be a path of length 1, and it should
                     // be free-identifier=? or bound-identifier=? to the given binding
                     assert_eq!(varref.segments.len(),1);
-                    let varref_name = mtwt_resolve(varref.segments[0].identifier);
-                    let varref_marks = mtwt_marksof(varref.segments[0].identifier.ctxt,
+                    let varref_name = mtwt_resolve(varref.segments
+                                                         .get(0)
+                                                         .identifier);
+                    let varref_marks = mtwt_marksof(varref.segments
+                                                          .get(0)
+                                                          .identifier
+                                                          .ctxt,
                                                     invalid_name);
                     if !(varref_name==binding_name) {
                         println!("uh oh, should match but doesn't:");
                         println!("varref: {:?}",varref);
-                        println!("binding: {:?}", bindings[binding_idx]);
+                        println!("binding: {:?}", *bindings.get(binding_idx));
                         ast_util::display_sctable(get_sctable());
                     }
                     assert_eq!(varref_name,binding_name);
@@ -1158,7 +1170,8 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
                     }
                 } else {
                     let fail = (varref.segments.len() == 1)
-                        && (mtwt_resolve(varref.segments[0].identifier) == binding_name);
+                        && (mtwt_resolve(varref.segments.get(0).identifier) ==
+                                         binding_name);
                     // temp debugging:
                     if fail {
                         println!("failure on test {}",test_idx);
@@ -1167,11 +1180,13 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
                         println!("uh oh, matches but shouldn't:");
                         println!("varref: {:?}",varref);
                         // good lord, you can't make a path with 0 segments, can you?
-                        let string = token::get_ident(varref.segments[0].identifier);
+                        let string = token::get_ident(varref.segments
+                                                            .get(0)
+                                                            .identifier);
                         println!("varref's first segment's uint: {}, and string: \"{}\"",
-                                 varref.segments[0].identifier.name,
+                                 varref.segments.get(0).identifier.name,
                                  string.get());
-                        println!("binding: {:?}", bindings[binding_idx]);
+                        println!("binding: {:?}", *bindings.get(binding_idx));
                         ast_util::display_sctable(get_sctable());
                     }
                     assert!(!fail);
@@ -1197,7 +1212,7 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
                 let string = ident.get();
                 "xx" == string
             }).collect();
-        let cxbinds: &[&ast::Ident] = cxbinds;
+        let cxbinds: &[&ast::Ident] = cxbinds.as_slice();
         let cxbind = match cxbinds {
             [b] => b,
             _ => fail!("expected just one binding for ext_cx")
@@ -1211,16 +1226,17 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
         // the xx binding should bind all of the xx varrefs:
         for (idx,v) in varrefs.iter().filter(|p| {
             p.segments.len() == 1
-            && "xx" == token::get_ident(p.segments[0].identifier).get()
+            && "xx" == token::get_ident(p.segments.get(0).identifier).get()
         }).enumerate() {
-            if mtwt_resolve(v.segments[0].identifier) != resolved_binding {
+            if mtwt_resolve(v.segments.get(0).identifier) !=
+                    resolved_binding {
                 println!("uh oh, xx binding didn't match xx varref:");
                 println!("this is xx varref \\# {:?}",idx);
                 println!("binding: {:?}",cxbind);
                 println!("resolves to: {:?}",resolved_binding);
-                println!("varref: {:?}",v.segments[0].identifier);
+                println!("varref: {:?}",v.segments.get(0).identifier);
                 println!("resolves to: {:?}",
-                         mtwt_resolve(v.segments[0].identifier));
+                         mtwt_resolve(v.segments.get(0).identifier));
                 let table = get_sctable();
                 println!("SC table:");
 
@@ -1231,7 +1247,8 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
                     }
                 }
             }
-            assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding);
+            assert_eq!(mtwt_resolve(v.segments.get(0).identifier),
+                       resolved_binding);
         };
     }
 
index 4b27951aa507a9e8ae358f3e909d312a2b8ae426..7752d88596820bd5d66dee1d3da13217d9e0e6d8 100644 (file)
@@ -22,6 +22,7 @@
 use std::fmt::parse;
 use collections::{HashMap, HashSet};
 use std::vec;
+use std::vec_ng::Vec;
 
 #[deriving(Eq)]
 enum ArgumentType {
@@ -49,7 +50,7 @@ struct Context<'a> {
     // were declared in.
     names: HashMap<~str, @ast::Expr>,
     name_types: HashMap<~str, ArgumentType>,
-    name_ordering: ~[~str],
+    name_ordering: Vec<~str>,
 
     // Collection of the compiled `rt::Piece` structures
     pieces: Vec<@ast::Expr> ,
@@ -70,15 +71,17 @@ struct Context<'a> {
 ///     Some((fmtstr, unnamed arguments, ordering of named arguments,
 ///           named arguments))
 fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-    -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, ~[~str],
+    -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>,
                             HashMap<~str, @ast::Expr>)>) {
     let mut args = Vec::new();
     let mut names = HashMap::<~str, @ast::Expr>::new();
-    let mut order = ~[];
+    let mut order = Vec::new();
 
     let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
                                              ecx.cfg(),
-                                             tts.to_owned());
+                                             tts.iter()
+                                                .map(|x| (*x).clone())
+                                                .collect());
     // Parse the leading function expression (maybe a block, maybe a path)
     let extra = p.parse_expr();
     if !p.eat(&token::COMMA) {
@@ -275,14 +278,14 @@ fn verify_arg_type(&mut self, arg: Position, ty: ArgumentType) {
                     return;
                 }
                 {
-                    let arg_type = match self.arg_types[arg] {
-                        None => None,
-                        Some(ref x) => Some(x)
+                    let arg_type = match self.arg_types.get(arg) {
+                        &None => None,
+                        &Some(ref x) => Some(x)
                     };
-                    self.verify_same(self.args[arg].span, &ty, arg_type);
+                    self.verify_same(self.args.get(arg).span, &ty, arg_type);
                 }
-                if self.arg_types[arg].is_none() {
-                    self.arg_types[arg] = Some(ty);
+                if self.arg_types.get(arg).is_none() {
+                    *self.arg_types.get_mut(arg) = Some(ty);
                 }
             }
 
@@ -653,7 +656,9 @@ fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr {
         // of each variable because we don't want to move out of the arguments
         // passed to this function.
         for (i, &e) in self.args.iter().enumerate() {
-            if self.arg_types[i].is_none() { continue } // error already generated
+            if self.arg_types.get(i).is_none() {
+                continue // error already generated
+            }
 
             let name = self.ecx.ident_of(format!("__arg{}", i));
             pats.push(self.ecx.pat_ident(e.span, name));
@@ -748,7 +753,7 @@ fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr {
     fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr)
                   -> @ast::Expr {
         let ty = match argno {
-            Exact(ref i) => self.arg_types[*i].get_ref(),
+            Exact(ref i) => self.arg_types.get(*i).get_ref(),
             Named(ref s) => self.name_types.get(s)
         };
 
@@ -822,7 +827,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
                                     efmt: @ast::Expr, args: Vec<@ast::Expr>,
                                     name_ordering: Vec<~str>,
                                     names: HashMap<~str, @ast::Expr>) -> @ast::Expr {
-    let arg_types = vec::from_fn(args.len(), |_| None);
+    let arg_types = Vec::from_fn(args.len(), |_| None);
     let mut cx = Context {
         ecx: ecx,
         args: args,
@@ -871,7 +876,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
     // Make sure that all arguments were used and all arguments have types.
     for (i, ty) in cx.arg_types.iter().enumerate() {
         if ty.is_none() {
-            cx.ecx.span_err(cx.args[i].span, "argument never used");
+            cx.ecx.span_err(cx.args.get(i).span, "argument never used");
         }
     }
     for (name, e) in cx.names.iter() {
index 5ee4084d207ba36dcc942d98933435d7757305a0..b94928238e9bb215540349e5f60d61bf75c40f6f 100644 (file)
@@ -20,7 +20,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt,
                       -> base::MacResult {
 
     cx.print_backtrace();
-    println!("{}", print::pprust::tt_to_str(&ast::TTDelim(@tt.to_owned())));
+    println!("{}", print::pprust::tt_to_str(&ast::TTDelim(
+                @tt.iter().map(|x| (*x).clone()).collect())));
 
     //trivial expression
     MRExpr(@ast::Expr {
index 3b8df84acc3237f57097ebacb0432534c4612973..e96597d41594b94d423c674e5991f15b66806b23 100644 (file)
@@ -17,6 +17,8 @@
 use parse::token;
 use parse;
 
+use std::vec_ng::Vec;
+
 /**
 *
 * Quasiquoting works via token trees.
@@ -35,6 +37,8 @@ pub mod rt {
     use parse;
     use print::pprust;
 
+    use std::vec_ng::Vec;
+
     pub use ast::*;
     pub use parse::token::*;
     pub use parse::new_parser_from_tts;
@@ -305,7 +309,7 @@ pub fn expand_quote_expr(cx: &mut ExtCtxt,
 pub fn expand_quote_item(cx: &mut ExtCtxt,
                          sp: Span,
                          tts: &[ast::TokenTree]) -> base::MacResult {
-    let e_attrs = cx.expr_vec_uniq(sp, Vec::new());
+    let e_attrs = cx.expr_vec_ng(sp);
     let expanded = expand_parse_call(cx, sp, "parse_item",
                                     vec!(e_attrs), tts);
     base::MRExpr(expanded)
@@ -332,7 +336,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt,
 pub fn expand_quote_stmt(cx: &mut ExtCtxt,
                          sp: Span,
                          tts: &[ast::TokenTree]) -> base::MacResult {
-    let e_attrs = cx.expr_vec_uniq(sp, Vec::new());
+    let e_attrs = cx.expr_vec_ng(sp);
     let expanded = expand_parse_call(cx, sp, "parse_stmt",
                                     vec!(e_attrs), tts);
     base::MRExpr(expanded)
@@ -540,7 +544,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> {
             vec!(cx.stmt_expr(e_push))
         }
 
-        ast::TTDelim(ref tts) => mk_tts(cx, sp, **tts),
+        ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
         ast::TTSeq(..) => fail!("TTSeq in quote!"),
 
         ast::TTNonterminal(sp, ident) => {
@@ -583,7 +587,9 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
     let mut p = parse::new_parser_from_tts(cx.parse_sess(),
                                            cx.cfg(),
-                                           tts.to_owned());
+                                           tts.iter()
+                                              .map(|x| (*x).clone())
+                                              .collect());
     p.quote_depth += 1u;
 
     let cx_expr = p.parse_expr();
@@ -629,14 +635,14 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                                   id_ext("_sp"),
                                   e_sp);
 
-    let stmt_let_tt = cx.stmt_let(sp, true,
-                                  id_ext("tt"),
-                                  cx.expr_vec_uniq(sp, Vec::new()));
+    let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
 
+    let mut vector = vec!(stmt_let_sp, stmt_let_tt);
+    vector.push_all_move(mk_tts(cx, sp, tts.as_slice()));
     let block = cx.expr_block(
         cx.block_all(sp,
                      Vec::new(),
-                     vec!(stmt_let_sp, stmt_let_tt) + mk_tts(cx, sp, tts),
+                     vector,
                      Some(cx.expr_ident(sp, id_ext("tt")))));
 
     (cx_expr, block)
index 15e753ee49f7093d75c6cbe8aa8c04d488ed88fd..4c18eb83afceec6e1cd162840ac1a9cda1dfbfac 100644 (file)
@@ -15,6 +15,8 @@
 use visit;
 use visit::Visitor;
 
+use std::vec_ng::Vec;
+
 struct MacroRegistrarContext {
     registrars: Vec<(ast::NodeId, Span)> ,
 }
@@ -23,7 +25,8 @@ impl Visitor<()> for MacroRegistrarContext {
     fn visit_item(&mut self, item: &ast::Item, _: ()) {
         match item.node {
             ast::ItemFn(..) => {
-                if attr::contains_name(item.attrs, "macro_registrar") {
+                if attr::contains_name(item.attrs.as_slice(),
+                                       "macro_registrar") {
                     self.registrars.push((item.id, item.span));
                 }
             }
index c81ee55c23768aa75d1ec0eaf8a5f27432ee17fc..b31388f58eb9fdc653529d9d904706227afdea1e 100644 (file)
@@ -142,6 +142,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
             return MacResult::dummy_expr(sp);
         }
         Ok(bytes) => {
+            let bytes = bytes.iter().map(|x| *x).collect();
             base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
         }
     }
index db2c9dcddb6dcdee3c439dc839c082a5d0ccc60f..183cccde18e86c487b2fc39824ead710ca6fcea9 100644 (file)
@@ -24,7 +24,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
     let cfg = cx.cfg();
     let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
                                None,
-                               tt.to_owned());
+                               tt.iter().map(|x| (*x).clone()).collect());
     let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup());
 
     if rust_parser.is_keyword(keywords::True) {
index cb86f2cecaa6682a6107743dda0e61d15023abe2..c9d3150c2cd417b630bd6b45e0a3b07060b960db 100644 (file)
@@ -22,7 +22,7 @@
 use parse::token;
 
 use collections::HashMap;
-use std::vec;
+use std::vec_ng::Vec;
 
 /* This is an Earley-like parser, without support for in-grammar nonterminals,
 only by calling out to the main rust parser for named nonterminals (which it
@@ -103,7 +103,7 @@ pub struct MatcherPos {
     sep: Option<Token>,
     idx: uint,
     up: Option<~MatcherPos>,
-    matches: vec!(Vec<@NamedMatch> ),
+    matches: Vec<Vec<@NamedMatch>>,
     match_lo: uint, match_hi: uint,
     sp_lo: BytePos,
 }
@@ -112,7 +112,9 @@ pub fn count_names(ms: &[Matcher]) -> uint {
     ms.iter().fold(0, |ct, m| {
         ct + match m.node {
             MatchTok(_) => 0u,
-            MatchSeq(ref more_ms, _, _, _, _) => count_names((*more_ms)),
+            MatchSeq(ref more_ms, _, _, _, _) => {
+                count_names(more_ms.as_slice())
+            }
             MatchNonterminal(_, _, _) => 1u
         }})
 }
@@ -131,7 +133,7 @@ pub fn initial_matcher_pos(ms: Vec<Matcher> , sep: Option<Token>, lo: BytePos)
             }
         }
     }
-    let matches = vec::from_fn(count_names(ms), |_i| Vec::new());
+    let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new());
     ~MatcherPos {
         elts: ms,
         sep: sep,
@@ -208,7 +210,7 @@ pub fn parse_or_else<R: Reader>(sess: @ParseSess,
                                 rdr: R,
                                 ms: Vec<Matcher> )
                                 -> HashMap<Ident, @NamedMatch> {
-    match parse(sess, cfg, rdr, ms) {
+    match parse(sess, cfg, rdr, ms.as_slice()) {
         Success(m) => m,
         Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
         Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
@@ -231,7 +233,11 @@ pub fn parse<R: Reader>(sess: @ParseSess,
                         ms: &[Matcher])
                         -> ParseResult {
     let mut cur_eis = Vec::new();
-    cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo));
+    cur_eis.push(initial_matcher_pos(ms.iter()
+                                       .map(|x| (*x).clone())
+                                       .collect(),
+                                     None,
+                                     rdr.peek().sp.lo));
 
     loop {
         let mut bb_eis = Vec::new(); // black-box parsed by parser.rs
@@ -274,8 +280,9 @@ pub fn parse<R: Reader>(sess: @ParseSess,
 
                         // Only touch the binders we have actually bound
                         for idx in range(ei.match_lo, ei.match_hi) {
-                            let sub = ei.matches[idx].clone();
-                            new_pos.matches[idx]
+                            let sub = (*ei.matches.get(idx)).clone();
+                            new_pos.matches
+                                   .get_mut(idx)
                                    .push(@MatchedSeq(sub, mk_sp(ei.sp_lo,
                                                                 sp.hi)));
                         }
@@ -308,7 +315,7 @@ pub fn parse<R: Reader>(sess: @ParseSess,
                     eof_eis.push(ei);
                 }
             } else {
-                match ei.elts[idx].node.clone() {
+                match ei.elts.get(idx).node.clone() {
                   /* need to descend into sequence */
                   MatchSeq(ref matchers, ref sep, zero_ok,
                            match_idx_lo, match_idx_hi) => {
@@ -317,13 +324,15 @@ pub fn parse<R: Reader>(sess: @ParseSess,
                         new_ei.idx += 1u;
                         //we specifically matched zero repeats.
                         for idx in range(match_idx_lo, match_idx_hi) {
-                            new_ei.matches[idx].push(@MatchedSeq(Vec::new(), sp));
+                            new_ei.matches
+                                  .get_mut(idx)
+                                  .push(@MatchedSeq(Vec::new(), sp));
                         }
 
                         cur_eis.push(new_ei);
                     }
 
-                    let matches = vec::from_elem(ei.matches.len(), Vec::new());
+                    let matches = Vec::from_elem(ei.matches.len(), Vec::new());
                     let ei_t = ei;
                     cur_eis.push(~MatcherPos {
                         elts: (*matchers).clone(),
@@ -352,10 +361,10 @@ pub fn parse<R: Reader>(sess: @ParseSess,
         if token_name_eq(&tok, &EOF) {
             if eof_eis.len() == 1u {
                 let mut v = Vec::new();
-                for dv in eof_eis[0u].matches.mut_iter() {
+                for dv in eof_eis.get_mut(0).matches.mut_iter() {
                     v.push(dv.pop().unwrap());
                 }
-                return Success(nameize(sess, ms, v));
+                return Success(nameize(sess, ms, v.as_slice()));
             } else if eof_eis.len() > 1u {
                 return Error(sp, ~"ambiguity: multiple successful parses");
             } else {
@@ -365,7 +374,7 @@ pub fn parse<R: Reader>(sess: @ParseSess,
             if (bb_eis.len() > 0u && next_eis.len() > 0u)
                 || bb_eis.len() > 1u {
                 let nts = bb_eis.map(|ei| {
-                    match ei.elts[ei.idx].node {
+                    match ei.elts.get(ei.idx).node {
                       MatchNonterminal(bind, name, _) => {
                         format!("{} ('{}')",
                                 token::get_ident(name),
@@ -390,10 +399,10 @@ pub fn parse<R: Reader>(sess: @ParseSess,
                 let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup());
 
                 let mut ei = bb_eis.pop().unwrap();
-                match ei.elts[ei.idx].node {
+                match ei.elts.get(ei.idx).node {
                   MatchNonterminal(_, name, idx) => {
                     let name_string = token::get_ident(name);
-                    ei.matches[idx].push(@MatchedNonterminal(
+                    ei.matches.get_mut(idx).push(@MatchedNonterminal(
                         parse_nt(&mut rust_parser, name_string.get())));
                     ei.idx += 1u;
                   }
index ed127c431176f8ea74f836e2ed76f8a25806efff..712d5f6bd27dac80b4a8ed525883f13bc47e482f 100644 (file)
 use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF};
 use parse::token;
 use print;
-use std::cell::RefCell;
 use util::small_vector::SmallVector;
 
+use std::cell::RefCell;
+use std::vec_ng::Vec;
+
 struct ParserAnyMacro {
     parser: RefCell<Parser>,
 }
@@ -100,7 +102,12 @@ fn expand(&self,
               sp: Span,
               arg: &[ast::TokenTree])
               -> MacResult {
-        generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses)
+        generic_extension(cx,
+                          sp,
+                          self.name,
+                          arg,
+                          self.lhses.as_slice(),
+                          self.rhses.as_slice())
     }
 }
 
@@ -115,7 +122,9 @@ fn generic_extension(cx: &ExtCtxt,
     if cx.trace_macros() {
         println!("{}! \\{ {} \\}",
                  token::get_ident(name),
-                 print::pprust::tt_to_str(&TTDelim(@arg.to_owned())));
+                 print::pprust::tt_to_str(&TTDelim(@arg.iter()
+                                                       .map(|x| (*x).clone())
+                                                       .collect())));
     }
 
     // Which arm's failure should we report? (the one furthest along)
@@ -128,8 +137,12 @@ fn generic_extension(cx: &ExtCtxt,
         match **lhs {
           MatchedNonterminal(NtMatchers(ref mtcs)) => {
             // `None` is because we're not interpolating
-            let arg_rdr = new_tt_reader(s_d, None, arg.to_owned());
-            match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) {
+            let arg_rdr = new_tt_reader(s_d,
+                                        None,
+                                        arg.iter()
+                                           .map(|x| (*x).clone())
+                                           .collect());
+            match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) {
               Success(named_matches) => {
                 let rhs = match *rhses[i] {
                     // okay, what's your transcriber?
@@ -137,7 +150,10 @@ fn generic_extension(cx: &ExtCtxt,
                         match *tt {
                             // cut off delimiters; don't parse 'em
                             TTDelim(ref tts) => {
-                                (*tts).slice(1u,(*tts).len()-1u).to_owned()
+                                (*tts).slice(1u,(*tts).len()-1u)
+                                      .iter()
+                                      .map(|x| (*x).clone())
+                                      .collect()
                             }
                             _ => cx.span_fatal(
                                 sp, "macro rhs must be delimited")
index 690ae82741cbaaa0361d23c226eaeaa7187456f8..a3f179e851ad321127f9e95c130d803b8ded428e 100644 (file)
@@ -18,6 +18,7 @@
 use parse::lexer::TokenAndSpan;
 
 use std::cell::{Cell, RefCell};
+use std::vec_ng::Vec;
 use collections::HashMap;
 
 ///an unzipping of `TokenTree`s
@@ -106,7 +107,7 @@ fn red(ad: @NamedMatch, idx: &uint) -> @NamedMatch {
                 // end of the line; duplicate henceforth
                 ad
             }
-            MatchedSeq(ref ads, _) => ads[*idx]
+            MatchedSeq(ref ads, _) => *ads.get(*idx)
         }
     }
     let repeat_idx = r.repeat_idx.borrow();
@@ -217,7 +218,8 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
             r.stack.get().idx.set(0u);
             {
                 let mut repeat_idx = r.repeat_idx.borrow_mut();
-                repeat_idx.get()[repeat_idx.get().len() - 1u] += 1u;
+                let last_repeat_idx = repeat_idx.get().len() - 1u;
+                *repeat_idx.get().get_mut(last_repeat_idx) += 1u;
             }
             match r.stack.get().sep.clone() {
               Some(tk) => {
@@ -231,7 +233,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan {
     loop { /* because it's easiest, this handles `TTDelim` not starting
     with a `TTTok`, even though it won't happen */
         // FIXME(pcwalton): Bad copy.
-        match r.stack.get().forest[r.stack.get().idx.get()].clone() {
+        match (*r.stack.get().forest.get(r.stack.get().idx.get())).clone() {
           TTDelim(tts) => {
             r.stack.set(@TtFrame {
                 forest: tts,
index cb6bc3c1b27711683a8691958d3decd0dfbd8144..b01ba7718ba58b4013634048adf396ffd4d410a9 100644 (file)
@@ -16,6 +16,8 @@
 use opt_vec::OptVec;
 use util::small_vector::SmallVector;
 
+use std::vec_ng::Vec;
+
 // We may eventually want to be able to fold over type parameters, too.
 pub trait Folder {
     fn fold_crate(&mut self, c: Crate) -> Crate {
@@ -23,11 +25,11 @@ fn fold_crate(&mut self, c: Crate) -> Crate {
     }
 
     fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> Vec<@MetaItem> {
-        meta_items.map(|x| fold_meta_item_(*x, self))
+        meta_items.iter().map(|x| fold_meta_item_(*x, self)).collect()
     }
 
     fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
-        view_paths.map(|view_path| {
+        view_paths.iter().map(|view_path| {
             let inner_view_path = match view_path.node {
                 ViewPathSimple(ref ident, ref path, node_id) => {
                     ViewPathSimple(ident.clone(),
@@ -60,7 +62,7 @@ fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
                 node: inner_view_path,
                 span: self.new_span(view_path.span),
             }
-        })
+        }).collect()
     }
 
     fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem {
@@ -275,7 +277,7 @@ fn fold_mac(&mut self, macro: &Mac) -> Mac {
             node: match macro.node {
                 MacInvocTT(ref p, ref tts, ctxt) => {
                     MacInvocTT(self.fold_path(p),
-                               fold_tts(*tts, self),
+                               fold_tts(tts.as_slice(), self),
                                ctxt)
                 }
             },
@@ -284,7 +286,7 @@ fn fold_mac(&mut self, macro: &Mac) -> Mac {
     }
 
     fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> Vec<@Expr> {
-        es.map(|x| f(*x))
+        es.iter().map(|x| f(*x)).collect()
     }
 
     fn new_id(&mut self, i: NodeId) -> NodeId {
@@ -371,20 +373,20 @@ fn fold_arg_<T: Folder>(a: &Arg, fld: &mut T) -> Arg {
 // token::LIFETIME are certainly not loop labels. But we can't tell in their
 // token form. So this is less ideal and hacky but it works.
 pub fn fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
-    tts.map(|tt| {
+    tts.iter().map(|tt| {
         match *tt {
             TTTok(span, ref tok) =>
             TTTok(span,maybe_fold_ident(tok,fld)),
-            TTDelim(tts) => TTDelim(@fold_tts(*tts, fld)),
+            TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)),
             TTSeq(span, pattern, ref sep, is_optional) =>
             TTSeq(span,
-                  @fold_tts(*pattern, fld),
+                  @fold_tts(pattern.as_slice(), fld),
                   sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)),
                   is_optional),
             TTNonterminal(sp,ref ident) =>
             TTNonterminal(sp,fld.fold_ident(*ident))
         }
-    })
+    }).collect()
 }
 
 // apply ident folder if it's an ident, otherwise leave it alone
@@ -518,7 +520,7 @@ pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T)
                               folder.new_id(node_id))
         }
         ViewItemUse(ref view_paths) => {
-            ViewItemUse(folder.fold_view_paths(*view_paths))
+            ViewItemUse(folder.fold_view_paths(view_paths.as_slice()))
         }
     };
     ViewItem {
@@ -881,7 +883,7 @@ mod test {
     // this version doesn't care about getting comments or docstrings in.
     fn fake_print_crate(s: &mut pprust::State,
                         krate: &ast::Crate) -> io::IoResult<()> {
-        pprust::print_mod(s, &krate.module, krate.attrs)
+        pprust::print_mod(s, &krate.module, krate.attrs.as_slice())
     }
 
     // change every identifier to "zz"
index 5524fdf1caf9c719e1ba6e28b14678ca2d18a588..ec81fff51c791ae7f82bc88047ad80e909c67bf9 100644 (file)
@@ -15,8 +15,9 @@
  * other useful things like `push()` and `len()`.
  */
 
-use std::vec;
 use std::default::Default;
+use std::vec;
+use std::vec_ng::Vec;
 
 #[deriving(Clone, Encodable, Decodable, Hash)]
 pub enum OptVec<T> {
@@ -87,7 +88,7 @@ pub fn map_move<U>(self, op: |T| -> U) -> OptVec<U> {
     pub fn get<'a>(&'a self, i: uint) -> &'a T {
         match *self {
             Empty => fail!("invalid index {}", i),
-            Vec(ref v) => &v[i]
+            Vec(ref v) => v.get(i)
         }
     }
 
@@ -147,7 +148,7 @@ pub fn prepend(&self, t: T) -> OptVec<T> {
         let mut v0 = vec!(t);
         match *self {
             Empty => {}
-            Vec(ref v1) => { v0.push_all(*v1); }
+            Vec(ref v1) => { v0.push_all(v1.as_slice()); }
         }
         return Vec(v0);
     }
index 25bd051a69d5eb119a3b544867df19d2fa831f33..0a74c7ca8212464cf5f7ca08066a15b7060c1e89 100644 (file)
@@ -15,6 +15,8 @@
 use parse::parser::Parser;
 use parse::token::INTERPOLATED;
 
+use std::vec_ng::Vec;
+
 // a parser that can parse attributes.
 pub trait ParserAttr {
     fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> ;
index 1c45fe5ee2c5001f4aa96d9e8cf8e9d5ad95ea6c..c2a2097de2442cd3a708cf9ceaedd75e49d1e6bb 100644 (file)
@@ -20,6 +20,7 @@
 use std::io;
 use std::str;
 use std::uint;
+use std::vec_ng::Vec;
 
 #[deriving(Clone, Eq)]
 pub enum CommentStyle {
@@ -58,20 +59,20 @@ fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> {
         let mut i = 0u;
         let mut j = lines.len();
         // first line of all-stars should be omitted
-        if lines.len() > 0 && lines[0].chars().all(|c| c == '*') {
+        if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') {
             i += 1;
         }
-        while i < j && lines[i].trim().is_empty() {
+        while i < j && lines.get(i).trim().is_empty() {
             i += 1;
         }
         // like the first, a last line of all stars should be omitted
-        if j > i && lines[j - 1].chars().skip(1).all(|c| c == '*') {
+        if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') {
             j -= 1;
         }
-        while j > i && lines[j - 1].trim().is_empty() {
+        while j > i && lines.get(j - 1).trim().is_empty() {
             j -= 1;
         }
-        return lines.slice(i, j).to_owned();
+        return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
index 677881de684e43d8592422fe27ac398caa0750c9..884fc306f22ea6684c461378b1e206d751adbe17 100644 (file)
@@ -1005,6 +1005,7 @@ mod test {
     use parse::token;
     use parse::token::{str_to_ident};
     use std::io::util;
+    use std::vec_ng::Vec;
 
     // represents a testing reader (incl. both reader and interner)
     struct Env {
index 40c9f346007b4a5056b0c78f4746754506cce125..9e5db1770bf311ecd0c875ec4b806050fdd2f1d4 100644 (file)
@@ -21,6 +21,7 @@
 use std::cell::RefCell;
 use std::io::File;
 use std::str;
+use std::vec_ng::Vec;
 
 pub mod lexer;
 pub mod parser;
@@ -288,6 +289,7 @@ mod test {
     use std::io;
     use std::io::MemWriter;
     use std::str;
+    use std::vec_ng::Vec;
     use codemap::{Span, BytePos, Spanned};
     use opt_vec;
     use ast;
@@ -362,27 +364,28 @@ fn sp(a: u32, b: u32) -> Span {
     // check the token-tree-ization of macros
     #[test] fn string_to_tts_macro () {
         let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))");
-        let tts: &[ast::TokenTree] = tts;
+        let tts: &[ast::TokenTree] = tts.as_slice();
         match tts {
             [ast::TTTok(_,_),
              ast::TTTok(_,token::NOT),
              ast::TTTok(_,_),
              ast::TTDelim(delim_elts)] => {
-                let delim_elts: &[ast::TokenTree] = *delim_elts;
+                let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
                 match delim_elts {
                     [ast::TTTok(_,token::LPAREN),
                      ast::TTDelim(first_set),
                      ast::TTTok(_,token::FAT_ARROW),
                      ast::TTDelim(second_set),
                      ast::TTTok(_,token::RPAREN)] => {
-                        let first_set: &[ast::TokenTree] = *first_set;
+                        let first_set: &[ast::TokenTree] =
+                            first_set.as_slice();
                         match first_set {
                             [ast::TTTok(_,token::LPAREN),
                              ast::TTTok(_,token::DOLLAR),
                              ast::TTTok(_,_),
                              ast::TTTok(_,token::RPAREN)] => {
                                 let second_set: &[ast::TokenTree] =
-                                    *second_set;
+                                    second_set.as_slice();
                                 match second_set {
                                     [ast::TTTok(_,token::LPAREN),
                                      ast::TTTok(_,token::DOLLAR),
index ceafa10cbecf0ada98325f236d1ed9458533be5a..9b209aadf19e50b91b61f6d8817980f461e3db5c 100644 (file)
@@ -82,7 +82,8 @@
 use collections::HashSet;
 use std::kinds::marker;
 use std::mem::replace;
-use std::vec;
+use std::vec_ng::Vec;
+use std::vec_ng;
 
 #[allow(non_camel_case_types)]
 #[deriving(Eq)]
@@ -270,7 +271,7 @@ fn maybe_append(lhs: Vec<Attribute> , rhs: Option<Vec<Attribute> >)
              -> Vec<Attribute> {
     match rhs {
         None => lhs,
-        Some(ref attrs) => vec_ng::append(lhs, (*attrs))
+        Some(ref attrs) => vec_ng::append(lhs, attrs.as_slice())
     }
 }
 
@@ -406,8 +407,11 @@ fn tokens_to_str(tokens: &[token::Token]) -> ~str {
         } else if inedible.contains(&self.token) {
             // leave it in the input
         } else {
-            let expected = vec_ng::append(edible.to_owned(), inedible);
-            let expect = tokens_to_str(expected);
+            let expected = vec_ng::append(edible.iter()
+                                                .map(|x| (*x).clone())
+                                                .collect(),
+                                          inedible);
+            let expect = tokens_to_str(expected.as_slice());
             let actual = self.this_token_to_str();
             self.fatal(
                 if expected.len() != 1 {
@@ -445,8 +449,12 @@ pub fn commit_expr(&mut self, e: @Expr, edible: &[token::Token], inedible: &[tok
         match e.node {
             ExprPath(..) => {
                 // might be unit-struct construction; check for recoverableinput error.
-                let expected = vec_ng::append(edible.to_owned(), inedible);
-                self.check_for_erroneous_unit_struct_expecting(expected);
+                let expected = vec_ng::append(edible.iter()
+                                                    .map(|x| (*x).clone())
+                                                    .collect(),
+                                              inedible);
+                self.check_for_erroneous_unit_struct_expecting(
+                    expected.as_slice());
             }
             _ => {}
         }
@@ -464,8 +472,12 @@ pub fn commit_stmt(&mut self, s: @Stmt, edible: &[token::Token], inedible: &[tok
         debug!("commit_stmt {:?}", s);
         let _s = s; // unused, but future checks might want to inspect `s`.
         if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) {
-            let expected = vec_ng::append(edible.to_owned(), inedible);
-            self.check_for_erroneous_unit_struct_expecting(expected);
+            let expected = vec_ng::append(edible.iter()
+                                                .map(|x| (*x).clone())
+                                                .collect(),
+                                          inedible.as_slice());
+            self.check_for_erroneous_unit_struct_expecting(
+                expected.as_slice());
         }
         self.expect_one_of(edible, inedible)
     }
@@ -1082,7 +1094,7 @@ pub fn parse_trait_methods(&mut self) -> Vec<TraitMethod> {
                 debug!("parse_trait_methods(): parsing provided method");
                 let (inner_attrs, body) =
                     p.parse_inner_attrs_and_block();
-                let attrs = vec_ng::append(attrs, inner_attrs);
+                let attrs = vec_ng::append(attrs, inner_attrs.as_slice());
                 Provided(@ast::Method {
                     ident: ident,
                     attrs: attrs,
@@ -1189,7 +1201,7 @@ pub fn parse_ty(&mut self, _: bool) -> P<Ty> {
 
                 if ts.len() == 1 && !one_tuple {
                     self.expect(&token::RPAREN);
-                    return ts[0]
+                    return *ts.get(0)
                 }
 
                 let t = TyTup(ts);
@@ -1769,7 +1781,7 @@ pub fn parse_bottom_expr(&mut self) -> @Expr {
             self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN);
 
             return if es.len() == 1 && !trailing_comma {
-                self.mk_expr(lo, hi, ExprParen(es[0]))
+                self.mk_expr(lo, hi, ExprParen(*es.get(0)))
             }
             else {
                 self.mk_expr(lo, hi, ExprTup(es))
@@ -1859,7 +1871,9 @@ pub fn parse_bottom_expr(&mut self) -> @Expr {
                         seq_sep_trailing_allowed(token::COMMA),
                         |p| p.parse_expr()
                     );
-                    ex = ExprVec(vec!(first_expr) + remaining_exprs, mutbl);
+                    let mut exprs = vec!(first_expr);
+                    exprs.push_all_move(remaining_exprs);
+                    ex = ExprVec(exprs, mutbl);
                 } else {
                     // Vector with one element.
                     self.expect(&token::RBRACKET);
@@ -3327,7 +3341,7 @@ fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode,
         while self.token != token::RBRACE {
             // parsing items even when they're not allowed lets us give
             // better error messages and recover more gracefully.
-            attributes_box.push_all(self.parse_outer_attributes());
+            attributes_box.push_all(self.parse_outer_attributes().as_slice());
             match self.token {
                 token::SEMI => {
                     if !attributes_box.is_empty() {
@@ -3850,7 +3864,7 @@ fn parse_method(&mut self, already_parsed_attrs: Option<Vec<Attribute> >) -> @Me
 
         let (inner_attrs, body) = self.parse_inner_attrs_and_block();
         let hi = body.span.hi;
-        let attrs = vec_ng::append(attrs, inner_attrs);
+        let attrs = vec_ng::append(attrs, inner_attrs.as_slice());
         @ast::Method {
             ident: ident,
             attrs: attrs,
@@ -4082,7 +4096,8 @@ fn parse_mod_items(&mut self,
         while self.token != term {
             let mut attrs = self.parse_outer_attributes();
             if first {
-                attrs = attrs_remaining + attrs;
+                attrs = vec_ng::append(attrs_remaining.clone(),
+                                       attrs.as_slice());
                 first = false;
             }
             debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})",
@@ -4164,7 +4179,7 @@ fn eval_src_mod(&mut self,
                     -> (ast::Item_, Vec<ast::Attribute> ) {
         let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span));
         prefix.pop();
-        let mod_path = Path::new(".").join_many(self.mod_path_stack);
+        let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
         let dir_path = prefix.join(&mod_path);
         let file_path = match ::attr::first_attr_value_str_by_name(
                 outer_attrs, "path") {
@@ -4194,7 +4209,7 @@ fn eval_src_mod(&mut self,
         };
 
         self.eval_src_mod_from_path(file_path,
-                                    outer_attrs.to_owned(),
+                                    outer_attrs.iter().map(|x| *x).collect(),
                                     id_sp)
     }
 
@@ -4231,7 +4246,7 @@ fn eval_src_mod_from_path(&mut self,
                                      &path,
                                      id_sp);
         let (inner, next) = p0.parse_inner_attrs_and_next();
-        let mod_attrs = vec_ng::append(outer_attrs, inner);
+        let mod_attrs = vec_ng::append(outer_attrs, inner.as_slice());
         let first_item_outer_attrs = next;
         let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs);
         {
@@ -4556,7 +4571,7 @@ fn parse_item_or_view_item(&mut self,
         match self.token {
             INTERPOLATED(token::NtItem(item)) => {
                 self.bump();
-                let new_attrs = vec_ng::append(attrs, item.attrs);
+                let new_attrs = vec_ng::append(attrs, item.attrs.as_slice());
                 return IoviItem(@Item {
                     attrs: new_attrs,
                     ..(*item).clone()
@@ -4662,7 +4677,8 @@ fn parse_item_or_view_item(&mut self,
         }
         if self.eat_keyword(keywords::Mod) {
             // MODULE ITEM
-            let (ident, item_, extra_attrs) = self.parse_item_mod(attrs);
+            let (ident, item_, extra_attrs) =
+                self.parse_item_mod(attrs.as_slice());
             let item = self.mk_item(lo,
                                     self.last_span.hi,
                                     ident,
@@ -4946,7 +4962,7 @@ fn parse_view_path(&mut self) -> @ViewPath {
           }
           _ => ()
         }
-        let last = path[path.len() - 1u];
+        let last = *path.get(path.len() - 1u);
         let path = ast::Path {
             span: mk_sp(lo, self.span.hi),
             global: false,
@@ -4984,7 +5000,8 @@ fn parse_items_and_view_items(&mut self,
                                   macros_allowed: bool)
                                   -> ParsedItemsAndViewItems {
         let mut attrs = vec_ng::append(first_item_attrs,
-                                    self.parse_outer_attributes());
+                                       self.parse_outer_attributes()
+                                           .as_slice());
         // First, parse view items.
         let mut view_items : Vec<ast::ViewItem> = Vec::new();
         let mut items = Vec::new();
@@ -5065,7 +5082,8 @@ fn parse_foreign_items(&mut self, first_item_attrs: Vec<Attribute> ,
                            macros_allowed: bool)
         -> ParsedItemsAndViewItems {
         let mut attrs = vec_ng::append(first_item_attrs,
-                                    self.parse_outer_attributes());
+                                       self.parse_outer_attributes()
+                                           .as_slice());
         let mut foreign_items = Vec::new();
         loop {
             match self.parse_foreign_item(attrs, macros_allowed) {
index d7d8752b0090b477fc6dc76e04aca703dd56e62e..1499a1b4c19be37efb990b1af9b7c0b0d81d36b4 100644 (file)
@@ -21,6 +21,7 @@
 use std::fmt;
 use std::local_data;
 use std::path::BytesContainer;
+use std::vec_ng::Vec;
 
 #[allow(non_camel_case_types)]
 #[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)]
@@ -412,13 +413,11 @@ fn mk_fresh_ident_interner() -> IdentInterner {
         // The indices here must correspond to the numbers in
         // special_idents, in Keyword to_ident(), and in static
         // constants below.
-        let init_vec = vec!(
-            $( $si_str, )*
-            $( $sk_str, )*
-            $( $rk_str, )*
-        );
-
-        interner::StrInterner::prefill(init_vec)
+        let mut init_vec = Vec::new();
+        $(init_vec.push($si_str);)*
+        $(init_vec.push($sk_str);)*
+        $(init_vec.push($rk_str);)*
+        interner::StrInterner::prefill(init_vec.as_slice())
     }
 }}
 
index 151f9c8b327ea23b3e5919461feaacab949caeaf..e9e0e4835933bd8785749573299bd6f778929b1b 100644 (file)
@@ -62,7 +62,7 @@
  */
 
 use std::io;
-use std::vec;
+use std::vec_ng::Vec;
 
 #[deriving(Clone, Eq)]
 pub enum Breaks {
@@ -131,7 +131,7 @@ pub fn buf_str(toks: Vec<Token> , szs: Vec<int> , left: uint, right: uint,
         if i != left {
             s.push_str(", ");
         }
-        s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone())));
+        s.push_str(format!("{}={}", szs.get(i), tok_str(toks.get(i).clone())));
         i += 1u;
         i %= n;
     }
@@ -156,9 +156,9 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer {
     // fall behind.
     let n: uint = 3 * linewidth;
     debug!("mk_printer {}", linewidth);
-    let token: Vec<Token> = vec::from_elem(n, Eof);
-    let size: Vec<int> = vec::from_elem(n, 0);
-    let scan_stack: Vec<uint> = vec::from_elem(n, 0u);
+    let token: Vec<Token> = Vec::from_elem(n, Eof);
+    let size: Vec<int> = Vec::from_elem(n, 0);
+    let scan_stack: Vec<uint> = Vec::from_elem(n, 0u);
     Printer {
         out: out,
         buf_len: n,
@@ -286,11 +286,11 @@ pub struct Printer {
 
 impl Printer {
     pub fn last_token(&mut self) -> Token {
-        self.token[self.right].clone()
+        (*self.token.get(self.right)).clone()
     }
     // be very careful with this!
     pub fn replace_last_token(&mut self, t: Token) {
-        self.token[self.right] = t;
+        *self.token.get_mut(self.right) = t;
     }
     pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
         debug!("pp ~[{},{}]", self.left, self.right);
@@ -298,8 +298,9 @@ pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
           Eof => {
             if !self.scan_stack_empty {
                 self.check_stack(0);
-                let left = self.token[self.left].clone();
-                try!(self.advance_left(left, self.size[self.left]));
+                let left = (*self.token.get(self.left)).clone();
+                let left_size = *self.size.get(self.left);
+                try!(self.advance_left(left, left_size));
             }
             self.indent(0);
             Ok(())
@@ -313,8 +314,8 @@ pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
             } else { self.advance_right(); }
             debug!("pp Begin({})/buffer ~[{},{}]",
                    b.offset, self.left, self.right);
-            self.token[self.right] = t;
-            self.size[self.right] = -self.right_total;
+            *self.token.get_mut(self.right) = t;
+            *self.size.get_mut(self.right) = -self.right_total;
             self.scan_push(self.right);
             Ok(())
           }
@@ -325,8 +326,8 @@ pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
             } else {
                 debug!("pp End/buffer ~[{},{}]", self.left, self.right);
                 self.advance_right();
-                self.token[self.right] = t;
-                self.size[self.right] = -1;
+                *self.token.get_mut(self.right) = t;
+                *self.size.get_mut(self.right) = -1;
                 self.scan_push(self.right);
                 Ok(())
             }
@@ -342,8 +343,8 @@ pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
                    b.offset, self.left, self.right);
             self.check_stack(0);
             self.scan_push(self.right);
-            self.token[self.right] = t;
-            self.size[self.right] = -self.right_total;
+            *self.token.get_mut(self.right) = t;
+            *self.size.get_mut(self.right) = -self.right_total;
             self.right_total += b.blank_space;
             Ok(())
           }
@@ -356,8 +357,8 @@ pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> {
                 debug!("pp String('{}')/buffer ~[{},{}]",
                        *s, self.left, self.right);
                 self.advance_right();
-                self.token[self.right] = t.clone();
-                self.size[self.right] = len;
+                *self.token.get_mut(self.right) = t.clone();
+                *self.size.get_mut(self.right) = len;
                 self.right_total += len;
                 self.check_stream()
             }
@@ -371,13 +372,15 @@ pub fn check_stream(&mut self) -> io::IoResult<()> {
             debug!("scan window is {}, longer than space on line ({})",
                    self.right_total - self.left_total, self.space);
             if !self.scan_stack_empty {
-                if self.left == self.scan_stack[self.bottom] {
+                if self.left == *self.scan_stack.get(self.bottom) {
                     debug!("setting {} to infinity and popping", self.left);
-                    self.size[self.scan_pop_bottom()] = SIZE_INFINITY;
+                    let scanned = self.scan_pop_bottom();
+                    *self.size.get_mut(scanned) = SIZE_INFINITY;
                 }
             }
-            let left = self.token[self.left].clone();
-            try!(self.advance_left(left, self.size[self.left]));
+            let left = (*self.token.get(self.left)).clone();
+            let left_size = *self.size.get(self.left);
+            try!(self.advance_left(left, left_size));
             if self.left != self.right {
                 try!(self.check_stream());
             }
@@ -393,26 +396,30 @@ pub fn scan_push(&mut self, x: uint) {
             self.top %= self.buf_len;
             assert!((self.top != self.bottom));
         }
-        self.scan_stack[self.top] = x;
+        *self.scan_stack.get_mut(self.top) = x;
     }
     pub fn scan_pop(&mut self) -> uint {
         assert!((!self.scan_stack_empty));
-        let x = self.scan_stack[self.top];
+        let x = *self.scan_stack.get(self.top);
         if self.top == self.bottom {
             self.scan_stack_empty = true;
-        } else { self.top += self.buf_len - 1u; self.top %= self.buf_len; }
+        } else {
+            self.top += self.buf_len - 1u; self.top %= self.buf_len;
+        }
         return x;
     }
     pub fn scan_top(&mut self) -> uint {
         assert!((!self.scan_stack_empty));
-        return self.scan_stack[self.top];
+        return *self.scan_stack.get(self.top);
     }
     pub fn scan_pop_bottom(&mut self) -> uint {
         assert!((!self.scan_stack_empty));
-        let x = self.scan_stack[self.bottom];
+        let x = *self.scan_stack.get(self.bottom);
         if self.top == self.bottom {
             self.scan_stack_empty = true;
-        } else { self.bottom += 1u; self.bottom %= self.buf_len; }
+        } else {
+            self.bottom += 1u; self.bottom %= self.buf_len;
+        }
         return x;
     }
     pub fn advance_right(&mut self) {
@@ -435,8 +442,9 @@ pub fn advance_left(&mut self, x: Token, L: int) -> io::IoResult<()> {
             if self.left != self.right {
                 self.left += 1u;
                 self.left %= self.buf_len;
-                let left = self.token[self.left].clone();
-                try!(self.advance_left(left, self.size[self.left]));
+                let left = (*self.token.get(self.left)).clone();
+                let left_size = *self.size.get(self.left);
+                try!(self.advance_left(left, left_size));
             }
             ret
         } else {
@@ -446,22 +454,28 @@ pub fn advance_left(&mut self, x: Token, L: int) -> io::IoResult<()> {
     pub fn check_stack(&mut self, k: int) {
         if !self.scan_stack_empty {
             let x = self.scan_top();
-            match self.token[x] {
-              Begin(_) => {
+            match self.token.get(x) {
+              &Begin(_) => {
                 if k > 0 {
-                    self.size[self.scan_pop()] = self.size[x] +
+                    let popped = self.scan_pop();
+                    *self.size.get_mut(popped) = *self.size.get(x) +
                         self.right_total;
                     self.check_stack(k - 1);
                 }
               }
-              End => {
+              &End => {
                 // paper says + not =, but that makes no sense.
-                self.size[self.scan_pop()] = 1;
+                let popped = self.scan_pop();
+                *self.size.get_mut(popped) = 1;
                 self.check_stack(k + 1);
               }
               _ => {
-                self.size[self.scan_pop()] = self.size[x] + self.right_total;
-                if k > 0 { self.check_stack(k); }
+                let popped = self.scan_pop();
+                *self.size.get_mut(popped) = *self.size.get(x) +
+                    self.right_total;
+                if k > 0 {
+                    self.check_stack(k);
+                }
               }
             }
         }
@@ -481,7 +495,7 @@ pub fn get_top(&mut self) -> PrintStackElem {
         let print_stack = &mut self.print_stack;
         let n = print_stack.len();
         if n != 0u {
-            print_stack[n - 1u]
+            *print_stack.get(n - 1u)
         } else {
             PrintStackElem {
                 offset: 0,
index 25ff793d34bcaaf8272ddaec469176eafecfbcce..d027efc1d42f6247735b78b611590d6bf2d327c6 100644 (file)
@@ -33,6 +33,7 @@
 use std::str;
 use std::io;
 use std::io::MemWriter;
+use std::vec_ng::Vec;
 
 // The &mut State is stored here to prevent recursive type.
 pub enum AnnNode<'a, 'b> {
@@ -147,7 +148,7 @@ pub fn print_crate(cm: @CodeMap,
 }
 
 pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> {
-    try!(print_mod(s, &krate.module, krate.attrs));
+    try!(print_mod(s, &krate.module, krate.attrs.as_slice()));
     try!(print_remaining_comments(s));
     try!(eof(&mut s.s));
     Ok(())
@@ -319,7 +320,7 @@ pub fn in_cbox(s: &mut State) -> bool {
     let boxes = s.boxes.borrow();
     let len = boxes.get().len();
     if len == 0u { return false; }
-    return boxes.get()[len - 1u] == pp::Consistent;
+    return *boxes.get().get(len - 1u) == pp::Consistent;
 }
 
 pub fn hardbreak_if_not_bol(s: &mut State) -> io::IoResult<()> {
@@ -463,7 +464,7 @@ pub fn print_type(s: &mut State, ty: &ast::Ty) -> io::IoResult<()> {
         }
         ast::TyTup(ref elts) => {
             try!(popen(s));
-            try!(commasep(s, Inconsistent, *elts, print_type_ref));
+            try!(commasep(s, Inconsistent, elts.as_slice(), print_type_ref));
             if elts.len() == 1 {
                 try!(word(&mut s.s, ","));
             }
@@ -517,7 +518,7 @@ pub fn print_foreign_item(s: &mut State,
                           item: &ast::ForeignItem) -> io::IoResult<()> {
     try!(hardbreak_if_not_bol(s));
     try!(maybe_print_comment(s, item.span.lo));
-    try!(print_outer_attributes(s, item.attrs));
+    try!(print_outer_attributes(s, item.attrs.as_slice()));
     match item.node {
         ast::ForeignItemFn(decl, ref generics) => {
             try!(print_fn(s, decl, None, AbiSet::Rust(), item.ident, generics,
@@ -545,7 +546,7 @@ pub fn print_foreign_item(s: &mut State,
 pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
     try!(hardbreak_if_not_bol(s));
     try!(maybe_print_comment(s, item.span.lo));
-    try!(print_outer_attributes(s, item.attrs));
+    try!(print_outer_attributes(s, item.attrs.as_slice()));
     {
         let ann_node = NodeItem(s, item);
         try!(s.ann.pre(ann_node));
@@ -580,21 +581,21 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
             item.vis
         ));
         try!(word(&mut s.s, " "));
-        try!(print_block_with_attrs(s, body, item.attrs));
+        try!(print_block_with_attrs(s, body, item.attrs.as_slice()));
       }
       ast::ItemMod(ref _mod) => {
         try!(head(s, visibility_qualified(item.vis, "mod")));
         try!(print_ident(s, item.ident));
         try!(nbsp(s));
         try!(bopen(s));
-        try!(print_mod(s, _mod, item.attrs));
+        try!(print_mod(s, _mod, item.attrs.as_slice()));
         try!(bclose(s, item.span));
       }
       ast::ItemForeignMod(ref nmod) => {
         try!(head(s, "extern"));
         try!(word_nbsp(s, nmod.abis.to_str()));
         try!(bopen(s));
-        try!(print_foreign_mod(s, nmod, item.attrs));
+        try!(print_foreign_mod(s, nmod, item.attrs.as_slice()));
         try!(bclose(s, item.span));
       }
       ast::ItemTy(ty, ref params) => {
@@ -646,7 +647,7 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> {
 
         try!(space(&mut s.s));
         try!(bopen(s));
-        try!(print_inner_attributes(s, item.attrs));
+        try!(print_inner_attributes(s, item.attrs.as_slice()));
         for meth in methods.iter() {
            try!(print_method(s, *meth));
         }
@@ -706,7 +707,7 @@ pub fn print_enum_def(s: &mut State, enum_definition: &ast::EnumDef,
     try!(print_ident(s, ident));
     try!(print_generics(s, generics));
     try!(space(&mut s.s));
-    try!(print_variants(s, enum_definition.variants, span));
+    try!(print_variants(s, enum_definition.variants.as_slice(), span));
     Ok(())
 }
 
@@ -717,7 +718,7 @@ pub fn print_variants(s: &mut State,
     for &v in variants.iter() {
         try!(space_if_not_bol(s));
         try!(maybe_print_comment(s, v.span.lo));
-        try!(print_outer_attributes(s, v.node.attrs));
+        try!(print_outer_attributes(s, v.node.attrs.as_slice()));
         try!(ibox(s, indent_unit));
         try!(print_variant(s, v));
         try!(word(&mut s.s, ","));
@@ -761,7 +762,10 @@ pub fn print_struct(s: &mut State,
     if ast_util::struct_def_is_tuple_like(struct_def) {
         if !struct_def.fields.is_empty() {
             try!(popen(s));
-            try!(commasep(s, Inconsistent, struct_def.fields, |s, field| {
+            try!(commasep(s,
+                          Inconsistent,
+                          struct_def.fields.as_slice(),
+                          |s, field| {
                 match field.node.kind {
                     ast::NamedField(..) => fail!("unexpected named field"),
                     ast::UnnamedField => {
@@ -787,7 +791,8 @@ pub fn print_struct(s: &mut State,
                 ast::NamedField(ident, visibility) => {
                     try!(hardbreak_if_not_bol(s));
                     try!(maybe_print_comment(s, field.span.lo));
-                    try!(print_outer_attributes(s, field.node.attrs));
+                    try!(print_outer_attributes(s,
+                                                field.node.attrs.as_slice()));
                     try!(print_visibility(s, visibility));
                     try!(print_ident(s, ident));
                     try!(word_nbsp(s, ":"));
@@ -857,7 +862,10 @@ fn print_variant_arg(s: &mut State,
                                      arg: &ast::VariantArg) -> io::IoResult<()> {
                     print_type(s, arg.ty)
                 }
-                try!(commasep(s, Consistent, *args, print_variant_arg));
+                try!(commasep(s,
+                              Consistent,
+                              args.as_slice(),
+                              print_variant_arg));
                 try!(pclose(s));
             }
         }
@@ -881,7 +889,7 @@ fn print_variant_arg(s: &mut State,
 pub fn print_ty_method(s: &mut State, m: &ast::TypeMethod) -> io::IoResult<()> {
     try!(hardbreak_if_not_bol(s));
     try!(maybe_print_comment(s, m.span.lo));
-    try!(print_outer_attributes(s, m.attrs));
+    try!(print_outer_attributes(s, m.attrs.as_slice()));
     try!(print_ty_fn(s,
                        None,
                        None,
@@ -907,12 +915,12 @@ pub fn print_trait_method(s: &mut State,
 pub fn print_method(s: &mut State, meth: &ast::Method) -> io::IoResult<()> {
     try!(hardbreak_if_not_bol(s));
     try!(maybe_print_comment(s, meth.span.lo));
-    try!(print_outer_attributes(s, meth.attrs));
+    try!(print_outer_attributes(s, meth.attrs.as_slice()));
     try!(print_fn(s, meth.decl, Some(meth.purity), AbiSet::Rust(),
                     meth.ident, &meth.generics, Some(meth.explicit_self.node),
                     meth.vis));
     try!(word(&mut s.s, " "));
-    print_block_with_attrs(s, meth.body, meth.attrs)
+    print_block_with_attrs(s, meth.body, meth.attrs.as_slice())
 }
 
 pub fn print_outer_attributes(s: &mut State,
@@ -1184,7 +1192,7 @@ fn print_field(s: &mut State, field: &ast::Field) -> io::IoResult<()> {
             try!(word(&mut s.s, "mut"));
             if exprs.len() > 0u { try!(nbsp(s)); }
         }
-        try!(commasep_exprs(s, Inconsistent, *exprs));
+        try!(commasep_exprs(s, Inconsistent, exprs.as_slice()));
         try!(word(&mut s.s, "]"));
         try!(end(s));
       }
@@ -1207,7 +1215,11 @@ fn print_field(s: &mut State, field: &ast::Field) -> io::IoResult<()> {
       ast::ExprStruct(ref path, ref fields, wth) => {
         try!(print_path(s, path, true));
         try!(word(&mut s.s, "{"));
-        try!(commasep_cmnt(s, Consistent, (*fields), print_field, get_span));
+        try!(commasep_cmnt(s,
+                           Consistent,
+                           fields.as_slice(),
+                           print_field,
+                           get_span));
         match wth {
             Some(expr) => {
                 try!(ibox(s, indent_unit));
@@ -1225,7 +1237,7 @@ fn print_field(s: &mut State, field: &ast::Field) -> io::IoResult<()> {
       }
       ast::ExprTup(ref exprs) => {
         try!(popen(s));
-        try!(commasep_exprs(s, Inconsistent, *exprs));
+        try!(commasep_exprs(s, Inconsistent, exprs.as_slice()));
         if exprs.len() == 1 {
             try!(word(&mut s.s, ","));
         }
@@ -1233,16 +1245,16 @@ fn print_field(s: &mut State, field: &ast::Field) -> io::IoResult<()> {
       }
       ast::ExprCall(func, ref args) => {
         try!(print_expr(s, func));
-        try!(print_call_post(s, *args));
+        try!(print_call_post(s, args.as_slice()));
       }
       ast::ExprMethodCall(ident, ref tys, ref args) => {
         let base_args = args.slice_from(1);
-        try!(print_expr(s, args[0]));
+        try!(print_expr(s, *args.get(0)));
         try!(word(&mut s.s, "."));
         try!(print_ident(s, ident));
         if tys.len() > 0u {
             try!(word(&mut s.s, "::<"));
-            try!(commasep(s, Inconsistent, *tys, print_type_ref));
+            try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref));
             try!(word(&mut s.s, ">"));
         }
         try!(print_call_post(s, base_args));
@@ -1455,7 +1467,7 @@ fn print_field(s: &mut State, field: &ast::Field) -> io::IoResult<()> {
         try!(print_ident(s, id));
         if tys.len() > 0u {
             try!(word(&mut s.s, "::<"));
-            try!(commasep(s, Inconsistent, *tys, print_type_ref));
+            try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref));
             try!(word(&mut s.s, ">"));
         }
       }
@@ -1649,7 +1661,7 @@ fn print_path_(s: &mut State,
                 }
                 try!(commasep(s,
                                 Inconsistent,
-                                segment.types.map_to_vec(|&t| t),
+                                segment.types.map_to_vec(|&t| t).as_slice(),
                                 print_type_ref));
             }
 
@@ -1708,7 +1720,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> {
           Some(ref args) => {
             if !args.is_empty() {
               try!(popen(s));
-              try!(commasep(s, Inconsistent, *args,
+              try!(commasep(s, Inconsistent, args.as_slice(),
                               |s, &p| print_pat(s, p)));
               try!(pclose(s));
             } else { }
@@ -1727,7 +1739,7 @@ fn print_field(s: &mut State, f: &ast::FieldPat) -> io::IoResult<()> {
             Ok(())
         }
         fn get_span(f: &ast::FieldPat) -> codemap::Span { return f.pat.span; }
-        try!(commasep_cmnt(s, Consistent, *fields,
+        try!(commasep_cmnt(s, Consistent, fields.as_slice(),
                              |s, f| print_field(s,f),
                              get_span));
         if etc {
@@ -1738,7 +1750,10 @@ fn print_field(s: &mut State, f: &ast::FieldPat) -> io::IoResult<()> {
       }
       ast::PatTup(ref elts) => {
         try!(popen(s));
-        try!(commasep(s, Inconsistent, *elts, |s, &p| print_pat(s, p)));
+        try!(commasep(s,
+                      Inconsistent,
+                      elts.as_slice(),
+                      |s, &p| print_pat(s, p)));
         if elts.len() == 1 {
             try!(word(&mut s.s, ","));
         }
@@ -1761,7 +1776,10 @@ fn print_field(s: &mut State, f: &ast::FieldPat) -> io::IoResult<()> {
       }
       ast::PatVec(ref before, slice, ref after) => {
         try!(word(&mut s.s, "["));
-        try!(commasep(s, Inconsistent, *before, |s, &p| print_pat(s, p)));
+        try!(commasep(s,
+                      Inconsistent,
+                      before.as_slice(),
+                      |s, &p| print_pat(s, p)));
         for &p in slice.iter() {
             if !before.is_empty() { try!(word_space(s, ",")); }
             match *p {
@@ -1773,7 +1791,10 @@ fn print_field(s: &mut State, f: &ast::FieldPat) -> io::IoResult<()> {
             try!(print_pat(s, p));
             if !after.is_empty() { try!(word_space(s, ",")); }
         }
-        try!(commasep(s, Inconsistent, *after, |s, &p| print_pat(s, p)));
+        try!(commasep(s,
+                      Inconsistent,
+                      after.as_slice(),
+                      |s, &p| print_pat(s, p)));
         try!(word(&mut s.s, "]"));
       }
     }
@@ -1842,7 +1863,7 @@ pub fn print_fn_args(s: &mut State, decl: &ast::FnDecl,
     for &explicit_self in opt_explicit_self.iter() {
         let m = match explicit_self {
             ast::SelfStatic => ast::MutImmutable,
-            _ => match decl.inputs[0].pat.node {
+            _ => match decl.inputs.get(0).pat.node {
                 ast::PatIdent(ast::BindByValue(m), _, _) => m,
                 _ => ast::MutImmutable
             }
@@ -1986,7 +2007,7 @@ fn print_item(s: &mut State, generics: &ast::Generics,
             ints.push(i);
         }
 
-        try!(commasep(s, Inconsistent, ints,
+        try!(commasep(s, Inconsistent, ints.as_slice(),
                         |s, &i| print_item(s, generics, i)));
         try!(word(&mut s.s, ">"));
     }
@@ -2041,7 +2062,7 @@ pub fn print_view_path(s: &mut State, vp: &ast::ViewPath) -> io::IoResult<()> {
             try!(print_path(s, path, false));
             try!(word(&mut s.s, "::{"));
         }
-        try!(commasep(s, Inconsistent, (*idents), |s, w| {
+        try!(commasep(s, Inconsistent, idents.as_slice(), |s, w| {
             print_ident(s, w.node.name)
         }));
         word(&mut s.s, "}")
@@ -2057,7 +2078,7 @@ pub fn print_view_paths(s: &mut State,
 pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> {
     try!(hardbreak_if_not_bol(s));
     try!(maybe_print_comment(s, item.span.lo));
-    try!(print_outer_attributes(s, item.attrs));
+    try!(print_outer_attributes(s, item.attrs.as_slice()));
     try!(print_visibility(s, item.vis));
     match item.node {
         ast::ViewItemExternMod(id, ref optional_path, _) => {
@@ -2073,7 +2094,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()>
 
         ast::ViewItemUse(ref vps) => {
             try!(head(s, "use"));
-            try!(print_view_paths(s, *vps));
+            try!(print_view_paths(s, vps.as_slice()));
         }
     }
     try!(word(&mut s.s, ";"));
@@ -2103,7 +2124,7 @@ pub fn print_arg(s: &mut State, input: &ast::Arg) -> io::IoResult<()> {
             match input.pat.node {
                 ast::PatIdent(_, ref path, _) if
                     path.segments.len() == 1 &&
-                    path.segments[0].identifier.name ==
+                    path.segments.get(0).identifier.name ==
                         parse::token::special_idents::invalid.name => {
                     // Do nothing.
                 }
@@ -2286,7 +2307,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) -> io::IoResult<()> {
       ast::LitBinary(ref arr) => {
         try!(ibox(s, indent_unit));
         try!(word(&mut s.s, "["));
-        try!(commasep_cmnt(s, Inconsistent, *arr.borrow(),
+        try!(commasep_cmnt(s, Inconsistent, arr.borrow().as_slice(),
                              |s, u| word(&mut s.s, format!("{}", *u)),
                              |_| lit.span));
         try!(word(&mut s.s, "]"));
@@ -2303,7 +2324,7 @@ pub fn next_lit(s: &mut State, pos: BytePos) -> Option<comments::Literal> {
     match s.literals {
       Some(ref lits) => {
         while s.cur_cmnt_and_lit.cur_lit < lits.len() {
-            let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit].clone();
+            let ltrl = (*(*lits).get(s.cur_cmnt_and_lit.cur_lit)).clone();
             if ltrl.pos > pos { return None; }
             s.cur_cmnt_and_lit.cur_lit += 1u;
             if ltrl.pos == pos { return Some(ltrl); }
@@ -2335,7 +2356,7 @@ pub fn print_comment(s: &mut State,
         comments::Mixed => {
             assert_eq!(cmnt.lines.len(), 1u);
             try!(zerobreak(&mut s.s));
-            try!(word(&mut s.s, cmnt.lines[0]));
+            try!(word(&mut s.s, *cmnt.lines.get(0)));
             try!(zerobreak(&mut s.s));
         }
         comments::Isolated => {
@@ -2352,7 +2373,7 @@ pub fn print_comment(s: &mut State,
         comments::Trailing => {
             try!(word(&mut s.s, " "));
             if cmnt.lines.len() == 1u {
-                try!(word(&mut s.s, cmnt.lines[0]));
+                try!(word(&mut s.s, *cmnt.lines.get(0)));
                 try!(hardbreak(&mut s.s));
             } else {
                 try!(ibox(s, 0u));
@@ -2414,7 +2435,7 @@ pub fn next_comment(s: &mut State) -> Option<comments::Comment> {
     match s.comments {
         Some(ref cmnts) => {
             if s.cur_cmnt_and_lit.cur_cmnt < cmnts.len() {
-                Some(cmnts[s.cur_cmnt_and_lit.cur_cmnt].clone())
+                Some((*cmnts.get(s.cur_cmnt_and_lit.cur_cmnt)).clone())
             } else {
                 None
             }
@@ -2535,6 +2556,8 @@ mod test {
     use codemap;
     use parse::token;
 
+    use std::vec_ng::Vec;
+
     #[test]
     fn test_fun_to_str() {
         let abba_ident = token::str_to_ident("abba");
index 7969cacb765a47f11e5566413fd1d01bfbf76fbf..ba154a8d8923c12138a9e72a219e72f98913e22e 100644 (file)
@@ -21,6 +21,7 @@
 use std::fmt;
 use std::hash::Hash;
 use std::rc::Rc;
+use std::vec_ng::Vec;
 
 pub struct Interner<T> {
     priv map: RefCell<HashMap<T, Name>>,
@@ -68,7 +69,7 @@ pub fn gensym(&self, val: T) -> Name {
 
     pub fn get(&self, idx: Name) -> T {
         let vect = self.vect.borrow();
-        vect.get()[idx].clone()
+        (*vect.get().get(idx as uint)).clone()
     }
 
     pub fn len(&self) -> uint {
@@ -189,21 +190,21 @@ pub fn gensym_copy(&self, idx : Name) -> Name {
         let new_idx = self.len() as Name;
         // leave out of map to avoid colliding
         let mut vect = self.vect.borrow_mut();
-        let existing = vect.get()[idx].clone();
+        let existing = (*vect.get().get(idx as uint)).clone();
         vect.get().push(existing);
         new_idx
     }
 
     pub fn get(&self, idx: Name) -> RcStr {
         let vect = self.vect.borrow();
-        vect.get()[idx].clone()
+        (*vect.get().get(idx as uint)).clone()
     }
 
     /// Returns this string with lifetime tied to the interner. Since
     /// strings may never be removed from the interner, this is safe.
     pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str {
         let vect = self.vect.borrow();
-        let s: &str = vect.get()[idx].as_slice();
+        let s: &str = vect.get().get(idx as uint).as_slice();
         unsafe {
             cast::transmute(s)
         }
index 36243350d2161422fc74d8de8fc207f2c270cf70..03fc30e2fd771dcca105928d87b21c5b152315aa 100644 (file)
@@ -15,6 +15,8 @@
 use parse::parser::Parser;
 use parse::token;
 
+use std::vec_ng::Vec;
+
 // map a string to tts, using a made-up filename: return both the TokenTree's
 // and the ParseSess
 pub fn string_to_tts_and_sess (source_str : ~str) -> (Vec<ast::TokenTree> , @ParseSess) {
index 22bf0f0a53f38e18005a922a0aee3c48bdaae417..9eb9871bb21414206e4ba7cdbd4c071e9d21fc37 100644 (file)
@@ -7,8 +7,10 @@
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
+
 use std::mem;
-use std::vec;
+use std::vec_ng::Vec;
+use std::vec_ng;
 
 /// A vector type optimized for cases where the size is almost always 0 or 1
 pub enum SmallVector<T> {
@@ -73,7 +75,7 @@ pub fn push_all(&mut self, other: SmallVector<T>) {
     pub fn get<'a>(&'a self, idx: uint) -> &'a T {
         match *self {
             One(ref v) if idx == 0 => v,
-            Many(ref vs) => &vs[idx],
+            Many(ref vs) => vs.get(idx),
             _ => fail!("out of bounds access")
         }
     }
@@ -104,7 +106,7 @@ pub fn move_iter(self) -> MoveItems<T> {
 pub enum MoveItems<T> {
     priv ZeroIterator,
     priv OneIterator(T),
-    priv ManyIterator(vec::MoveItems<T>),
+    priv ManyIterator(vec_ng::MoveItems<T>),
 }
 
 impl<T> Iterator<T> for MoveItems<T> {
@@ -136,6 +138,8 @@ fn size_hint(&self) -> (uint, Option<uint>) {
 mod test {
     use super::*;
 
+    use std::vec_ng::Vec;
+
     #[test]
     fn test_len() {
         let v: SmallVector<int> = SmallVector::zero();
index 39989977d69faa5e213ced72beeffd4666f24566..2edfd367f4ef2712442920a25ed42d24bae74d1f 100644 (file)
@@ -637,7 +637,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en
             visitor.visit_expr(subexpression, env.clone())
         }
         ExprVec(ref subexpressions, _) => {
-            walk_exprs(visitor, *subexpressions, env.clone())
+            walk_exprs(visitor, subexpressions.as_slice(), env.clone())
         }
         ExprRepeat(element, count, _) => {
             visitor.visit_expr(element, env.clone());
@@ -662,7 +662,7 @@ pub fn walk_expr<E: Clone, V: Visitor<E>>(visitor: &mut V, expression: &Expr, en
             visitor.visit_expr(callee_expression, env.clone())
         }
         ExprMethodCall(_, ref types, ref arguments) => {
-            walk_exprs(visitor, *arguments, env.clone());
+            walk_exprs(visitor, arguments.as_slice(), env.clone());
             for &typ in types.iter() {
                 visitor.visit_ty(typ, env.clone())
             }