]> git.lizzy.rs Git - rust.git/commitdiff
Removed deprecated functions `map` and `flat_map` for vectors and slices.
authorMarvin Löbel <loebel.marvin@gmail.com>
Fri, 28 Mar 2014 19:42:34 +0000 (20:42 +0100)
committerMarvin Löbel <loebel.marvin@gmail.com>
Sun, 30 Mar 2014 01:47:04 +0000 (03:47 +0200)
58 files changed:
src/doc/tutorial.md
src/libnative/io/process.rs
src/librustc/back/lto.rs
src/librustc/driver/driver.rs
src/librustc/lib/llvm.rs
src/librustc/metadata/filesearch.rs
src/librustc/middle/check_match.rs
src/librustc/middle/resolve.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/middle/subst.rs
src/librustc/middle/trans/_match.rs
src/librustc/middle/trans/adt.rs
src/librustc/middle/trans/asm.rs
src/librustc/middle/trans/build.rs
src/librustc/middle/trans/builder.rs
src/librustc/middle/trans/common.rs
src/librustc/middle/trans/debuginfo.rs
src/librustc/middle/trans/expr.rs
src/librustc/middle/trans/foreign.rs
src/librustc/middle/trans/llrepr.rs
src/librustc/middle/trans/meth.rs
src/librustc/middle/trans/monomorphize.rs
src/librustc/middle/ty.rs
src/librustc/middle/typeck/astconv.rs
src/librustc/middle/typeck/check/_match.rs
src/librustc/middle/typeck/check/method.rs
src/librustc/middle/typeck/check/mod.rs
src/librustc/middle/typeck/check/writeback.rs
src/librustc/middle/typeck/collect.rs
src/librustc/middle/typeck/infer/mod.rs
src/librustc/middle/typeck/infer/region_inference/mod.rs
src/librustc/middle/typeck/infer/to_str.rs
src/librustc/middle/typeck/mod.rs
src/librustc/util/common.rs
src/librustc/util/ppaux.rs
src/librustdoc/passes.rs
src/libserialize/json.rs
src/libstd/ascii.rs
src/libstd/io/net/addrinfo.rs
src/libstd/slice.rs
src/libstd/vec.rs
src/libsyntax/ast_util.rs
src/libsyntax/ext/build.rs
src/libsyntax/ext/deriving/clone.rs
src/libsyntax/ext/deriving/default.rs
src/libsyntax/ext/deriving/generic.rs
src/libsyntax/ext/deriving/rand.rs
src/libsyntax/ext/deriving/ty.rs
src/libsyntax/ext/deriving/zero.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/fold.rs
src/libsyntax/parse/comments.rs
src/libsyntax/util/parser_testing.rs
src/test/auxiliary/issue_2723_a.rs
src/test/run-pass/issue-3563-3.rs

index b0d9e3e9802f31b36b47979a3a714c1322cf0acb..e15145e5f1d3421fe55dfea317115266e05f6ced 100644 (file)
@@ -1764,7 +1764,10 @@ access local variables in the enclosing scope.
 
 ~~~~
 let mut max = 0;
-[1, 2, 3].map(|x| if *x > max { max = *x });
+let f = |x: int| if x > max { max = x };
+for x in [1, 2, 3].iter() {
+    f(*x);
+}
 ~~~~
 
 Stack closures are very efficient because their environment is
index 463f9f8bedded3e1063e68319a63377860118d19..b0f2495e98c4d86f69346a0a5b938a6bb6480ab0 100644 (file)
@@ -597,7 +597,7 @@ fn with_argv<T>(prog: &str, args: &[~str], cb: proc:(**libc::c_char) -> T) -> T
     // Next, convert each of the byte strings into a pointer. This is
     // technically unsafe as the caller could leak these pointers out of our
     // scope.
-    let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
+    let mut ptrs: Vec<_> = tmps.iter().map(|tmp| tmp.with_ref(|buf| buf)).collect();
 
     // Finally, make sure we add a null pointer.
     ptrs.push(ptr::null());
@@ -622,7 +622,9 @@ fn with_envp<T>(env: Option<~[(~str, ~str)]>, cb: proc:(*c_void) -> T) -> T {
             }
 
             // Once again, this is unsafe.
-            let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
+            let mut ptrs: Vec<*libc::c_char> = tmps.iter()
+                                                   .map(|tmp| tmp.with_ref(|buf| buf))
+                                                   .collect();
             ptrs.push(ptr::null());
 
             cb(ptrs.as_ptr() as *c_void)
index ef3496f113b599fdc8dae389ee93a88bf68bdfc3..674f5e77cc084135b1720426331fb73201437c1b 100644 (file)
@@ -69,8 +69,8 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
     }
 
     // Internalize everything but the reachable symbols of the current module
-    let cstrs = reachable.map(|s| s.to_c_str());
-    let arr = cstrs.map(|c| c.with_ref(|p| p));
+    let cstrs: Vec<::std::c_str::CString> = reachable.iter().map(|s| s.to_c_str()).collect();
+    let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect();
     let ptr = arr.as_ptr();
     unsafe {
         llvm::LLVMRustRunRestrictionPass(llmod, ptr as **libc::c_char,
index 7e7ccb8a1c60a4d1f572cb49fd17e68ea2a9e175..8328d5faa543ea9d45066bee2e8888610c19f2ee 100644 (file)
@@ -943,9 +943,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> session::Options {
         NoDebugInfo
     };
 
-    let addl_lib_search_paths = matches.opt_strs("L").map(|s| {
+    let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| {
         Path::new(s.as_slice())
-    }).move_iter().collect();
+    }).collect();
 
     let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
     let test = matches.opt_present("test");
index 878082d14c7c54c411ff11ce7e5dc299d6af4837..6f20e46a6effce8d03443fc870ac1596e6de1b05 100644 (file)
@@ -1861,7 +1861,7 @@ pub fn type_to_str(&self, ty: Type) -> ~str {
     }
 
     pub fn types_to_str(&self, tys: &[Type]) -> ~str {
-        let strs = tys.map(|t| self.type_to_str(*t));
+        let strs: Vec<~str> = tys.iter().map(|t| self.type_to_str(*t)).collect();
         format!("[{}]", strs.connect(","))
     }
 
index 8bb4942e06fd5471df2eda95f04f88ebf2b6d4e9..5313473739ff7ce25e02b1c40ff5e65ebdde3d6f 100644 (file)
@@ -200,9 +200,9 @@ pub fn get_rust_path() -> Option<~str> {
 pub fn rust_path() -> Vec<Path> {
     let mut env_rust_path: Vec<Path> = match get_rust_path() {
         Some(env_path) => {
-            let env_path_components: Vec<&str> =
-                env_path.split_str(PATH_ENTRY_SEPARATOR).collect();
-            env_path_components.map(|&s| Path::new(s))
+            let env_path_components =
+                env_path.split_str(PATH_ENTRY_SEPARATOR);
+            env_path_components.map(|s| Path::new(s)).collect()
         }
         None => Vec::new()
     };
index 2c2fe87c9bed1816ae928245a6e97fec3a7a7950..8f1a9270047ad97189f2cac4a63d37831c4ef1a5 100644 (file)
@@ -163,7 +163,7 @@ fn raw_pat(p: @Pat) -> @Pat {
 
 fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
     assert!((!pats.is_empty()));
-    let ext = match is_useful(cx, &pats.map(|p| vec!(*p)), [wild()]) {
+    let ext = match is_useful(cx, &pats.iter().map(|p| vec!(*p)).collect(), [wild()]) {
         not_useful => {
             // This is good, wildcard pattern isn't reachable
             return;
@@ -692,12 +692,12 @@ fn specialize(cx: &MatchCheckCtxt,
                     DefVariant(_, variant_id, _) => {
                         if variant(variant_id) == *ctor_id {
                             let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id);
-                            let args = struct_fields.map(|sf| {
+                            let args = struct_fields.iter().map(|sf| {
                                 match pattern_fields.iter().find(|f| f.ident.name == sf.name) {
                                     Some(f) => f.pat,
                                     _ => wild()
                                 }
-                            });
+                            }).collect();
                             Some(vec::append(args, r.tail()))
                         } else {
                             None
index 0f283ca2f897e3aa55dfbb82ca70c0032af624aa..5eb90f770dd5fe5818815d5f3662220d0cd99db7 100644 (file)
@@ -4707,18 +4707,20 @@ fn resolve_module_relative_path(&mut self,
                                         path: &Path,
                                         namespace: Namespace)
                                         -> Option<(Def, LastPrivate)> {
-        let module_path_idents = path.segments.init().map(|ps| ps.identifier);
+        let module_path_idents = path.segments.init().iter()
+                                                     .map(|ps| ps.identifier)
+                                                     .collect::<Vec<_>>();
 
         let containing_module;
         let last_private;
         match self.resolve_module_path(self.current_module,
-                                       module_path_idents,
+                                       module_path_idents.as_slice(),
                                        UseLexicalScope,
                                        path.span,
                                        PathSearch) {
             Failed => {
                 let msg = format!("use of undeclared module `{}`",
-                                  self.idents_to_str(module_path_idents));
+                                  self.idents_to_str(module_path_idents.as_slice()));
                 self.resolve_error(path.span, msg);
                 return None;
             }
@@ -4772,21 +4774,23 @@ fn resolve_crate_relative_path(&mut self,
                                    path: &Path,
                                    namespace: Namespace)
                                        -> Option<(Def, LastPrivate)> {
-        let module_path_idents = path.segments.init().map(|ps| ps.identifier);
+        let module_path_idents = path.segments.init().iter()
+                                                     .map(|ps| ps.identifier)
+                                                     .collect::<Vec<_>>();
 
         let root_module = self.graph_root.get_module();
 
         let containing_module;
         let last_private;
         match self.resolve_module_path_from_root(root_module,
-                                                 module_path_idents,
+                                                 module_path_idents.as_slice(),
                                                  0,
                                                  path.span,
                                                  PathSearch,
                                                  LastMod(AllPublic)) {
             Failed => {
                 let msg = format!("use of undeclared module `::{}`",
-                                  self.idents_to_str(module_path_idents));
+                                  self.idents_to_str(module_path_idents.as_slice()));
                 self.resolve_error(path.span, msg);
                 return None;
             }
index 40a7bdff11f5e776d3699fd26d1f1ffcb97161d9..8f4c8c84b68643220b0769a1642f6150e2a10417 100644 (file)
@@ -216,7 +216,7 @@ fn visit_fn_decl(&mut self,
                referenced_idents={:?} \
                early_count={}",
                n,
-               referenced_idents.map(lifetime_show),
+               referenced_idents.iter().map(lifetime_show).collect::<Vec<token::InternedString>>(),
                early_count);
         if referenced_idents.is_empty() {
             let scope1 = LateScope(n, &generics.lifetimes, scope);
index c6819dbc91946389e6b03bfd6a36e5022e916d0b..7500c23e006a2e69de06860cd56a2182815e68df 100644 (file)
@@ -134,7 +134,7 @@ impl<T:Subst> Subst for Vec<T> {
     fn subst_spanned(&self, tcx: &ty::ctxt,
                      substs: &ty::substs,
                      span: Option<Span>) -> Vec<T> {
-        self.map(|t| t.subst_spanned(tcx, substs, span))
+        self.iter().map(|t| t.subst_spanned(tcx, substs, span)).collect()
     }
 }
 impl<T:Subst> Subst for Rc<T> {
@@ -189,7 +189,7 @@ fn subst_spanned(&self, tcx: &ty::ctxt,
         ty::substs {
             regions: self.regions.subst_spanned(tcx, substs, span),
             self_ty: self.self_ty.map(|typ| typ.subst_spanned(tcx, substs, span)),
-            tps: self.tps.map(|typ| typ.subst_spanned(tcx, substs, span))
+            tps: self.tps.iter().map(|typ| typ.subst_spanned(tcx, substs, span)).collect()
         }
     }
 }
index 6e692443300f5a0251e269495cb2169d117d48fb..b7a8052fdb917ad6dfd3b6725c90dd6da3cb15e3 100644 (file)
@@ -1578,10 +1578,10 @@ fn compile_submatch_continue<'r,
             let pat_ty = node_id_type(bcx, pat_id);
             let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
             expr::with_field_tys(tcx, pat_ty, Some(pat_id), |discr, field_tys| {
-                let rec_vals = rec_fields.map(|field_name| {
+                let rec_vals = rec_fields.iter().map(|field_name| {
                         let ix = ty::field_idx_strict(tcx, field_name.name, field_tys);
                         adt::trans_field_ptr(bcx, pat_repr, val, discr, ix)
-                        });
+                        }).collect();
                 compile_submatch(
                         bcx,
                         enter_rec_or_struct(bcx,
index 1215eba17d7bbd149e985d45d05cc7c0c8183e97..3b28549ed4cee74865a3026688d3de9e71d26770 100644 (file)
@@ -136,9 +136,9 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
         }
         ty::ty_struct(def_id, ref substs) => {
             let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
-            let mut ftys = fields.map(|field| {
+            let mut ftys = fields.iter().map(|field| {
                 ty::lookup_field_type(cx.tcx(), def_id, field.id, substs)
-            });
+            }).collect::<Vec<_>>();
             let packed = ty::lookup_packed(cx.tcx(), def_id);
             let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
             if dtor { ftys.push(ty::mk_bool()); }
@@ -158,7 +158,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
 
             if cases.iter().all(|c| c.tys.len() == 0) {
                 // All bodies empty -> intlike
-                let discrs = cases.map(|c| c.discr);
+                let discrs: Vec<u64> = cases.iter().map(|c| c.discr).collect();
                 let bounds = IntBounds {
                     ulo: *discrs.iter().min().unwrap(),
                     uhi: *discrs.iter().max().unwrap(),
@@ -218,12 +218,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
             let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
                                      slo: 0, shi: (cases.len() - 1) as i64 };
             let ity = range_to_inttype(cx, hint, &bounds);
-            return General(ity, cases.map(|c| {
+            return General(ity, cases.iter().map(|c| {
                 let discr = vec!(ty_of_inttype(ity));
                 mk_struct(cx,
                           vec::append(discr, c.tys.as_slice()).as_slice(),
                           false)
-            }))
+            }).collect())
         }
         _ => cx.sess().bug("adt::represent_type called on non-ADT type")
     }
@@ -270,18 +270,18 @@ fn find_ptr(&self) -> Option<uint> {
 }
 
 fn get_cases(tcx: &ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {
-    ty::enum_variants(tcx, def_id).map(|vi| {
-        let arg_tys = vi.args.map(|&raw_ty| {
+    ty::enum_variants(tcx, def_id).iter().map(|vi| {
+        let arg_tys = vi.args.iter().map(|&raw_ty| {
             ty::subst(tcx, substs, raw_ty)
-        });
+        }).collect();
         Case { discr: vi.disr_val, tys: arg_tys }
-    })
+    }).collect()
 }
 
 
 fn mk_struct(cx: &CrateContext, tys: &[ty::t], packed: bool) -> Struct {
-    let lltys = tys.map(|&ty| type_of::sizing_type_of(cx, ty));
-    let llty_rec = Type::struct_(cx, lltys, packed);
+    let lltys = tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect::<Vec<_>>();
+    let llty_rec = Type::struct_(cx, lltys.as_slice(), packed);
     Struct {
         size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
         align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
@@ -464,9 +464,9 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
 
 fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> Vec<Type> {
     if sizing {
-        st.fields.map(|&ty| type_of::sizing_type_of(cx, ty))
+        st.fields.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
     } else {
-        st.fields.map(|&ty| type_of::type_of(cx, ty))
+        st.fields.iter().map(|&ty| type_of::type_of(cx, ty)).collect()
     }
 }
 
@@ -700,7 +700,7 @@ fn struct_field_ptr(bcx: &Block, st: &Struct, val: ValueRef, ix: uint,
     let ccx = bcx.ccx();
 
     let val = if needs_cast {
-        let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
+        let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
         let real_ty = Type::struct_(ccx, fields.as_slice(), st.packed);
         PointerCast(bcx, val, real_ty.ptr_to())
     } else {
@@ -773,11 +773,11 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
                                                  vals).as_slice(),
                          false)
             } else {
-                let vals = nonnull.fields.map(|&ty| {
+                let vals = nonnull.fields.iter().map(|&ty| {
                     // Always use null even if it's not the `ptrfield`th
                     // field; see #8506.
                     C_null(type_of::sizing_type_of(ccx, ty))
-                }).move_iter().collect::<Vec<ValueRef> >();
+                }).collect::<Vec<ValueRef>>();
                 C_struct(ccx, build_const_struct(ccx,
                                                  nonnull,
                                                  vals.as_slice()).as_slice(),
index f4ce7c3c6a6924b9a43bd2cb8f3bd93eb4df5670..5c32b3fba4494955a99381d42c8fe13feaa10314 100644 (file)
@@ -36,17 +36,17 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
     let temp_scope = fcx.push_custom_cleanup_scope();
 
     // Prepare the output operands
-    let outputs = ia.outputs.map(|&(ref c, out)| {
+    let outputs = ia.outputs.iter().map(|&(ref c, out)| {
         constraints.push((*c).clone());
 
         let out_datum = unpack_datum!(bcx, expr::trans(bcx, out));
         output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
         out_datum.val
 
-    });
+    }).collect::<Vec<_>>();
 
     // Now the input operands
-    let inputs = ia.inputs.map(|&(ref c, input)| {
+    let inputs = ia.inputs.iter().map(|&(ref c, input)| {
         constraints.push((*c).clone());
 
         let in_datum = unpack_datum!(bcx, expr::trans(bcx, input));
@@ -57,12 +57,15 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
                                    cleanup::CustomScope(temp_scope),
                                    callee::DontAutorefArg)
         })
-    });
+    }).collect::<Vec<_>>();
 
     // no failure occurred preparing operands, no need to cleanup
     fcx.pop_custom_cleanup_scope(temp_scope);
 
-    let mut constraints = constraints.map(|s| s.get().to_str()).connect(",");
+    let mut constraints = constraints.iter()
+                                     .map(|s| s.get().to_str())
+                                     .collect::<Vec<~str>>()
+                                     .connect(",");
 
     let mut clobbers = getClobbers();
     if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
index 79e22ea345529d3ac6767844da8f6564d9cf0b36..c7c0d51b3abd8d400f332598ec21280e7e62b79d 100644 (file)
@@ -121,7 +121,7 @@ pub fn Invoke(cx: &Block,
     terminate(cx, "Invoke");
     debug!("Invoke({} with arguments ({}))",
            cx.val_to_str(fn_),
-           args.map(|a| cx.val_to_str(*a)).connect(", "));
+           args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", "));
     B(cx).invoke(fn_, args, then, catch, attributes)
 }
 
index 4526f3505beebacd782b9fe0f707cf979b800974..ff74fac95ff403f6e0d004cafe2e4f506f4f3a04 100644 (file)
@@ -780,13 +780,13 @@ pub fn inline_asm_call(&self, asm: *c_char, cons: *c_char,
         let alignstack = if alignstack { lib::llvm::True }
                          else          { lib::llvm::False };
 
-        let argtys = inputs.map(|v| {
+        let argtys = inputs.iter().map(|v| {
             debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
             val_ty(*v)
-        });
+        }).collect::<Vec<_>>();
 
         debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
-        let fty = Type::func(argtys, &output);
+        let fty = Type::func(argtys.as_slice(), &output);
         unsafe {
             let v = llvm::LLVMInlineAsm(
                 fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
@@ -800,7 +800,10 @@ pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
 
         debug!("Call {} with args ({})",
                self.ccx.tn.val_to_str(llfn),
-               args.map(|&v| self.ccx.tn.val_to_str(v)).connect(", "));
+               args.iter()
+                   .map(|&v| self.ccx.tn.val_to_str(v))
+                   .collect::<Vec<~str>>()
+                   .connect(", "));
 
         unsafe {
             let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
index 86413406ba561a339225e340f11fcfd0b442a1ab..81a6b401d7aa9db67ad2ca3630a43b339063c9d3 100644 (file)
@@ -816,7 +816,10 @@ pub fn node_id_type_params(bcx: &Block, node: ExprOrMethodCall) -> Vec<ty::t> {
     if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
         bcx.sess().bug(
             format!("type parameters for node {:?} include inference types: {}",
-                 node, params.map(|t| bcx.ty_to_str(*t)).connect(",")));
+                 node, params.iter()
+                             .map(|t| bcx.ty_to_str(*t))
+                             .collect::<Vec<~str>>()
+                             .connect(",")));
     }
 
     match bcx.fcx.param_substs {
index a75d5237d1f0a0a22729a143f85400177b093ee8..a35dcaf868b2fe74ec7864565e0975df2ebc770e 100644 (file)
@@ -733,7 +733,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
         source_locations_enabled: Cell::new(false),
     };
 
-    let arg_pats = fn_decl.inputs.map(|arg_ref| arg_ref.pat);
+    let arg_pats = fn_decl.inputs.iter().map(|arg_ref| arg_ref.pat).collect::<Vec<_>>();
     populate_scope_map(cx,
                        arg_pats.as_slice(),
                        top_level_block,
@@ -1218,7 +1218,7 @@ struct StructMemberDescriptionFactory {
 impl StructMemberDescriptionFactory {
     fn create_member_descriptions(&self, cx: &CrateContext)
                                   -> Vec<MemberDescription> {
-        self.fields.map(|field| {
+        self.fields.iter().map(|field| {
             let name = if field.ident.name == special_idents::unnamed_field.name {
                 ~""
             } else {
@@ -1231,7 +1231,7 @@ fn create_member_descriptions(&self, cx: &CrateContext)
                 type_metadata: type_metadata(cx, field.mt.ty, self.span),
                 offset: ComputedMemberOffset,
             }
-        })
+        }).collect()
     }
 }
 
@@ -1321,14 +1321,14 @@ struct TupleMemberDescriptionFactory {
 impl TupleMemberDescriptionFactory {
     fn create_member_descriptions(&self, cx: &CrateContext)
                                   -> Vec<MemberDescription> {
-        self.component_types.map(|&component_type| {
+        self.component_types.iter().map(|&component_type| {
             MemberDescription {
                 name: ~"",
                 llvm_type: type_of::type_of(cx, component_type),
                 type_metadata: type_metadata(cx, component_type, self.span),
                 offset: ComputedMemberOffset,
             }
-        })
+        }).collect()
     }
 }
 
@@ -1443,7 +1443,9 @@ fn describe_enum_variant(cx: &CrateContext,
                       -> (DICompositeType, Type, MemberDescriptionFactory) {
     let variant_llvm_type =
         Type::struct_(cx, struct_def.fields
+                                    .iter()
                                     .map(|&t| type_of::type_of(cx, t))
+                                    .collect::<Vec<_>>()
                                     .as_slice(),
                       struct_def.packed);
     // Could some consistency checks here: size, align, field count, discr type
@@ -1464,11 +1466,11 @@ fn describe_enum_variant(cx: &CrateContext,
                                            variant_definition_span);
 
     // Get the argument names from the enum variant info
-    let mut arg_names = match variant_info.arg_names {
+    let mut arg_names: Vec<_> = match variant_info.arg_names {
         Some(ref names) => {
-            names.map(|ident| token::get_ident(*ident).get().to_str())
+            names.iter().map(|ident| token::get_ident(*ident).get().to_str()).collect()
         }
-        None => variant_info.args.map(|_| ~"")
+        None => variant_info.args.iter().map(|_| ~"").collect()
     };
 
     // If this is not a univariant enum, there is also the (unnamed) discriminant field
index f1cd46923d71d433f805a763e145f4a56addf600..bd8c84e85e57be61d39f1898bf71c9d59d861dda 100644 (file)
@@ -994,7 +994,7 @@ fn trans_rec_or_struct<'a>(
     with_field_tys(tcx, ty, Some(id), |discr, field_tys| {
         let mut need_base = slice::from_elem(field_tys.len(), true);
 
-        let numbered_fields = fields.map(|field| {
+        let numbered_fields = fields.iter().map(|field| {
             let opt_pos =
                 field_tys.iter().position(|field_ty|
                                           field_ty.ident.name == field.ident.node.name);
@@ -1008,7 +1008,7 @@ fn trans_rec_or_struct<'a>(
                                       "Couldn't find field in struct type")
                 }
             }
-        });
+        }).collect::<Vec<_>>();
         let optbase = match base {
             Some(base_expr) => {
                 let mut leftovers = Vec::new();
@@ -1029,7 +1029,7 @@ fn trans_rec_or_struct<'a>(
         };
 
         let repr = adt::represent_type(bcx.ccx(), ty);
-        trans_adt(bcx, repr, discr, numbered_fields, optbase, dest)
+        trans_adt(bcx, repr, discr, numbered_fields.as_slice(), optbase, dest)
     })
 }
 
index 4aa272a63a3e740631e937380c3e2604b0b094f3..158589393efd2ac763ade6020df5c8fcfc57e158 100644 (file)
@@ -851,7 +851,7 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
            ty.repr(ccx.tcx()),
            ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
            ccx.tn.type_to_str(llsig.llret_ty),
-           ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty).as_slice()),
+           ccx.tn.types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
            ccx.tn.type_to_str(fn_ty.ret_ty.ty),
            ret_def);
 
index 75908a0d8050624e15fe2cb915ae18f14e8117d1..c84e4db768696156f0d3e6d91e2265a98121d23d 100644 (file)
@@ -18,7 +18,7 @@ pub trait LlvmRepr {
 
 impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
     fn llrepr(&self, ccx: &CrateContext) -> ~str {
-        let reprs = self.map(|t| t.llrepr(ccx));
+        let reprs: Vec<~str> = self.iter().map(|t| t.llrepr(ccx)).collect();
         format!("[{}]", reprs.connect(","))
     }
 }
index 4701f38c09a8b330e0dbc7db443bfe5207197da2..ac787a1847871d5f5715502a90ec8ab67eac6d1a 100644 (file)
@@ -541,7 +541,7 @@ fn emit_vtable_methods(bcx: &Block,
     ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
 
     let trait_method_def_ids = ty::trait_method_def_ids(tcx, trt_id);
-    trait_method_def_ids.map(|method_def_id| {
+    trait_method_def_ids.iter().map(|method_def_id| {
         let ident = ty::method(tcx, *method_def_id).ident;
         // The substitutions we have are on the impl, so we grab
         // the method type from the impl to substitute into.
@@ -558,7 +558,7 @@ fn emit_vtable_methods(bcx: &Block,
         } else {
             trans_fn_ref_with_vtables(bcx, m_id, ExprId(0), substs, Some(vtables))
         }
-    })
+    }).collect()
 }
 
 pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
index 80a7fe2facfebe2f0a0f89cbc33ee24bb915fe80..179fda7cc4673978a45e2570a92ec13856df2fa4 100644 (file)
@@ -298,7 +298,7 @@ pub fn make_mono_id(ccx: &CrateContext,
                vts.repr(ccx.tcx()), substs.tys.repr(ccx.tcx()));
         let vts_iter = substs.self_vtables.iter().chain(vts.iter());
         vts_iter.zip(substs_iter).map(|(vtable, subst)| {
-            let v = vtable.map(|vt| meth::vtable_id(ccx, vt));
+            let v = vtable.iter().map(|vt| meth::vtable_id(ccx, vt)).collect::<Vec<_>>();
             (*subst, if !v.is_empty() { Some(@v) } else { None })
         }).collect()
       }
index a86348947f1381bafa3d5be0942c35142ffc6747..350ee411029f6f20f8e611a53908aa945380368a 100644 (file)
@@ -1408,14 +1408,14 @@ pub fn mk_ctor_fn(cx: &ctxt,
                   binder_id: ast::NodeId,
                   input_tys: &[ty::t],
                   output: ty::t) -> t {
-    let input_args = input_tys.map(|t| *t);
+    let input_args = input_tys.iter().map(|t| *t).collect();
     mk_bare_fn(cx,
                BareFnTy {
                    purity: ast::ImpureFn,
                    abis: AbiSet::Rust(),
                    sig: FnSig {
                     binder_id: binder_id,
-                    inputs: Vec::from_slice(input_args),
+                    inputs: input_args,
                     output: output,
                     variadic: false
                    }
@@ -2880,7 +2880,7 @@ pub fn replace_closure_return_type(tcx: &ctxt, fn_type: t, ret_type: t) -> t {
 
 // Returns a vec of all the input and output types of fty.
 pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
-    vec::append_one(sig.inputs.map(|a| *a), sig.output)
+    vec::append_one(sig.inputs.iter().map(|a| *a).collect(), sig.output)
 }
 
 // Type accessors for AST nodes
@@ -3432,7 +3432,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
     tcx.sess.bug(format!(
         "no field named `{}` found in the list of fields `{:?}`",
         token::get_name(name),
-        fields.map(|f| token::get_ident(f.ident).get().to_str())));
+        fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>()));
 }
 
 pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
@@ -3724,8 +3724,8 @@ pub fn trait_supertraits(cx: &ctxt, id: ast::DefId) -> @Vec<@TraitRef> {
 
 pub fn trait_ref_supertraits(cx: &ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> {
     let supertrait_refs = trait_supertraits(cx, trait_ref.def_id);
-    supertrait_refs.map(
-        |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs))
+    supertrait_refs.iter().map(
+        |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs)).collect()
 }
 
 fn lookup_locally_or_in_crate_store<V:Clone>(
@@ -3768,7 +3768,7 @@ pub fn trait_methods(cx: &ctxt, trait_did: ast::DefId) -> @Vec<@Method> {
         Some(&methods) => methods,
         None => {
             let def_ids = ty::trait_method_def_ids(cx, trait_did);
-            let methods = @def_ids.map(|d| ty::method(cx, *d));
+            let methods = @def_ids.iter().map(|d| ty::method(cx, *d)).collect();
             trait_methods.insert(trait_did, methods);
             methods
         }
@@ -3876,7 +3876,7 @@ pub fn from_ast_variant(cx: &ctxt,
         match ast_variant.node.kind {
             ast::TupleVariantKind(ref args) => {
                 let arg_tys = if args.len() > 0 {
-                    ty_fn_args(ctor_ty).map(|a| *a)
+                    ty_fn_args(ctor_ty).iter().map(|a| *a).collect()
                 } else {
                     Vec::new()
                 };
@@ -3897,7 +3897,7 @@ pub fn from_ast_variant(cx: &ctxt,
 
                 assert!(fields.len() > 0);
 
-                let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
+                let arg_tys = ty_fn_args(ctor_ty).iter().map(|a| *a).collect();
                 let arg_names = fields.iter().map(|field| {
                     match field.node.kind {
                         NamedField(ident, _) => ident,
@@ -4280,7 +4280,7 @@ fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
 // this. Takes a list of substs with which to instantiate field types.
 pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs)
                      -> Vec<field> {
-    lookup_struct_fields(cx, did).map(|f| {
+    lookup_struct_fields(cx, did).iter().map(|f| {
        field {
             // FIXME #6993: change type of field to Name and get rid of new()
             ident: ast::Ident::new(f.name),
@@ -4289,7 +4289,7 @@ pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs)
                 mutbl: MutImmutable
             }
         }
-    })
+    }).collect()
 }
 
 pub fn is_binopable(cx: &ctxt, ty: t, op: ast::BinOp) -> bool {
index 54ed9ef6ef6062e2caad8293ad62e126b956bc2b..5d2d221a774e013ba5e50e72332019c614cb8180 100644 (file)
@@ -167,8 +167,8 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
     let expected_num_region_params = decl_generics.region_param_defs().len();
     let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len();
     let regions = if expected_num_region_params == supplied_num_region_params {
-        path.segments.last().unwrap().lifetimes.map(
-            |l| ast_region_to_region(this.tcx(), l))
+        path.segments.last().unwrap().lifetimes.iter().map(
+            |l| ast_region_to_region(this.tcx(), l)).collect::<Vec<_>>()
     } else {
         let anon_regions =
             rscope.anon_regions(path.span, expected_num_region_params);
index a4b78bc3db2cc07b87b2658f372e9e720d22c82d..38cb48a6c7c2449b191785b8a252333fca3d8031 100644 (file)
@@ -147,7 +147,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
                         let vinfo =
                             ty::enum_variant_with_id(tcx, enm, var);
                         let var_tpt = ty::lookup_item_type(tcx, var);
-                        vinfo.args.map(|t| {
+                        vinfo.args.iter().map(|t| {
                             if var_tpt.generics.type_param_defs().len() ==
                                 expected_substs.tps.len()
                             {
@@ -157,7 +157,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
                                 *t // In this case, an error was already signaled
                                     // anyway
                             }
-                        })
+                        }).collect()
                     };
 
                     kind_name = "variant";
@@ -209,7 +209,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
             // Get the expected types of the arguments.
             let class_fields = ty::struct_fields(
                 tcx, struct_def_id, expected_substs);
-            arg_types = class_fields.map(|field| field.mt.ty);
+            arg_types = class_fields.iter().map(|field| field.mt.ty).collect();
 
             kind_name = "structure";
         }
index 9f1a5506b02f2559b42f8a2f8a04be4ece7a0786..5a5e9ee094154fc5f6cb2a09b59a3ded55fa94b9 100644 (file)
@@ -659,7 +659,10 @@ fn push_candidates_from_impl(&mut self,
         debug!("push_candidates_from_impl: {} {} {}",
                token::get_name(self.m_name),
                impl_info.ident.repr(self.tcx()),
-               impl_info.methods.map(|m| m.ident).repr(self.tcx()));
+               impl_info.methods.iter()
+                                .map(|m| m.ident)
+                                .collect::<Vec<ast::Ident>>()
+                                .repr(self.tcx()));
 
         let idx = {
             match impl_info.methods
index 3f1d3e29c54f95a1d5830eaa2c69a3f8da6a2f74..4b611dd5ddfe721c7291341be82bc1e9f42fc9a9 100644 (file)
@@ -449,7 +449,7 @@ fn check_fn<'a>(ccx: &'a CrateCtxt<'a>,
     let ret_ty = fn_sig.output;
 
     debug!("check_fn(arg_tys={:?}, ret_ty={:?})",
-           arg_tys.map(|&a| ppaux::ty_to_str(tcx, a)),
+           arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<~str>>(),
            ppaux::ty_to_str(tcx, ret_ty));
 
     // Create the function context.  This is either derived from scratch or,
@@ -1717,7 +1717,7 @@ fn check_argument_types(fcx: &FnCtxt,
         };
 
         debug!("check_argument_types: formal_tys={:?}",
-               formal_tys.map(|t| fcx.infcx().ty_to_str(*t)));
+               formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<~str>>());
 
         // Check the arguments.
         // We do this in a pretty awful way: first we typecheck any arguments
@@ -1886,10 +1886,10 @@ fn check_method_call(fcx: &FnCtxt,
                                                 expr.span,
                                                 fcx.expr_ty(rcvr));
 
-        let tps = tps.map(|&ast_ty| fcx.to_ty(ast_ty));
+        let tps = tps.iter().map(|&ast_ty| fcx.to_ty(ast_ty)).collect::<Vec<_>>();
         let fn_ty = match method::lookup(fcx, expr, rcvr,
                                          method_name.name,
-                                         expr_t, tps,
+                                         expr_t, tps.as_slice(),
                                          DontDerefArgs,
                                          CheckTraitsAndInherentMethods,
                                          AutoderefReceiver) {
@@ -2235,7 +2235,7 @@ fn check_expr_fn(fcx: &FnCtxt,
         let fty = if error_happened {
             fty_sig = FnSig {
                 binder_id: ast::CRATE_NODE_ID,
-                inputs: fn_ty.sig.inputs.map(|_| ty::mk_err()),
+                inputs: fn_ty.sig.inputs.iter().map(|_| ty::mk_err()).collect(),
                 output: ty::mk_err(),
                 variadic: false
             };
@@ -2938,11 +2938,11 @@ fn check_struct_enum_variant(fcx: &FnCtxt,
       }
       ast::ExprMethodCall(ident, ref tps, ref args) => {
         check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice());
-        let arg_tys = args.map(|a| fcx.expr_ty(*a));
-        let (args_bot, args_err) = arg_tys.iter().fold((false, false),
+        let mut arg_tys = args.iter().map(|a| fcx.expr_ty(*a));
+        let (args_bot, args_err) = arg_tys.fold((false, false),
              |(rest_bot, rest_err), a| {
-              (rest_bot || ty::type_is_bot(*a),
-               rest_err || ty::type_is_error(*a))});
+              (rest_bot || ty::type_is_bot(a),
+               rest_err || ty::type_is_error(a))});
         if args_err {
             fcx.write_error(id);
         } else if args_bot {
@@ -3686,8 +3686,8 @@ pub fn instantiate_path(fcx: &FnCtxt,
     let num_expected_regions = tpt.generics.region_param_defs().len();
     let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len();
     let regions = if num_expected_regions == num_supplied_regions {
-        OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.map(
-            |l| ast_region_to_region(fcx.tcx(), l)))
+        OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.iter().map(
+            |l| ast_region_to_region(fcx.tcx(), l)).collect())
     } else {
         if num_supplied_regions != 0 {
             fcx.ccx.tcx.sess.span_err(
index 31d74a1e938aa952e0185f4aeb715d21180c408a..7159febc7f1074b402df399d0c30dd6d9de0f147 100644 (file)
@@ -113,7 +113,7 @@ fn resolve_vtable_map_entry(fcx: &FnCtxt, sp: Span, vtable_key: MethodCall) {
 
     fn resolve_origins(fcx: &FnCtxt, sp: Span,
                        vtbls: vtable_res) -> vtable_res {
-        @vtbls.map(|os| @os.map(|origin| {
+        @vtbls.iter().map(|os| @os.iter().map(|origin| {
             match origin {
                 &vtable_static(def_id, ref tys, origins) => {
                     let r_tys = resolve_type_vars_in_types(fcx,
@@ -126,7 +126,7 @@ fn resolve_origins(fcx: &FnCtxt, sp: Span,
                     vtable_param(n, b)
                 }
             }
-        }))
+        }).collect()).collect()
     }
 }
 
index 22449a93357c79eaf4413cd7b9d43722fe9aa726..dfde5b1d874e55494e7ac709fb3c25691f5821e8 100644 (file)
@@ -152,7 +152,7 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
         let result_ty = match variant.node.kind {
             ast::TupleVariantKind(ref args) if args.len() > 0 => {
                 let rs = ExplicitRscope;
-                let input_tys = args.map(|va| ccx.to_ty(&rs, va.ty));
+                let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, va.ty)).collect();
                 ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
             }
 
@@ -168,8 +168,8 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
 
                 convert_struct(ccx, struct_def, tpt, variant.node.id);
 
-                let input_tys = struct_def.fields.map(
-                    |f| ty::node_id_to_type(ccx.tcx, f.node.id));
+                let input_tys: Vec<_> = struct_def.fields.iter().map(
+                    |f| ty::node_id_to_type(ccx.tcx, f.node.id)).collect();
                 ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
             }
         };
@@ -222,7 +222,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
                     }
 
                     // Add an entry mapping
-                    let method_def_ids = @ms.map(|m| {
+                    let method_def_ids = @ms.iter().map(|m| {
                         match m {
                             &ast::Required(ref ty_method) => {
                                 local_def(ty_method.id)
@@ -231,13 +231,11 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
                                 local_def(method.id)
                             }
                         }
-                    });
+                    }).collect();
 
                     let trait_def_id = local_def(trait_id);
                     tcx.trait_method_def_ids.borrow_mut()
-                        .insert(trait_def_id, @method_def_ids.iter()
-                                                             .map(|x| *x)
-                                                             .collect());
+                        .insert(trait_def_id, method_def_ids);
                 }
                 _ => {} // Ignore things that aren't traits.
             }
@@ -697,9 +695,9 @@ pub fn convert_struct(ccx: &CrateCtxt,
                 tcx.tcache.borrow_mut().insert(local_def(ctor_id), tpt);
             } else if struct_def.fields.get(0).node.kind.is_unnamed() {
                 // Tuple-like.
-                let inputs = struct_def.fields.map(
+                let inputs: Vec<_> = struct_def.fields.iter().map(
                         |field| tcx.tcache.borrow().get(
-                            &local_def(field.node.id)).ty);
+                            &local_def(field.node.id)).ty).collect();
                 let ctor_fn_ty = ty::mk_ctor_fn(tcx,
                                                 ctor_id,
                                                 inputs.as_slice(),
index 8084b712e5330deaa74d2eb908d69e7dd7e55041..7daf03d8526407ffb03c5cb19cffc4dabdb18e1a 100644 (file)
@@ -683,7 +683,7 @@ pub fn ty_to_str(&self, t: ty::t) -> ~str {
     }
 
     pub fn tys_to_str(&self, ts: &[ty::t]) -> ~str {
-        let tstrs = ts.map(|t| self.ty_to_str(*t));
+        let tstrs: Vec<~str> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
         format!("({})", tstrs.connect(", "))
     }
 
index 927b6be2a041fa8d0223fa1b67f45f12a3628f98..4d6d2da18bf1e6e1b6f07e1f1334e7db2fd2cd87 100644 (file)
@@ -1164,8 +1164,14 @@ fn free_regions_first(a: &RegionAndOrigin,
             format!("collect_error_for_expanding_node() could not find error \
                   for var {:?}, lower_bounds={}, upper_bounds={}",
                  node_idx,
-                 lower_bounds.map(|x| x.region).repr(self.tcx),
-                 upper_bounds.map(|x| x.region).repr(self.tcx)));
+                 lower_bounds.iter()
+                             .map(|x| x.region)
+                             .collect::<Vec<ty::Region>>()
+                             .repr(self.tcx),
+                 upper_bounds.iter()
+                             .map(|x| x.region)
+                             .collect::<Vec<ty::Region>>()
+                             .repr(self.tcx)));
     }
 
     fn collect_error_for_contracting_node(
@@ -1209,7 +1215,10 @@ fn collect_error_for_contracting_node(
             format!("collect_error_for_contracting_node() could not find error \
                   for var {:?}, upper_bounds={}",
                  node_idx,
-                 upper_bounds.map(|x| x.region).repr(self.tcx)));
+                 upper_bounds.iter()
+                             .map(|x| x.region)
+                             .collect::<Vec<ty::Region>>()
+                             .repr(self.tcx)));
     }
 
     fn collect_concrete_regions(&self,
index aa3e3a23182e0ea0d1daa44fdd45cb91d9760e46..aa0b777a0c0f20322d935f2c8a7f1ee49ae0e9aa 100644 (file)
@@ -32,7 +32,7 @@ fn inf_str(&self, cx: &InferCtxt) -> ~str {
 impl InferStr for FnSig {
     fn inf_str(&self, cx: &InferCtxt) -> ~str {
         format!("({}) -> {}",
-             self.inputs.map(|a| a.inf_str(cx)).connect(", "),
+             self.inputs.iter().map(|a| a.inf_str(cx)).collect::<Vec<~str>>().connect(", "),
              self.output.inf_str(cx))
     }
 }
index 47772ca4682c994f1aa7ce98caf45f5ffbdaa787..72cc936f23ee3b43a04d482642871fd376e70b7c 100644 (file)
@@ -258,7 +258,7 @@ pub fn write_substs_to_tcx(tcx: &ty::ctxt,
                            substs: Vec<ty::t> ) {
     if substs.len() > 0u {
         debug!("write_substs_to_tcx({}, {:?})", node_id,
-               substs.map(|t| ppaux::ty_to_str(tcx, *t)));
+               substs.iter().map(|t| ppaux::ty_to_str(tcx, *t)).collect::<Vec<~str>>());
         assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));
 
         tcx.node_type_substs.borrow_mut().insert(node_id, substs);
index c264916ab42df439e0e25a3f4ca166c1f2d7dd30..bdc27ceab2b2bd9543cda9c5425bdde24a8cd97e 100644 (file)
@@ -67,7 +67,7 @@ pub fn indenter() -> _indenter {
 pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; }
 
 pub fn field_exprs(fields: Vec<ast::Field> ) -> Vec<@ast::Expr> {
-    fields.map(|f| f.expr)
+    fields.move_iter().map(|f| f.expr).collect()
 }
 
 struct LoopQueryVisitor<'a> {
index 42a6438b58cb8d46ec06e55e92cae9eaf038566d..9b76d50a66479f9b2f628588f5b4ff7d710c4ed1 100644 (file)
@@ -276,7 +276,7 @@ pub fn vstore_ty_to_str(cx: &ctxt, mt: &mt, vs: ty::vstore) -> ~str {
 }
 
 pub fn vec_map_to_str<T>(ts: &[T], f: |t: &T| -> ~str) -> ~str {
-    let tstrs = ts.map(f);
+    let tstrs = ts.iter().map(f).collect::<Vec<~str>>();
     format!("[{}]", tstrs.connect(", "))
 }
 
@@ -405,7 +405,7 @@ fn push_sig_to_str(cx: &ctxt,
                        ket: char,
                        sig: &ty::FnSig) {
         s.push_char(bra);
-        let strs = sig.inputs.map(|a| fn_input_to_str(cx, *a));
+        let strs: Vec<~str> = sig.inputs.iter().map(|a| fn_input_to_str(cx, *a)).collect();
         s.push_str(strs.connect(", "));
         if sig.variadic {
             s.push_str(", ...");
@@ -447,7 +447,7 @@ fn push_sig_to_str(cx: &ctxt,
       }
       ty_unboxed_vec(ref tm) => { format!("unboxed_vec<{}>", mt_to_str(cx, tm)) }
       ty_tup(ref elems) => {
-        let strs = elems.map(|elem| ty_to_str(cx, *elem));
+        let strs: Vec<~str> = elems.iter().map(|elem| ty_to_str(cx, *elem)).collect();
         ~"(" + strs.connect(",") + ")"
       }
       ty_closure(ref f) => {
index 114fdc6c5c9c7dd4560e13f010f3c22150868f8e..a08776946bc819ce6b6bdab86bfc956462f4a46a 100644 (file)
@@ -314,14 +314,14 @@ pub fn unindent(s: &str) -> ~str {
 
     if lines.len() >= 1 {
         let mut unindented = vec!( lines.get(0).trim() );
-        unindented.push_all(lines.tail().map(|&line| {
+        unindented.push_all(lines.tail().iter().map(|&line| {
             if line.is_whitespace() {
                 line
             } else {
                 assert!(line.len() >= min_indent);
                 line.slice_from(min_indent)
             }
-        }));
+        }).collect::<Vec<_>>().as_slice());
         unindented.connect("\n")
     } else {
         s.to_owned()
index 59a80f0f43224dece00e447c6e1d61527bc248e0..949275be7cacc29da22f35b83fc03f5a5cc960e0 100644 (file)
@@ -1706,7 +1706,7 @@ fn to_json(&self) -> Json {
 }
 
 impl<A:ToJson> ToJson for ~[A] {
-    fn to_json(&self) -> Json { List(self.map(|elt| elt.to_json())) }
+    fn to_json(&self) -> Json { List(self.iter().map(|elt| elt.to_json()).collect()) }
 }
 
 impl<A:ToJson> ToJson for TreeMap<~str, A> {
index 85df875dc1d62fb1514c299ec3e031e35e52089f..57f7d1834588924e99c9cf4803aac717ff23f5c7 100644 (file)
@@ -285,12 +285,12 @@ fn as_str_ascii<'a>(&'a self) -> &'a str {
 
     #[inline]
     fn to_lower(&self) -> ~[Ascii] {
-        self.map(|a| a.to_lower())
+        self.iter().map(|a| a.to_lower()).collect()
     }
 
     #[inline]
     fn to_upper(&self) -> ~[Ascii] {
-        self.map(|a| a.to_upper())
+        self.iter().map(|a| a.to_upper()).collect()
     }
 
     #[inline]
index f16db6e76c9b9dbc5e18f239ea239521acf8b612..bf573bfaed876d977f4278aa8af44635a9b33738 100644 (file)
 
 #![allow(missing_doc)]
 
+use iter::Iterator;
 use io::IoResult;
 use io::net::ip::{SocketAddr, IpAddr};
 use option::{Option, Some, None};
 use rt::rtio::{IoFactory, LocalIo};
-use slice::ImmutableVector;
+use slice::OwnedVector;
 
 /// Hints to the types of sockets that are desired when looking up hosts
 pub enum SocketType {
@@ -73,7 +74,7 @@ pub struct Info {
 /// Easy name resolution. Given a hostname, returns the list of IP addresses for
 /// that hostname.
 pub fn get_host_addresses(host: &str) -> IoResult<~[IpAddr]> {
-    lookup(Some(host), None, None).map(|a| a.map(|i| i.address.ip))
+    lookup(Some(host), None, None).map(|a| a.move_iter().map(|i| i.address.ip).collect())
 }
 
 /// Full-fleged resolution. This function will perform a synchronous call to
index b5055dfe8b37c036e99fc9a860df7c5a7614eaff..4a720aefa4eb6457ca78ab2fe656bd1e4be4f8f5 100644 (file)
@@ -348,16 +348,6 @@ pub fn append_one<T>(lhs: ~[T], x: T) -> ~[T] {
 
 // Functional utilities
 
-/**
- * Apply a function to each element of a vector and return a concatenation
- * of each result vector
- */
-pub fn flat_map<T, U>(v: &[T], f: |t: &T| -> ~[U]) -> ~[U] {
-    let mut result = ~[];
-    for elem in v.iter() { result.push_all_move(f(elem)); }
-    result
-}
-
 #[allow(missing_doc)]
 pub trait VectorVector<T> {
     // FIXME #5898: calling these .concat and .connect conflicts with
@@ -902,11 +892,7 @@ pub trait ImmutableVector<'a, T> {
     fn initn(&self, n: uint) -> &'a [T];
     /// Returns the last element of a vector, or `None` if it is empty.
     fn last(&self) -> Option<&'a T>;
-    /**
-     * Apply a function to each element of a vector and return a concatenation
-     * of each result vector
-     */
-    fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U];
+
     /// Returns a pointer to the element at the given index, without doing
     /// bounds checking.
     unsafe fn unsafe_ref(self, index: uint) -> &'a T;
@@ -935,11 +921,6 @@ pub trait ImmutableVector<'a, T> {
      */
     fn bsearch(&self, f: |&T| -> Ordering) -> Option<uint>;
 
-    /// Deprecated, use iterators where possible
-    /// (`self.iter().map(f)`). Apply a function to each element
-    /// of a vector and return the results.
-    fn map<U>(&self, |t: &T| -> U) -> ~[U];
-
     /**
      * Returns a mutable reference to the first element in this slice
      * and adjusts the slice in place so that it no longer contains
@@ -1094,11 +1075,6 @@ fn last(&self) -> Option<&'a T> {
             if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
     }
 
-    #[inline]
-    fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U] {
-        flat_map(*self, f)
-    }
-
     #[inline]
     unsafe fn unsafe_ref(self, index: uint) -> &'a T {
         transmute(self.repr().data.offset(index as int))
@@ -1129,10 +1105,6 @@ fn bsearch(&self, f: |&T| -> Ordering) -> Option<uint> {
         return None;
     }
 
-    fn map<U>(&self, f: |t: &T| -> U) -> ~[U] {
-        self.iter().map(f).collect()
-    }
-
     fn shift_ref(&mut self) -> Option<&'a T> {
         if self.len() == 0 { return None; }
         unsafe {
@@ -3329,27 +3301,6 @@ fn test_dedup_shared() {
          */
     }
 
-    #[test]
-    fn test_map() {
-        // Test on-stack map.
-        let v = &[1u, 2u, 3u];
-        let mut w = v.map(square_ref);
-        assert_eq!(w.len(), 3u);
-        assert_eq!(w[0], 1u);
-        assert_eq!(w[1], 4u);
-        assert_eq!(w[2], 9u);
-
-        // Test on-heap map.
-        let v = ~[1u, 2u, 3u, 4u, 5u];
-        w = v.map(square_ref);
-        assert_eq!(w.len(), 5u);
-        assert_eq!(w[0], 1u);
-        assert_eq!(w[1], 4u);
-        assert_eq!(w[2], 9u);
-        assert_eq!(w[3], 16u);
-        assert_eq!(w[4], 25u);
-    }
-
     #[test]
     fn test_retain() {
         let mut v = ~[1, 2, 3, 4, 5];
@@ -3730,36 +3681,6 @@ fn test_grow_fn_fail() {
         })
     }
 
-    #[test]
-    #[should_fail]
-    fn test_map_fail() {
-        use rc::Rc;
-        let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
-        let mut i = 0;
-        v.map(|_elt| {
-            if i == 2 {
-                fail!()
-            }
-            i += 1;
-            ~[(~0, Rc::new(0))]
-        });
-    }
-
-    #[test]
-    #[should_fail]
-    fn test_flat_map_fail() {
-        use rc::Rc;
-        let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
-        let mut i = 0;
-        flat_map(v, |_elt| {
-            if i == 2 {
-                fail!()
-            }
-            i += 1;
-            ~[(~0, Rc::new(0))]
-        });
-    }
-
     #[test]
     #[should_fail]
     fn test_permute_fail() {
index 4454801c12946f1d13ccec6353e665a96777743f..11fd2b8ee225de4f4a98988fa295d6ed8d0ced80 100644 (file)
@@ -902,13 +902,6 @@ pub fn remove(&mut self, index: uint) -> Option<T> {
         }
     }
 
-    ///Apply a function to each element of a vector and return the results.
-    #[inline]
-    #[deprecated="Use `xs.iter().map(closure)` instead."]
-    pub fn map<U>(&self, f: |t: &T| -> U) -> Vec<U> {
-        self.iter().map(f).collect()
-    }
-
     /// Takes ownership of the vector `other`, moving all elements into
     /// the current vector. This does not copy any elements, and it is
     /// illegal to use the `other` vector after calling this method
index 3dbd10b625c3d828a26cf1d5f7e0cb9a49098331..1676a130235562071bf158587e8698ecac978314 100644 (file)
@@ -25,9 +25,9 @@
 
 pub fn path_name_i(idents: &[Ident]) -> ~str {
     // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
-    idents.map(|i| {
+    idents.iter().map(|i| {
         token::get_ident(*i).get().to_str()
-    }).connect("::")
+    }).collect::<Vec<~str>>().connect("::")
 }
 
 // totally scary function: ignores all but the last element, should have
@@ -717,13 +717,15 @@ fn ident_to_segment(id : &Ident) -> PathSegment {
     }
 
     #[test] fn idents_name_eq_test() {
-        assert!(segments_name_eq([Ident{name:3,ctxt:4},
-                                   Ident{name:78,ctxt:82}].map(ident_to_segment),
-                                 [Ident{name:3,ctxt:104},
-                                   Ident{name:78,ctxt:182}].map(ident_to_segment)));
-        assert!(!segments_name_eq([Ident{name:3,ctxt:4},
-                                    Ident{name:78,ctxt:82}].map(ident_to_segment),
-                                  [Ident{name:3,ctxt:104},
-                                    Ident{name:77,ctxt:182}].map(ident_to_segment)));
+        assert!(segments_name_eq(
+            [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+            [Ident{name:3,ctxt:104}, Ident{name:78,ctxt:182}]
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+        assert!(!segments_name_eq(
+            [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+            [Ident{name:3,ctxt:104}, Ident{name:77,ctxt:182}]
+                .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
     }
 }
index 1c2c63cd91974c25ba5df368a98c1897d5259f82..489af0fc2d477096ffb4bdc66f005f12ebc73b43 100644 (file)
@@ -746,7 +746,7 @@ fn lambda_fn_decl(&self, span: Span,
     }
     fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr {
         let fn_decl = self.fn_decl(
-            ids.map(|id| self.arg(span, *id, self.ty_infer(span))),
+            ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
             self.ty_infer(span));
 
         self.expr(span, ast::ExprFnBlock(fn_decl, blk))
@@ -966,16 +966,14 @@ fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
 
     fn view_use_list(&self, sp: Span, vis: ast::Visibility,
                      path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
-        let imports = imports.map(|id| {
+        let imports = imports.iter().map(|id| {
             respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID })
-        });
+        }).collect();
 
         self.view_use(sp, vis,
                       vec!(@respan(sp,
                                 ast::ViewPathList(self.path(sp, path),
-                                                  imports.iter()
-                                                         .map(|x| *x)
-                                                         .collect(),
+                                                  imports,
                                                   ast::DUMMY_NODE_ID))))
     }
 
index 3cbccae664bec12b34800f803b3ee5b185ece019..367accb4b19d863016b15245b1effcf5b6eb352e 100644 (file)
@@ -71,11 +71,11 @@ fn cs_clone(
 
     if all_fields.len() >= 1 && all_fields.get(0).name.is_none() {
         // enum-like
-        let subcalls = all_fields.map(subcall);
+        let subcalls = all_fields.iter().map(subcall).collect();
         cx.expr_call_ident(trait_span, ctor_ident, subcalls)
     } else {
         // struct-like
-        let fields = all_fields.map(|field| {
+        let fields = all_fields.iter().map(|field| {
             let ident = match field.name {
                 Some(i) => i,
                 None => cx.span_bug(trait_span,
@@ -83,7 +83,7 @@ fn cs_clone(
                                             name))
             };
             cx.field_imm(field.span, ident, subcall(field))
-        });
+        }).collect::<Vec<_>>();
 
         if fields.is_empty() {
             // no fields, so construct like `None`
index 46e9dfb89abac3b2e608f3b7d59de1edc2ed3baf..94675f91e9d6c684d0d5ddf346d332a7b3443eb3 100644 (file)
@@ -56,14 +56,14 @@ fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructur
                     if fields.is_empty() {
                         cx.expr_ident(trait_span, substr.type_ident)
                     } else {
-                        let exprs = fields.map(|sp| default_call(*sp));
+                        let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
                         cx.expr_call_ident(trait_span, substr.type_ident, exprs)
                     }
                 }
                 Named(ref fields) => {
-                    let default_fields = fields.map(|&(ident, span)| {
+                    let default_fields = fields.iter().map(|&(ident, span)| {
                         cx.field_imm(span, ident, default_call(span))
-                    });
+                    }).collect();
                     cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
                 }
             }
index 5454d8912a52bb750f4b3b42529427192fbe2549..b7b4d3db64a47909d44bff6d2295fa8e1389b05e 100644 (file)
@@ -371,12 +371,12 @@ fn create_derived_impl(&self,
         ty_params.extend(generics.ty_params.iter().map(|ty_param| {
             // I don't think this can be moved out of the loop, since
             // a TyParamBound requires an ast id
-            let mut bounds =
+            let mut bounds: Vec<_> =
                 // extra restrictions on the generics parameters to the type being derived upon
-                self.additional_bounds.map(|p| {
+                self.additional_bounds.iter().map(|p| {
                     cx.typarambound(p.to_path(cx, self.span,
                                                   type_ident, generics))
-                });
+                }).collect();
             // require the current trait
             bounds.push(cx.typarambound(trait_path.clone()));
 
@@ -413,7 +413,7 @@ fn create_derived_impl(&self,
             ident,
             vec::append(vec!(attr), self.attributes.as_slice()),
             ast::ItemImpl(trait_generics, opt_trait_ref,
-                          self_type, methods.map(|x| *x)))
+                          self_type, methods))
     }
 
     fn expand_struct_def(&self,
@@ -421,7 +421,7 @@ fn expand_struct_def(&self,
                          struct_def: &StructDef,
                          type_ident: Ident,
                          generics: &Generics) -> @ast::Item {
-        let methods = self.methods.map(|method_def| {
+        let methods = self.methods.iter().map(|method_def| {
             let (explicit_self, self_args, nonself_args, tys) =
                 method_def.split_self_nonself_args(
                     cx, self, type_ident, generics);
@@ -447,7 +447,7 @@ fn expand_struct_def(&self,
                                      type_ident, generics,
                                      explicit_self, tys,
                                      body)
-        });
+        }).collect();
 
         self.create_derived_impl(cx, type_ident, generics, methods)
     }
@@ -457,7 +457,7 @@ fn expand_enum_def(&self,
                        enum_def: &EnumDef,
                        type_ident: Ident,
                        generics: &Generics) -> @ast::Item {
-        let methods = self.methods.map(|method_def| {
+        let methods = self.methods.iter().map(|method_def| {
             let (explicit_self, self_args, nonself_args, tys) =
                 method_def.split_self_nonself_args(cx, self,
                                                    type_ident, generics);
@@ -483,7 +483,7 @@ fn expand_enum_def(&self,
                                      type_ident, generics,
                                      explicit_self, tys,
                                      body)
-        });
+        }).collect();
 
         self.create_derived_impl(cx, type_ident, generics, methods)
     }
@@ -955,18 +955,18 @@ fn expand_static_enum_method_body(&self,
                                       self_args: &[@Expr],
                                       nonself_args: &[@Expr])
         -> @Expr {
-        let summary = enum_def.variants.map(|v| {
+        let summary = enum_def.variants.iter().map(|v| {
             let ident = v.node.name;
             let summary = match v.node.kind {
                 ast::TupleVariantKind(ref args) => {
-                    Unnamed(args.map(|va| trait_.set_expn_info(cx, va.ty.span)))
+                    Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect())
                 }
                 ast::StructVariantKind(struct_def) => {
                     trait_.summarise_struct(cx, struct_def)
                 }
             };
             (ident, v.span, summary)
-        });
+        }).collect();
         self.call_substructure_method(cx, trait_, type_ident,
                                       self_args, nonself_args,
                                       &StaticEnum(enum_def, summary))
@@ -1027,10 +1027,10 @@ fn create_subpatterns(&self,
                           field_paths: Vec<ast::Path> ,
                           mutbl: ast::Mutability)
                           -> Vec<@ast::Pat> {
-        field_paths.map(|path| {
+        field_paths.iter().map(|path| {
             cx.pat(path.span,
                         ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
-            })
+            }).collect()
     }
 
     fn create_struct_pattern(&self,
@@ -1200,12 +1200,14 @@ pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec<@Expr> | -> @Expr,
     match *substructure.fields {
         EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
             // call self_n.method(other_1_n, other_2_n, ...)
-            let called = all_fields.map(|field| {
+            let called = all_fields.iter().map(|field| {
                 cx.expr_method_call(field.span,
                                     field.self_,
                                     substructure.method_ident,
-                                    field.other.map(|e| cx.expr_addr_of(field.span, *e)))
-            });
+                                    field.other.iter()
+                                               .map(|e| cx.expr_addr_of(field.span, *e))
+                                               .collect())
+            }).collect();
 
             f(cx, trait_span, called)
         },
index a31759065ae8d3e30f1c78d663a48ee2894b0af6..6b824e52bb3d702036717b493bc13e15e040ae5b 100644 (file)
@@ -136,15 +136,15 @@ fn rand_thing(cx: &mut ExtCtxt,
                 if fields.is_empty() {
                     cx.expr_ident(trait_span, ctor_ident)
                 } else {
-                    let exprs = fields.map(|span| rand_call(cx, *span));
+                    let exprs = fields.iter().map(|span| rand_call(cx, *span)).collect();
                     cx.expr_call_ident(trait_span, ctor_ident, exprs)
                 }
             }
             Named(ref fields) => {
-                let rand_fields = fields.map(|&(ident, span)| {
+                let rand_fields = fields.iter().map(|&(ident, span)| {
                     let e = rand_call(cx, span);
                     cx.field_imm(span, ident, e)
-                });
+                }).collect();
                 cx.expr_struct_ident(trait_span, ctor_ident, rand_fields)
             }
         }
index bfdfba7ba782d6e5391ccb68d833f6ffa541253e..e58c024fcb01435fbbf6e83d89561a155ca9d53f 100644 (file)
@@ -69,9 +69,9 @@ pub fn to_path(&self,
                    self_ty: Ident,
                    self_generics: &Generics)
                    -> ast::Path {
-        let idents = self.path.map(|s| cx.ident_of(*s) );
+        let idents = self.path.iter().map(|s| cx.ident_of(*s)).collect();
         let lt = mk_lifetimes(cx, span, &self.lifetime);
-        let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics));
+        let tys = self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
 
         cx.path_all(span, self.global, idents, lt, tys)
     }
@@ -150,7 +150,9 @@ pub fn to_ty(&self,
                 let ty = if fields.is_empty() {
                     ast::TyNil
                 } else {
-                    ast::TyTup(fields.map(|f| f.to_ty(cx, span, self_ty, self_generics)))
+                    ast::TyTup(fields.iter()
+                                     .map(|f| f.to_ty(cx, span, self_ty, self_generics))
+                                     .collect())
                 };
 
                 cx.ty(span, ty)
@@ -219,10 +221,10 @@ pub fn to_generics(&self,
                        self_ty: Ident,
                        self_generics: &Generics)
                        -> Generics {
-        let lifetimes = self.lifetimes.map(|lt| {
+        let lifetimes = self.lifetimes.iter().map(|lt| {
             cx.lifetime(span, cx.ident_of(*lt).name)
-        });
-        let ty_params = self.bounds.map(|t| {
+        }).collect();
+        let ty_params = self.bounds.iter().map(|t| {
             match t {
                 &(ref name, ref bounds) => {
                     mk_ty_param(cx,
@@ -233,7 +235,7 @@ pub fn to_generics(&self,
                                 self_generics)
                 }
             }
-        });
+        }).collect();
         mk_generics(lifetimes, ty_params)
     }
 }
index 9feae18689416c6c215a4a8fe5796669a94eddae..10692bd7f93f6a399786bfbccce84ec538421421 100644 (file)
@@ -73,14 +73,14 @@ fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
                     if fields.is_empty() {
                         cx.expr_ident(trait_span, substr.type_ident)
                     } else {
-                        let exprs = fields.map(|sp| zero_call(*sp));
+                        let exprs = fields.iter().map(|sp| zero_call(*sp)).collect();
                         cx.expr_call_ident(trait_span, substr.type_ident, exprs)
                     }
                 }
                 Named(ref fields) => {
-                    let zero_fields = fields.map(|&(ident, span)| {
+                    let zero_fields = fields.iter().map(|&(ident, span)| {
                         cx.field_imm(span, ident, zero_call(span))
-                    });
+                    }).collect();
                     cx.expr_struct_ident(trait_span, substr.type_ident, zero_fields)
                 }
             }
index e6494bf1aca8a8179d24699c8f26a86a19573a6f..aa9330bf65764ee631e7080e99fdf662dea0dbd2 100644 (file)
@@ -776,7 +776,7 @@ pub fn expand_block(blk: &Block, fld: &mut MacroExpander) -> P<Block> {
 
 // expand the elements of a block.
 pub fn expand_block_elts(b: &Block, fld: &mut MacroExpander) -> P<Block> {
-    let new_view_items = b.view_items.map(|x| fld.fold_view_item(x));
+    let new_view_items = b.view_items.iter().map(|x| fld.fold_view_item(x)).collect();
     let new_stmts =
         b.stmts.iter().flat_map(|x| {
             let renamed_stmt = {
index 14847aee8cf61dca641f4847a0bb2abe6ef06b41..6f8656f494dcb0370d9647e52ee989415038d255 100644 (file)
@@ -83,7 +83,7 @@ fn to_source(&self) -> ~str {
 
     impl<'a> ToSource for &'a [@ast::Item] {
         fn to_source(&self) -> ~str {
-            self.map(|i| i.to_source()).connect("\n\n")
+            self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n")
         }
     }
 
@@ -95,7 +95,7 @@ fn to_source(&self) -> ~str {
 
     impl<'a> ToSource for &'a [ast::Ty] {
         fn to_source(&self) -> ~str {
-            self.map(|i| i.to_source()).connect(", ")
+            self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ")
         }
     }
 
@@ -339,7 +339,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
 }
 
 fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> {
-    strs.map(|str| str_to_ident(*str))
+    strs.iter().map(|str| str_to_ident(*str)).collect()
 }
 
 fn id_ext(str: &str) -> ast::Ident {
index 8931fb0f443aa50474cac9635819ff8bb90d6374..4d8d816d225d4670a0e6b4bed76fe54ab6793ba5 100644 (file)
@@ -71,7 +71,9 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
     -> base::MacResult {
     base::check_zero_tts(cx, sp, tts, "module_path!");
     let string = cx.mod_path()
+                   .iter()
                    .map(|x| token::get_ident(*x).get().to_str())
+                   .collect::<Vec<~str>>()
                    .connect("::");
     base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
 }
index ae537cc47826e90f48e4db867437c5316dbb9fdb..62999fb496afa1347cf76422c35a75a0b24e99b0 100644 (file)
@@ -373,7 +373,7 @@ pub fn parse(sess: &ParseSess,
         } else {
             if (bb_eis.len() > 0u && next_eis.len() > 0u)
                 || bb_eis.len() > 1u {
-                let nts = bb_eis.map(|ei| {
+                let nts = bb_eis.iter().map(|ei| {
                     match ei.elts.get(ei.idx).node {
                       MatchNonterminal(bind, name, _) => {
                         format!("{} ('{}')",
@@ -381,7 +381,7 @@ pub fn parse(sess: &ParseSess,
                                 token::get_ident(bind))
                       }
                       _ => fail!()
-                    } }).connect(" or ");
+                    } }).collect::<Vec<~str>>().connect(" or ");
                 return Error(sp, format!(
                     "local ambiguity: multiple parsing options: \
                      built-in NTs {} or {} other options.",
index 0f8c74f9ee0716be8d12fd5d9235782e3bbac725..03963219d52c403dc7149d142b1060c065942187 100644 (file)
@@ -41,7 +41,7 @@ fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
                 }
                 ViewPathList(ref path, ref path_list_idents, node_id) => {
                     ViewPathList(self.fold_path(path),
-                                 path_list_idents.map(|path_list_ident| {
+                                 path_list_idents.iter().map(|path_list_ident| {
                                     let id = self.new_id(path_list_ident.node
                                                                         .id);
                                     Spanned {
@@ -54,7 +54,7 @@ fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
                                         span: self.new_span(
                                             path_list_ident.span)
                                     }
-                                 }),
+                                 }).collect(),
                                  self.new_id(node_id))
                 }
             };
@@ -83,7 +83,7 @@ fn fold_struct_field(&mut self, sf: &StructField) -> StructField {
                 kind: sf.node.kind,
                 id: self.new_id(sf.node.id),
                 ty: self.fold_ty(sf.node.ty),
-                attrs: sf.node.attrs.map(|e| fold_attribute_(*e, self))
+                attrs: sf.node.attrs.iter().map(|e| fold_attribute_(*e, self)).collect()
             },
             span: self.new_span(sf.span)
         }
@@ -115,7 +115,7 @@ fn fold_stmt(&mut self, s: &Stmt) -> SmallVector<@Stmt> {
 
     fn fold_arm(&mut self, a: &Arm) -> Arm {
         Arm {
-            pats: a.pats.map(|x| self.fold_pat(*x)),
+            pats: a.pats.iter().map(|x| self.fold_pat(*x)).collect(),
             guard: a.guard.map(|x| self.fold_expr(x)),
             body: self.fold_expr(a.body),
         }
@@ -163,18 +163,18 @@ fn fold_ty(&mut self, t: P<Ty>) -> P<Ty> {
                     onceness: f.onceness,
                     bounds: fold_opt_bounds(&f.bounds, self),
                     decl: self.fold_fn_decl(f.decl),
-                    lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
+                    lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
                 })
             }
             TyBareFn(ref f) => {
                 TyBareFn(@BareFnTy {
-                    lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
+                    lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
                     purity: f.purity,
                     abis: f.abis,
                     decl: self.fold_fn_decl(f.decl)
                 })
             }
-            TyTup(ref tys) => TyTup(tys.map(|&ty| self.fold_ty(ty))),
+            TyTup(ref tys) => TyTup(tys.iter().map(|&ty| self.fold_ty(ty)).collect()),
             TyPath(ref path, ref bounds, id) => {
                 TyPath(self.fold_path(path),
                        fold_opt_bounds(bounds, self),
@@ -214,8 +214,8 @@ fn fold_variant(&mut self, v: &Variant) -> P<Variant> {
         let kind;
         match v.node.kind {
             TupleVariantKind(ref variant_args) => {
-                kind = TupleVariantKind(variant_args.map(|x|
-                    fold_variant_arg_(x, self)))
+                kind = TupleVariantKind(variant_args.iter().map(|x|
+                    fold_variant_arg_(x, self)).collect())
             }
             StructVariantKind(ref struct_def) => {
                 kind = StructVariantKind(@ast::StructDef {
@@ -226,7 +226,7 @@ fn fold_variant(&mut self, v: &Variant) -> P<Variant> {
             }
         }
 
-        let attrs = v.node.attrs.map(|x| fold_attribute_(*x, self));
+        let attrs = v.node.attrs.iter().map(|x| fold_attribute_(*x, self)).collect();
 
         let de = match v.node.disr_expr {
           Some(e) => Some(self.fold_expr(e)),
@@ -254,11 +254,11 @@ fn fold_path(&mut self, p: &Path) -> Path {
         ast::Path {
             span: self.new_span(p.span),
             global: p.global,
-            segments: p.segments.map(|segment| ast::PathSegment {
+            segments: p.segments.iter().map(|segment| ast::PathSegment {
                 identifier: self.fold_ident(segment.identifier),
-                lifetimes: segment.lifetimes.map(|l| fold_lifetime(l, self)),
-                types: segment.types.map(|&typ| self.fold_ty(typ)),
-            })
+                lifetimes: segment.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
+                types: segment.types.iter().map(|&typ| self.fold_ty(typ)).collect(),
+            }).collect()
         }
     }
 
@@ -323,7 +323,7 @@ fn fold_meta_item_<T: Folder>(mi: @MetaItem, fld: &mut T) -> @MetaItem {
             match mi.node {
                 MetaWord(ref id) => MetaWord((*id).clone()),
                 MetaList(ref id, ref mis) => {
-                    MetaList((*id).clone(), mis.map(|e| fold_meta_item_(*e, fld)))
+                    MetaList((*id).clone(), mis.iter().map(|e| fold_meta_item_(*e, fld)).collect())
                 }
                 MetaNameValue(ref id, ref s) => {
                     MetaNameValue((*id).clone(), (*s).clone())
@@ -402,7 +402,7 @@ fn maybe_fold_ident<T: Folder>(t: &token::Token, fld: &mut T) -> token::Token {
 
 pub fn noop_fold_fn_decl<T: Folder>(decl: &FnDecl, fld: &mut T) -> P<FnDecl> {
     P(FnDecl {
-        inputs: decl.inputs.map(|x| fold_arg_(x, fld)), // bad copy
+        inputs: decl.inputs.iter().map(|x| fold_arg_(x, fld)).collect(), // bad copy
         output: fld.fold_ty(decl.output),
         cf: decl.cf,
         variadic: decl.variadic
@@ -441,7 +441,7 @@ pub fn fold_lifetime<T: Folder>(l: &Lifetime, fld: &mut T) -> Lifetime {
 
 pub fn fold_lifetimes<T: Folder>(lts: &Vec<Lifetime>, fld: &mut T)
                                    -> Vec<Lifetime> {
-    lts.map(|l| fold_lifetime(l, fld))
+    lts.iter().map(|l| fold_lifetime(l, fld)).collect()
 }
 
 pub fn fold_opt_lifetime<T: Folder>(o_lt: &Option<Lifetime>, fld: &mut T)
@@ -456,7 +456,7 @@ pub fn fold_generics<T: Folder>(generics: &Generics, fld: &mut T) -> Generics {
 
 fn fold_struct_def<T: Folder>(struct_def: @StructDef, fld: &mut T) -> @StructDef {
     @ast::StructDef {
-        fields: struct_def.fields.map(|f| fold_struct_field(f, fld)),
+        fields: struct_def.fields.iter().map(|f| fold_struct_field(f, fld)).collect(),
         ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(cid)),
     }
 }
@@ -474,7 +474,7 @@ fn fold_struct_field<T: Folder>(f: &StructField, fld: &mut T) -> StructField {
             kind: f.node.kind,
             id: fld.new_id(f.node.id),
             ty: fld.fold_ty(f.node.ty),
-            attrs: f.node.attrs.map(|a| fold_attribute_(*a, fld)),
+            attrs: f.node.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
         },
         span: fld.new_span(f.span),
     }
@@ -525,14 +525,14 @@ pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T)
     };
     ViewItem {
         node: inner_view_item,
-        attrs: vi.attrs.map(|a| fold_attribute_(*a, folder)),
+        attrs: vi.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
         vis: vi.vis,
         span: folder.new_span(vi.span),
     }
 }
 
 pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
-    let view_items = b.view_items.map(|x| folder.fold_view_item(x));
+    let view_items = b.view_items.iter().map(|x| folder.fold_view_item(x)).collect();
     let stmts = b.stmts.iter().flat_map(|s| folder.fold_stmt(*s).move_iter()).collect();
     P(Block {
         id: folder.new_id(b.id), // Needs to be first, for ast_map.
@@ -566,9 +566,9 @@ pub fn noop_fold_item_underscore<T: Folder>(i: &Item_, folder: &mut T) -> Item_
         ItemEnum(ref enum_definition, ref generics) => {
             ItemEnum(
                 ast::EnumDef {
-                    variants: enum_definition.variants.map(|&x| {
+                    variants: enum_definition.variants.iter().map(|&x| {
                         folder.fold_variant(x)
-                    }),
+                    }).collect(),
                 },
                 fold_generics(generics, folder))
         }
@@ -580,18 +580,18 @@ pub fn noop_fold_item_underscore<T: Folder>(i: &Item_, folder: &mut T) -> Item_
             ItemImpl(fold_generics(generics, folder),
                      ifce.as_ref().map(|p| fold_trait_ref(p, folder)),
                      folder.fold_ty(ty),
-                     methods.map(|x| folder.fold_method(*x))
+                     methods.iter().map(|x| folder.fold_method(*x)).collect()
             )
         }
         ItemTrait(ref generics, ref traits, ref methods) => {
-            let methods = methods.map(|method| {
+            let methods = methods.iter().map(|method| {
                 match *method {
                     Required(ref m) => Required(folder.fold_type_method(m)),
                     Provided(method) => Provided(folder.fold_method(method))
                 }
-            });
+            }).collect();
             ItemTrait(fold_generics(generics, folder),
-                      traits.map(|p| fold_trait_ref(p, folder)),
+                      traits.iter().map(|p| fold_trait_ref(p, folder)).collect(),
                       methods)
         }
         ItemMac(ref m) => ItemMac(folder.fold_mac(m)),
@@ -602,7 +602,7 @@ pub fn noop_fold_type_method<T: Folder>(m: &TypeMethod, fld: &mut T) -> TypeMeth
     TypeMethod {
         id: fld.new_id(m.id), // Needs to be first, for ast_map.
         ident: fld.fold_ident(m.ident),
-        attrs: m.attrs.map(|a| fold_attribute_(*a, fld)),
+        attrs: m.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
         purity: m.purity,
         decl: fld.fold_fn_decl(m.decl),
         generics: fold_generics(&m.generics, fld),
@@ -623,8 +623,8 @@ pub fn noop_fold_mod<T: Folder>(m: &Mod, folder: &mut T) -> Mod {
 pub fn noop_fold_crate<T: Folder>(c: Crate, folder: &mut T) -> Crate {
     Crate {
         module: folder.fold_mod(&c.module),
-        attrs: c.attrs.map(|x| fold_attribute_(*x, folder)),
-        config: c.config.map(|x| fold_meta_item_(*x, folder)),
+        attrs: c.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
+        config: c.config.iter().map(|x| fold_meta_item_(*x, folder)).collect(),
         span: folder.new_span(c.span),
     }
 }
@@ -643,7 +643,7 @@ pub fn noop_fold_item<T: Folder>(i: &Item, folder: &mut T) -> SmallVector<@Item>
     SmallVector::one(@Item {
         id: id,
         ident: folder.fold_ident(ident),
-        attrs: i.attrs.map(|e| fold_attribute_(*e, folder)),
+        attrs: i.attrs.iter().map(|e| fold_attribute_(*e, folder)).collect(),
         node: node,
         vis: i.vis,
         span: folder.new_span(i.span)
@@ -654,11 +654,11 @@ pub fn noop_fold_foreign_item<T: Folder>(ni: &ForeignItem, folder: &mut T) -> @F
     @ForeignItem {
         id: folder.new_id(ni.id), // Needs to be first, for ast_map.
         ident: folder.fold_ident(ni.ident),
-        attrs: ni.attrs.map(|x| fold_attribute_(*x, folder)),
+        attrs: ni.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
         node: match ni.node {
             ForeignItemFn(ref fdec, ref generics) => {
                 ForeignItemFn(P(FnDecl {
-                    inputs: fdec.inputs.map(|a| fold_arg_(a, folder)),
+                    inputs: fdec.inputs.iter().map(|a| fold_arg_(a, folder)).collect(),
                     output: folder.fold_ty(fdec.output),
                     cf: fdec.cf,
                     variadic: fdec.variadic
@@ -677,7 +677,7 @@ pub fn noop_fold_method<T: Folder>(m: &Method, folder: &mut T) -> @Method {
     @Method {
         id: folder.new_id(m.id), // Needs to be first, for ast_map.
         ident: folder.fold_ident(m.ident),
-        attrs: m.attrs.map(|a| fold_attribute_(*a, folder)),
+        attrs: m.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
         generics: fold_generics(&m.generics, folder),
         explicit_self: folder.fold_explicit_self(&m.explicit_self),
         purity: m.purity,
@@ -700,28 +700,28 @@ pub fn noop_fold_pat<T: Folder>(p: @Pat, folder: &mut T) -> @Pat {
         PatLit(e) => PatLit(folder.fold_expr(e)),
         PatEnum(ref pth, ref pats) => {
             PatEnum(folder.fold_path(pth),
-                    pats.as_ref().map(|pats| pats.map(|x| folder.fold_pat(*x))))
+                    pats.as_ref().map(|pats| pats.iter().map(|x| folder.fold_pat(*x)).collect()))
         }
         PatStruct(ref pth, ref fields, etc) => {
             let pth_ = folder.fold_path(pth);
-            let fs = fields.map(|f| {
+            let fs = fields.iter().map(|f| {
                 ast::FieldPat {
                     ident: f.ident,
                     pat: folder.fold_pat(f.pat)
                 }
-            });
+            }).collect();
             PatStruct(pth_, fs, etc)
         }
-        PatTup(ref elts) => PatTup(elts.map(|x| folder.fold_pat(*x))),
+        PatTup(ref elts) => PatTup(elts.iter().map(|x| folder.fold_pat(*x)).collect()),
         PatUniq(inner) => PatUniq(folder.fold_pat(inner)),
         PatRegion(inner) => PatRegion(folder.fold_pat(inner)),
         PatRange(e1, e2) => {
             PatRange(folder.fold_expr(e1), folder.fold_expr(e2))
         },
         PatVec(ref before, ref slice, ref after) => {
-            PatVec(before.map(|x| folder.fold_pat(*x)),
+            PatVec(before.iter().map(|x| folder.fold_pat(*x)).collect(),
                     slice.map(|x| folder.fold_pat(x)),
-                    after.map(|x| folder.fold_pat(*x)))
+                    after.iter().map(|x| folder.fold_pat(*x)).collect())
         }
     };
 
@@ -741,21 +741,21 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
             ExprBox(folder.fold_expr(p), folder.fold_expr(e))
         }
         ExprVec(ref exprs, mutt) => {
-            ExprVec(exprs.map(|&x| folder.fold_expr(x)), mutt)
+            ExprVec(exprs.iter().map(|&x| folder.fold_expr(x)).collect(), mutt)
         }
         ExprRepeat(expr, count, mutt) => {
             ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count), mutt)
         }
-        ExprTup(ref elts) => ExprTup(elts.map(|x| folder.fold_expr(*x))),
+        ExprTup(ref elts) => ExprTup(elts.iter().map(|x| folder.fold_expr(*x)).collect()),
         ExprCall(f, ref args) => {
             ExprCall(folder.fold_expr(f),
-                     args.map(|&x| folder.fold_expr(x)))
+                     args.iter().map(|&x| folder.fold_expr(x)).collect())
         }
         ExprMethodCall(i, ref tps, ref args) => {
             ExprMethodCall(
                 folder.fold_ident(i),
-                tps.map(|&x| folder.fold_ty(x)),
-                args.map(|&x| folder.fold_expr(x)))
+                tps.iter().map(|&x| folder.fold_ty(x)).collect(),
+                args.iter().map(|&x| folder.fold_expr(x)).collect())
         }
         ExprBinary(binop, lhs, rhs) => {
             ExprBinary(binop,
@@ -790,7 +790,7 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
         }
         ExprMatch(expr, ref arms) => {
             ExprMatch(folder.fold_expr(expr),
-                      arms.map(|x| folder.fold_arm(x)))
+                      arms.iter().map(|x| folder.fold_arm(x)).collect())
         }
         ExprFnBlock(decl, body) => {
             ExprFnBlock(folder.fold_fn_decl(decl), folder.fold_block(body))
@@ -810,7 +810,7 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
         ExprField(el, id, ref tys) => {
             ExprField(folder.fold_expr(el),
                       folder.fold_ident(id),
-                      tys.map(|&x| folder.fold_ty(x)))
+                      tys.iter().map(|&x| folder.fold_ty(x)).collect())
         }
         ExprIndex(el, er) => {
             ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
@@ -823,19 +823,19 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
         }
         ExprInlineAsm(ref a) => {
             ExprInlineAsm(InlineAsm {
-                inputs: a.inputs.map(|&(ref c, input)| {
+                inputs: a.inputs.iter().map(|&(ref c, input)| {
                     ((*c).clone(), folder.fold_expr(input))
-                }),
-                outputs: a.outputs.map(|&(ref c, out)| {
+                }).collect(),
+                outputs: a.outputs.iter().map(|&(ref c, out)| {
                     ((*c).clone(), folder.fold_expr(out))
-                }),
+                }).collect(),
                 .. (*a).clone()
             })
         }
         ExprMac(ref mac) => ExprMac(folder.fold_mac(mac)),
         ExprStruct(ref path, ref fields, maybe_expr) => {
             ExprStruct(folder.fold_path(path),
-                       fields.map(|x| fold_field_(*x, folder)),
+                       fields.iter().map(|x| fold_field_(*x, folder)).collect(),
                        maybe_expr.map(|x| folder.fold_expr(x)))
         },
         ExprParen(ex) => ExprParen(folder.fold_expr(ex))
index 53586a665133cf0de64860e9422c8463251db11f..7d337e9c0785209751931b4e6c27b18edb7cf0da 100644 (file)
@@ -104,7 +104,7 @@ fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> {
         }
 
         if can_trim {
-            lines.map(|line| line.slice(i + 1, line.len()).to_owned())
+            lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect()
         } else {
             lines
         }
index b62990036e5e3267387db445bc54523dbac99d9c..eb7b3162b52d6f544b4ef3284c9ddf3a7ecf42a0 100644 (file)
@@ -70,7 +70,7 @@ pub fn string_to_pat(source_str: ~str) -> @ast::Pat {
 
 // convert a vector of strings to a vector of ast::Ident's
 pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
-    ids.map(|u| token::str_to_ident(*u))
+    ids.iter().map(|u| token::str_to_ident(*u)).collect()
 }
 
 // does the given string match the pattern? whitespace in the first string
index 2dc88d8ff02242bd39697186d682f6d2268d1b1b..16b743baa3d1323e901e236b7f7fb32916ebd3e0 100644 (file)
@@ -10,5 +10,5 @@
 
 
 pub unsafe fn f(xs: Vec<int> ) {
-    xs.map(|_x| { unsafe fn q() { fail!(); } });
+    xs.iter().map(|_x| { unsafe fn q() { fail!(); } }).collect::<Vec<()>>();
 }
index bb0ea68e757e8a23d94cecc362f757ac18b9a45f..8529947245846caf72da3102b26ea6c67d42e551 100644 (file)
@@ -99,7 +99,9 @@ fn add_pt(&mut self, x: int, y: int) {
 impl fmt::Show for AsciiArt {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         // Convert each line into a string.
-        let lines = self.lines.map(|line| str::from_chars(line.as_slice()));
+        let lines = self.lines.iter()
+                              .map(|line| str::from_chars(line.as_slice()))
+                              .collect::<Vec<~str>>();
 
         // Concatenate the lines together using a new-line.
         write!(f.buf, "{}", lines.connect("\n"))