~~~~
let mut max = 0;
-[1, 2, 3].map(|x| if *x > max { max = *x });
+let f = |x: int| if x > max { max = x };
+for x in [1, 2, 3].iter() {
+ f(*x);
+}
~~~~
Stack closures are very efficient because their environment is
// Next, convert each of the byte strings into a pointer. This is
// technically unsafe as the caller could leak these pointers out of our
// scope.
- let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
+ let mut ptrs: Vec<_> = tmps.iter().map(|tmp| tmp.with_ref(|buf| buf)).collect();
// Finally, make sure we add a null pointer.
ptrs.push(ptr::null());
}
// Once again, this is unsafe.
- let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
+ let mut ptrs: Vec<*libc::c_char> = tmps.iter()
+ .map(|tmp| tmp.with_ref(|buf| buf))
+ .collect();
ptrs.push(ptr::null());
cb(ptrs.as_ptr() as *c_void)
}
// Internalize everything but the reachable symbols of the current module
- let cstrs = reachable.map(|s| s.to_c_str());
- let arr = cstrs.map(|c| c.with_ref(|p| p));
+ let cstrs: Vec<::std::c_str::CString> = reachable.iter().map(|s| s.to_c_str()).collect();
+ let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect();
let ptr = arr.as_ptr();
unsafe {
llvm::LLVMRustRunRestrictionPass(llmod, ptr as **libc::c_char,
NoDebugInfo
};
- let addl_lib_search_paths = matches.opt_strs("L").map(|s| {
+ let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| {
Path::new(s.as_slice())
- }).move_iter().collect();
+ }).collect();
let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
let test = matches.opt_present("test");
}
pub fn types_to_str(&self, tys: &[Type]) -> ~str {
- let strs = tys.map(|t| self.type_to_str(*t));
+ let strs: Vec<~str> = tys.iter().map(|t| self.type_to_str(*t)).collect();
format!("[{}]", strs.connect(","))
}
pub fn rust_path() -> Vec<Path> {
let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => {
- let env_path_components: Vec<&str> =
- env_path.split_str(PATH_ENTRY_SEPARATOR).collect();
- env_path_components.map(|&s| Path::new(s))
+ let env_path_components =
+ env_path.split_str(PATH_ENTRY_SEPARATOR);
+ env_path_components.map(|s| Path::new(s)).collect()
}
None => Vec::new()
};
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
assert!((!pats.is_empty()));
- let ext = match is_useful(cx, &pats.map(|p| vec!(*p)), [wild()]) {
+ let ext = match is_useful(cx, &pats.iter().map(|p| vec!(*p)).collect(), [wild()]) {
not_useful => {
// This is good, wildcard pattern isn't reachable
return;
DefVariant(_, variant_id, _) => {
if variant(variant_id) == *ctor_id {
let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id);
- let args = struct_fields.map(|sf| {
+ let args = struct_fields.iter().map(|sf| {
match pattern_fields.iter().find(|f| f.ident.name == sf.name) {
Some(f) => f.pat,
_ => wild()
}
- });
+ }).collect();
Some(vec::append(args, r.tail()))
} else {
None
path: &Path,
namespace: Namespace)
-> Option<(Def, LastPrivate)> {
- let module_path_idents = path.segments.init().map(|ps| ps.identifier);
+ let module_path_idents = path.segments.init().iter()
+ .map(|ps| ps.identifier)
+ .collect::<Vec<_>>();
let containing_module;
let last_private;
match self.resolve_module_path(self.current_module,
- module_path_idents,
+ module_path_idents.as_slice(),
UseLexicalScope,
path.span,
PathSearch) {
Failed => {
let msg = format!("use of undeclared module `{}`",
- self.idents_to_str(module_path_idents));
+ self.idents_to_str(module_path_idents.as_slice()));
self.resolve_error(path.span, msg);
return None;
}
path: &Path,
namespace: Namespace)
-> Option<(Def, LastPrivate)> {
- let module_path_idents = path.segments.init().map(|ps| ps.identifier);
+ let module_path_idents = path.segments.init().iter()
+ .map(|ps| ps.identifier)
+ .collect::<Vec<_>>();
let root_module = self.graph_root.get_module();
let containing_module;
let last_private;
match self.resolve_module_path_from_root(root_module,
- module_path_idents,
+ module_path_idents.as_slice(),
0,
path.span,
PathSearch,
LastMod(AllPublic)) {
Failed => {
let msg = format!("use of undeclared module `::{}`",
- self.idents_to_str(module_path_idents));
+ self.idents_to_str(module_path_idents.as_slice()));
self.resolve_error(path.span, msg);
return None;
}
referenced_idents={:?} \
early_count={}",
n,
- referenced_idents.map(lifetime_show),
+ referenced_idents.iter().map(lifetime_show).collect::<Vec<token::InternedString>>(),
early_count);
if referenced_idents.is_empty() {
let scope1 = LateScope(n, &generics.lifetimes, scope);
fn subst_spanned(&self, tcx: &ty::ctxt,
substs: &ty::substs,
span: Option<Span>) -> Vec<T> {
- self.map(|t| t.subst_spanned(tcx, substs, span))
+ self.iter().map(|t| t.subst_spanned(tcx, substs, span)).collect()
}
}
impl<T:Subst> Subst for Rc<T> {
ty::substs {
regions: self.regions.subst_spanned(tcx, substs, span),
self_ty: self.self_ty.map(|typ| typ.subst_spanned(tcx, substs, span)),
- tps: self.tps.map(|typ| typ.subst_spanned(tcx, substs, span))
+ tps: self.tps.iter().map(|typ| typ.subst_spanned(tcx, substs, span)).collect()
}
}
}
let pat_ty = node_id_type(bcx, pat_id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
expr::with_field_tys(tcx, pat_ty, Some(pat_id), |discr, field_tys| {
- let rec_vals = rec_fields.map(|field_name| {
+ let rec_vals = rec_fields.iter().map(|field_name| {
let ix = ty::field_idx_strict(tcx, field_name.name, field_tys);
adt::trans_field_ptr(bcx, pat_repr, val, discr, ix)
- });
+ }).collect();
compile_submatch(
bcx,
enter_rec_or_struct(bcx,
}
ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
- let mut ftys = fields.map(|field| {
+ let mut ftys = fields.iter().map(|field| {
ty::lookup_field_type(cx.tcx(), def_id, field.id, substs)
- });
+ }).collect::<Vec<_>>();
let packed = ty::lookup_packed(cx.tcx(), def_id);
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
if dtor { ftys.push(ty::mk_bool()); }
if cases.iter().all(|c| c.tys.len() == 0) {
// All bodies empty -> intlike
- let discrs = cases.map(|c| c.discr);
+ let discrs: Vec<u64> = cases.iter().map(|c| c.discr).collect();
let bounds = IntBounds {
ulo: *discrs.iter().min().unwrap(),
uhi: *discrs.iter().max().unwrap(),
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
slo: 0, shi: (cases.len() - 1) as i64 };
let ity = range_to_inttype(cx, hint, &bounds);
- return General(ity, cases.map(|c| {
+ return General(ity, cases.iter().map(|c| {
let discr = vec!(ty_of_inttype(ity));
mk_struct(cx,
vec::append(discr, c.tys.as_slice()).as_slice(),
false)
- }))
+ }).collect())
}
_ => cx.sess().bug("adt::represent_type called on non-ADT type")
}
}
fn get_cases(tcx: &ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {
- ty::enum_variants(tcx, def_id).map(|vi| {
- let arg_tys = vi.args.map(|&raw_ty| {
+ ty::enum_variants(tcx, def_id).iter().map(|vi| {
+ let arg_tys = vi.args.iter().map(|&raw_ty| {
ty::subst(tcx, substs, raw_ty)
- });
+ }).collect();
Case { discr: vi.disr_val, tys: arg_tys }
- })
+ }).collect()
}
fn mk_struct(cx: &CrateContext, tys: &[ty::t], packed: bool) -> Struct {
- let lltys = tys.map(|&ty| type_of::sizing_type_of(cx, ty));
- let llty_rec = Type::struct_(cx, lltys, packed);
+ let lltys = tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect::<Vec<_>>();
+ let llty_rec = Type::struct_(cx, lltys.as_slice(), packed);
Struct {
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> Vec<Type> {
if sizing {
- st.fields.map(|&ty| type_of::sizing_type_of(cx, ty))
+ st.fields.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
} else {
- st.fields.map(|&ty| type_of::type_of(cx, ty))
+ st.fields.iter().map(|&ty| type_of::type_of(cx, ty)).collect()
}
}
let ccx = bcx.ccx();
let val = if needs_cast {
- let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
+ let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
let real_ty = Type::struct_(ccx, fields.as_slice(), st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
} else {
vals).as_slice(),
false)
} else {
- let vals = nonnull.fields.map(|&ty| {
+ let vals = nonnull.fields.iter().map(|&ty| {
// Always use null even if it's not the `ptrfield`th
// field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty))
- }).move_iter().collect::<Vec<ValueRef> >();
+ }).collect::<Vec<ValueRef>>();
C_struct(ccx, build_const_struct(ccx,
nonnull,
vals.as_slice()).as_slice(),
let temp_scope = fcx.push_custom_cleanup_scope();
// Prepare the output operands
- let outputs = ia.outputs.map(|&(ref c, out)| {
+ let outputs = ia.outputs.iter().map(|&(ref c, out)| {
constraints.push((*c).clone());
let out_datum = unpack_datum!(bcx, expr::trans(bcx, out));
output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
out_datum.val
- });
+ }).collect::<Vec<_>>();
// Now the input operands
- let inputs = ia.inputs.map(|&(ref c, input)| {
+ let inputs = ia.inputs.iter().map(|&(ref c, input)| {
constraints.push((*c).clone());
let in_datum = unpack_datum!(bcx, expr::trans(bcx, input));
cleanup::CustomScope(temp_scope),
callee::DontAutorefArg)
})
- });
+ }).collect::<Vec<_>>();
// no failure occurred preparing operands, no need to cleanup
fcx.pop_custom_cleanup_scope(temp_scope);
- let mut constraints = constraints.map(|s| s.get().to_str()).connect(",");
+ let mut constraints = constraints.iter()
+ .map(|s| s.get().to_str())
+ .collect::<Vec<~str>>()
+ .connect(",");
let mut clobbers = getClobbers();
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))",
cx.val_to_str(fn_),
- args.map(|a| cx.val_to_str(*a)).connect(", "));
+ args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", "));
B(cx).invoke(fn_, args, then, catch, attributes)
}
let alignstack = if alignstack { lib::llvm::True }
else { lib::llvm::False };
- let argtys = inputs.map(|v| {
+ let argtys = inputs.iter().map(|v| {
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
val_ty(*v)
- });
+ }).collect::<Vec<_>>();
debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
- let fty = Type::func(argtys, &output);
+ let fty = Type::func(argtys.as_slice(), &output);
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
debug!("Call {} with args ({})",
self.ccx.tn.val_to_str(llfn),
- args.map(|&v| self.ccx.tn.val_to_str(v)).connect(", "));
+ args.iter()
+ .map(|&v| self.ccx.tn.val_to_str(v))
+ .collect::<Vec<~str>>()
+ .connect(", "));
unsafe {
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
bcx.sess().bug(
format!("type parameters for node {:?} include inference types: {}",
- node, params.map(|t| bcx.ty_to_str(*t)).connect(",")));
+ node, params.iter()
+ .map(|t| bcx.ty_to_str(*t))
+ .collect::<Vec<~str>>()
+ .connect(",")));
}
match bcx.fcx.param_substs {
source_locations_enabled: Cell::new(false),
};
- let arg_pats = fn_decl.inputs.map(|arg_ref| arg_ref.pat);
+ let arg_pats = fn_decl.inputs.iter().map(|arg_ref| arg_ref.pat).collect::<Vec<_>>();
populate_scope_map(cx,
arg_pats.as_slice(),
top_level_block,
impl StructMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
-> Vec<MemberDescription> {
- self.fields.map(|field| {
+ self.fields.iter().map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
~""
} else {
type_metadata: type_metadata(cx, field.mt.ty, self.span),
offset: ComputedMemberOffset,
}
- })
+ }).collect()
}
}
impl TupleMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
-> Vec<MemberDescription> {
- self.component_types.map(|&component_type| {
+ self.component_types.iter().map(|&component_type| {
MemberDescription {
name: ~"",
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
}
- })
+ }).collect()
}
}
-> (DICompositeType, Type, MemberDescriptionFactory) {
let variant_llvm_type =
Type::struct_(cx, struct_def.fields
+ .iter()
.map(|&t| type_of::type_of(cx, t))
+ .collect::<Vec<_>>()
.as_slice(),
struct_def.packed);
// Could some consistency checks here: size, align, field count, discr type
variant_definition_span);
// Get the argument names from the enum variant info
- let mut arg_names = match variant_info.arg_names {
+ let mut arg_names: Vec<_> = match variant_info.arg_names {
Some(ref names) => {
- names.map(|ident| token::get_ident(*ident).get().to_str())
+ names.iter().map(|ident| token::get_ident(*ident).get().to_str()).collect()
}
- None => variant_info.args.map(|_| ~"")
+ None => variant_info.args.iter().map(|_| ~"").collect()
};
// If this is not a univariant enum, there is also the (unnamed) discriminant field
with_field_tys(tcx, ty, Some(id), |discr, field_tys| {
let mut need_base = slice::from_elem(field_tys.len(), true);
- let numbered_fields = fields.map(|field| {
+ let numbered_fields = fields.iter().map(|field| {
let opt_pos =
field_tys.iter().position(|field_ty|
field_ty.ident.name == field.ident.node.name);
"Couldn't find field in struct type")
}
}
- });
+ }).collect::<Vec<_>>();
let optbase = match base {
Some(base_expr) => {
let mut leftovers = Vec::new();
};
let repr = adt::represent_type(bcx.ccx(), ty);
- trans_adt(bcx, repr, discr, numbered_fields, optbase, dest)
+ trans_adt(bcx, repr, discr, numbered_fields.as_slice(), optbase, dest)
})
}
ty.repr(ccx.tcx()),
ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
ccx.tn.type_to_str(llsig.llret_ty),
- ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty).as_slice()),
+ ccx.tn.types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
ccx.tn.type_to_str(fn_ty.ret_ty.ty),
ret_def);
impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
fn llrepr(&self, ccx: &CrateContext) -> ~str {
- let reprs = self.map(|t| t.llrepr(ccx));
+ let reprs: Vec<~str> = self.iter().map(|t| t.llrepr(ccx)).collect();
format!("[{}]", reprs.connect(","))
}
}
ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
let trait_method_def_ids = ty::trait_method_def_ids(tcx, trt_id);
- trait_method_def_ids.map(|method_def_id| {
+ trait_method_def_ids.iter().map(|method_def_id| {
let ident = ty::method(tcx, *method_def_id).ident;
// The substitutions we have are on the impl, so we grab
// the method type from the impl to substitute into.
} else {
trans_fn_ref_with_vtables(bcx, m_id, ExprId(0), substs, Some(vtables))
}
- })
+ }).collect()
}
pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
vts.repr(ccx.tcx()), substs.tys.repr(ccx.tcx()));
let vts_iter = substs.self_vtables.iter().chain(vts.iter());
vts_iter.zip(substs_iter).map(|(vtable, subst)| {
- let v = vtable.map(|vt| meth::vtable_id(ccx, vt));
+ let v = vtable.iter().map(|vt| meth::vtable_id(ccx, vt)).collect::<Vec<_>>();
(*subst, if !v.is_empty() { Some(@v) } else { None })
}).collect()
}
binder_id: ast::NodeId,
input_tys: &[ty::t],
output: ty::t) -> t {
- let input_args = input_tys.map(|t| *t);
+ let input_args = input_tys.iter().map(|t| *t).collect();
mk_bare_fn(cx,
BareFnTy {
purity: ast::ImpureFn,
abis: AbiSet::Rust(),
sig: FnSig {
binder_id: binder_id,
- inputs: Vec::from_slice(input_args),
+ inputs: input_args,
output: output,
variadic: false
}
// Returns a vec of all the input and output types of fty.
pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
- vec::append_one(sig.inputs.map(|a| *a), sig.output)
+ vec::append_one(sig.inputs.iter().map(|a| *a).collect(), sig.output)
}
// Type accessors for AST nodes
tcx.sess.bug(format!(
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
- fields.map(|f| token::get_ident(f.ident).get().to_str())));
+ fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>()));
}
pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
pub fn trait_ref_supertraits(cx: &ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> {
let supertrait_refs = trait_supertraits(cx, trait_ref.def_id);
- supertrait_refs.map(
- |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs))
+ supertrait_refs.iter().map(
+ |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs)).collect()
}
fn lookup_locally_or_in_crate_store<V:Clone>(
Some(&methods) => methods,
None => {
let def_ids = ty::trait_method_def_ids(cx, trait_did);
- let methods = @def_ids.map(|d| ty::method(cx, *d));
+ let methods = @def_ids.iter().map(|d| ty::method(cx, *d)).collect();
trait_methods.insert(trait_did, methods);
methods
}
match ast_variant.node.kind {
ast::TupleVariantKind(ref args) => {
let arg_tys = if args.len() > 0 {
- ty_fn_args(ctor_ty).map(|a| *a)
+ ty_fn_args(ctor_ty).iter().map(|a| *a).collect()
} else {
Vec::new()
};
assert!(fields.len() > 0);
- let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
+ let arg_tys = ty_fn_args(ctor_ty).iter().map(|a| *a).collect();
let arg_names = fields.iter().map(|field| {
match field.node.kind {
NamedField(ident, _) => ident,
// this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs)
-> Vec<field> {
- lookup_struct_fields(cx, did).map(|f| {
+ lookup_struct_fields(cx, did).iter().map(|f| {
field {
// FIXME #6993: change type of field to Name and get rid of new()
ident: ast::Ident::new(f.name),
mutbl: MutImmutable
}
}
- })
+ }).collect()
}
pub fn is_binopable(cx: &ctxt, ty: t, op: ast::BinOp) -> bool {
let expected_num_region_params = decl_generics.region_param_defs().len();
let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len();
let regions = if expected_num_region_params == supplied_num_region_params {
- path.segments.last().unwrap().lifetimes.map(
- |l| ast_region_to_region(this.tcx(), l))
+ path.segments.last().unwrap().lifetimes.iter().map(
+ |l| ast_region_to_region(this.tcx(), l)).collect::<Vec<_>>()
} else {
let anon_regions =
rscope.anon_regions(path.span, expected_num_region_params);
let vinfo =
ty::enum_variant_with_id(tcx, enm, var);
let var_tpt = ty::lookup_item_type(tcx, var);
- vinfo.args.map(|t| {
+ vinfo.args.iter().map(|t| {
if var_tpt.generics.type_param_defs().len() ==
expected_substs.tps.len()
{
*t // In this case, an error was already signaled
// anyway
}
- })
+ }).collect()
};
kind_name = "variant";
// Get the expected types of the arguments.
let class_fields = ty::struct_fields(
tcx, struct_def_id, expected_substs);
- arg_types = class_fields.map(|field| field.mt.ty);
+ arg_types = class_fields.iter().map(|field| field.mt.ty).collect();
kind_name = "structure";
}
debug!("push_candidates_from_impl: {} {} {}",
token::get_name(self.m_name),
impl_info.ident.repr(self.tcx()),
- impl_info.methods.map(|m| m.ident).repr(self.tcx()));
+ impl_info.methods.iter()
+ .map(|m| m.ident)
+ .collect::<Vec<ast::Ident>>()
+ .repr(self.tcx()));
let idx = {
match impl_info.methods
let ret_ty = fn_sig.output;
debug!("check_fn(arg_tys={:?}, ret_ty={:?})",
- arg_tys.map(|&a| ppaux::ty_to_str(tcx, a)),
+ arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<~str>>(),
ppaux::ty_to_str(tcx, ret_ty));
// Create the function context. This is either derived from scratch or,
};
debug!("check_argument_types: formal_tys={:?}",
- formal_tys.map(|t| fcx.infcx().ty_to_str(*t)));
+ formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<~str>>());
// Check the arguments.
// We do this in a pretty awful way: first we typecheck any arguments
expr.span,
fcx.expr_ty(rcvr));
- let tps = tps.map(|&ast_ty| fcx.to_ty(ast_ty));
+ let tps = tps.iter().map(|&ast_ty| fcx.to_ty(ast_ty)).collect::<Vec<_>>();
let fn_ty = match method::lookup(fcx, expr, rcvr,
method_name.name,
- expr_t, tps,
+ expr_t, tps.as_slice(),
DontDerefArgs,
CheckTraitsAndInherentMethods,
AutoderefReceiver) {
let fty = if error_happened {
fty_sig = FnSig {
binder_id: ast::CRATE_NODE_ID,
- inputs: fn_ty.sig.inputs.map(|_| ty::mk_err()),
+ inputs: fn_ty.sig.inputs.iter().map(|_| ty::mk_err()).collect(),
output: ty::mk_err(),
variadic: false
};
}
ast::ExprMethodCall(ident, ref tps, ref args) => {
check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice());
- let arg_tys = args.map(|a| fcx.expr_ty(*a));
- let (args_bot, args_err) = arg_tys.iter().fold((false, false),
+ let mut arg_tys = args.iter().map(|a| fcx.expr_ty(*a));
+ let (args_bot, args_err) = arg_tys.fold((false, false),
|(rest_bot, rest_err), a| {
- (rest_bot || ty::type_is_bot(*a),
- rest_err || ty::type_is_error(*a))});
+ (rest_bot || ty::type_is_bot(a),
+ rest_err || ty::type_is_error(a))});
if args_err {
fcx.write_error(id);
} else if args_bot {
let num_expected_regions = tpt.generics.region_param_defs().len();
let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len();
let regions = if num_expected_regions == num_supplied_regions {
- OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.map(
- |l| ast_region_to_region(fcx.tcx(), l)))
+ OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.iter().map(
+ |l| ast_region_to_region(fcx.tcx(), l)).collect())
} else {
if num_supplied_regions != 0 {
fcx.ccx.tcx.sess.span_err(
fn resolve_origins(fcx: &FnCtxt, sp: Span,
vtbls: vtable_res) -> vtable_res {
- @vtbls.map(|os| @os.map(|origin| {
+ @vtbls.iter().map(|os| @os.iter().map(|origin| {
match origin {
&vtable_static(def_id, ref tys, origins) => {
let r_tys = resolve_type_vars_in_types(fcx,
vtable_param(n, b)
}
}
- }))
+ }).collect()).collect()
}
}
let result_ty = match variant.node.kind {
ast::TupleVariantKind(ref args) if args.len() > 0 => {
let rs = ExplicitRscope;
- let input_tys = args.map(|va| ccx.to_ty(&rs, va.ty));
+ let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, va.ty)).collect();
ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
}
convert_struct(ccx, struct_def, tpt, variant.node.id);
- let input_tys = struct_def.fields.map(
- |f| ty::node_id_to_type(ccx.tcx, f.node.id));
+ let input_tys: Vec<_> = struct_def.fields.iter().map(
+ |f| ty::node_id_to_type(ccx.tcx, f.node.id)).collect();
ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
}
};
}
// Add an entry mapping
- let method_def_ids = @ms.map(|m| {
+ let method_def_ids = @ms.iter().map(|m| {
match m {
&ast::Required(ref ty_method) => {
local_def(ty_method.id)
local_def(method.id)
}
}
- });
+ }).collect();
let trait_def_id = local_def(trait_id);
tcx.trait_method_def_ids.borrow_mut()
- .insert(trait_def_id, @method_def_ids.iter()
- .map(|x| *x)
- .collect());
+ .insert(trait_def_id, method_def_ids);
}
_ => {} // Ignore things that aren't traits.
}
tcx.tcache.borrow_mut().insert(local_def(ctor_id), tpt);
} else if struct_def.fields.get(0).node.kind.is_unnamed() {
// Tuple-like.
- let inputs = struct_def.fields.map(
+ let inputs: Vec<_> = struct_def.fields.iter().map(
|field| tcx.tcache.borrow().get(
- &local_def(field.node.id)).ty);
+ &local_def(field.node.id)).ty).collect();
let ctor_fn_ty = ty::mk_ctor_fn(tcx,
ctor_id,
inputs.as_slice(),
}
pub fn tys_to_str(&self, ts: &[ty::t]) -> ~str {
- let tstrs = ts.map(|t| self.ty_to_str(*t));
+ let tstrs: Vec<~str> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
format!("({})", tstrs.connect(", "))
}
format!("collect_error_for_expanding_node() could not find error \
for var {:?}, lower_bounds={}, upper_bounds={}",
node_idx,
- lower_bounds.map(|x| x.region).repr(self.tcx),
- upper_bounds.map(|x| x.region).repr(self.tcx)));
+ lower_bounds.iter()
+ .map(|x| x.region)
+ .collect::<Vec<ty::Region>>()
+ .repr(self.tcx),
+ upper_bounds.iter()
+ .map(|x| x.region)
+ .collect::<Vec<ty::Region>>()
+ .repr(self.tcx)));
}
fn collect_error_for_contracting_node(
format!("collect_error_for_contracting_node() could not find error \
for var {:?}, upper_bounds={}",
node_idx,
- upper_bounds.map(|x| x.region).repr(self.tcx)));
+ upper_bounds.iter()
+ .map(|x| x.region)
+ .collect::<Vec<ty::Region>>()
+ .repr(self.tcx)));
}
fn collect_concrete_regions(&self,
impl InferStr for FnSig {
fn inf_str(&self, cx: &InferCtxt) -> ~str {
format!("({}) -> {}",
- self.inputs.map(|a| a.inf_str(cx)).connect(", "),
+ self.inputs.iter().map(|a| a.inf_str(cx)).collect::<Vec<~str>>().connect(", "),
self.output.inf_str(cx))
}
}
substs: Vec<ty::t> ) {
if substs.len() > 0u {
debug!("write_substs_to_tcx({}, {:?})", node_id,
- substs.map(|t| ppaux::ty_to_str(tcx, *t)));
+ substs.iter().map(|t| ppaux::ty_to_str(tcx, *t)).collect::<Vec<~str>>());
assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));
tcx.node_type_substs.borrow_mut().insert(node_id, substs);
pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; }
pub fn field_exprs(fields: Vec<ast::Field> ) -> Vec<@ast::Expr> {
- fields.map(|f| f.expr)
+ fields.move_iter().map(|f| f.expr).collect()
}
struct LoopQueryVisitor<'a> {
}
pub fn vec_map_to_str<T>(ts: &[T], f: |t: &T| -> ~str) -> ~str {
- let tstrs = ts.map(f);
+ let tstrs = ts.iter().map(f).collect::<Vec<~str>>();
format!("[{}]", tstrs.connect(", "))
}
ket: char,
sig: &ty::FnSig) {
s.push_char(bra);
- let strs = sig.inputs.map(|a| fn_input_to_str(cx, *a));
+ let strs: Vec<~str> = sig.inputs.iter().map(|a| fn_input_to_str(cx, *a)).collect();
s.push_str(strs.connect(", "));
if sig.variadic {
s.push_str(", ...");
}
ty_unboxed_vec(ref tm) => { format!("unboxed_vec<{}>", mt_to_str(cx, tm)) }
ty_tup(ref elems) => {
- let strs = elems.map(|elem| ty_to_str(cx, *elem));
+ let strs: Vec<~str> = elems.iter().map(|elem| ty_to_str(cx, *elem)).collect();
~"(" + strs.connect(",") + ")"
}
ty_closure(ref f) => {
if lines.len() >= 1 {
let mut unindented = vec!( lines.get(0).trim() );
- unindented.push_all(lines.tail().map(|&line| {
+ unindented.push_all(lines.tail().iter().map(|&line| {
if line.is_whitespace() {
line
} else {
assert!(line.len() >= min_indent);
line.slice_from(min_indent)
}
- }));
+ }).collect::<Vec<_>>().as_slice());
unindented.connect("\n")
} else {
s.to_owned()
}
impl<A:ToJson> ToJson for ~[A] {
- fn to_json(&self) -> Json { List(self.map(|elt| elt.to_json())) }
+ fn to_json(&self) -> Json { List(self.iter().map(|elt| elt.to_json()).collect()) }
}
impl<A:ToJson> ToJson for TreeMap<~str, A> {
#[inline]
fn to_lower(&self) -> ~[Ascii] {
- self.map(|a| a.to_lower())
+ self.iter().map(|a| a.to_lower()).collect()
}
#[inline]
fn to_upper(&self) -> ~[Ascii] {
- self.map(|a| a.to_upper())
+ self.iter().map(|a| a.to_upper()).collect()
}
#[inline]
#![allow(missing_doc)]
+use iter::Iterator;
use io::IoResult;
use io::net::ip::{SocketAddr, IpAddr};
use option::{Option, Some, None};
use rt::rtio::{IoFactory, LocalIo};
-use slice::ImmutableVector;
+use slice::OwnedVector;
/// Hints to the types of sockets that are desired when looking up hosts
pub enum SocketType {
/// Easy name resolution. Given a hostname, returns the list of IP addresses for
/// that hostname.
pub fn get_host_addresses(host: &str) -> IoResult<~[IpAddr]> {
- lookup(Some(host), None, None).map(|a| a.map(|i| i.address.ip))
+ lookup(Some(host), None, None).map(|a| a.move_iter().map(|i| i.address.ip).collect())
}
/// Full-fleged resolution. This function will perform a synchronous call to
// Functional utilities
-/**
- * Apply a function to each element of a vector and return a concatenation
- * of each result vector
- */
-pub fn flat_map<T, U>(v: &[T], f: |t: &T| -> ~[U]) -> ~[U] {
- let mut result = ~[];
- for elem in v.iter() { result.push_all_move(f(elem)); }
- result
-}
-
#[allow(missing_doc)]
pub trait VectorVector<T> {
// FIXME #5898: calling these .concat and .connect conflicts with
fn initn(&self, n: uint) -> &'a [T];
/// Returns the last element of a vector, or `None` if it is empty.
fn last(&self) -> Option<&'a T>;
- /**
- * Apply a function to each element of a vector and return a concatenation
- * of each result vector
- */
- fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U];
+
/// Returns a pointer to the element at the given index, without doing
/// bounds checking.
unsafe fn unsafe_ref(self, index: uint) -> &'a T;
*/
fn bsearch(&self, f: |&T| -> Ordering) -> Option<uint>;
- /// Deprecated, use iterators where possible
- /// (`self.iter().map(f)`). Apply a function to each element
- /// of a vector and return the results.
- fn map<U>(&self, |t: &T| -> U) -> ~[U];
-
/**
* Returns a mutable reference to the first element in this slice
* and adjusts the slice in place so that it no longer contains
if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
}
- #[inline]
- fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U] {
- flat_map(*self, f)
- }
-
#[inline]
unsafe fn unsafe_ref(self, index: uint) -> &'a T {
transmute(self.repr().data.offset(index as int))
return None;
}
- fn map<U>(&self, f: |t: &T| -> U) -> ~[U] {
- self.iter().map(f).collect()
- }
-
fn shift_ref(&mut self) -> Option<&'a T> {
if self.len() == 0 { return None; }
unsafe {
*/
}
- #[test]
- fn test_map() {
- // Test on-stack map.
- let v = &[1u, 2u, 3u];
- let mut w = v.map(square_ref);
- assert_eq!(w.len(), 3u);
- assert_eq!(w[0], 1u);
- assert_eq!(w[1], 4u);
- assert_eq!(w[2], 9u);
-
- // Test on-heap map.
- let v = ~[1u, 2u, 3u, 4u, 5u];
- w = v.map(square_ref);
- assert_eq!(w.len(), 5u);
- assert_eq!(w[0], 1u);
- assert_eq!(w[1], 4u);
- assert_eq!(w[2], 9u);
- assert_eq!(w[3], 16u);
- assert_eq!(w[4], 25u);
- }
-
#[test]
fn test_retain() {
let mut v = ~[1, 2, 3, 4, 5];
})
}
- #[test]
- #[should_fail]
- fn test_map_fail() {
- use rc::Rc;
- let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
- let mut i = 0;
- v.map(|_elt| {
- if i == 2 {
- fail!()
- }
- i += 1;
- ~[(~0, Rc::new(0))]
- });
- }
-
- #[test]
- #[should_fail]
- fn test_flat_map_fail() {
- use rc::Rc;
- let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
- let mut i = 0;
- flat_map(v, |_elt| {
- if i == 2 {
- fail!()
- }
- i += 1;
- ~[(~0, Rc::new(0))]
- });
- }
-
#[test]
#[should_fail]
fn test_permute_fail() {
}
}
- ///Apply a function to each element of a vector and return the results.
- #[inline]
- #[deprecated="Use `xs.iter().map(closure)` instead."]
- pub fn map<U>(&self, f: |t: &T| -> U) -> Vec<U> {
- self.iter().map(f).collect()
- }
-
/// Takes ownership of the vector `other`, moving all elements into
/// the current vector. This does not copy any elements, and it is
/// illegal to use the `other` vector after calling this method
pub fn path_name_i(idents: &[Ident]) -> ~str {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
- idents.map(|i| {
+ idents.iter().map(|i| {
token::get_ident(*i).get().to_str()
- }).connect("::")
+ }).collect::<Vec<~str>>().connect("::")
}
// totally scary function: ignores all but the last element, should have
}
#[test] fn idents_name_eq_test() {
- assert!(segments_name_eq([Ident{name:3,ctxt:4},
- Ident{name:78,ctxt:82}].map(ident_to_segment),
- [Ident{name:3,ctxt:104},
- Ident{name:78,ctxt:182}].map(ident_to_segment)));
- assert!(!segments_name_eq([Ident{name:3,ctxt:4},
- Ident{name:78,ctxt:82}].map(ident_to_segment),
- [Ident{name:3,ctxt:104},
- Ident{name:77,ctxt:182}].map(ident_to_segment)));
+ assert!(segments_name_eq(
+ [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+ [Ident{name:3,ctxt:104}, Ident{name:78,ctxt:182}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
+ assert!(!segments_name_eq(
+ [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
+ [Ident{name:3,ctxt:104}, Ident{name:77,ctxt:182}]
+ .iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
}
}
}
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr {
let fn_decl = self.fn_decl(
- ids.map(|id| self.arg(span, *id, self.ty_infer(span))),
+ ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
self.ty_infer(span));
self.expr(span, ast::ExprFnBlock(fn_decl, blk))
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
- let imports = imports.map(|id| {
+ let imports = imports.iter().map(|id| {
respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID })
- });
+ }).collect();
self.view_use(sp, vis,
vec!(@respan(sp,
ast::ViewPathList(self.path(sp, path),
- imports.iter()
- .map(|x| *x)
- .collect(),
+ imports,
ast::DUMMY_NODE_ID))))
}
if all_fields.len() >= 1 && all_fields.get(0).name.is_none() {
// enum-like
- let subcalls = all_fields.map(subcall);
+ let subcalls = all_fields.iter().map(subcall).collect();
cx.expr_call_ident(trait_span, ctor_ident, subcalls)
} else {
// struct-like
- let fields = all_fields.map(|field| {
+ let fields = all_fields.iter().map(|field| {
let ident = match field.name {
Some(i) => i,
None => cx.span_bug(trait_span,
name))
};
cx.field_imm(field.span, ident, subcall(field))
- });
+ }).collect::<Vec<_>>();
if fields.is_empty() {
// no fields, so construct like `None`
if fields.is_empty() {
cx.expr_ident(trait_span, substr.type_ident)
} else {
- let exprs = fields.map(|sp| default_call(*sp));
+ let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}
}
Named(ref fields) => {
- let default_fields = fields.map(|&(ident, span)| {
+ let default_fields = fields.iter().map(|&(ident, span)| {
cx.field_imm(span, ident, default_call(span))
- });
+ }).collect();
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
}
}
ty_params.extend(generics.ty_params.iter().map(|ty_param| {
// I don't think this can be moved out of the loop, since
// a TyParamBound requires an ast id
- let mut bounds =
+ let mut bounds: Vec<_> =
// extra restrictions on the generics parameters to the type being derived upon
- self.additional_bounds.map(|p| {
+ self.additional_bounds.iter().map(|p| {
cx.typarambound(p.to_path(cx, self.span,
type_ident, generics))
- });
+ }).collect();
// require the current trait
bounds.push(cx.typarambound(trait_path.clone()));
ident,
vec::append(vec!(attr), self.attributes.as_slice()),
ast::ItemImpl(trait_generics, opt_trait_ref,
- self_type, methods.map(|x| *x)))
+ self_type, methods))
}
fn expand_struct_def(&self,
struct_def: &StructDef,
type_ident: Ident,
generics: &Generics) -> @ast::Item {
- let methods = self.methods.map(|method_def| {
+ let methods = self.methods.iter().map(|method_def| {
let (explicit_self, self_args, nonself_args, tys) =
method_def.split_self_nonself_args(
cx, self, type_ident, generics);
type_ident, generics,
explicit_self, tys,
body)
- });
+ }).collect();
self.create_derived_impl(cx, type_ident, generics, methods)
}
enum_def: &EnumDef,
type_ident: Ident,
generics: &Generics) -> @ast::Item {
- let methods = self.methods.map(|method_def| {
+ let methods = self.methods.iter().map(|method_def| {
let (explicit_self, self_args, nonself_args, tys) =
method_def.split_self_nonself_args(cx, self,
type_ident, generics);
type_ident, generics,
explicit_self, tys,
body)
- });
+ }).collect();
self.create_derived_impl(cx, type_ident, generics, methods)
}
self_args: &[@Expr],
nonself_args: &[@Expr])
-> @Expr {
- let summary = enum_def.variants.map(|v| {
+ let summary = enum_def.variants.iter().map(|v| {
let ident = v.node.name;
let summary = match v.node.kind {
ast::TupleVariantKind(ref args) => {
- Unnamed(args.map(|va| trait_.set_expn_info(cx, va.ty.span)))
+ Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect())
}
ast::StructVariantKind(struct_def) => {
trait_.summarise_struct(cx, struct_def)
}
};
(ident, v.span, summary)
- });
+ }).collect();
self.call_substructure_method(cx, trait_, type_ident,
self_args, nonself_args,
&StaticEnum(enum_def, summary))
field_paths: Vec<ast::Path> ,
mutbl: ast::Mutability)
-> Vec<@ast::Pat> {
- field_paths.map(|path| {
+ field_paths.iter().map(|path| {
cx.pat(path.span,
ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
- })
+ }).collect()
}
fn create_struct_pattern(&self,
match *substructure.fields {
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
// call self_n.method(other_1_n, other_2_n, ...)
- let called = all_fields.map(|field| {
+ let called = all_fields.iter().map(|field| {
cx.expr_method_call(field.span,
field.self_,
substructure.method_ident,
- field.other.map(|e| cx.expr_addr_of(field.span, *e)))
- });
+ field.other.iter()
+ .map(|e| cx.expr_addr_of(field.span, *e))
+ .collect())
+ }).collect();
f(cx, trait_span, called)
},
if fields.is_empty() {
cx.expr_ident(trait_span, ctor_ident)
} else {
- let exprs = fields.map(|span| rand_call(cx, *span));
+ let exprs = fields.iter().map(|span| rand_call(cx, *span)).collect();
cx.expr_call_ident(trait_span, ctor_ident, exprs)
}
}
Named(ref fields) => {
- let rand_fields = fields.map(|&(ident, span)| {
+ let rand_fields = fields.iter().map(|&(ident, span)| {
let e = rand_call(cx, span);
cx.field_imm(span, ident, e)
- });
+ }).collect();
cx.expr_struct_ident(trait_span, ctor_ident, rand_fields)
}
}
self_ty: Ident,
self_generics: &Generics)
-> ast::Path {
- let idents = self.path.map(|s| cx.ident_of(*s) );
+ let idents = self.path.iter().map(|s| cx.ident_of(*s)).collect();
let lt = mk_lifetimes(cx, span, &self.lifetime);
- let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics));
+ let tys = self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
cx.path_all(span, self.global, idents, lt, tys)
}
let ty = if fields.is_empty() {
ast::TyNil
} else {
- ast::TyTup(fields.map(|f| f.to_ty(cx, span, self_ty, self_generics)))
+ ast::TyTup(fields.iter()
+ .map(|f| f.to_ty(cx, span, self_ty, self_generics))
+ .collect())
};
cx.ty(span, ty)
self_ty: Ident,
self_generics: &Generics)
-> Generics {
- let lifetimes = self.lifetimes.map(|lt| {
+ let lifetimes = self.lifetimes.iter().map(|lt| {
cx.lifetime(span, cx.ident_of(*lt).name)
- });
- let ty_params = self.bounds.map(|t| {
+ }).collect();
+ let ty_params = self.bounds.iter().map(|t| {
match t {
&(ref name, ref bounds) => {
mk_ty_param(cx,
self_generics)
}
}
- });
+ }).collect();
mk_generics(lifetimes, ty_params)
}
}
if fields.is_empty() {
cx.expr_ident(trait_span, substr.type_ident)
} else {
- let exprs = fields.map(|sp| zero_call(*sp));
+ let exprs = fields.iter().map(|sp| zero_call(*sp)).collect();
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}
}
Named(ref fields) => {
- let zero_fields = fields.map(|&(ident, span)| {
+ let zero_fields = fields.iter().map(|&(ident, span)| {
cx.field_imm(span, ident, zero_call(span))
- });
+ }).collect();
cx.expr_struct_ident(trait_span, substr.type_ident, zero_fields)
}
}
// expand the elements of a block.
pub fn expand_block_elts(b: &Block, fld: &mut MacroExpander) -> P<Block> {
- let new_view_items = b.view_items.map(|x| fld.fold_view_item(x));
+ let new_view_items = b.view_items.iter().map(|x| fld.fold_view_item(x)).collect();
let new_stmts =
b.stmts.iter().flat_map(|x| {
let renamed_stmt = {
impl<'a> ToSource for &'a [@ast::Item] {
fn to_source(&self) -> ~str {
- self.map(|i| i.to_source()).connect("\n\n")
+ self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n")
}
}
impl<'a> ToSource for &'a [ast::Ty] {
fn to_source(&self) -> ~str {
- self.map(|i| i.to_source()).connect(", ")
+ self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ")
}
}
}
fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> {
- strs.map(|str| str_to_ident(*str))
+ strs.iter().map(|str| str_to_ident(*str)).collect()
}
fn id_ext(str: &str) -> ast::Ident {
-> base::MacResult {
base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path()
+ .iter()
.map(|x| token::get_ident(*x).get().to_str())
+ .collect::<Vec<~str>>()
.connect("::");
base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
}
} else {
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|| bb_eis.len() > 1u {
- let nts = bb_eis.map(|ei| {
+ let nts = bb_eis.iter().map(|ei| {
match ei.elts.get(ei.idx).node {
MatchNonterminal(bind, name, _) => {
format!("{} ('{}')",
token::get_ident(bind))
}
_ => fail!()
- } }).connect(" or ");
+ } }).collect::<Vec<~str>>().connect(" or ");
return Error(sp, format!(
"local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.",
}
ViewPathList(ref path, ref path_list_idents, node_id) => {
ViewPathList(self.fold_path(path),
- path_list_idents.map(|path_list_ident| {
+ path_list_idents.iter().map(|path_list_ident| {
let id = self.new_id(path_list_ident.node
.id);
Spanned {
span: self.new_span(
path_list_ident.span)
}
- }),
+ }).collect(),
self.new_id(node_id))
}
};
kind: sf.node.kind,
id: self.new_id(sf.node.id),
ty: self.fold_ty(sf.node.ty),
- attrs: sf.node.attrs.map(|e| fold_attribute_(*e, self))
+ attrs: sf.node.attrs.iter().map(|e| fold_attribute_(*e, self)).collect()
},
span: self.new_span(sf.span)
}
fn fold_arm(&mut self, a: &Arm) -> Arm {
Arm {
- pats: a.pats.map(|x| self.fold_pat(*x)),
+ pats: a.pats.iter().map(|x| self.fold_pat(*x)).collect(),
guard: a.guard.map(|x| self.fold_expr(x)),
body: self.fold_expr(a.body),
}
onceness: f.onceness,
bounds: fold_opt_bounds(&f.bounds, self),
decl: self.fold_fn_decl(f.decl),
- lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
+ lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
})
}
TyBareFn(ref f) => {
TyBareFn(@BareFnTy {
- lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
+ lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
purity: f.purity,
abis: f.abis,
decl: self.fold_fn_decl(f.decl)
})
}
- TyTup(ref tys) => TyTup(tys.map(|&ty| self.fold_ty(ty))),
+ TyTup(ref tys) => TyTup(tys.iter().map(|&ty| self.fold_ty(ty)).collect()),
TyPath(ref path, ref bounds, id) => {
TyPath(self.fold_path(path),
fold_opt_bounds(bounds, self),
let kind;
match v.node.kind {
TupleVariantKind(ref variant_args) => {
- kind = TupleVariantKind(variant_args.map(|x|
- fold_variant_arg_(x, self)))
+ kind = TupleVariantKind(variant_args.iter().map(|x|
+ fold_variant_arg_(x, self)).collect())
}
StructVariantKind(ref struct_def) => {
kind = StructVariantKind(@ast::StructDef {
}
}
- let attrs = v.node.attrs.map(|x| fold_attribute_(*x, self));
+ let attrs = v.node.attrs.iter().map(|x| fold_attribute_(*x, self)).collect();
let de = match v.node.disr_expr {
Some(e) => Some(self.fold_expr(e)),
ast::Path {
span: self.new_span(p.span),
global: p.global,
- segments: p.segments.map(|segment| ast::PathSegment {
+ segments: p.segments.iter().map(|segment| ast::PathSegment {
identifier: self.fold_ident(segment.identifier),
- lifetimes: segment.lifetimes.map(|l| fold_lifetime(l, self)),
- types: segment.types.map(|&typ| self.fold_ty(typ)),
- })
+ lifetimes: segment.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
+ types: segment.types.iter().map(|&typ| self.fold_ty(typ)).collect(),
+ }).collect()
}
}
match mi.node {
MetaWord(ref id) => MetaWord((*id).clone()),
MetaList(ref id, ref mis) => {
- MetaList((*id).clone(), mis.map(|e| fold_meta_item_(*e, fld)))
+ MetaList((*id).clone(), mis.iter().map(|e| fold_meta_item_(*e, fld)).collect())
}
MetaNameValue(ref id, ref s) => {
MetaNameValue((*id).clone(), (*s).clone())
pub fn noop_fold_fn_decl<T: Folder>(decl: &FnDecl, fld: &mut T) -> P<FnDecl> {
P(FnDecl {
- inputs: decl.inputs.map(|x| fold_arg_(x, fld)), // bad copy
+ inputs: decl.inputs.iter().map(|x| fold_arg_(x, fld)).collect(), // bad copy
output: fld.fold_ty(decl.output),
cf: decl.cf,
variadic: decl.variadic
pub fn fold_lifetimes<T: Folder>(lts: &Vec<Lifetime>, fld: &mut T)
-> Vec<Lifetime> {
- lts.map(|l| fold_lifetime(l, fld))
+ lts.iter().map(|l| fold_lifetime(l, fld)).collect()
}
pub fn fold_opt_lifetime<T: Folder>(o_lt: &Option<Lifetime>, fld: &mut T)
fn fold_struct_def<T: Folder>(struct_def: @StructDef, fld: &mut T) -> @StructDef {
@ast::StructDef {
- fields: struct_def.fields.map(|f| fold_struct_field(f, fld)),
+ fields: struct_def.fields.iter().map(|f| fold_struct_field(f, fld)).collect(),
ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(cid)),
}
}
kind: f.node.kind,
id: fld.new_id(f.node.id),
ty: fld.fold_ty(f.node.ty),
- attrs: f.node.attrs.map(|a| fold_attribute_(*a, fld)),
+ attrs: f.node.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
},
span: fld.new_span(f.span),
}
};
ViewItem {
node: inner_view_item,
- attrs: vi.attrs.map(|a| fold_attribute_(*a, folder)),
+ attrs: vi.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
vis: vi.vis,
span: folder.new_span(vi.span),
}
}
pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
- let view_items = b.view_items.map(|x| folder.fold_view_item(x));
+ let view_items = b.view_items.iter().map(|x| folder.fold_view_item(x)).collect();
let stmts = b.stmts.iter().flat_map(|s| folder.fold_stmt(*s).move_iter()).collect();
P(Block {
id: folder.new_id(b.id), // Needs to be first, for ast_map.
ItemEnum(ref enum_definition, ref generics) => {
ItemEnum(
ast::EnumDef {
- variants: enum_definition.variants.map(|&x| {
+ variants: enum_definition.variants.iter().map(|&x| {
folder.fold_variant(x)
- }),
+ }).collect(),
},
fold_generics(generics, folder))
}
ItemImpl(fold_generics(generics, folder),
ifce.as_ref().map(|p| fold_trait_ref(p, folder)),
folder.fold_ty(ty),
- methods.map(|x| folder.fold_method(*x))
+ methods.iter().map(|x| folder.fold_method(*x)).collect()
)
}
ItemTrait(ref generics, ref traits, ref methods) => {
- let methods = methods.map(|method| {
+ let methods = methods.iter().map(|method| {
match *method {
Required(ref m) => Required(folder.fold_type_method(m)),
Provided(method) => Provided(folder.fold_method(method))
}
- });
+ }).collect();
ItemTrait(fold_generics(generics, folder),
- traits.map(|p| fold_trait_ref(p, folder)),
+ traits.iter().map(|p| fold_trait_ref(p, folder)).collect(),
methods)
}
ItemMac(ref m) => ItemMac(folder.fold_mac(m)),
TypeMethod {
id: fld.new_id(m.id), // Needs to be first, for ast_map.
ident: fld.fold_ident(m.ident),
- attrs: m.attrs.map(|a| fold_attribute_(*a, fld)),
+ attrs: m.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
purity: m.purity,
decl: fld.fold_fn_decl(m.decl),
generics: fold_generics(&m.generics, fld),
pub fn noop_fold_crate<T: Folder>(c: Crate, folder: &mut T) -> Crate {
Crate {
module: folder.fold_mod(&c.module),
- attrs: c.attrs.map(|x| fold_attribute_(*x, folder)),
- config: c.config.map(|x| fold_meta_item_(*x, folder)),
+ attrs: c.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
+ config: c.config.iter().map(|x| fold_meta_item_(*x, folder)).collect(),
span: folder.new_span(c.span),
}
}
SmallVector::one(@Item {
id: id,
ident: folder.fold_ident(ident),
- attrs: i.attrs.map(|e| fold_attribute_(*e, folder)),
+ attrs: i.attrs.iter().map(|e| fold_attribute_(*e, folder)).collect(),
node: node,
vis: i.vis,
span: folder.new_span(i.span)
@ForeignItem {
id: folder.new_id(ni.id), // Needs to be first, for ast_map.
ident: folder.fold_ident(ni.ident),
- attrs: ni.attrs.map(|x| fold_attribute_(*x, folder)),
+ attrs: ni.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
node: match ni.node {
ForeignItemFn(ref fdec, ref generics) => {
ForeignItemFn(P(FnDecl {
- inputs: fdec.inputs.map(|a| fold_arg_(a, folder)),
+ inputs: fdec.inputs.iter().map(|a| fold_arg_(a, folder)).collect(),
output: folder.fold_ty(fdec.output),
cf: fdec.cf,
variadic: fdec.variadic
@Method {
id: folder.new_id(m.id), // Needs to be first, for ast_map.
ident: folder.fold_ident(m.ident),
- attrs: m.attrs.map(|a| fold_attribute_(*a, folder)),
+ attrs: m.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
generics: fold_generics(&m.generics, folder),
explicit_self: folder.fold_explicit_self(&m.explicit_self),
purity: m.purity,
PatLit(e) => PatLit(folder.fold_expr(e)),
PatEnum(ref pth, ref pats) => {
PatEnum(folder.fold_path(pth),
- pats.as_ref().map(|pats| pats.map(|x| folder.fold_pat(*x))))
+ pats.as_ref().map(|pats| pats.iter().map(|x| folder.fold_pat(*x)).collect()))
}
PatStruct(ref pth, ref fields, etc) => {
let pth_ = folder.fold_path(pth);
- let fs = fields.map(|f| {
+ let fs = fields.iter().map(|f| {
ast::FieldPat {
ident: f.ident,
pat: folder.fold_pat(f.pat)
}
- });
+ }).collect();
PatStruct(pth_, fs, etc)
}
- PatTup(ref elts) => PatTup(elts.map(|x| folder.fold_pat(*x))),
+ PatTup(ref elts) => PatTup(elts.iter().map(|x| folder.fold_pat(*x)).collect()),
PatUniq(inner) => PatUniq(folder.fold_pat(inner)),
PatRegion(inner) => PatRegion(folder.fold_pat(inner)),
PatRange(e1, e2) => {
PatRange(folder.fold_expr(e1), folder.fold_expr(e2))
},
PatVec(ref before, ref slice, ref after) => {
- PatVec(before.map(|x| folder.fold_pat(*x)),
+ PatVec(before.iter().map(|x| folder.fold_pat(*x)).collect(),
slice.map(|x| folder.fold_pat(x)),
- after.map(|x| folder.fold_pat(*x)))
+ after.iter().map(|x| folder.fold_pat(*x)).collect())
}
};
ExprBox(folder.fold_expr(p), folder.fold_expr(e))
}
ExprVec(ref exprs, mutt) => {
- ExprVec(exprs.map(|&x| folder.fold_expr(x)), mutt)
+ ExprVec(exprs.iter().map(|&x| folder.fold_expr(x)).collect(), mutt)
}
ExprRepeat(expr, count, mutt) => {
ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count), mutt)
}
- ExprTup(ref elts) => ExprTup(elts.map(|x| folder.fold_expr(*x))),
+ ExprTup(ref elts) => ExprTup(elts.iter().map(|x| folder.fold_expr(*x)).collect()),
ExprCall(f, ref args) => {
ExprCall(folder.fold_expr(f),
- args.map(|&x| folder.fold_expr(x)))
+ args.iter().map(|&x| folder.fold_expr(x)).collect())
}
ExprMethodCall(i, ref tps, ref args) => {
ExprMethodCall(
folder.fold_ident(i),
- tps.map(|&x| folder.fold_ty(x)),
- args.map(|&x| folder.fold_expr(x)))
+ tps.iter().map(|&x| folder.fold_ty(x)).collect(),
+ args.iter().map(|&x| folder.fold_expr(x)).collect())
}
ExprBinary(binop, lhs, rhs) => {
ExprBinary(binop,
}
ExprMatch(expr, ref arms) => {
ExprMatch(folder.fold_expr(expr),
- arms.map(|x| folder.fold_arm(x)))
+ arms.iter().map(|x| folder.fold_arm(x)).collect())
}
ExprFnBlock(decl, body) => {
ExprFnBlock(folder.fold_fn_decl(decl), folder.fold_block(body))
ExprField(el, id, ref tys) => {
ExprField(folder.fold_expr(el),
folder.fold_ident(id),
- tys.map(|&x| folder.fold_ty(x)))
+ tys.iter().map(|&x| folder.fold_ty(x)).collect())
}
ExprIndex(el, er) => {
ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
}
ExprInlineAsm(ref a) => {
ExprInlineAsm(InlineAsm {
- inputs: a.inputs.map(|&(ref c, input)| {
+ inputs: a.inputs.iter().map(|&(ref c, input)| {
((*c).clone(), folder.fold_expr(input))
- }),
- outputs: a.outputs.map(|&(ref c, out)| {
+ }).collect(),
+ outputs: a.outputs.iter().map(|&(ref c, out)| {
((*c).clone(), folder.fold_expr(out))
- }),
+ }).collect(),
.. (*a).clone()
})
}
ExprMac(ref mac) => ExprMac(folder.fold_mac(mac)),
ExprStruct(ref path, ref fields, maybe_expr) => {
ExprStruct(folder.fold_path(path),
- fields.map(|x| fold_field_(*x, folder)),
+ fields.iter().map(|x| fold_field_(*x, folder)).collect(),
maybe_expr.map(|x| folder.fold_expr(x)))
},
ExprParen(ex) => ExprParen(folder.fold_expr(ex))
}
if can_trim {
- lines.map(|line| line.slice(i + 1, line.len()).to_owned())
+ lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect()
} else {
lines
}
// convert a vector of strings to a vector of ast::Ident's
pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
- ids.map(|u| token::str_to_ident(*u))
+ ids.iter().map(|u| token::str_to_ident(*u)).collect()
}
// does the given string match the pattern? whitespace in the first string
pub unsafe fn f(xs: Vec<int> ) {
- xs.map(|_x| { unsafe fn q() { fail!(); } });
+ xs.iter().map(|_x| { unsafe fn q() { fail!(); } }).collect::<Vec<()>>();
}
impl fmt::Show for AsciiArt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Convert each line into a string.
- let lines = self.lines.map(|line| str::from_chars(line.as_slice()));
+ let lines = self.lines.iter()
+ .map(|line| str::from_chars(line.as_slice()))
+ .collect::<Vec<~str>>();
// Concatenate the lines together using a new-line.
write!(f.buf, "{}", lines.connect("\n"))