config.src_base.display());
let mut tests = Vec::new();
let dirs = fs::readdir(&config.src_base).unwrap();
- for file in dirs.iter() {
+ for file in &dirs {
let file = file.clone();
debug!("inspecting file {:?}", file.display());
if is_test(config, &file) {
let mut valid = false;
- for ext in valid_extensions.iter() {
+ for ext in &valid_extensions {
if name.ends_with(ext.as_slice()) {
valid = true;
}
}
- for pre in invalid_prefixes.iter() {
+ for pre in &invalid_prefixes {
if name.starts_with(pre.as_slice()) {
valid = false;
}
match cmd.spawn() {
Ok(mut process) => {
- for input in input.iter() {
+ if let Some(input) = input {
process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
}
let ProcessOutput { status, output, error } =
match cmd.spawn() {
Ok(mut process) => {
- for input in input.iter() {
+ if let Some(input) = input {
process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
}
exe_file.as_str().unwrap().replace("\\", "\\\\"))[]);
// Add line breakpoints
- for line in breakpoint_lines.iter() {
+ for line in &breakpoint_lines {
script_str.push_str(&format!("break '{}':{}\n",
testfile.filename_display(),
*line)[]);
script_str.push_str("type category enable Rust\n");
// Set breakpoints on every line that contains the string "#break"
- for line in breakpoint_lines.iter() {
+ for line in &breakpoint_lines {
script_str.push_str(format!("breakpoint set --line {}\n",
line).as_slice());
}
// Append the other commands
- for line in commands.iter() {
+ for line in &commands {
script_str.push_str(line.as_slice());
script_str.push_str("\n");
}
let mut rest = line.trim();
let mut first = true;
let mut failed = false;
- for frag in check_fragments[i].iter() {
+ for frag in &check_fragments[i] {
let found = if first {
if rest.starts_with(frag.as_slice()) {
Some(0)
missing_patterns[0]).as_slice(),
proc_res);
} else {
- for pattern in missing_patterns.iter() {
+ for pattern in missing_patterns {
error(format!("error pattern '{}' not found!",
*pattern).as_slice());
}
fn check_forbid_output(props: &TestProps,
output_to_check: &str,
proc_res: &ProcRes) {
- for pat in props.forbid_output.iter() {
+ for pat in &props.forbid_output {
if output_to_check.contains(pat.as_slice()) {
fatal_proc_rec("forbidden pattern found in compiler output", proc_res);
}
// FIXME (#9639): This needs to handle non-utf8 paths
let extra_link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string());
- for rel_ab in props.aux_builds.iter() {
+ for rel_ab in &props.aux_builds {
let abs_ab = config.aux_base.join(rel_ab.as_slice());
let aux_props = header::load_props(&abs_ab);
let mut crate_type = if aux_props.no_prefer_dynamic {
runargs.push(format!("{}", config.adb_test_dir));
runargs.push(format!("{}", prog_short));
- for tv in args.args.iter() {
+ for tv in &args.args {
runargs.push(tv.to_string());
}
procsrv::run("",
let tdir = aux_output_dir_name(config, testfile);
let dirs = fs::readdir(&tdir).unwrap();
- for file in dirs.iter() {
+ for file in &dirs {
if file.extension_str() == Some("so") {
// FIXME (#9639): This needs to handle non-utf8 paths
let copy_result = procsrv::run("",
];
pub fn get_os(triple: &str) -> &'static str {
- for &(triple_os, os) in OS_TABLE.iter() {
+ for &(triple_os, os) in OS_TABLE {
if triple.contains(triple_os) {
return os
}
fn drop(&mut self) {
unsafe {
destroy_chunk(&*self.head.borrow());
- for chunk in self.chunks.borrow().iter() {
+ for chunk in &*self.chunks.borrow() {
if !chunk.is_copy.get() {
destroy_chunk(chunk);
}
let mut keys = (0..n).map(|_| rng.gen::<uint>() % n)
.collect::<Vec<_>>();
- for k in keys.iter() {
+ for k in &keys {
insert(map, *k);
}
let iterout = [9, 5, 3];
let heap = BinaryHeap::from_vec(data);
let mut i = 0;
- for el in heap.iter() {
+ for el in &heap {
assert_eq!(*el, iterout[i]);
i += 1;
}
let mut q: BinaryHeap<uint> = xs.iter().rev().map(|&x| x).collect();
- for &x in xs.iter() {
+ for &x in &xs {
assert_eq!(q.pop().unwrap(), x);
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Bitv {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- for bit in self.iter() {
+ for bit in self {
try!(write!(fmt, "{}", if bit { 1u32 } else { 0u32 }));
}
Ok(())
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "BitvSet {{"));
let mut first = true;
- for n in self.iter() {
+ for n in self {
if !first {
try!(write!(fmt, ", "));
}
impl<S: hash::Writer + hash::Hasher> hash::Hash<S> for BitvSet {
fn hash(&self, state: &mut S) {
- for pos in self.iter() {
+ for pos in self {
pos.hash(state);
}
}
b.iter(|| {
let mut sum = 0u;
for _ in 0u..10 {
- for pres in bitv.iter() {
+ for pres in &bitv {
sum += pres as uint;
}
}
let bitv = Bitv::from_elem(BENCH_BITS, false);
b.iter(|| {
let mut sum = 0u;
- for pres in bitv.iter() {
+ for pres in &bitv {
sum += pres as uint;
}
sum
fn test_bitv_set_frombitv_init() {
let bools = [true, false];
let lengths = [10, 64, 100];
- for &b in bools.iter() {
- for &l in lengths.iter() {
+ for &b in &bools {
+ for &l in &lengths {
let bitset = BitvSet::from_bitv(Bitv::from_elem(l, b));
assert_eq!(bitset.contains(&1u), b);
assert_eq!(bitset.contains(&(l-1u)), b);
|idx| {idx % 3 == 0}));
b.iter(|| {
let mut sum = 0u;
- for idx in bitv.iter() {
+ for idx in &bitv {
sum += idx as uint;
}
sum
#[stable(feature = "rust1", since = "1.0.0")]
impl<S: Hasher, K: Hash<S>, V: Hash<S>> Hash<S> for BTreeMap<K, V> {
fn hash(&self, state: &mut S) {
- for elt in self.iter() {
+ for elt in self {
elt.hash(state);
}
}
}
b.iter(|| {
- for entry in map.iter() {
+ for entry in &map {
black_box(entry);
}
});
let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
- for key in self.keys().iter() {
+ for key in self.keys() {
keys.push(key.clone())
}
- for val in self.vals().iter() {
+ for val in self.vals() {
vals.push(val.clone())
}
- for edge in self.edges().iter() {
+ for edge in self.edges() {
edges.push(edge.clone())
}
let mut set_a = BTreeSet::new();
let mut set_b = BTreeSet::new();
- for x in a.iter() { assert!(set_a.insert(*x)) }
- for y in b.iter() { assert!(set_b.insert(*y)) }
+ for x in a { assert!(set_a.insert(*x)) }
+ for y in b { assert!(set_b.insert(*y)) }
let mut i = 0;
f(&set_a, &set_b, Counter { i: &mut i, expected: expected });
let set: BTreeSet<int> = xs.iter().map(|&x| x).collect();
- for x in xs.iter() {
+ for x in &xs {
assert!(set.contains(x));
}
}
impl<S: Writer + Hasher, A: Hash<S>> Hash<S> for DList<A> {
fn hash(&self, state: &mut S) {
self.len().hash(state);
- for elt in self.iter() {
+ for elt in self {
elt.hash(state);
}
}
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "EnumSet {{"));
let mut first = true;
- for e in self.iter() {
+ for e in self {
if !first {
try!(write!(fmt, ", "));
}
impl<S: Writer + Hasher, A: Hash<S>> Hash<S> for RingBuf<A> {
fn hash(&self, state: &mut S) {
self.len().hash(state);
- for elt in self.iter() {
+ for elt in self {
elt.hash(state);
}
}
b.iter(|| {
let mut sum = 0;
- for &i in ring.iter() {
+ for &i in &ring {
sum += i;
}
test::black_box(sum);
fn concat(&self) -> Vec<T> {
let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len());
let mut result = Vec::with_capacity(size);
- for v in self.iter() {
+ for v in self {
result.push_all(v.as_slice())
}
result
let size = self.iter().fold(0u, |acc, v| acc + v.as_slice().len());
let mut result = Vec::with_capacity(size + self.len());
let mut first = true;
- for v in self.iter() {
+ for v in self {
if first { first = false } else { result.push(sep.clone()) }
result.push_all(v.as_slice())
}
assert_eq!(v.len(), 3);
let mut cnt = 0u;
- for f in v.iter() {
+ for f in &v {
assert!(*f == Foo);
cnt += 1;
}
assert_eq!(cnt, 3);
- for f in v[1..3].iter() {
+ for f in &v[1..3] {
assert!(*f == Foo);
cnt += 1;
}
let xs: [Foo; 3] = [Foo, Foo, Foo];
cnt = 0;
- for f in xs.iter() {
+ for f in &xs {
assert!(*f == Foo);
cnt += 1;
}
b.iter(|| {
let mut sum = 0;
- for x in v.iter() {
+ for x in &v {
sum += *x;
}
// sum == 11806, to stop dead code elimination.
let len = s.iter().map(|s| s.as_slice().len()).sum();
let mut result = String::with_capacity(len);
- for s in s.iter() {
+ for s in s {
result.push_str(s.as_slice())
}
let mut result = String::with_capacity(len);
let mut first = true;
- for s in s.iter() {
+ for s in s {
if first {
first = false;
} else {
let s = "ศไทย中华Việt Nam";
let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
let mut pos = 0;
- for ch in v.iter() {
+ for ch in &v {
assert!(s.char_at(pos) == *ch);
pos += ch.to_string().len();
}
&["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]),
];
- for &(s, g) in test_same.iter() {
+ for &(s, g) in &test_same[] {
// test forward iterator
assert!(order::equals(s.graphemes(true), g.iter().map(|&x| x)));
assert!(order::equals(s.graphemes(false), g.iter().map(|&x| x)));
assert!(order::equals(s.graphemes(false).rev(), g.iter().rev().map(|&x| x)));
}
- for &(s, gt, gf) in test_diff.iter() {
+ for &(s, gt, gf) in &test_diff {
// test forward iterator
assert!(order::equals(s.graphemes(true), gt.iter().map(|&x| x)));
assert!(order::equals(s.graphemes(false), gf.iter().map(|&x| x)));
(String::from_str("\u{20000}"),
vec![0xD840, 0xDC00])];
- for p in pairs.iter() {
+ for p in &pairs {
let (s, u) = (*p).clone();
let s_as_utf16 = s.utf16_units().collect::<Vec<u16>>();
let u_as_string = String::from_utf16(u.as_slice()).unwrap();
// zeroed (when moving out, because of #[unsafe_no_drop_flag]).
if self.cap != 0 {
unsafe {
- for x in self.iter() {
+ for x in &*self {
ptr::read(x);
}
dealloc(*self.ptr, self.cap)
v.push(());
assert_eq!(v.iter().count(), 2);
- for &() in v.iter() {}
+ for &() in &v {}
assert_eq!(v.iter_mut().count(), 2);
v.push(());
// In order to not traverse the `VecMap` twice, count the elements
// during iteration.
let mut count: uint = 0;
- for elt in self.iter() {
+ for elt in self {
elt.hash(state);
count += 1;
}
let map: VecMap<char> = xs.iter().map(|&x| x).collect();
- for &(k, v) in xs.iter() {
+ for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
try!(write!(f, "["));
}
let mut is_first = true;
- for x in self.iter() {
+ for x in self {
if is_first {
is_first = false;
} else {
#[inline]
fn hash(&self, state: &mut S) {
self.len().hash(state);
- for elt in self.iter() {
+ for elt in self {
elt.hash(state);
}
}
(1.0f64, NAN, None)
];
- for &(a, b, result) in data_integer.iter() {
+ for &(a, b, result) in &data_integer {
assert!(partial_min(a, b) == result);
}
- for &(a, b, result) in data_float.iter() {
+ for &(a, b, result) in &data_float {
assert!(partial_min(a, b) == result);
}
}
(1.0f64, NAN, None)
];
- for &(a, b, result) in data_integer.iter() {
+ for &(a, b, result) in &data_integer {
assert!(partial_max(a, b) == result);
}
- for &(a, b, result) in data_float.iter() {
+ for &(a, b, result) in &data_float {
assert!(partial_max(a, b) == result);
}
}
impl Writer for MyHasher {
// Most things we'll just add up the bytes.
fn write(&mut self, buf: &[u8]) {
- for byte in buf.iter() {
+ for byte in buf {
self.hash += *byte as u64;
}
}
fn to_hex_str(r: &[u8; 8]) -> String {
let mut s = String::new();
- for b in r.iter() {
+ for b in r {
s.push_str(format!("{}", fmt::radix(*b, 16)).as_slice());
}
s
fn result_str(h: u64) -> String {
let r = result_bytes(h);
let mut s = String::new();
- for b in r.iter() {
+ for b in &r {
s.push_str(format!("{}", fmt::radix(*b, 16)).as_slice());
}
s
/// Returns true if any of several options were matched.
pub fn opts_present(&self, names: &[String]) -> bool {
- for nm in names.iter() {
+ for nm in names {
match find_opt(self.opts.as_slice(), Name::from_str(&nm[])) {
Some(id) if !self.vals[id].is_empty() => return true,
_ => (),
/// Returns the string argument supplied to one of several matching options or `None`.
pub fn opts_str(&self, names: &[String]) -> Option<String> {
- for nm in names.iter() {
+ for nm in names {
match self.opt_val(&nm[]) {
Some(Val(ref s)) => return Some(s.clone()),
_ => ()
pub fn opt_strs(&self, nm: &str) -> Vec<String> {
let mut acc: Vec<String> = Vec::new();
let r = self.opt_vals(nm);
- for v in r.iter() {
+ for v in &r {
match *v {
Val(ref s) => acc.push((*s).clone()),
_ => ()
}
// Search in aliases.
- for candidate in opts.iter() {
+ for candidate in opts {
if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() {
return opts.iter().position(|opt| opt.name == candidate.name);
}
}
}
let mut name_pos = 0;
- for nm in names.iter() {
+ for nm in &names {
name_pos += 1;
let optid = match find_opt(opts.as_slice(), (*nm).clone()) {
Some(id) => id,
options: &[RenderOption]) -> old_io::IoResult<()>
{
fn writeln<W:Writer>(w: &mut W, arg: &[&str]) -> old_io::IoResult<()> {
- for &s in arg.iter() { try!(w.write_str(s)); }
+ for &s in arg { try!(w.write_str(s)); }
w.write_char('\n')
}
}
try!(writeln(w, &["digraph ", g.graph_id().as_slice(), " {"]));
- for n in g.nodes().iter() {
+ for n in &*g.nodes() {
try!(indent(w));
let id = g.node_id(n);
if options.contains(&RenderOption::NoNodeLabels) {
}
}
- for e in g.edges().iter() {
+ for e in &*g.edges() {
let escaped_label = g.edge_label(e).escape();
try!(indent(w));
let source = g.source(e);
let mut rng = CountingRng { i: 0 };
- for &val in expected.iter() {
+ for &val in &expected {
assert_eq!(wc.ind_sample(&mut rng), val)
}
}}
let v: &[($ty, $ty)] = &[(0, 10),
(10, 127),
(Int::min_value(), Int::max_value())];
- for &(low, high) in v.iter() {
+ for &(low, high) in v {
let mut sampler: Range<$ty> = Range::new(low, high);
for _ in 0u..1000 {
let v = sampler.sample(&mut rng);
(-1e35, -1e25),
(1e-35, 1e-25),
(-1e35, 1e35)];
- for &(low, high) in v.iter() {
+ for &(low, high) in v {
let mut sampler: Range<$ty> = Range::new(low, high);
for _ in 0u..1000 {
let v = sampler.sample(&mut rng);
}
let r = [(0, MIDPOINT), (MIDPOINT, 0)];
- for &(mr_offset, m2_offset) in r.iter() {
+ for &(mr_offset, m2_offset) in &r {
macro_rules! rngstepp {
($j:expr, $shift:expr) => {{
}
}
- for &(mr_offset, m2_offset) in MP_VEC.iter() {
+ for &(mr_offset, m2_offset) in &MP_VEC {
for base in (0..MIDPOINT / 4).map(|i| i * 4) {
macro_rules! rngstepp {
// To test that `fill_bytes` actually did something, check that the
// average of `v` is not 0.
let mut sum = 0.0;
- for &x in v.iter() {
+ for &x in &v {
sum += x as f64;
}
assert!(sum / v.len() as f64 != 0.0);
}
fn check_foreign_fn(cx: &Context, decl: &ast::FnDecl) {
- for input in decl.inputs.iter() {
+ for input in &decl.inputs {
check_ty(cx, &*input.ty);
}
if let ast::Return(ref ret_ty) = decl.output {
match it.node {
ast::ItemForeignMod(ref nmod) if nmod.abi != abi::RustIntrinsic => {
- for ni in nmod.items.iter() {
+ for ni in &nmod.items {
match ni.node {
ast::ForeignItemFn(ref decl, _) => check_foreign_fn(cx, &**decl),
ast::ForeignItemStatic(ref t, _) => check_ty(cx, &**t)
// If it's a struct, we also have to check the fields' types
match it.node {
ast::ItemStruct(ref struct_def, _) => {
- for struct_field in struct_def.fields.iter() {
+ for struct_field in &struct_def.fields {
self.check_heap_type(cx, struct_field.span,
ty::node_id_to_type(cx.tcx, struct_field.node.id));
}
"no_builtins",
];
- for &name in ATTRIBUTE_WHITELIST.iter() {
+ for &name in ATTRIBUTE_WHITELIST {
if attr.check_name(name) {
break;
}
}
fn check_must_use(cx: &Context, attrs: &[ast::Attribute], sp: Span) -> bool {
- for attr in attrs.iter() {
+ for attr in attrs {
if attr.check_name("must_use") {
let mut msg = "unused result which must be used".to_string();
// check for #[must_use="..."]
ast::ItemEnum(ref enum_definition, _) => {
if has_extern_repr { return }
self.check_case(cx, "type", it.ident, it.span);
- for variant in enum_definition.variants.iter() {
+ for variant in &enum_definition.variants {
self.check_case(cx, "variant", variant.node.name, variant.span);
}
}
}
fn check_generics(&mut self, cx: &Context, it: &ast::Generics) {
- for gen in it.ty_params.iter() {
+ for gen in &*it.ty_params {
self.check_case(cx, "type parameter", gen.ident, gen.span);
}
}
fn check_struct_def(&mut self, cx: &Context, s: &ast::StructDef,
_: ast::Ident, _: &ast::Generics, _: ast::NodeId) {
- for sf in s.fields.iter() {
+ for sf in &s.fields {
if let ast::StructField_ { kind: ast::NamedField(ident, _), .. } = sf.node {
self.check_snake_case(cx, "structure field", ident, sf.span);
}
// avoid false warnings in match arms with multiple patterns
let mut mutables = FnvHashMap();
- for p in pats.iter() {
+ for p in pats {
pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| {
let ident = path1.node;
if let ast::BindByValue(ast::MutMutable) = mode {
}
let used_mutables = cx.tcx.used_mut_nodes.borrow();
- for (_, v) in mutables.iter() {
+ for (_, v) in &mutables {
if !v.iter().any(|e| used_mutables.contains(e)) {
cx.span_lint(UNUSED_MUT, cx.tcx.map.span(v[0]),
"variable does not need to be mutable");
fn check_expr(&mut self, cx: &Context, e: &ast::Expr) {
if let ast::ExprMatch(_, ref arms, _) = e.node {
- for a in arms.iter() {
+ for a in arms {
self.check_unused_mut_pat(cx, &a.pats[])
}
}
fn check_fn(&mut self, cx: &Context,
_: visit::FnKind, decl: &ast::FnDecl,
_: &ast::Block, _: Span, _: ast::NodeId) {
- for a in decl.inputs.iter() {
+ for a in &decl.inputs {
self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat));
}
}
if cx.current_level(UNCONDITIONAL_RECURSION) != Level::Allow {
let sess = cx.sess();
// offer some help to the programmer.
- for call in self_call_spans.iter() {
+ for call in &self_call_spans {
sess.span_note(*call, "recursive call site")
}
sess.span_help(sp, "a `loop` may express intention better if this is on purpose")
pub fn register_pass(&mut self, sess: Option<&Session>,
from_plugin: bool, pass: LintPassObject) {
- for &lint in pass.get_lints().iter() {
+ for &lint in pass.get_lints() {
self.lints.push((*lint, from_plugin));
let id = LintId::of(*lint);
}
pub fn process_command_line(&mut self, sess: &Session) {
- for &(ref lint_name, level) in sess.opts.lint_opts.iter() {
+ for &(ref lint_name, level) in &sess.opts.lint_opts {
match self.find_lint(&lint_name[], sess, None) {
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
None => {
pub fn gather_attrs(attrs: &[ast::Attribute])
-> Vec<Result<(InternedString, Level, Span), Span>> {
let mut out = vec!();
- for attr in attrs.iter() {
+ for attr in attrs {
let level = match Level::from_str(attr.name().get()) {
None => continue,
Some(lvl) => lvl,
}
};
- for meta in metas.iter() {
+ for meta in metas {
out.push(match meta.node {
ast::MetaWord(ref lint_name) => Ok((lint_name.clone(), level, meta.span)),
_ => Err(meta.span),
// If we missed any lints added to the session, then there's a bug somewhere
// in the iteration code.
- for (id, v) in tcx.sess.lints.borrow().iter() {
- for &(lint, span, ref msg) in v.iter() {
+ for (id, v) in &*tcx.sess.lints.borrow() {
+ for &(lint, span, ref msg) in v {
tcx.sess.span_bug(span,
format!("unprocessed lint {} at {}: {}",
lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice())
dump_crates(&self.sess.cstore);
}
- for &(ref name, kind) in self.sess.opts.libs.iter() {
+ for &(ref name, kind) in &self.sess.opts.libs {
register_native_lib(self.sess, None, name.clone(), kind);
}
}
None
})
.collect::<Vec<&ast::Attribute>>();
- for m in link_args.iter() {
+ for m in &link_args {
match m.value_str() {
Some(linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()),
None => { /* fallthrough */ }
None
})
.collect::<Vec<&ast::Attribute>>();
- for m in link_args.iter() {
+ for m in &link_args {
match m.meta_item_list() {
Some(items) => {
let kind = items.iter().find(|k| {
pub fn is_staged_api(cstore: &cstore::CStore, def: ast::DefId) -> bool {
let cdata = cstore.get_crate_data(def.krate);
let attrs = decoder::get_crate_attributes(cdata.data());
- for attr in attrs.iter() {
+ for attr in &attrs {
if attr.name().get() == "staged_api" {
match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) }
}
pub fn iter_crate_data<I>(&self, mut i: I) where
I: FnMut(ast::CrateNum, &crate_metadata),
{
- for (&k, v) in self.metas.borrow().iter() {
+ for (&k, v) in &*self.metas.borrow() {
i(k, &**v);
}
}
pub fn iter_crate_data_origins<I>(&self, mut i: I) where
I: FnMut(ast::CrateNum, &crate_metadata, Option<CrateSource>),
{
- for (&k, v) in self.metas.borrow().iter() {
+ for (&k, v) in &*self.metas.borrow() {
let origin = self.get_used_crate_source(k);
origin.as_ref().map(|cs| { assert!(k == cs.cnum); });
i(k, &**v, origin);
ordering: &mut Vec<ast::CrateNum>) {
if ordering.contains(&cnum) { return }
let meta = cstore.get_crate_data(cnum);
- for (_, &dep) in meta.cnum_map.iter() {
+ for (_, &dep) in &meta.cnum_map {
visit(cstore, dep, ordering);
}
ordering.push(cnum);
};
- for (&num, _) in self.metas.borrow().iter() {
+ for (&num, _) in &*self.metas.borrow() {
visit(self, num, &mut ordering);
}
ordering.reverse();
});
let mut impl_methods = Vec::new();
- for impl_method_id in impl_method_ids.iter() {
+ for impl_method_id in &impl_method_ids {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data());
let family = item_family(impl_method_doc);
match family {
try!(write!(out, "=Crate Attributes ({})=\n", *hash));
let r = get_attributes(md);
- for attr in r.iter() {
+ for attr in &r {
try!(write!(out, "{}\n", pprust::attribute_to_string(attr)));
}
fn list_crate_deps(data: &[u8], out: &mut old_io::Writer) -> old_io::IoResult<()> {
try!(write!(out, "=External Dependencies=\n"));
- for dep in get_crate_deps(data).iter() {
+ for dep in &get_crate_deps(data) {
try!(write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash));
}
try!(write!(out, "\n"));
fn encode_struct_fields(rbml_w: &mut Encoder,
fields: &[ty::field_ty],
origin: DefId) {
- for f in fields.iter() {
+ for f in fields {
if f.name == special_idents::unnamed_field.name {
rbml_w.start_tag(tag_item_unnamed_field);
} else {
let mut i = 0;
let vi = ty::enum_variants(ecx.tcx,
DefId { krate: ast::LOCAL_CRATE, node: id });
- for variant in variants.iter() {
+ for variant in variants {
let def_id = local_def(variant.node.id);
index.push(entry {
val: variant.node.id as i64,
let path = path.collect::<Vec<_>>();
rbml_w.start_tag(tag_path);
rbml_w.wr_tagged_u32(tag_path_len, path.len() as u32);
- for pe in path.iter() {
+ for pe in &path {
let tag = match *pe {
ast_map::PathMod(_) => tag_path_elem_mod,
ast_map::PathName(_) => tag_path_elem_name
let impl_items = ecx.tcx.impl_items.borrow();
match ecx.tcx.inherent_impls.borrow().get(&exp.def_id) {
Some(implementations) => {
- for base_impl_did in implementations.iter() {
- for &method_did in (*impl_items)[*base_impl_did].iter() {
+ for base_impl_did in &**implementations {
+ for &method_did in &*(*impl_items)[*base_impl_did] {
let impl_item = ty::impl_or_trait_item(
ecx.tcx,
method_did.def_id());
-> bool {
match ecx.tcx.trait_items_cache.borrow().get(&exp.def_id) {
Some(trait_items) => {
- for trait_item in trait_items.iter() {
+ for trait_item in &**trait_items {
if let ty::MethodTraitItem(ref m) = *trait_item {
encode_reexported_static_method(rbml_w,
exp,
path: PathElems) {
debug!("(encoding info for module) encoding reexports for {}", id);
match ecx.reexports.get(&id) {
- Some(ref exports) => {
+ Some(exports) => {
debug!("(encoding info for module) found reexports for {}", id);
- for exp in exports.iter() {
+ for exp in exports {
debug!("(encoding info for module) reexport '{}' ({}/{}) for \
{}",
exp.name,
debug!("(encoding info for module) encoding info for module ID {}", id);
// Encode info about all the module children.
- for item in md.items.iter() {
+ for item in &md.items {
rbml_w.start_tag(tag_mod_child);
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
rbml_w.end_tag();
fn encode_provided_source(rbml_w: &mut Encoder,
source_opt: Option<DefId>) {
- for source in source_opt.iter() {
+ if let Some(source) = source_opt {
rbml_w.start_tag(tag_item_method_provided_source);
- let s = def_to_string(*source);
+ let s = def_to_string(source);
rbml_w.writer.write_all(s.as_bytes());
rbml_w.end_tag();
}
let mut index = Vec::new();
/* We encode both private and public fields -- need to include
private fields to get the offsets right */
- for field in fields.iter() {
+ for field in fields {
let nm = field.name;
let id = field.id.node;
rbml_w.wr_tagged_u64(tag_region_param_def_index,
param.index as u64);
- for &bound_region in param.bounds.iter() {
+ for &bound_region in ¶m.bounds {
encode_region(ecx, rbml_w, bound_region);
}
fn encode_method_argument_names(rbml_w: &mut Encoder,
decl: &ast::FnDecl) {
rbml_w.start_tag(tag_method_argument_names);
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
rbml_w.start_tag(tag_method_argument_name);
if let ast::PatIdent(_, ref path1, _) = arg.pat.node {
let name = token::get_ident(path1.node);
ecx: &EncodeContext,
attrs: &[ast::Attribute]) {
let mut repr_attrs = Vec::new();
- for attr in attrs.iter() {
+ for attr in attrs {
repr_attrs.extend(attr::find_repr_attrs(ecx.tcx.sess.diagnostic(),
attr).into_iter());
}
match ecx.tcx.inherent_impls.borrow().get(&def_id) {
None => {}
Some(implementations) => {
- for &impl_def_id in implementations.iter() {
+ for &impl_def_id in &**implementations {
rbml_w.start_tag(tag_items_data_item_inherent_impl);
encode_def_id(rbml_w, impl_def_id);
rbml_w.end_tag();
match ecx.tcx.trait_impls.borrow().get(&trait_def_id) {
None => {}
Some(implementations) => {
- for &impl_def_id in implementations.borrow().iter() {
+ for &impl_def_id in &*implementations.borrow() {
rbml_w.start_tag(tag_items_data_item_extension_impl);
encode_def_id(rbml_w, impl_def_id);
rbml_w.end_tag();
encode_path(rbml_w, path);
// Encode all the items in this module.
- for foreign_item in fm.items.iter() {
+ for foreign_item in &fm.items {
rbml_w.start_tag(tag_mod_child);
rbml_w.wr_str(&def_to_string(local_def(foreign_item.id))[]);
rbml_w.end_tag();
encode_name(rbml_w, item.ident.name);
encode_attributes(rbml_w, &item.attrs[]);
encode_repr_attrs(rbml_w, ecx, &item.attrs[]);
- for v in (*enum_definition).variants.iter() {
+ for v in &enum_definition.variants {
encode_variant_id(rbml_w, local_def(v.node.id));
}
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
}
_ => {}
}
- for &item_def_id in items.iter() {
+ for &item_def_id in items {
rbml_w.start_tag(tag_item_impl_item);
match item_def_id {
ty::MethodTraitItemId(item_def_id) => {
}
rbml_w.end_tag();
}
- for ast_trait_ref in opt_trait.iter() {
+ if let Some(ref ast_trait_ref) = *opt_trait {
let trait_ref = ty::node_id_to_trait_ref(
tcx, ast_trait_ref.ref_id);
encode_trait_ref(rbml_w, ecx, &*trait_ref, tag_item_trait_ref);
encode_attributes(rbml_w, &item.attrs[]);
encode_visibility(rbml_w, vis);
encode_stability(rbml_w, stab);
- for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() {
+ for &method_def_id in &*ty::trait_item_def_ids(tcx, def_id) {
rbml_w.start_tag(tag_item_trait_item);
match method_def_id {
ty::MethodTraitItemId(method_def_id) => {
rbml_w.start_tag(tag_index);
let mut bucket_locs = Vec::new();
rbml_w.start_tag(tag_index_buckets);
- for bucket in buckets.iter() {
+ for bucket in &buckets {
bucket_locs.push(rbml_w.writer.tell().unwrap());
rbml_w.start_tag(tag_index_buckets_bucket);
- for elt in bucket.iter() {
+ for elt in bucket {
rbml_w.start_tag(tag_index_buckets_bucket_elt);
assert!(elt.pos < 0xffff_ffff);
{
}
rbml_w.end_tag();
rbml_w.start_tag(tag_index_table);
- for pos in bucket_locs.iter() {
+ for pos in &bucket_locs {
assert!(*pos < 0xffff_ffff);
let wr: &mut SeekableMemWriter = rbml_w.writer;
wr.write_be_u32(*pos as u32);
rbml_w.start_tag(tag_meta_item_name);
rbml_w.writer.write_all(name.get().as_bytes());
rbml_w.end_tag();
- for inner_item in items.iter() {
+ for inner_item in items {
encode_meta_item(rbml_w, &**inner_item);
}
rbml_w.end_tag();
fn encode_attributes(rbml_w: &mut Encoder, attrs: &[ast::Attribute]) {
rbml_w.start_tag(tag_attributes);
- for attr in attrs.iter() {
+ for attr in attrs {
rbml_w.start_tag(tag_attribute);
rbml_w.wr_tagged_u8(tag_attribute_is_sugared_doc, attr.node.is_sugared_doc as u8);
encode_meta_item(rbml_w, &*attr.node.value);
fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[ast::Name]) {
rbml_w.start_tag(tag_associated_type_names);
- for &name in names.iter() {
+ for &name in names {
rbml_w.wr_tagged_str(tag_associated_type_name, token::get_name(name).get());
}
rbml_w.end_tag();
// Sanity-check the crate numbers
let mut expected_cnum = 1;
- for n in deps.iter() {
+ for n in &deps {
assert_eq!(n.cnum, expected_cnum);
expected_cnum += 1;
}
// but is enough to get transitive crate dependencies working.
rbml_w.start_tag(tag_crate_deps);
let r = get_ordered_deps(cstore);
- for dep in r.iter() {
+ for dep in &r {
encode_crate_dep(rbml_w, (*dep).clone());
}
rbml_w.end_tag();
fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) {
rbml_w.start_tag(tag_lang_items);
- for (i, def_id) in ecx.tcx.lang_items.items() {
- for id in def_id.iter() {
+ for (i, &def_id) in ecx.tcx.lang_items.items() {
+ if let Some(id) = def_id {
if id.krate == ast::LOCAL_CRATE {
rbml_w.start_tag(tag_lang_items_item);
}
}
- for i in ecx.tcx.lang_items.missing.iter() {
+ for i in &ecx.tcx.lang_items.missing {
rbml_w.wr_tagged_u32(tag_lang_items_missing, *i as u32);
}
fn encode_macro_defs(rbml_w: &mut Encoder,
krate: &ast::Crate) {
rbml_w.start_tag(tag_macro_defs);
- for def in krate.exported_macros.iter() {
+ for def in &krate.exported_macros {
rbml_w.start_tag(tag_macro_def);
encode_name(rbml_w, def.ident.name);
rbml_w: &mut Encoder) {
rbml_w.start_tag(tag_misc_info);
rbml_w.start_tag(tag_misc_info_crate_items);
- for item in krate.module.items.iter() {
+ for item in &krate.module.items {
rbml_w.start_tag(tag_mod_child);
rbml_w.wr_str(&def_to_string(local_def(item.id))[]);
rbml_w.end_tag();
fn encode_reachable_extern_fns(ecx: &EncodeContext, rbml_w: &mut Encoder) {
rbml_w.start_tag(tag_reachable_extern_fns);
- for id in ecx.reachable.iter() {
+ for id in ecx.reachable {
if let Some(ast_map::NodeItem(i)) = ecx.tcx.map.find(*id) {
if let ast::ItemFn(_, _, abi, ref generics, _) = i.node {
if abi != abi::Rust && !generics.is_type_parameterized() {
stats.total_bytes = rbml_w.writer.tell().unwrap();
if tcx.sess.meta_stats() {
- for e in rbml_w.writer.get_ref().iter() {
+ for e in rbml_w.writer.get_ref() {
if *e == 0 {
stats.zero_bytes += 1;
}
// Try RUST_PATH
if !found {
let rustpath = rust_path();
- for path in rustpath.iter() {
+ for path in &rustpath {
let tlib_path = make_rustpkg_lib_path(
self.sysroot, path, self.triple);
debug!("is {} in visited_dirs? {}", tlib_path.display(),
}
cwd.pop();
}
- let h = os::homedir();
- for h in h.iter() {
+ if let Some(h) = os::homedir() {
let p = h.join(".rust");
if !env_rust_path.contains(&p) && p.exists() {
env_rust_path.push(p);
&format!("multiple matching crates for `{}`",
self.crate_name)[]);
self.sess.note("candidates:");
- for lib in libraries.iter() {
+ for lib in &libraries {
match lib.dylib {
Some((ref p, _)) => {
self.sess.note(&format!("path: {}",
F: FnMut(&mut PState<'a, 'tcx>) -> T,
{
let mut r = VecPerParamSpace::empty();
- for &space in subst::ParamSpace::all().iter() {
+ for &space in &subst::ParamSpace::all() {
assert_eq!(next(st), '[');
while peek(st) != ']' {
r.push(space, f(st));
}
ty::ty_tup(ref ts) => {
mywrite!(w, "T[");
- for t in ts.iter() { enc_ty(w, cx, *t); }
+ for t in ts { enc_ty(w, cx, *t); }
mywrite!(w, "]");
}
ty::ty_uniq(typ) => { mywrite!(w, "~"); enc_ty(w, cx, typ); }
mut op: F) where
F: FnMut(&mut SeekableMemWriter, &ctxt<'a, 'tcx>, &T),
{
- for &space in subst::ParamSpace::all().iter() {
+ for &space in &subst::ParamSpace::all() {
mywrite!(w, "[");
- for t in v.get_slice(space).iter() {
+ for t in v.get_slice(space) {
op(w, cx, t);
}
mywrite!(w, "]");
fn enc_fn_sig<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>,
fsig: &ty::PolyFnSig<'tcx>) {
mywrite!(w, "[");
- for ty in fsig.0.inputs.iter() {
+ for ty in &fsig.0.inputs {
enc_ty(w, cx, *ty);
}
mywrite!(w, "]");
}
pub fn enc_builtin_bounds(w: &mut SeekableMemWriter, _cx: &ctxt, bs: &ty::BuiltinBounds) {
- for bound in bs.iter() {
+ for bound in bs {
match bound {
ty::BoundSend => mywrite!(w, "S"),
ty::BoundSized => mywrite!(w, "Z"),
bs: &ty::ParamBounds<'tcx>) {
enc_builtin_bounds(w, cx, &bs.builtin_bounds);
- for &r in bs.region_bounds.iter() {
+ for &r in &bs.region_bounds {
mywrite!(w, "R");
enc_region(w, cx, r);
}
- for tp in bs.trait_bounds.iter() {
+ for tp in &bs.trait_bounds {
mywrite!(w, "I");
enc_trait_ref(w, cx, &*tp.0);
}
- for tp in bs.projection_bounds.iter() {
+ for tp in &bs.projection_bounds {
mywrite!(w, "P");
enc_projection_predicate(w, cx, &tp.0);
}
mut f: F) where
F: FnMut(&mut Encoder, &T),
{
- for &space in subst::ParamSpace::all().iter() {
+ for &space in &subst::ParamSpace::all() {
rbml_w.emit_from_vec(v.get_slice(space),
|rbml_w, n| Ok(f(rbml_w, n))).unwrap();
}
debug!("Encoding side tables for id {}", id);
- for def in tcx.def_map.borrow().get(&id).iter() {
+ if let Some(def) = tcx.def_map.borrow().get(&id) {
rbml_w.tag(c::tag_table_def, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| (*def).encode(rbml_w).unwrap());
})
}
- for &ty in tcx.node_types.borrow().get(&id).iter() {
+ if let Some(ty) = tcx.node_types.borrow().get(&id) {
rbml_w.tag(c::tag_table_node_type, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &item_substs in tcx.item_substs.borrow().get(&id).iter() {
+ if let Some(item_substs) = tcx.item_substs.borrow().get(&id) {
rbml_w.tag(c::tag_table_item_subst, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &fv in tcx.freevars.borrow().get(&id).iter() {
+ if let Some(fv) = tcx.freevars.borrow().get(&id) {
rbml_w.tag(c::tag_table_freevars, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
});
- for freevar in fv.iter() {
+ for freevar in fv {
rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
}
let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id };
- for &type_scheme in tcx.tcache.borrow().get(&lid).iter() {
+ if let Some(type_scheme) = tcx.tcache.borrow().get(&lid) {
rbml_w.tag(c::tag_table_tcache, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &type_param_def in tcx.ty_param_defs.borrow().get(&id).iter() {
+ if let Some(type_param_def) = tcx.ty_param_defs.borrow().get(&id) {
rbml_w.tag(c::tag_table_param_defs, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
}
let method_call = MethodCall::expr(id);
- for &method in tcx.method_map.borrow().get(&method_call).iter() {
+ if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &trait_ref in tcx.object_cast_map.borrow().get(&id).iter() {
+ if let Some(trait_ref) = tcx.object_cast_map.borrow().get(&id) {
rbml_w.tag(c::tag_table_object_cast_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &adjustment in tcx.adjustments.borrow().get(&id).iter() {
+ if let Some(adjustment) = tcx.adjustments.borrow().get(&id) {
match *adjustment {
_ if ty::adjust_is_object(adjustment) => {
let method_call = MethodCall::autoobject(id);
- for &method in tcx.method_map.borrow().get(&method_call).iter() {
+ if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
assert!(!ty::adjust_is_object(adjustment));
for autoderef in 0..adj.autoderefs {
let method_call = MethodCall::autoderef(id, autoderef);
- for &method in tcx.method_map.borrow().get(&method_call).iter() {
+ if let Some(method) = tcx.method_map.borrow().get(&method_call) {
rbml_w.tag(c::tag_table_method_map, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &closure_type in tcx.closure_tys.borrow().get(&ast_util::local_def(id)).iter() {
+ if let Some(closure_type) = tcx.closure_tys.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_tys, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
})
}
- for &&closure_kind in tcx.closure_kinds.borrow().get(&ast_util::local_def(id)).iter() {
+ if let Some(closure_kind) = tcx.closure_kinds.borrow().get(&ast_util::local_def(id)) {
rbml_w.tag(c::tag_table_closure_kinds, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
- encode_closure_kind(rbml_w, closure_kind)
+ encode_closure_kind(rbml_w, *closure_kind)
})
})
}
impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex {
let mut stmts_exit = pred;
- for stmt in blk.stmts.iter() {
+ for stmt in &blk.stmts {
stmts_exit = self.stmt(&**stmt, stmts_exit);
}
self.pat(&*pats[0], pred)
} else {
let collect = self.add_dummy_node(&[]);
- for pat in pats.iter() {
+ for pat in pats {
let pat_exit = self.pat(&**pat, pred);
self.add_contained_edge(pat_exit, collect);
}
let expr_exit = self.add_node(expr.id, &[]);
let mut cond_exit = discr_exit;
- for arm in arms.iter() {
+ for arm in arms {
cond_exit = self.add_dummy_node(&[cond_exit]); // 2
let pats_exit = self.pats_any(&arm.pats[],
cond_exit); // 3
assert!(!self.exit_map.contains_key(&id));
self.exit_map.insert(id, node);
}
- for &pred in preds.iter() {
+ for &pred in preds {
self.add_contained_edge(pred, node);
}
node
Some(_) => {
match self.tcx.def_map.borrow().get(&expr.id) {
Some(&def::DefLabel(loop_id)) => {
- for l in self.loop_scopes.iter() {
+ for l in &self.loop_scopes {
if l.loop_id == loop_id {
return *l;
}
}
ast::ItemEnum(ref enum_definition, _) => {
self.inside_const(|v| {
- for var in enum_definition.variants.iter() {
+ for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
v.visit_expr(&**ex);
}
}
ast::ExprBlock(ref block) => {
// Check all statements in the block
- for stmt in block.stmts.iter() {
+ for stmt in &block.stmts {
let block_span_err = |&: span|
span_err!(v.tcx.sess, span, E0016,
"blocks in constants are limited to items and \
visit::walk_expr(cx, ex);
match ex.node {
ast::ExprMatch(ref scrut, ref arms, source) => {
- for arm in arms.iter() {
+ for arm in arms {
// First, check legality of move bindings.
check_legality_of_move_bindings(cx,
arm.guard.is_some(),
source: ast::MatchSource) {
let mut seen = Matrix(vec![]);
let mut printed_if_let_err = false;
- for &(ref pats, guard) in arms.iter() {
- for pat in pats.iter() {
+ for &(ref pats, guard) in arms {
+ for pat in pats {
let v = vec![&**pat];
match is_useful(cx, &seen, &v[], LeaveOutWitness) {
visit::walk_fn(cx, kind, decl, body, sp);
- for input in decl.inputs.iter() {
+ for input in &decl.inputs {
is_refutable(cx, &*input.pat, |pat| {
span_err!(cx.tcx.sess, input.pat.span, E0006,
"refutable pattern in function argument: `{}` not covered",
let tcx = cx.tcx;
let def_map = &tcx.def_map;
let mut by_ref_span = None;
- for pat in pats.iter() {
+ for pat in pats {
pat_bindings(def_map, &**pat, |bm, _, span, _path| {
match bm {
ast::BindByRef(_) => {
}
};
- for pat in pats.iter() {
+ for pat in pats {
walk_pat(&**pat, |p| {
if pat_is_binding(def_map, &*p) {
match p.node {
-> Option<&'a Expr> {
fn variant_expr<'a>(variants: &'a [P<ast::Variant>], id: ast::NodeId)
-> Option<&'a Expr> {
- for variant in variants.iter() {
+ for variant in variants {
if variant.node.id == id {
return variant.node.disr_expr.as_ref().map(|e| &**e);
}
let mut orig_kills = self.kills[start.. end].to_vec();
let mut changed = false;
- for &node_id in edge.data.exiting_scopes.iter() {
+ for &node_id in &edge.data.exiting_scopes {
let opt_cfg_idx = self.nodeid_to_index.get(&node_id).map(|&i|i);
match opt_cfg_idx {
Some(cfg_idx) => {
// Note: this is a little endian printout of bytes.
- for &word in words.iter() {
+ for &word in words {
let mut v = word;
for _ in 0..uint::BYTES {
result.push(sep);
}
};
let fields = ty::lookup_struct_fields(self.tcx, id);
- for pat in pats.iter() {
+ for pat in pats {
let field_id = fields.iter()
.find(|field| field.name == pat.node.ident.name).unwrap().id;
self.live_symbols.insert(field_id.node);
self.worklist.extend(enum_def.variants.iter().map(|variant| variant.node.id));
}
ast::ItemImpl(_, _, _, Some(ref _trait_ref), _, ref impl_items) => {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
self.worklist.push(method.id);
// depending on whether a crate is built as bin or lib, and we want
// the warning to be consistent, we also seed the worklist with
// exported symbols.
- for id in exported_items.iter() {
+ for id in exported_items {
worklist.push(*id);
}
- for id in reachable_symbols.iter() {
+ for id in reachable_symbols {
worklist.push(*id);
}
match self.tcx.inherent_impls.borrow().get(&local_def(id)) {
None => (),
Some(impl_list) => {
- for impl_did in impl_list.iter() {
- for item_did in (*impl_items)[*impl_did].iter() {
+ for impl_did in &**impl_list {
+ for item_did in &(*impl_items)[*impl_did] {
if self.live_symbols.contains(&item_did.def_id()
.node) {
return true;
} else {
match item.node {
ast::ItemEnum(ref enum_def, _) => {
- for variant in enum_def.variants.iter() {
+ for variant in &enum_def.variants {
if self.should_warn_about_variant(&variant.node) {
self.warn_dead_code(variant.node.id, variant.span,
variant.node.name, "variant");
pub fn calculate(tcx: &ty::ctxt) {
let mut fmts = tcx.dependency_formats.borrow_mut();
- for &ty in tcx.sess.crate_types.borrow().iter() {
+ for &ty in &*tcx.sess.crate_types.borrow() {
fmts.insert(ty, calculate_type(&tcx.sess, ty));
}
tcx.sess.abort_if_errors();
debug!("adding dylib: {}", data.name);
add_library(sess, cnum, cstore::RequireDynamic, &mut formats);
let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum);
- for &(depnum, style) in deps.iter() {
+ for &(depnum, style) in &deps {
debug!("adding {:?}: {}", style,
sess.cstore.get_crate_data(depnum).name.clone());
add_library(sess, depnum, style, &mut formats);
but you have one or more functions named 'main' that are not \
defined at the crate level. Either move the definition or \
attach the `#[main]` attribute to override this behavior.");
- for &(_, span) in this.non_main_fns.iter() {
+ for &(_, span) in &this.non_main_fns {
this.session.span_note(span, "here is a function named 'main'");
}
this.session.abort_if_errors();
fn walk_arg_patterns(&mut self,
decl: &ast::FnDecl,
body: &ast::Block) {
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
let fn_body_scope = region::CodeExtent::from_node_id(body.id);
}
fn consume_exprs(&mut self, exprs: &Vec<P<ast::Expr>>) {
- for expr in exprs.iter() {
+ for expr in exprs {
self.consume_expr(&**expr);
}
}
ast::ExprIf(ref cond_expr, ref then_blk, ref opt_else_expr) => {
self.consume_expr(&**cond_expr);
self.walk_block(&**then_blk);
- for else_expr in opt_else_expr.iter() {
+ if let Some(ref else_expr) = *opt_else_expr {
self.consume_expr(&**else_expr);
}
}
self.borrow_expr(&**discr, ty::ReEmpty, ty::ImmBorrow, MatchDiscriminant);
// treatment of the discriminant is handled while walking the arms.
- for arm in arms.iter() {
+ for arm in arms {
let mode = self.arm_move_mode(discr_cmt.clone(), arm);
let mode = mode.match_mode();
self.walk_arm(discr_cmt.clone(), arm, mode);
}
ast::ExprInlineAsm(ref ia) => {
- for &(_, ref input) in ia.inputs.iter() {
+ for &(_, ref input) in &ia.inputs {
self.consume_expr(&**input);
}
- for &(_, ref output, is_rw) in ia.outputs.iter() {
+ for &(_, ref output, is_rw) in &ia.outputs {
self.mutate_expr(expr, &**output,
if is_rw { WriteAndRead } else { JustWrite });
}
}
ast::ExprRet(ref opt_expr) => {
- for expr in opt_expr.iter() {
+ if let Some(ref expr) = *opt_expr {
self.consume_expr(&**expr);
}
}
fn walk_block(&mut self, blk: &ast::Block) {
debug!("walk_block(blk.id={})", blk.id);
- for stmt in blk.stmts.iter() {
+ for stmt in &blk.stmts {
self.walk_stmt(&**stmt);
}
- for tail_expr in blk.expr.iter() {
+ if let Some(ref tail_expr) = blk.expr {
self.consume_expr(&**tail_expr);
}
}
fields: &Vec<ast::Field>,
opt_with: &Option<P<ast::Expr>>) {
// Consume the expressions supplying values for each field.
- for field in fields.iter() {
+ for field in fields {
self.consume_expr(&*field.expr);
}
};
// Consume those fields of the with expression that are needed.
- for with_field in with_fields.iter() {
+ for with_field in &with_fields {
if !contains_field_named(with_field, fields) {
let cmt_field = self.mc.cat_field(&*with_expr,
with_cmt.clone(),
match pass_args {
PassArgs::ByValue => {
self.consume_expr(receiver);
- for &arg in rhs.iter() {
+ for &arg in &rhs {
self.consume_expr(arg);
}
let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
let bk = ty::ImmBorrow;
- for &arg in rhs.iter() {
+ for &arg in &rhs {
self.borrow_expr(arg, r, bk, OverloadedOperator);
}
return true;
fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode<Span> {
let mut mode = Unknown;
- for pat in arm.pats.iter() {
+ for pat in &arm.pats {
self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
}
mode
}
fn walk_arm(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm, mode: MatchMode) {
- for pat in arm.pats.iter() {
+ for pat in &arm.pats {
self.walk_pat(discr_cmt.clone(), &**pat, mode);
}
- for guard in arm.guard.iter() {
+ if let Some(ref guard) = arm.guard {
self.consume_expr(&**guard);
}
debug!("walk_captures({})", closure_expr.repr(self.tcx()));
ty::with_freevars(self.tcx(), closure_expr.id, |freevars| {
- for freevar in freevars.iter() {
+ for freevar in freevars {
let id_var = freevar.def.def_id().node;
let upvar_id = ty::UpvarId { var_id: id_var,
closure_expr_id: closure_expr.id };
{
let mut substs = subst::Substs::empty();
- for &space in subst::ParamSpace::all().iter() {
+ for &space in &subst::ParamSpace::all() {
let a_tps = a_subst.types.get_slice(space);
let b_tps = b_subst.types.get_slice(space);
let tps = try!(self.tps(space, a_tps, b_tps));
}
(&NonerasedRegions(ref a), &NonerasedRegions(ref b)) => {
- for &space in subst::ParamSpace::all().iter() {
+ for &space in &subst::ParamSpace::all() {
let a_regions = a.get_slice(space);
let b_regions = b.get_slice(space);
variances.regions.get_slice(space)
}
None => {
- for _ in a_regions.iter() {
+ for _ in a_regions {
invariance.push(ty::Invariant);
}
&invariance[]
errors: &Vec<RegionResolutionError<'tcx>>) {
let p_errors = self.process_errors(errors);
let errors = if p_errors.is_empty() { errors } else { &p_errors };
- for error in errors.iter() {
+ for error in errors {
match error.clone() {
ConcreteFailure(origin, sub, sup) => {
self.report_concrete_failure(origin, sub, sup);
let mut trace_origins = Vec::new();
let mut same_regions = Vec::new();
let mut processed_errors = Vec::new();
- for error in errors.iter() {
+ for error in errors {
match error.clone() {
ConcreteFailure(origin, sub, sup) => {
debug!("processing ConcreteFailure");
}
if !same_regions.is_empty() {
let common_scope_id = same_regions[0].scope_id;
- for sr in same_regions.iter() {
+ for sr in &same_regions {
// Since ProcessedErrors is used to reconstruct the function
// declaration, we want to make sure that they are, in fact,
// from the same scope
var_origins: &[RegionVariableOrigin],
trace_origins: &[(TypeTrace<'tcx>, ty::type_err<'tcx>)],
same_regions: &[SameRegions]) {
- for vo in var_origins.iter() {
+ for vo in var_origins {
self.report_inference_failure(vo.clone());
}
self.give_suggestion(same_regions);
- for &(ref trace, terr) in trace_origins.iter() {
+ for &(ref trace, terr) in trace_origins {
self.report_type_error(trace.clone(), &terr);
}
}
let mut ty_params = self.generics.ty_params.clone();
let where_clause = self.generics.where_clause.clone();
let mut kept_lifetimes = HashSet::new();
- for sr in self.same_regions.iter() {
+ for sr in self.same_regions {
self.cur_anon.set(0);
self.offset_cur_anon();
let (anon_nums, region_names) =
// vector of string and then sort them. However, it makes the
// choice of lifetime name deterministic and thus easier to test.
let mut names = Vec::new();
- for rn in region_names.iter() {
+ for rn in region_names {
let lt_name = token::get_name(*rn).get().to_string();
names.push(lt_name);
}
-> (HashSet<u32>, HashSet<ast::Name>) {
let mut anon_nums = HashSet::new();
let mut region_names = HashSet::new();
- for br in same_regions.regions.iter() {
+ for br in &same_regions.regions {
match *br {
ty::BrAnon(i) => {
anon_nums.insert(i);
fn extract_all_region_names(&self) -> HashSet<ast::Name> {
let mut all_region_names = HashSet::new();
- for sr in self.same_regions.iter() {
- for br in sr.regions.iter() {
+ for sr in self.same_regions {
+ for br in &sr.regions {
match *br {
ty::BrNamed(_, name) => {
all_region_names.insert(name);
where_clause: ast::WhereClause)
-> ast::Generics {
let mut lifetimes = Vec::new();
- for lt in add.iter() {
+ for lt in add {
lifetimes.push(ast::LifetimeDef { lifetime: *lt,
bounds: Vec::new() });
}
- for lt in generics.lifetimes.iter() {
+ for lt in &generics.lifetimes {
if keep.contains(<.lifetime.name) ||
!remove.contains(<.lifetime.name) {
lifetimes.push((*lt).clone());
region_names: &HashSet<ast::Name>)
-> Vec<ast::Arg> {
let mut new_inputs = Vec::new();
- for arg in inputs.iter() {
+ for arg in inputs {
let new_ty = self.rebuild_arg_ty_or_output(&*arg.ty, lifetime,
anon_nums, region_names);
let possibly_new_arg = ast::Arg {
impl LifeGiver {
fn with_taken(taken: &[ast::LifetimeDef]) -> LifeGiver {
let mut taken_ = HashSet::new();
- for lt in taken.iter() {
+ for lt in taken {
let lt_name = token::get_name(lt.lifetime.name).get().to_string();
taken_.insert(lt_name);
}
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
- for (a_br, a_r) in a_map.iter() {
+ for (a_br, a_r) in a_map {
if tainted.iter().any(|x| x == a_r) {
debug!("generalize_region(r0={:?}): \
replacing with {:?}, tainted={:?}",
let mut a_r = None;
let mut b_r = None;
let mut only_new_vars = true;
- for r in tainted.iter() {
+ for r in &tainted {
if is_var_in_set(a_vars, *r) {
if a_r.is_some() {
return fresh_bound_variable(infcx, debruijn);
a_map: &FnvHashMap<ty::BoundRegion, ty::Region>,
r: ty::Region) -> ty::Region
{
- for (a_br, a_r) in a_map.iter() {
+ for (a_br, a_r) in a_map {
if *a_r == r {
return ty::ReLateBound(ty::DebruijnIndex::new(1), *a_br);
}
skol_map.repr(infcx.tcx));
let new_vars = infcx.region_vars_confined_to_snapshot(snapshot);
- for (&skol_br, &skol) in skol_map.iter() {
+ for (&skol_br, &skol) in skol_map {
let tainted = infcx.tainted_regions(snapshot, skol);
- for &tainted_region in tainted.iter() {
+ for &tainted_region in &tainted {
// Each skolemized should only be relatable to itself
// or new variables:
match tainted_region {
mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty),
error_str)[]);
- for err in err.iter() {
- ty::note_and_explain_type_err(self.tcx, *err)
+ if let Some(err) = err {
+ ty::note_and_explain_type_err(self.tcx, err)
}
}
}
a, b);
}
VerifyGenericBound(_, _, a, ref bs) => {
- for &b in bs.iter() {
+ for &b in bs {
consider_adding_bidirectional_edges(
&mut result_set, r,
a, b);
errors: &mut Vec<RegionResolutionError<'tcx>>)
{
let mut reg_reg_dups = FnvHashSet();
- for verify in self.verifys.borrow().iter() {
+ for verify in &*self.verifys.borrow() {
match *verify {
VerifyRegSubReg(ref origin, sub, sup) => {
if self.is_subregion_of(sub, sup) {
}
let dummy_idx = graph.add_node(());
- for (constraint, _) in constraints.iter() {
+ for (constraint, _) in &*constraints {
match *constraint {
ConstrainVarSubVar(a_id, b_id) => {
graph.add_edge(NodeIndex(a_id.index as uint),
lower_bounds.sort_by(|a, b| { free_regions_first(a, b) });
upper_bounds.sort_by(|a, b| { free_regions_first(a, b) });
- for lower_bound in lower_bounds.iter() {
- for upper_bound in upper_bounds.iter() {
+ for lower_bound in &lower_bounds {
+ for upper_bound in &upper_bounds {
if !self.is_subregion_of(lower_bound.region,
upper_bound.region) {
errors.push(SubSupConflict(
return;
}
- for upper_bound_1 in upper_bounds.iter() {
- for upper_bound_2 in upper_bounds.iter() {
+ for upper_bound_1 in &upper_bounds {
+ for upper_bound_2 in &upper_bounds {
match self.glb_concrete_regions(upper_bound_1.region,
upper_bound_2.region) {
Ok(_) => {}
changed = false;
iteration += 1;
debug!("---- {} Iteration {}{}", "#", tag, iteration);
- for (constraint, _) in self.constraints.borrow().iter() {
+ for (constraint, _) in &*self.constraints.borrow() {
let edge_changed = body(constraint);
if edge_changed {
debug!("Updated due to constraint {}",
already instantiated")
};
- for &(dir, vid) in relations.iter() {
+ for &(dir, vid) in &relations {
stack.push((ty, dir, vid));
}
let mut escaping_types = Vec::new();
let actions_since_snapshot = self.values.actions_since_snapshot(&s.snapshot);
debug!("actions_since_snapshot.len() = {}", actions_since_snapshot.len());
- for action in actions_since_snapshot.iter() {
+ for action in actions_since_snapshot {
match *action {
sv::UndoLog::NewElem(index) => {
// if any new variables were created during the
(self.fn_once_trait(), ty::FnOnceClosureKind),
];
- for &(opt_def_id, kind) in def_id_kinds.iter() {
+ for &(opt_def_id, kind) in &def_id_kinds {
if Some(id) == opt_def_id {
return Some(kind);
}
}
pub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {
- for attribute in attrs.iter() {
+ for attribute in attrs {
match attribute.value_str() {
Some(ref value) if attribute.check_name("lang") => {
return Some(value.clone());
debug!("creating fn_maps: {:?}", &fn_maps as *const IrMaps);
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
pat_util::pat_bindings(&ir.tcx.def_map,
&*arg.pat,
|_bm, arg_id, _x, path1| {
}
fn visit_arm(ir: &mut IrMaps, arm: &ast::Arm) {
- for pat in arm.pats.iter() {
+ for pat in &arm.pats {
pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| {
debug!("adding local variable {} from match with bm {:?}",
p_id, bm);
// construction site.
let mut call_caps = Vec::new();
ty::with_freevars(ir.tcx, expr.id, |freevars| {
- for fv in freevars.iter() {
+ for fv in freevars {
if let DefLocal(rv) = fv.def {
let fv_ln = ir.add_live_node(FreeVarNode(fv.span));
call_caps.push(CaptureInfo {ln: fv_ln,
let ln = self.live_node(expr.id, expr.span);
self.init_empty(ln, succ);
let mut first_merge = true;
- for arm in arms.iter() {
+ for arm in arms {
let body_succ =
self.propagate_through_expr(&*arm.body, succ);
let guard_succ =
}
ast::ExprInlineAsm(ref ia) => {
- for &(_, ref input) in ia.inputs.iter() {
+ for &(_, ref input) in &ia.inputs {
this.visit_expr(&**input);
}
// Output operands must be lvalues
- for &(_, ref out, _) in ia.outputs.iter() {
+ for &(_, ref out, _) in &ia.outputs {
this.check_lvalue(&**out);
this.visit_expr(&**out);
}
}
fn warn_about_unused_args(&self, decl: &ast::FnDecl, entry_ln: LiveNode) {
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
pat_util::pat_bindings(&self.ir.tcx.def_map,
&*arg.pat,
|_bm, p_id, sp, path1| {
-> bool {
if !self.used_on_entry(ln, var) {
let r = self.should_warn(var);
- for name in r.iter() {
+ if let Some(name) = r {
// annoying: for parameters in funcs like `fn(x: int)
// {ret}`, there is only one node, so asking about
if is_assigned {
self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_VARIABLES, id, sp,
format!("variable `{}` is assigned to, but never used",
- *name));
+ name));
} else {
self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_VARIABLES, id, sp,
- format!("unused variable: `{}`", *name));
+ format!("unused variable: `{}`", name));
}
}
true
var: Variable) {
if self.live_on_exit(ln, var).is_none() {
let r = self.should_warn(var);
- for name in r.iter() {
+ if let Some(name) = r {
self.ir.tcx.sess.add_lint(lint::builtin::UNUSED_ASSIGNMENTS, id, sp,
- format!("value assigned to `{}` is never read", *name));
+ format!("value assigned to `{}` is never read", name));
}
}
}
}
}
Some(&def::DefConst(..)) => {
- for subpat in subpats.iter() {
+ for subpat in subpats {
try!(self.cat_pattern_(cmt.clone(), &**subpat, op));
}
}
ast::PatStruct(_, ref field_pats, _) => {
// {f1: p1, ..., fN: pN}
- for fp in field_pats.iter() {
+ for fp in field_pats {
let field_ty = try!(self.pat_ty(&*fp.node.pat)); // see (*2)
let cmt_field = self.cat_field(pat, cmt.clone(), fp.node.ident.name, field_ty);
try!(self.cat_pattern_(cmt_field, &*fp.node.pat, op));
ast::PatVec(ref before, ref slice, ref after) => {
let elt_cmt = try!(self.cat_index(pat, try!(self.deref_vec(pat, cmt))));
- for before_pat in before.iter() {
+ for before_pat in before {
try!(self.cat_pattern_(elt_cmt.clone(), &**before_pat, op));
}
- for slice_pat in slice.iter() {
+ if let Some(ref slice_pat) = *slice {
let slice_ty = try!(self.pat_ty(&**slice_pat));
let slice_cmt = self.cat_rvalue_node(pat.id(), pat.span(), slice_ty);
try!(self.cat_pattern_(slice_cmt, &**slice_pat, op));
}
- for after_pat in after.iter() {
+ for after_pat in after {
try!(self.cat_pattern_(elt_cmt.clone(), &**after_pat, op));
}
}
// this properly would result in the necessity of computing *type*
// reachability, which might result in a compile time loss.
fn mark_destructors_reachable(&mut self) {
- for (_, destructor_def_id) in self.tcx.destructor_for_type.borrow().iter() {
+ for (_, destructor_def_id) in &*self.tcx.destructor_for_type.borrow() {
if destructor_def_id.krate == ast::LOCAL_CRATE {
self.reachable_symbols.insert(destructor_def_id.node);
}
// other crates link to us, they're going to expect to be able to
// use the lang items, so we need to be sure to mark them as
// exported.
- for id in exported_items.iter() {
+ for id in exported_items {
reachable_context.worklist.push(*id);
}
for (_, item) in tcx.lang_items.items() {
use syntax::attr::AttrMetaMethods;
pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) {
- for attr in krate.attrs.iter() {
+ for attr in &krate.attrs {
if !attr.check_name("recursion_limit") {
continue;
}
record_rvalue_scope(visitor, &**subexpr, blk_id);
}
ast::ExprStruct(_, ref fields, _) => {
- for field in fields.iter() {
+ for field in fields {
record_rvalue_scope_if_borrow_expr(
visitor, &*field.expr, blk_id);
}
}
ast::ExprVec(ref subexprs) |
ast::ExprTup(ref subexprs) => {
- for subexpr in subexprs.iter() {
+ for subexpr in subexprs {
record_rvalue_scope_if_borrow_expr(
visitor, &**subexpr, blk_id);
}
}
fn visit_generics(&mut self, generics: &ast::Generics) {
- for ty_param in generics.ty_params.iter() {
+ for ty_param in &*generics.ty_params {
visit::walk_ty_param_bounds_helper(self, &ty_param.bounds);
match ty_param.default {
Some(ref ty) => self.visit_ty(&**ty),
None => {}
}
}
- for predicate in generics.where_clause.predicates.iter() {
+ for predicate in &generics.where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ ref bounded_ty,
ref bounds,
.. }) => {
self.visit_lifetime_ref(lifetime);
- for bound in bounds.iter() {
+ for bound in bounds {
self.visit_lifetime_ref(bound);
}
}
self.with(LateScope(&trait_ref.bound_lifetimes, self.scope), |old_scope, this| {
this.check_lifetime_defs(old_scope, &trait_ref.bound_lifetimes);
- for lifetime in trait_ref.bound_lifetimes.iter() {
+ for lifetime in &trait_ref.bound_lifetimes {
this.visit_lifetime_def(lifetime);
}
this.visit_trait_ref(&trait_ref.trait_ref)
let lifetime_i = &lifetimes[i];
let special_idents = [special_idents::static_lifetime];
- for lifetime in lifetimes.iter() {
+ for lifetime in lifetimes {
if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
span_err!(self.sess, lifetime.lifetime.span, E0262,
"illegal lifetime parameter name: `{}`",
// It is a soft error to shadow a lifetime within a parent scope.
self.check_lifetime_def_for_shadowing(old_scope, &lifetime_i.lifetime);
- for bound in lifetime_i.bounds.iter() {
+ for bound in &lifetime_i.bounds {
self.resolve_lifetime_ref(bound);
}
}
let mut collector =
FreeLifetimeCollector { early_bound: &mut early_bound,
late_bound: &mut late_bound };
- for ty_param in generics.ty_params.iter() {
+ for ty_param in &*generics.ty_params {
visit::walk_ty_param_bounds_helper(&mut collector, &ty_param.bounds);
}
- for predicate in generics.where_clause.predicates.iter() {
+ for predicate in &generics.where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounds,
ref bounded_ty,
..}) => {
collector.visit_lifetime_ref(lifetime);
- for bound in bounds.iter() {
+ for bound in bounds {
collector.visit_lifetime_ref(bound);
}
}
// Any lifetime that either has a bound or is referenced by a
// bound is early.
- for lifetime_def in generics.lifetimes.iter() {
+ for lifetime_def in &generics.lifetimes {
if !lifetime_def.bounds.is_empty() {
shuffle(&mut early_bound, &mut late_bound,
lifetime_def.lifetime.name);
- for bound in lifetime_def.bounds.iter() {
+ for bound in &lifetime_def.bounds {
shuffle(&mut early_bound, &mut late_bound,
bound.name);
}
/// Construct the stability index for a crate being compiled.
pub fn build(sess: &Session, krate: &Crate) -> Index {
let mut staged_api = false;
- for attr in krate.attrs.iter() {
+ for attr in &krate.attrs {
if attr.name().get() == "staged_api" {
match attr.node.value.node {
ast::MetaWord(_) => {
maybe_do_stability_check(tcx, id, item.span, cb);
}
ast::ItemTrait(_, _, ref supertraits, _) => {
- for t in supertraits.iter() {
+ for t in &**supertraits {
if let ast::TraitTyParamBound(ref t, _) = *t {
let id = ty::trait_ref_to_def_id(tcx, &t.trait_ref);
maybe_do_stability_check(tcx, id, t.trait_ref.path.span, cb);
let mut active_lib_features: FnvHashMap<InternedString, Span>
= lib_features.clone().into_iter().collect();
- for used_feature in used_lib_features.iter() {
+ for used_feature in used_lib_features {
active_lib_features.remove(used_feature);
}
- for (_, &span) in active_lib_features.iter() {
+ for (_, &span) in &active_lib_features {
sess.add_lint(lint::builtin::UNUSED_FEATURES,
ast::CRATE_NODE_ID,
span,
impl<T: fmt::Debug> fmt::Debug for VecPerParamSpace<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "VecPerParamSpace {{"));
- for space in ParamSpace::all().iter() {
+ for space in &ParamSpace::all() {
try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space)));
}
try!(write!(fmt, "}}"));
fn clone_slice<T:Clone>(x: &[T]) -> Vec<T> {
let mut v = Vec::new();
- for e in x.iter() {
+ for e in &x {
v.push((*e).clone()); // (*)
}
}
pub fn report_fulfillment_errors<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>,
errors: &Vec<FulfillmentError<'tcx>>) {
- for error in errors.iter() {
+ for error in errors {
report_fulfillment_error(infcx, error);
}
}
span: Span) -> Option<String> {
let def_id = trait_ref.def_id;
let mut report = None;
- for item in ty::get_attrs(infcx.tcx, def_id).iter() {
+ for item in &*ty::get_attrs(infcx.tcx, def_id) {
if item.check_name("rustc_on_unimplemented") {
let err_sp = if item.meta().span == DUMMY_SP {
span
// The `Self` type is erased, so it should not appear in list of
// arguments or return type apart from the receiver.
let ref sig = method.fty.sig;
- for &input_ty in sig.0.inputs[1..].iter() {
+ for &input_ty in &sig.0.inputs[1..] {
if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) {
return Some(MethodViolationCode::ReferencesSelf);
}
let impl_items = &impl_items_map[impl_vtable.impl_def_id];
let mut impl_ty = None;
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
let assoc_type = match impl_or_trait_items_map[impl_item.def_id()] {
ty::TypeTraitItem(ref assoc_type) => assoc_type.clone(),
ty::MethodTraitItem(..) => { continue; }
debug!("assemble_candidates_from_impls(self_ty={})", self_ty.repr(self.tcx()));
let all_impls = self.all_impls(obligation.predicate.def_id());
- for &impl_def_id in all_impls.iter() {
+ for &impl_def_id in &all_impls {
self.infcx.probe(|snapshot| {
let (skol_obligation_trait_pred, skol_map) =
self.infcx().skolemize_late_bound_regions(&obligation.predicate, snapshot);
}
let trait_items = ty::trait_items(tcx, bound_ref.def_id());
- for trait_item in trait_items.iter() {
+ for trait_item in &**trait_items {
match *trait_item {
ty::MethodTraitItem(_) => method_count += 1,
ty::TypeTraitItem(_) => {}
$(let mut $variant = total;)*
- for (_, t) in tcx.interner.borrow().iter() {
+ for (_, t) in &*tcx.interner.borrow() {
let variant = match t.sty {
ty::ty_bool | ty::ty_char | ty::ty_int(..) | ty::ty_uint(..) |
ty::ty_float(..) | ty::ty_str => continue,
&ty_trait(box TyTrait { ref principal, ref bounds }) => {
let mut computation = FlagComputation::new();
computation.add_substs(principal.0.substs);
- for projection_bound in bounds.projection_bounds.iter() {
+ for projection_bound in &bounds.projection_bounds {
let mut proj_computation = FlagComputation::new();
proj_computation.add_projection_predicate(&projection_bound.0);
computation.add_bound_computation(&proj_computation);
}
fn add_tys(&mut self, tys: &[Ty]) {
- for &ty in tys.iter() {
+ for &ty in tys {
self.add_ty(ty);
}
}
// make no assumptions (other than that it cannot have an
// in-scope type parameter within, which makes no sense).
let mut tc = TC::All - TC::InteriorParam;
- for bound in bounds.builtin_bounds.iter() {
+ for bound in &bounds.builtin_bounds {
tc = tc - match bound {
BoundSync | BoundSend | BoundCopy => TC::None,
BoundSized => TC::Nonsized,
pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
-> uint {
let mut i = 0;
- for f in fields.iter() { if f.name == name { return i; } i += 1; }
+ for f in fields { if f.name == name { return i; } i += 1; }
tcx.sess.bug(&format!(
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
{
let mut vec = Vec::new();
- for builtin_bound in bounds.builtin_bounds.iter() {
+ for builtin_bound in &bounds.builtin_bounds {
match traits::trait_ref_for_builtin_bound(tcx, builtin_bound, param_ty) {
Ok(trait_ref) => { vec.push(trait_ref.as_predicate()); }
Err(ErrorReported) => { }
}
}
- for ®ion_bound in bounds.region_bounds.iter() {
+ for ®ion_bound in &bounds.region_bounds {
// account for the binder being introduced below; no need to shift `param_ty`
// because, at present at least, it can only refer to early-bound regions
let region_bound = ty_fold::shift_region(region_bound, 1);
vec.push(ty::Binder(ty::OutlivesPredicate(param_ty, region_bound)).as_predicate());
}
- for bound_trait_ref in bounds.trait_bounds.iter() {
+ for bound_trait_ref in &bounds.trait_bounds {
vec.push(bound_trait_ref.as_predicate());
}
- for projection in bounds.projection_bounds.iter() {
+ for projection in &bounds.projection_bounds {
vec.push(projection.as_predicate());
}
// Record the trait->implementation mappings, if applicable.
let associated_traits = csearch::get_impl_trait(tcx, impl_def_id);
- for trait_ref in associated_traits.iter() {
+ if let Some(ref trait_ref) = associated_traits {
record_trait_implementation(tcx, trait_ref.def_id, impl_def_id);
}
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
- for impl_item_def_id in impl_items.iter() {
+ for impl_item_def_id in &impl_items {
let method_def_id = impl_item_def_id.def_id();
match impl_or_trait_item(tcx, method_def_id) {
MethodTraitItem(method) => {
- for &source in method.provided_source.iter() {
+ if let Some(source) = method.provided_source {
tcx.provided_method_sources
.borrow_mut()
.insert(method_def_id, source);
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
- for impl_item_def_id in impl_items.iter() {
+ for impl_item_def_id in &impl_items {
let method_def_id = impl_item_def_id.def_id();
match impl_or_trait_item(tcx, method_def_id) {
MethodTraitItem(method) => {
- for &source in method.provided_source.iter() {
+ if let Some(source) = method.provided_source {
tcx.provided_method_sources
.borrow_mut()
.insert(method_def_id, source);
};
let fn_sig = |&: state: &mut SipHasher, sig: &Binder<FnSig<'tcx>>| {
let sig = anonymize_late_bound_regions(tcx, sig).0;
- for a in sig.inputs.iter() { helper(tcx, *a, svh, state); }
+ for a in &sig.inputs { helper(tcx, *a, svh, state); }
if let ty::FnConverging(output) = sig.output {
helper(tcx, output, svh, state);
}
free_id: ast::NodeId,
region_params: &[RegionParameterDef])
{
- for r in region_params.iter() {
+ for r in region_params {
regions.push(r.space, ty::free_region_from_def(free_id, r));
}
}
fn push_types_from_defs<'tcx>(tcx: &ty::ctxt<'tcx>,
types: &mut VecPerParamSpace<Ty<'tcx>>,
defs: &[TypeParameterDef<'tcx>]) {
- for def in defs.iter() {
+ for def in defs {
debug!("construct_parameter_environment(): push_types_from_defs: def={:?}",
def.repr(tcx));
let ty = ty::mk_param_from_def(tcx, def);
fn record_region_bounds<'tcx>(tcx: &ty::ctxt<'tcx>, predicates: &[ty::Predicate<'tcx>]) {
debug!("record_region_bounds(predicates={:?})", predicates.repr(tcx));
- for predicate in predicates.iter() {
+ for predicate in predicates {
match *predicate {
Predicate::Projection(..) |
Predicate::Trait(..) |
let did = match self_type.sty {
ty::ty_struct(struct_did, substs) => {
let fields = ty::struct_fields(tcx, struct_did, substs);
- for field in fields.iter() {
+ for field in &fields {
if type_moves_by_default(param_env, span, field.mt.ty) {
return Err(FieldDoesNotImplementCopy(field.name))
}
}
ty::ty_enum(enum_did, substs) => {
let enum_variants = ty::enum_variants(tcx, enum_did);
- for variant in enum_variants.iter() {
- for variant_arg_type in variant.args.iter() {
+ for variant in &*enum_variants {
+ for variant_arg_type in &variant.args {
let substd_arg_type =
variant_arg_type.subst(tcx, substs);
if type_moves_by_default(param_env, span, substd_arg_type) {
let mut missing = HashSet::new();
sess.cstore.iter_crate_data(|cnum, _| {
- for item in csearch::get_missing_lang_items(&sess.cstore, cnum).iter() {
+ for item in &csearch::get_missing_lang_items(&sess.cstore, cnum) {
missing.insert(*item);
}
});
},
_ => {
diagnostic.handler().err("multiple plugin registration functions found");
- for &(_, span) in finder.registrars.iter() {
+ for &(_, span) in &finder.registrars {
diagnostic.span_note(span, "one is here");
}
diagnostic.handler().abort_if_errors();
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly. Identify these by
// spans, because the crate map isn't set up yet.
- for item in krate.module.items.iter() {
+ for item in &krate.module.items {
if let ast::ItemExternCrate(_) = item.node {
loader.span_whitelist.insert(item.span);
}
visit::walk_crate(&mut loader, krate);
if let Some(plugins) = addl_plugins {
- for plugin in plugins.iter() {
+ for plugin in &plugins {
loader.load_plugin(CrateOrString::Str(plugin.as_slice()),
None, None, None)
}
let mut plugin_attr = None;
let mut macro_selection = Some(HashSet::new()); // None => load all
let mut reexport = HashSet::new();
- for attr in item.attrs.iter() {
+ for attr in &item.attrs {
let mut used = true;
match attr.name().get() {
"phase" => {
macro_selection = None;
}
if let (Some(sel), Some(names)) = (macro_selection.as_mut(), names) {
- for name in names.iter() {
+ for name in names {
if let ast::MetaWord(ref name) = name.node {
sel.insert(name.clone());
} else {
}
};
- for name in names.iter() {
+ for name in names {
if let ast::MetaWord(ref name) = name.node {
reexport.insert(name.clone());
} else {
let value = iter.next();
let option_to_lookup = key.replace("-", "_");
let mut found = false;
- for &(candidate, setter, opt_type_desc, _) in $stat.iter() {
+ for &(candidate, setter, opt_type_desc, _) in $stat {
if option_to_lookup != candidate { continue }
if !setter(&mut op, value) {
match (value, opt_type_desc) {
let mut lint_opts = vec!();
let mut describe_lints = false;
- for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() {
+ for &level in &[lint::Allow, lint::Warn, lint::Deny, lint::Forbid] {
for lint_name in matches.opt_strs(level.as_str()).into_iter() {
if lint_name == "help" {
describe_lints = true;
let mut output_types = Vec::new();
if !debugging_opts.parse_only && !no_trans {
let unparsed_output_types = matches.opt_strs("emit");
- for unparsed_output_type in unparsed_output_types.iter() {
+ for unparsed_output_type in &unparsed_output_types {
for part in unparsed_output_type.split(',') {
let output_type = match part.as_slice() {
"asm" => OutputTypeAssembly,
};
let mut search_paths = SearchPaths::new();
- for s in matches.opt_strs("L").iter() {
+ for s in &matches.opt_strs("L") {
search_paths.add_path(&s[]);
}
};
let mut externs = HashMap::new();
- for arg in matches.opt_strs("extern").iter() {
+ for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(1, '=');
let name = match parts.next() {
Some(s) => s,
pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateType>, String> {
let mut crate_types: Vec<CrateType> = Vec::new();
- for unparsed_crate_type in list_list.iter() {
+ for unparsed_crate_type in &list_list {
for part in unparsed_crate_type.split(',') {
let new_part = match part {
"lib" => default_lib_output(),
while i < queue.len() {
match edges_map.get(&queue[i]) {
Some(edges) => {
- for target in edges.iter() {
+ for target in edges {
if *target == destination {
return true;
}
impl Writer for FnvHasher {
fn write(&mut self, bytes: &[u8]) {
let FnvHasher(mut hash) = *self;
- for byte in bytes.iter() {
+ for byte in bytes {
hash = hash ^ (*byte as u64);
hash = hash * 0x100000001b3;
}
0
};
- for t in tps[..tps.len() - num_defaults].iter() {
+ for t in &tps[..tps.len() - num_defaults] {
strs.push(ty_to_string(cx, *t))
}
- for projection in projections.iter() {
+ for projection in projections {
strs.push(format!("{}={}",
projection.projection_ty.item_name.user_string(cx),
projection.ty.user_string(cx)));
components.push(tap.user_string(tcx));
// Builtin bounds.
- for bound in bounds.builtin_bounds.iter() {
+ for bound in &bounds.builtin_bounds {
components.push(bound.user_string(tcx));
}
impl<'tcx> Repr<'tcx> for ty::BuiltinBounds {
fn repr(&self, _tcx: &ctxt) -> String {
let mut res = Vec::new();
- for b in self.iter() {
+ for b in self {
res.push(match b {
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
let mut res = Vec::new();
res.push(self.builtin_bounds.repr(tcx));
- for t in self.trait_bounds.iter() {
+ for t in &self.trait_bounds {
res.push(t.repr(tcx));
}
res.connect("+")
if !s.is_empty() {
result.push(s);
}
- for n in self.trait_bounds.iter() {
+ for n in &self.trait_bounds {
result.push(n.user_string(tcx));
}
result.connect(" + ")
res.push(region_str);
}
- for bound in self.builtin_bounds.iter() {
+ for bound in &self.builtin_bounds {
res.push(bound.user_string(tcx));
}
- for projection_bound in self.projection_bounds.iter() {
+ for projection_bound in &self.projection_bounds {
res.push(projection_bound.user_string(tcx));
}
let oslibname = format!("{}{}{}", osprefix, name, ossuffix);
let unixlibname = format!("lib{}.a", name);
- for path in search_paths.iter() {
+ for path in search_paths {
debug!("looking for {} inside {:?}", name, path.display());
let test = path.join(&oslibname[]);
if test.exists() { return test }
// 32,768, and we leave a bit of extra space for the program name.
static ARG_LENGTH_LIMIT: uint = 32000;
- for member_name in self.members.iter() {
+ for member_name in &self.members {
let len = member_name.as_vec().len();
// `len + 1` to account for the space that's inserted before each
// all SYMDEF files as these are just magical placeholders which get
// re-created when we make a new archive anyway.
let files = try!(fs::readdir(loc.path()));
- for file in files.iter() {
+ for file in &files {
let filename = file.filename_str().unwrap();
if skip(filename) { continue }
if filename.contains(".SYMDEF") { continue }
fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
let mut ret = Vec::new();
- for rpath in rpaths.iter() {
+ for rpath in rpaths {
ret.push(format!("-Wl,-rpath,{}", &(*rpath)[]));
}
return ret;
{
debug!("output: {:?}", config.out_filename.display());
debug!("libs:");
- for libpath in libs.iter() {
+ for libpath in libs {
debug!(" {:?}", libpath.display());
}
fn log_rpaths(desc: &str, rpaths: &[String]) {
debug!("{} rpaths:", desc);
- for rpath in rpaths.iter() {
+ for rpath in rpaths {
debug!(" {}", *rpath);
}
}
fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
let mut set = HashSet::new();
let mut minimized = Vec::new();
- for rpath in rpaths.iter() {
+ for rpath in rpaths {
if set.insert(&rpath[]) {
minimized.push(rpath.clone());
}
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once
- for t in tests.iter() {
+ for t in tests {
sh.reset();
sh.input_str(t.input.as_slice());
let out_str = sh.result_str();
}
// Test that it works when accepting the message in pieces
- for t in tests.iter() {
+ for t in tests {
sh.reset();
let len = t.input.len();
let mut left = len;
// avoid collisions.
let mut state = SipHasher::new();
- for data in metadata.iter() {
+ for data in metadata {
data.hash(&mut state);
}
//
// We hash only the MetaItems instead of the entire Attribute
// to avoid hashing the AttrId
- for attr in krate.attrs.iter() {
+ for attr in &krate.attrs {
attr.node.value.hash(&mut state);
}
let paths = os::split_paths(&target_path[]);
// FIXME 16351: add a sane default search path?
- for dir in paths.iter() {
+ for dir in &paths {
let p = dir.join(path.clone());
if p.is_file() {
return load_file(&p);
let loan_path = owned_ptr_base_path(loan_path);
let cont = self.each_in_scope_loan(scope, |loan| {
let mut ret = true;
- for restr_path in loan.restricted_paths.iter() {
+ for restr_path in &loan.restricted_paths {
if **restr_path == *loan_path {
if !op(loan) {
ret = false;
debug!("new_loan_indices = {:?}", new_loan_indices);
self.each_issued_loan(scope, |issued_loan| {
- for &new_loan_index in new_loan_indices.iter() {
+ for &new_loan_index in &new_loan_indices {
let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan);
}
for (i, &x) in new_loan_indices.iter().enumerate() {
let old_loan = &self.all_loans[x];
- for &y in new_loan_indices[(i+1) ..].iter() {
+ for &y in &new_loan_indices[(i+1) ..] {
let new_loan = &self.all_loans[y];
self.report_error_if_loans_conflict(old_loan, new_loan);
}
}
let loan2_base_path = owned_ptr_base_path_rc(&loan2.loan_path);
- for restr_path in loan1.restricted_paths.iter() {
+ for restr_path in &loan1.restricted_paths {
if *restr_path != loan2_base_path { continue; }
// If new_loan is something like `x.a`, and old_loan is something like `x.b`, we would
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[]));
// Second, build parents from the moved and assigned.
- for m in moved.iter() {
+ for m in &moved {
let mut p = this.path_parent(*m);
while p != InvalidMovePathIndex {
parents.push(p);
p = this.path_parent(p);
}
}
- for a in assigned.iter() {
+ for a in &assigned {
let mut p = this.path_parent(*a);
while p != InvalidMovePathIndex {
parents.push(p);
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[]));
// Fourth, build the leftover from the moved, assigned, and parents.
- for m in moved.iter() {
+ for m in &moved {
let lp = this.path_loan_path(*m);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
}
- for a in assigned.iter() {
+ for a in &assigned {
let lp = this.path_loan_path(*a);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
}
- for p in parents.iter() {
+ for p in &parents {
let lp = this.path_loan_path(*p);
add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
}
let fields = ty::lookup_struct_fields(tcx, def_id);
match *origin_field_name {
mc::NamedField(ast_name) => {
- for f in fields.iter() {
+ for f in &fields {
if f.name == ast_name {
continue;
}
match *origin_field_name {
mc::NamedField(ast_name) => {
let variant_arg_names = variant_info.arg_names.as_ref().unwrap();
- for variant_arg_ident in variant_arg_names.iter() {
+ for variant_arg_ident in variant_arg_names {
if variant_arg_ident.name == ast_name {
continue;
}
fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
errors: &Vec<MoveError<'tcx>>) {
let grouped_errors = group_errors_with_same_origin(errors);
- for error in grouped_errors.iter() {
+ for error in &grouped_errors {
report_cannot_move_out_of(bccx, error.move_from.clone());
let mut is_first_note = true;
- for move_to in error.move_to_places.iter() {
+ for move_to in &error.move_to_places {
note_move_destination(bccx, move_to.span,
&move_to.ident, is_first_note);
is_first_note = false;
fn group_errors_with_same_origin<'tcx>(errors: &Vec<MoveError<'tcx>>)
-> Vec<GroupedMoveErrors<'tcx>> {
let mut grouped_errors = Vec::new();
- for error in errors.iter() {
+ for error in errors {
append_to_grouped_errors(&mut grouped_errors, error)
}
return grouped_errors;
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
- for assignment in self.path_assignments.borrow().iter() {
+ for assignment in &*self.path_assignments.borrow() {
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
// Kill all moves related to a variable `x` when
// it goes out of scope:
- for path in self.paths.borrow().iter() {
+ for path in &*self.paths.borrow() {
match path.loan_path.kind {
LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
let kill_scope = path.loan_path.kill_scope(tcx);
//! Returns the kind of a move of `loan_path` by `id`, if one exists.
let mut ret = None;
- for loan_path_index in self.move_data.path_map.borrow().get(&*loan_path).iter() {
+ if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
self.dfcx_moves.each_gen_bit(id, |move_index| {
let the_move = self.move_data.moves.borrow();
let the_move = (*the_move)[move_index];
- if the_move.path == **loan_path_index {
+ if the_move.path == *loan_path_index {
ret = Some(the_move.kind);
false
} else {
ret = false;
}
} else {
- for &loan_path_index in opt_loan_path_index.iter() {
+ if let Some(loan_path_index) = opt_loan_path_index {
let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
true
}
});
- if !cont { ret = false; break }
+ if !cont { ret = false; }
}
}
ret
debug!("dataflow_for({:?}, id={}) {:?}", e, id, self.variants);
let mut sets = "".to_string();
let mut seen_one = false;
- for &variant in self.variants.iter() {
+ for &variant in &self.variants {
if seen_one { sets.push_str(" "); } else { seen_one = true; }
sets.push_str(variant.short_name());
sets.push_str(": ");
id: &str) {
let mut out_filenames = Vec::new();
- for output_type in sess.opts.output_types.iter() {
+ for output_type in &sess.opts.output_types {
let file = outputs.path(*output_type);
match *output_type {
config::OutputTypeExe => {
- for output in sess.crate_types.borrow().iter() {
+ for output in &*sess.crate_types.borrow() {
let p = link::filename_for_input(sess, *output,
id, &file);
out_filenames.push(p);
.map(|fmap| escape_dep_filename(&fmap.name[]))
.collect();
let mut file = try!(old_io::File::create(&deps_filename));
- for path in out_filenames.iter() {
+ for path in &out_filenames {
try!(write!(&mut file as &mut Writer,
"{}: {}\n\n", path.display(), files.connect(" ")));
}
fn describe_debug_flags() {
println!("\nAvailable debug options:\n");
- for &(name, _, opt_type_desc, desc) in config::DB_OPTIONS.iter() {
+ for &(name, _, opt_type_desc, desc) in config::DB_OPTIONS {
let (width, extra) = match opt_type_desc {
Some(..) => (21, "=val"),
None => (25, "")
fn describe_codegen_flags() {
println!("\nAvailable codegen options:\n");
- for &(name, _, opt_type_desc, desc) in config::CG_OPTIONS.iter() {
+ for &(name, _, opt_type_desc, desc) in config::CG_OPTIONS {
let (width, extra) = match opt_type_desc {
Some(..) => (21, "=val"),
None => (25, "")
if sess.opts.prints.len() == 0 { return false }
let attrs = input.map(|input| parse_crate_attrs(sess, input));
- for req in sess.opts.prints.iter() {
+ for req in &sess.opts.prints {
match *req {
PrintRequest::Sysroot => println!("{}", sess.sysroot().display()),
PrintRequest::FileNames |
let crate_types = driver::collect_crate_types(sess, attrs);
let metadata = driver::collect_crate_metadata(sess, attrs);
*sess.crate_metadata.borrow_mut() = metadata;
- for &style in crate_types.iter() {
+ for &style in &crate_types {
let fname = link::filename_for_input(sess, style,
id.as_slice(),
&t_outputs.with_extension(""));
BUG_REPORT_URL),
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
];
- for note in xs.iter() {
+ for note in &xs {
emitter.emit(None, ¬e[], None, diagnostic::Note)
}
}
pub fn create_region_hierarchy(&self, rh: &RH) {
- for child_rh in rh.sub.iter() {
+ for child_rh in rh.sub {
self.create_region_hierarchy(child_rh);
self.infcx.tcx.region_maps.record_encl_scope(
CodeExtent::from_node_id(child_rh.id),
names: &[String])
-> Option<ast::NodeId> {
assert!(idx < names.len());
- for item in m.items.iter() {
+ for item in &m.items {
if item.ident.user_string(this.infcx.tcx) == names[idx] {
return search(this, &**item, idx+1, names);
}
}
pub fn apply_llfn(&self, llfn: ValueRef) {
- for &(idx, ref attr) in self.attrs.iter() {
+ for &(idx, ref attr) in &self.attrs {
attr.apply_llfn(idx as c_uint, llfn);
}
}
pub fn apply_callsite(&self, callsite: ValueRef) {
- for &(idx, ref attr) in self.attrs.iter() {
+ for &(idx, ref attr) in &self.attrs {
attr.apply_callsite(idx as c_uint, callsite);
}
}
// Enum variants are parented to the enum definition itself because
// they inherit privacy
ast::ItemEnum(ref def, _) => {
- for variant in def.variants.iter() {
+ for variant in &def.variants {
// The parent is considered the enclosing enum because the
// enum will dictate the privacy visibility of this variant
// instead.
// parent all the methods to the trait to indicate that they're
// private.
ast::ItemTrait(_, _, _, ref methods) if item.vis != ast::Public => {
- for m in methods.iter() {
+ for m in methods {
match *m {
ast::ProvidedMethod(ref m) => {
self.parents.insert(m.id, item.id);
// While we have the id of the struct definition, go ahead and parent
// all the fields.
- for field in s.fields.iter() {
+ for field in &s.fields {
self.parents.insert(field.node.id, self.curparent);
}
visit::walk_struct_def(self, s)
// Enum variants inherit from their parent, so if the enum is
// public all variants are public unless they're explicitly priv
ast::ItemEnum(ref def, _) if public_first => {
- for variant in def.variants.iter() {
+ for variant in &def.variants {
self.exported_items.insert(variant.node.id);
}
}
});
if public_ty || public_trait {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
let meth_public =
// Default methods on traits are all public so long as the trait
// is public
ast::ItemTrait(_, _, _, ref methods) if public_first => {
- for method in methods.iter() {
+ for method in methods {
match *method {
ast::ProvidedMethod(ref m) => {
debug!("provided {}", m.id);
// crate module gets processed as well.
if self.prev_exported {
assert!(self.export_map.contains_key(&id), "wut {}", id);
- for export in self.export_map[id].iter() {
+ for export in &self.export_map[id] {
if is_local(export.def_id) {
self.reexports.insert(export.def_id.node);
}
match vpath.node {
ast::ViewPathSimple(..) | ast::ViewPathGlob(..) => {}
ast::ViewPathList(ref prefix, ref list) => {
- for pid in list.iter() {
+ for pid in list {
match pid.node {
ast::PathListIdent { id, name } => {
debug!("privacy - ident item {}", id);
ast::ExprStruct(_, ref fields, _) => {
match ty::expr_ty(self.tcx, expr).sty {
ty::ty_struct(id, _) => {
- for field in (*fields).iter() {
+ for field in &(*fields) {
self.check_field(expr.span, id,
NamedField(field.ident.node));
}
ty::ty_enum(_, _) => {
match self.tcx.def_map.borrow()[expr.id].clone() {
def::DefVariant(_, variant_id, _) => {
- for field in fields.iter() {
+ for field in fields {
self.check_field(expr.span, variant_id,
NamedField(field.ident.node));
}
ast::PatStruct(_, ref fields, _) => {
match ty::pat_ty(self.tcx, pattern).sty {
ty::ty_struct(id, _) => {
- for field in fields.iter() {
+ for field in fields {
self.check_field(pattern.span, id,
NamedField(field.node.ident));
}
ty::ty_enum(_, _) => {
match self.tcx.def_map.borrow().get(&pattern.id) {
Some(&def::DefVariant(_, variant_id, _)) => {
- for field in fields.iter() {
+ for field in fields {
self.check_field(pattern.span, variant_id,
NamedField(field.node.ident));
}
check_inherited(item.span, item.vis,
"visibility qualifiers have no effect on trait \
impls");
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref m) => {
check_inherited(m.span, m.pe_vis(), "");
}
ast::ItemEnum(ref def, _) => {
- for v in def.variants.iter() {
+ for v in &def.variants {
match v.node.vis {
ast::Public => {
if item.vis == ast::Public {
}
ast::ItemTrait(_, _, _, ref methods) => {
- for m in methods.iter() {
+ for m in methods {
match *m {
ast::ProvidedMethod(ref m) => {
check_inherited(m.span, m.pe_vis(),
}
}
let check_struct = |&: def: &ast::StructDef| {
- for f in def.fields.iter() {
+ for f in &def.fields {
match f.node.kind {
ast::NamedField(_, p) => check_inherited(tcx, f.span, p),
ast::UnnamedField(..) => {}
check_inherited(tcx, item.span, item.vis);
match item.node {
ast::ItemImpl(_, _, _, _, _, ref impl_items) => {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref m) => {
check_inherited(tcx, m.span, m.pe_vis());
}
}
ast::ItemForeignMod(ref fm) => {
- for i in fm.items.iter() {
+ for i in &fm.items {
check_inherited(tcx, i.span, i.vis);
}
}
ast::ItemEnum(ref def, _) => {
- for v in def.variants.iter() {
+ for v in &def.variants {
check_inherited(tcx, v.span, v.node.vis);
}
}
ast::ItemStruct(ref def, _) => check_struct(&**def),
ast::ItemTrait(_, _, _, ref methods) => {
- for m in methods.iter() {
+ for m in methods {
match *m {
ast::RequiredMethod(..) => {}
ast::ProvidedMethod(ref m) => check_inherited(tcx, m.span,
return
}
- for bound in bounds.iter() {
+ for bound in &**bounds {
self.check_ty_param_bound(bound)
}
}
match *trait_ref {
None => {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
visit::walk_method_helper(self, &**method)
// impl Public<Private> { ... }. Any public static
// methods will be visible as `Public::foo`.
let mut found_pub_static = false;
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
if method.pe_explicit_self().node ==
}
fn visit_generics(&mut self, generics: &ast::Generics) {
- for ty_param in generics.ty_params.iter() {
- for bound in ty_param.bounds.iter() {
+ for ty_param in &*generics.ty_params {
+ for bound in &*ty_param.bounds {
self.check_ty_param_bound(bound)
}
}
- for predicate in generics.where_clause.predicates.iter() {
+ for predicate in &generics.where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ref bound_pred) => {
- for bound in bound_pred.bounds.iter() {
+ for bound in &*bound_pred.bounds {
self.check_ty_param_bound(bound)
}
}
token::get_name(name))[]);
{
let r = child.span_for_namespace(ns);
- for sp in r.iter() {
- self.session.span_note(*sp,
+ if let Some(sp) = r {
+ self.session.span_note(sp,
&format!("first definition of {} `{}` here",
namespace_error_to_string(duplicate_type),
token::get_name(name))[]);
fn block_needs_anonymous_module(&mut self, block: &Block) -> bool {
// Check each statement.
- for statement in block.stmts.iter() {
+ for statement in &block.stmts {
match statement.node {
StmtDecl(ref declaration, _) => {
match declaration.node {
}
}
- for source_item in source_items.iter() {
+ for source_item in source_items {
let (module_path, name) = match source_item.node {
PathListIdent { name, .. } =>
(module_path.clone(), name.name),
let module = name_bindings.get_module();
- for variant in (*enum_definition).variants.iter() {
+ for variant in &(*enum_definition).variants {
self.build_reduced_graph_for_variant(
&**variant,
local_def(item.id),
};
// For each implementation item...
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
MethodImplItem(ref method) => {
// Add the method to the module.
let def_id = local_def(item.id);
// Add the names of all the items to the trait info.
- for trait_item in items.iter() {
+ for trait_item in items {
let (name, kind) = match *trait_item {
ast::RequiredMethod(_) |
ast::ProvidedMethod(_) => {
let trait_item_def_ids =
csearch::get_trait_item_def_ids(&self.session.cstore, def_id);
- for trait_item_def_id in trait_item_def_ids.iter() {
+ for trait_item_def_id in &trait_item_def_ids {
let (trait_item_name, trait_item_kind) =
csearch::get_trait_item_name_and_kind(
&self.session.cstore,
// Add each static method to the module.
let new_parent = type_module;
- for method_info in methods.iter() {
+ for method_info in methods {
let name = method_info.name;
debug!("(building reduced graph for \
external crate) creating \
}
ViewPathList(_, ref list) => {
- for i in list.iter() {
+ for i in list {
self.finalize_import(i.node.id(), i.span);
}
}
self.current_module = orig_module;
build_reduced_graph::populate_module_if_necessary(self, &module_);
- for (_, child_node) in module_.children.borrow().iter() {
+ for (_, child_node) in &*module_.children.borrow() {
match child_node.get_module_if_available() {
None => {
// Nothing to do.
}
}
- for (_, child_module) in module_.anonymous_children.borrow().iter() {
+ for (_, child_module) in &*module_.anonymous_children.borrow() {
self.resolve_imports_for_module_subtree(child_module.clone());
}
}
fn names_to_string(&self, names: &[Name]) -> String {
let mut first = true;
let mut result = String::new();
- for name in names.iter() {
+ for name in names {
if first {
first = false
} else {
// Add all resolved imports from the containing module.
let import_resolutions = containing_module.import_resolutions.borrow();
- for (ident, target_import_resolution) in import_resolutions.iter() {
+ for (ident, target_import_resolution) in &*import_resolutions {
debug!("(resolving glob import) writing module resolution \
{} into `{}`",
token::get_name(*ident),
// Add all children from the containing module.
build_reduced_graph::populate_module_if_necessary(self, &containing_module);
- for (&name, name_bindings) in containing_module.children.borrow().iter() {
+ for (&name, name_bindings) in &*containing_module.children.borrow() {
self.merge_import_resolution(module_,
containing_module.clone(),
import_directive,
}
// Add external module children from the containing module.
- for (&name, module) in containing_module.external_module_children.borrow().iter() {
+ for (&name, module) in &*containing_module.external_module_children.borrow() {
let name_bindings =
Rc::new(Resolver::create_name_bindings_from_module(module.clone()));
self.merge_import_resolution(module_,
// Descend into children and anonymous children.
build_reduced_graph::populate_module_if_necessary(self, &module_);
- for (_, child_node) in module_.children.borrow().iter() {
+ for (_, child_node) in &*module_.children.borrow() {
match child_node.get_module_if_available() {
None => {
// Continue.
}
}
- for (_, module_) in module_.anonymous_children.borrow().iter() {
+ for (_, module_) in &*module_.anonymous_children.borrow() {
self.report_unresolved_imports(module_.clone());
}
}
DlDef(d @ DefLocal(_)) => {
let node_id = d.def_id().node;
let mut def = d;
- for rib in ribs.iter() {
+ for rib in ribs {
match rib.kind {
NormalRibKind => {
// Nothing to do. Continue.
}
DlDef(def @ DefTyParam(..)) |
DlDef(def @ DefSelfTy(..)) => {
- for rib in ribs.iter() {
+ for rib in ribs {
match rib.kind {
NormalRibKind | ClosureRibKind(..) => {
// Nothing to do. Continue.
// enum item: resolve all the variants' discrs,
// then resolve the ty params
ItemEnum(ref enum_def, ref generics) => {
- for variant in (*enum_def).variants.iter() {
- for dis_expr in variant.node.disr_expr.iter() {
+ for variant in &(*enum_def).variants {
+ if let Some(ref dis_expr) = variant.node.disr_expr {
// resolve the discriminator expr
// as a constant
self.with_constant_rib(|this| {
this.resolve_type_parameter_bounds(item.id, bounds,
TraitDerivation);
- for trait_item in (*trait_items).iter() {
+ for trait_item in &(*trait_items) {
// Create a new rib for the trait_item-specific type
// parameters.
//
this.resolve_where_clause(&ty_m.generics
.where_clause);
- for argument in ty_m.decl.inputs.iter() {
+ for argument in &ty_m.decl.inputs {
this.resolve_type(&*argument.ty);
}
ItemForeignMod(ref foreign_module) => {
self.with_scope(Some(name), |this| {
- for foreign_item in foreign_module.items.iter() {
+ for foreign_item in &foreign_module.items {
match foreign_item.node {
ForeignItemFn(_, ref generics) => {
this.with_type_parameter_rib(
}
Some(declaration) => {
let mut bindings_list = HashMap::new();
- for argument in declaration.inputs.iter() {
+ for argument in &declaration.inputs {
this.resolve_pattern(&*argument.pat,
ArgumentIrrefutableMode,
&mut bindings_list);
fn resolve_type_parameters(&mut self,
type_parameters: &OwnedSlice<TyParam>) {
- for type_parameter in type_parameters.iter() {
+ for type_parameter in &**type_parameters {
self.resolve_type_parameter(type_parameter);
}
}
fn resolve_type_parameter(&mut self,
type_parameter: &TyParam) {
- for bound in type_parameter.bounds.iter() {
+ for bound in &*type_parameter.bounds {
self.resolve_type_parameter_bound(type_parameter.id, bound,
TraitBoundingTypeParameter);
}
id: NodeId,
type_parameter_bounds: &OwnedSlice<TyParamBound>,
reference_type: TraitReferenceType) {
- for type_parameter_bound in type_parameter_bounds.iter() {
+ for type_parameter_bound in &**type_parameter_bounds {
self.resolve_type_parameter_bound(id, type_parameter_bound,
reference_type);
}
}
fn resolve_where_clause(&mut self, where_clause: &ast::WhereClause) {
- for predicate in where_clause.predicates.iter() {
+ for predicate in &where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ref bound_pred) => {
self.resolve_type(&*bound_pred.bounded_ty);
- for bound in bound_pred.bounds.iter() {
+ for bound in &*bound_pred.bounds {
self.resolve_type_parameter_bound(bound_pred.bounded_ty.id, bound,
TraitBoundingTypeParameter);
}
this.resolve_where_clause(&generics.where_clause);
// Resolve fields.
- for field in fields.iter() {
+ for field in fields {
this.resolve_type(&*field.node.ty);
}
});
this.resolve_type(self_type);
this.with_current_self_type(self_type, |this| {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
MethodImplItem(ref method) => {
// If this is a trait impl, ensure the method
fn check_trait_item(&self, name: Name, span: Span) {
// If there is a TraitRef in scope for an impl, then the method must be in the trait.
- for &(did, ref trait_ref) in self.current_trait_ref.iter() {
+ if let Some((did, ref trait_ref)) = self.current_trait_ref {
if self.trait_item_map.get(&(name, did)).is_none() {
let path_str = self.path_names_to_string(&trait_ref.path);
self.resolve_error(span,
for (i, p) in arm.pats.iter().enumerate() {
let map_i = self.binding_mode_map(&**p);
- for (&key, &binding_0) in map_0.iter() {
+ for (&key, &binding_0) in &map_0 {
match map_i.get(&key) {
None => {
self.resolve_error(
}
}
- for (&key, &binding) in map_i.iter() {
+ for (&key, &binding) in &map_i {
if !map_0.contains_key(&key) {
self.resolve_error(
binding.span,
self.value_ribs.push(Rib::new(NormalRibKind));
let mut bindings_list = HashMap::new();
- for pattern in arm.pats.iter() {
+ for pattern in &arm.pats {
self.resolve_pattern(&**pattern, RefutableMode, &mut bindings_list);
}
// Check for imports appearing after non-item statements.
let mut found_non_item = false;
- for statement in block.stmts.iter() {
+ for statement in &block.stmts {
if let ast::StmtDecl(ref declaration, _) = statement.node {
if let ast::DeclItem(ref i) = declaration.node {
match i.node {
let mut values: Vec<uint> = Vec::new();
for rib in this.value_ribs.iter().rev() {
- for (&k, _) in rib.bindings.iter() {
+ for (&k, _) in &rib.bindings {
maybes.push(token::get_name(k));
values.push(uint::MAX);
}
build_reduced_graph::populate_module_if_necessary(self, &search_module);
{
- for (_, child_names) in search_module.children.borrow().iter() {
+ for (_, child_names) in &*search_module.children.borrow() {
let def = match child_names.def_for_namespace(TypeNS) {
Some(def) => def,
None => continue
}
// Look for imports.
- for (_, import) in search_module.import_resolutions.borrow().iter() {
+ for (_, import) in &*search_module.import_resolutions.borrow() {
let target = match import.target_for_namespace(TypeNS) {
None => continue,
Some(target) => target,
debug!("Children:");
build_reduced_graph::populate_module_if_necessary(self, &module_);
- for (&name, _) in module_.children.borrow().iter() {
+ for (&name, _) in &*module_.children.borrow() {
debug!("* {}", token::get_name(name));
}
debug!("Import resolutions:");
let import_resolutions = module_.import_resolutions.borrow();
- for (&name, import_resolution) in import_resolutions.iter() {
+ for (&name, import_resolution) in &*import_resolutions {
let value_repr;
match import_resolution.target_for_namespace(ValueNS) {
None => { value_repr = "".to_string(); }
self.record_exports_for_module(&*module_);
build_reduced_graph::populate_module_if_necessary(self.resolver, &module_);
- for (_, child_name_bindings) in module_.children.borrow().iter() {
+ for (_, child_name_bindings) in &*module_.children.borrow() {
match child_name_bindings.get_module_if_available() {
None => {
// Nothing to do.
}
}
- for (_, child_module) in module_.anonymous_children.borrow().iter() {
+ for (_, child_module) in &*module_.anonymous_children.borrow() {
self.record_exports_for_module_subtree(child_module.clone());
}
}
fn add_exports_for_module(&mut self,
exports: &mut Vec<Export>,
module_: &Module) {
- for (name, importresolution) in module_.import_resolutions.borrow().iter() {
+ for (name, importresolution) in &*module_.import_resolutions.borrow() {
if !importresolution.is_public {
continue
}
let xs = [TypeNS, ValueNS];
- for &ns in xs.iter() {
+ for &ns in &xs {
match importresolution.target_for_namespace(ns) {
Some(target) => {
debug!("(computing exports) maybe export '{}'",
symbol_hasher.input_str(&link_meta.crate_name[]);
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
- for meta in tcx.sess.crate_metadata.borrow().iter() {
+ for meta in &*tcx.sess.crate_metadata.borrow() {
symbol_hasher.input_str(&meta[]);
}
symbol_hasher.input_str("-");
outputs: &OutputFilenames,
crate_name: &str) -> Vec<Path> {
let mut out_filenames = Vec::new();
- for &crate_type in sess.crate_types.borrow().iter() {
+ for &crate_type in &*sess.crate_types.borrow() {
if invalid_output_for_target(sess, crate_type) {
sess.bug(&format!("invalid output type `{:?}` for target os `{}`",
crate_type, sess.opts.target_triple)[]);
let mut ab = ArchiveBuilder::create(config);
ab.add_file(obj_filename).unwrap();
- for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() {
+ for &(ref l, kind) in &*sess.cstore.get_used_libraries().borrow() {
match kind {
cstore::NativeStatic => {
ab.add_native_library(&l[]).unwrap();
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
let mut all_native_libs = vec![];
- for &(cnum, ref path) in crates.iter() {
+ for &(cnum, ref path) in &crates {
let ref name = sess.cstore.get_crate_data(cnum).name;
let p = match *path {
Some(ref p) => p.clone(), None => {
and so may need to be preserved");
}
- for &(kind, ref lib) in all_native_libs.iter() {
+ for &(kind, ref lib) in &all_native_libs {
let name = match kind {
cstore::NativeStatic => "static library",
cstore::NativeUnknown => "library",
// crates.
let deps = sess.cstore.get_used_crates(cstore::RequireDynamic);
- for &(cnum, _) in deps.iter() {
+ for &(cnum, _) in &deps {
// We may not pass all crates through to the linker. Some crates may
// appear statically in an existing dylib, meaning we'll pick up all the
// symbols from the dylib.
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
for (cnum, _) in crates.into_iter() {
let libs = csearch::get_native_libraries(&sess.cstore, cnum);
- for &(kind, ref lib) in libs.iter() {
+ for &(kind, ref lib) in &libs {
match kind {
cstore::NativeUnknown => {
cmd.arg(format!("-l{}", *lib));
}
// Make sure we actually can run LTO
- for crate_type in sess.crate_types.borrow().iter() {
+ for crate_type in &*sess.crate_types.borrow() {
match *crate_type {
config::CrateTypeExecutable | config::CrateTypeStaticlib => {}
_ => {
fn dump(&mut self, handler: &Handler) {
let mut buffer = self.buffer.lock().unwrap();
- for diag in buffer.iter() {
+ for diag in &*buffer {
match diag.code {
Some(ref code) => {
handler.emit_with_code(None,
config.no_builtins);
}
- for pass in config.passes.iter() {
+ for pass in &config.passes {
let pass = CString::from_slice(pass.as_bytes());
if !llvm::LLVMRustAddPass(mpm, pass.as_ptr()) {
cgcx.handler.warn(format!("unknown pass {:?}, ignoring",
modules_config.emit_bc = true;
}
- for output_type in output_types.iter() {
+ for output_type in output_types {
match *output_type {
config::OutputTypeBitcode => { modules_config.emit_bc = true; },
config::OutputTypeLlvmAssembly => { modules_config.emit_ir = true; },
// Otherwise, we produced it only as a temporary output, and will need
// to get rid of it.
let mut user_wants_bitcode = false;
- for output_type in output_types.iter() {
+ for output_type in output_types {
match *output_type {
config::OutputTypeBitcode => {
user_wants_bitcode = true;
// FIXME #21627 disable faulty FastISel on AArch64 (even for -O0)
if sess.target.target.arch.as_slice() == "aarch64" { add("-fast-isel=0"); }
- for arg in sess.opts.cg.llvm_args.iter() {
+ for arg in &sess.opts.cg.llvm_args {
add(&(*arg)[]);
}
}
error!("Mis-calculated spans for path '{}'. \
Found {} spans, expected {}. Found spans:",
path_to_string(path), spans.len(), path.segments.len());
- for s in spans.iter() {
+ for s in &spans {
let loc = self.sess.codemap().lookup_char_pos(s.lo);
error!(" '{}' in {}, line {}",
self.span.snippet(*s), loc.file.name, loc.line);
return;
}
let sub_paths = &sub_paths[..len-2];
- for &(ref span, ref qualname) in sub_paths.iter() {
+ for &(ref span, ref qualname) in sub_paths {
self.fmt.sub_mod_ref_str(path.span,
*span,
&qualname[],
}
fn process_formals(&mut self, formals: &Vec<ast::Arg>, qualname: &str) {
- for arg in formals.iter() {
+ for arg in formals {
assert!(self.collected_paths.len() == 0 && !self.collecting);
self.collecting = true;
self.visit_pat(&*arg.pat);
self.collecting = false;
let span_utils = self.span.clone();
- for &(id, ref p, _, _) in self.collected_paths.iter() {
+ for &(id, ref p, _, _) in &self.collected_paths {
let typ = ppaux::ty_to_string(&self.analysis.ty_cx,
(*self.analysis.ty_cx.node_types.borrow())[id]);
// get the span only for the name of the variable (I hope the path is only ever a
self.process_formals(&method.pe_fn_decl().inputs, qualname);
// walk arg and return types
- for arg in method.pe_fn_decl().inputs.iter() {
+ for arg in &method.pe_fn_decl().inputs {
self.visit_ty(&*arg.ty);
}
self.process_formals(&decl.inputs, &qualname[]);
// walk arg and return types
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
self.visit_ty(&*arg.ty);
}
&val[]);
// fields
- for field in def.fields.iter() {
+ for field in &def.fields {
self.process_struct_field_def(field, &qualname[], item.id);
self.visit_ty(&*field.node.ty);
}
&format!("Could not find subspan for enum {}",
enum_name)[]),
}
- for variant in enum_definition.variants.iter() {
+ for variant in &enum_definition.variants {
let name = get_ident(variant.node.name);
let name = name.get();
let mut qualname = enum_name.clone();
&enum_name[],
&val[],
item.id);
- for arg in args.iter() {
+ for arg in args {
self.visit_ty(&*arg.ty);
}
}
&val[],
item.id);
- for field in struct_def.fields.iter() {
+ for field in &struct_def.fields {
self.process_struct_field_def(field, qualname.as_slice(), variant.node.id);
self.visit_ty(&*field.node.ty);
}
}
self.process_generic_params(type_parameters, item.span, "", item.id);
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
visit::walk_method_helper(self, &**method)
&val[]);
// super-traits
- for super_bound in trait_refs.iter() {
+ for super_bound in &**trait_refs {
let trait_ref = match *super_bound {
ast::TraitTyParamBound(ref trait_ref, _) => {
trait_ref
// walk generics and methods
self.process_generic_params(generics, item.span, &qualname[], item.id);
- for method in methods.iter() {
+ for method in methods {
self.visit_trait_item(method)
}
}
_ => None
};
- for field in fields.iter() {
+ for field in fields {
match struct_def {
Some(struct_def) => {
let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def);
- for f in fields.iter() {
+ for f in &fields {
if generated_code(field.ident.span) {
continue;
}
self.span.snippet(p.span))[]);
}
};
- for &Spanned { node: ref field, span } in fields.iter() {
+ for &Spanned { node: ref field, span } in fields {
let sub_span = self.span.span_for_first_ident(span);
let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def);
- for f in fields.iter() {
+ for f in fields {
if f.name == field.ident.name {
self.fmt.ref_str(recorder::VarRef,
span,
let glob_map = &self.analysis.glob_map;
let glob_map = glob_map.as_ref().unwrap();
if glob_map.contains_key(&item.id) {
- for n in glob_map[item.id].iter() {
+ for n in &glob_map[item.id] {
if name_string.len() > 0 {
name_string.push_str(", ");
}
self.write_sub_paths(path, true);
}
ast::ViewPathList(ref path, ref list) => {
- for plid in list.iter() {
+ for plid in list {
match plid.node {
ast::PathListIdent { id, .. } => {
match self.lookup_type_ref(id) {
}
fn visit_generics(&mut self, generics: &ast::Generics) {
- for param in generics.ty_params.iter() {
- for bound in param.bounds.iter() {
+ for param in &*generics.ty_params {
+ for bound in &*param.bounds {
if let ast::TraitTyParamBound(ref trait_ref, _) = *bound {
self.process_trait_ref(&trait_ref.trait_ref);
}
scope_id);
// walk arg and return types
- for arg in method_type.decl.inputs.iter() {
+ for arg in &method_type.decl.inputs {
self.visit_ty(&*arg.ty);
}
match *ty {
ty::ty_struct(def_id, _) => {
let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, def_id);
- for f in fields.iter() {
+ for f in &fields {
if f.name == ident.node.name {
let sub_span = self.span.span_for_last_ident(ex.span);
self.fmt.ref_str(recorder::VarRef,
self.process_formals(&decl.inputs, &id[]);
// walk arg and return types
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
self.visit_ty(&*arg.ty);
}
fn visit_arm(&mut self, arm: &ast::Arm) {
assert!(self.collected_paths.len() == 0 && !self.collecting);
self.collecting = true;
- for pattern in arm.pats.iter() {
+ for pattern in &arm.pats {
// collect paths from the arm's patterns
self.visit_pat(&**pattern);
}
// This is to get around borrow checking, because we need mut self to call process_path.
let mut paths_to_process = vec![];
// process collected paths
- for &(id, ref p, ref immut, ref_kind) in self.collected_paths.iter() {
+ for &(id, ref p, ref immut, ref_kind) in &self.collected_paths {
let def_map = self.analysis.ty_cx.def_map.borrow();
if !def_map.contains_key(&id) {
self.sess.span_bug(p.span,
*def)
}
}
- for &(id, ref path, ref_kind) in paths_to_process.iter() {
+ for &(id, ref path, ref_kind) in &paths_to_process {
self.process_path(id, path.span, path, ref_kind);
}
self.collecting = false;
let value = self.span.snippet(l.span);
- for &(id, ref p, ref immut, _) in self.collected_paths.iter() {
+ for &(id, ref p, ref immut, _) in &self.collected_paths {
let value = if *immut { value.to_string() } else { "<mutable>".to_string() };
let types = self.analysis.ty_cx.node_types.borrow();
let typ = ppaux::ty_to_string(&self.analysis.ty_cx, (*types)[id]);
}
fn has_nested_bindings(m: &[Match], col: uint) -> bool {
- for br in m.iter() {
+ for br in m {
match br.pats[col].node {
ast::PatIdent(_, _, Some(_)) => return true,
_ => ()
let tcx = bcx.tcx();
let mut found: Vec<Opt> = vec![];
- for br in m.iter() {
+ for br in m {
let cur = br.pats[col];
let opt = match cur.node {
ast::PatLit(ref l) => ConstantValue(ConstantExpr(&**l)),
bindings_map: &BindingsMap<'tcx>,
cs: Option<cleanup::ScopeId>)
-> Block<'blk, 'tcx> {
- for (&ident, &binding_info) in bindings_map.iter() {
+ for (&ident, &binding_info) in bindings_map {
let llval = match binding_info.trmode {
// By value mut binding for a copy type: load from the ptr
// into the matched value and copy to our alloca
let val = unpack_datum!(bcx, expr::trans(bcx, guard_expr));
let val = val.to_llbool(bcx);
- for (_, &binding_info) in data.bindings_map.iter() {
+ for (_, &binding_info) in &data.bindings_map {
if let TrByCopy(llbinding) = binding_info.trmode {
call_lifetime_end(bcx, llbinding);
}
with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
// Guard does not match: remove all bindings from the lllocals table
- for (_, &binding_info) in data.bindings_map.iter() {
+ for (_, &binding_info) in &data.bindings_map {
call_lifetime_end(bcx, binding_info.llmatch);
bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
}
}
None => {
let data = &m[0].data;
- for &(ref ident, ref value_ptr) in m[0].bound_ptrs.iter() {
+ for &(ref ident, ref value_ptr) in &m[0].bound_ptrs {
let llmatch = data.bindings_map[*ident].llmatch;
call_lifetime_start(bcx, llmatch);
Store(bcx, *value_ptr, llmatch);
Variant(_, ref repr, _) => {
let (the_kind, val_opt) = adt::trans_switch(bcx, &**repr, val);
kind = the_kind;
- for &tval in val_opt.iter() { test_val = tval; }
+ if let Some(tval) = val_opt { test_val = tval; }
}
SliceLengthEqual(_) | SliceLengthGreaterOrEqual(_, _) => {
let (_, len) = tvec::get_base_and_len(bcx, val, left_ty);
}
}
}
- for o in opts.iter() {
+ for o in &opts {
match *o {
ConstantRange(_, _) => { kind = Compare; break },
SliceLengthGreaterOrEqual(_, _) => { kind = CompareSliceLength; break },
compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default);
let mut arm_cxs = Vec::new();
- for arm_data in arm_datas.iter() {
+ for arm_data in &arm_datas {
let mut bcx = arm_data.bodycx;
// insert bindings into the lllocals map and add cleanups
});
}
- for inner_pat in inner.iter() {
+ if let Some(ref inner_pat) = *inner {
bcx = bind_irrefutable_pat(bcx, &**inner_pat, val, cleanup_scope);
}
}
&*repr,
vinfo.disr_val,
val);
- for sub_pat in sub_pats.iter() {
+ if let Some(ref sub_pat) = *sub_pats {
for (i, &argval) in args.vals.iter().enumerate() {
bcx = bind_irrefutable_pat(bcx, &*sub_pat[i],
argval, cleanup_scope);
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
expr::with_field_tys(tcx, pat_ty, Some(pat.id), |discr, field_tys| {
- for f in fields.iter() {
+ for f in fields {
let ix = ty::field_idx_strict(tcx, f.node.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, &*pat_repr, val,
discr, ix);
// alignment of the type.
let (_, align) = union_size_and_align(fields.as_slice());
let mut use_align = true;
- for st in fields.iter() {
+ for st in &fields {
// Get the first non-zero-sized field
let field = st.fields.iter().skip(1).filter(|ty| {
let t = type_of::sizing_type_of(cx, **ty);
cx.tcx().sess.bug("range_to_inttype: found ReprPacked on an enum");
}
}
- for &ity in attempts.iter() {
+ for &ity in attempts {
if bounds_usable(cx, ity, bounds) {
return ity;
}
packed: bool,
scapegoat: Ty<'tcx>) {
let mut offset = 0;
- for &llty in fields.iter() {
+ for &llty in fields {
// Invariant: offset < ccx.obj_size_bound() <= 1<<61
if !packed {
let type_align = machine::llalign_of_min(ccx, llty);
let mut offsets = vec!();
let mut offset = 0;
- for &ty in st.fields.iter() {
+ for &ty in &st.fields {
let llty = type_of::sizing_type_of(ccx, ty);
if !st.packed {
let type_align = type_of::align_of(ccx, ty);
// don't do this then linker errors can be generated where the linker
// complains that one object files has a thread local version of the
// symbol and another one doesn't.
- for attr in ty::get_attrs(ccx.tcx(), did).iter() {
+ for attr in &*ty::get_attrs(ccx.tcx(), did) {
if attr.check_name("thread_local") {
llvm::set_thread_local(c, true);
}
InlineNone => { /* fallthrough */ }
}
- for attr in attrs.iter() {
+ for attr in attrs {
let mut used = true;
match attr.name().get() {
"no_stack_check" => unset_split_stack(llfn),
n_variants);
let next_cx = fcx.new_temp_block("enum-iter-next");
- for variant in (*variants).iter() {
+ for variant in &(*variants) {
let variant_cx =
fcx.new_temp_block(
&format!("enum-iter-variant-{}",
if need_invoke(bcx) {
debug!("invoking {} at {:?}", bcx.val_to_string(llfn), bcx.llbb);
- for &llarg in llargs.iter() {
+ for &llarg in llargs {
debug!("arg: {}", bcx.val_to_string(llarg));
}
let normal_bcx = bcx.fcx.new_temp_block("normal-return");
return (llresult, normal_bcx);
} else {
debug!("calling {} at {:?}", bcx.val_to_string(llfn), bcx.llbb);
- for &llarg in llargs.iter() {
+ for &llarg in llargs {
debug!("arg: {}", bcx.val_to_string(llarg));
}
vec![ty::mk_tup(ccx.tcx(), monomorphized_arg_types)]
}
};
- for monomorphized_arg_type in monomorphized_arg_types.iter() {
+ for monomorphized_arg_type in &monomorphized_arg_types {
debug!("trans_closure: monomorphized_arg_type: {}",
ty_to_string(ccx.tcx(), *monomorphized_arg_type));
}
// This somewhat improves single-stepping experience in debugger.
unsafe {
let llreturn = fcx.llreturn.get();
- for &llreturn in llreturn.iter() {
+ if let Some(llreturn) = llreturn {
llvm::LLVMMoveBasicBlockAfter(llreturn, bcx.llbb);
}
}
let avar = adt::represent_type(ccx, ty);
match *avar {
adt::General(_, ref variants, _) => {
- for var in variants.iter() {
+ for var in variants {
let mut size = 0;
for field in var.fields.iter().skip(1) {
// skip the discriminant
// and control visibility.
pub fn trans_mod(ccx: &CrateContext, m: &ast::Mod) {
let _icx = push_ctxt("trans_mod");
- for item in m.items.iter() {
+ for item in &m.items {
trans_item(ccx, &**item);
}
}
stats.fn_stats.borrow_mut().sort_by(|&(_, insns_a), &(_, insns_b)| {
insns_b.cmp(&insns_a)
});
- for tuple in stats.fn_stats.borrow().iter() {
+ for tuple in &*stats.fn_stats.borrow() {
match *tuple {
(ref name, insns) => {
println!("{} insns, {}", insns, *name);
}
}
if shared_ccx.sess().count_llvm_insns() {
- for (k, v) in shared_ccx.stats().llvm_insns.borrow().iter() {
+ for (k, v) in &*shared_ccx.stats().llvm_insns.borrow() {
println!("{:7} {}", *v, *k);
}
}
rty: Type,
ret_def: bool) -> FnType {
let mut arg_tys = Vec::new();
- for &aty in atys.iter() {
+ for &aty in atys {
let ty = classify_arg_ty(ccx, aty);
arg_tys.push(ty);
}
};
let mut arg_tys = Vec::new();
- for &aty in atys.iter() {
+ for &aty in atys {
let ty = classify_arg_ty(ccx, aty, align_fn);
arg_tys.push(ty);
}
let mut arg_tys = Vec::new();
let mut offset = if sret { 4 } else { 0 };
- for aty in atys.iter() {
+ for aty in atys {
let ty = classify_arg_ty(ccx, *aty, &mut offset);
arg_tys.push(ty);
};
let mut arg_tys = Vec::new();
let mut offset = if sret { 4 } else { 0 };
- for aty in atys.iter() {
+ for aty in atys {
let ty = classify_arg_ty(ccx, *aty, &mut offset);
arg_tys.push(ty);
};
ret_ty = ArgType::direct(rty, None, None, attr);
}
- for &t in atys.iter() {
+ for &t in atys {
let ty = match t.kind() {
Struct => {
let size = llsize_of_alloc(ccx, t);
off: uint,
packed: bool) {
let mut field_off = off;
- for ty in tys.iter() {
+ for ty in tys {
if !packed {
field_off = align(field_off, *ty);
}
fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type {
fn llvec_len(cls: &[RegClass]) -> uint {
let mut len = 1;
- for c in cls.iter() {
+ for c in cls {
if *c != SSEUp {
break;
}
}
let mut arg_tys = Vec::new();
- for t in atys.iter() {
+ for t in atys {
let ty = x86_64_ty(ccx, *t, |cls| cls.is_pass_byval(), ByValAttribute);
arg_tys.push(ty);
}
ret_ty = ArgType::direct(rty, None, None, attr);
}
- for &t in atys.iter() {
+ for &t in atys {
let ty = match t.kind() {
Struct => {
match llsize_of_alloc(ccx, t) {
AstScopeKind(nid) => write!(f, "AstScopeKind({})", nid),
LoopScopeKind(nid, ref blks) => {
try!(write!(f, "LoopScopeKind({}, [", nid));
- for blk in blks.iter() {
+ for blk in blks {
try!(write!(f, "{:p}, ", blk));
}
write!(f, "])")
-> Block<'a, 'tcx> {
let out = self.new_id_block("join", id);
let mut reachable = false;
- for bcx in in_cxs.iter() {
+ for bcx in in_cxs {
if !bcx.unreachable.get() {
build::Br(*bcx, out.llbb, DebugLoc::None);
reachable = true;
debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(), b.id, b.span, true);
fcx.push_ast_cleanup_scope(cleanup_debug_loc);
- for s in b.stmts.iter() {
+ for s in &b.stmts {
bcx = trans_stmt(bcx, &**s);
}
},
ty::ty_tup(ref component_types) => {
unique_type_id.push_str("tuple ");
- for &component_type in component_types.iter() {
+ for &component_type in component_types {
let component_type_id =
self.get_unique_type_id_of_type(cx, component_type);
let component_type_id =
let sig = ty::erase_late_bound_regions(cx.tcx(), sig);
- for ¶meter_type in sig.inputs.iter() {
+ for ¶meter_type in &sig.inputs {
let parameter_type_id =
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
if tps.len() > 0 {
output.push('<');
- for &type_parameter in tps.iter() {
+ for &type_parameter in tps {
let param_type_id =
type_map.get_unique_type_id_of_type(cx, type_parameter);
let param_type_id =
let sig = ty::erase_late_bound_regions(cx.tcx(), sig);
- for ¶meter_type in sig.inputs.iter() {
+ for ¶meter_type in &sig.inputs {
let parameter_type_id =
self.get_unique_type_id_of_type(cx, parameter_type);
let parameter_type_id =
}
// Arguments types
- for arg in fn_decl.inputs.iter() {
+ for arg in &fn_decl.inputs {
assert_type_for_node_id(cx, arg.pat.id, arg.pat.span);
let arg_type = ty::node_id_to_type(cx.tcx(), arg.pat.id);
let arg_type = monomorphize::apply_param_substs(cx.tcx(),
});
// regular arguments
- for &argument_type in signature.inputs.iter() {
+ for &argument_type in &signature.inputs {
signature_metadata.push(type_metadata(cx, argument_type, span));
}
// Push argument identifiers onto the stack so arguments integrate nicely
// with variable shadowing.
- for arg in args.iter() {
+ for arg in args {
pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, _, path1| {
scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata,
ident: Some(path1.node) });
scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata);
// The interesting things here are statements and the concluding expression.
- for statement in block.stmts.iter() {
+ for statement in &block.stmts {
scope_map.insert(ast_util::stmt_id(&**statement),
scope_stack.last().unwrap().scope_metadata);
}
}
- for exp in block.expr.iter() {
+ if let Some(ref exp) = block.expr {
walk_expr(cx, &**exp, scope_stack, scope_map);
}
}
walk_pattern(cx, &*local.pat, scope_stack, scope_map);
- for exp in local.init.iter() {
+ if let Some(ref exp) = local.init {
walk_expr(cx, &**exp, scope_stack, scope_map);
}
}
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
- for sub_pat in sub_pat_opt.iter() {
+ if let Some(ref sub_pat) = *sub_pat_opt {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
}
ast::PatEnum(_, ref sub_pats_opt) => {
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
- for sub_pats in sub_pats_opt.iter() {
- for p in sub_pats.iter() {
+ if let Some(ref sub_pats) = *sub_pats_opt {
+ for p in sub_pats {
walk_pattern(cx, &**p, scope_stack, scope_map);
}
}
ast::PatTup(ref sub_pats) => {
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
- for sub_pat in sub_pats.iter() {
+ for sub_pat in sub_pats {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
}
ast::PatVec(ref front_sub_pats, ref middle_sub_pats, ref back_sub_pats) => {
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
- for sub_pat in front_sub_pats.iter() {
+ for sub_pat in front_sub_pats {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
- for sub_pat in middle_sub_pats.iter() {
+ if let Some(ref sub_pat) = *middle_sub_pats {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
- for sub_pat in back_sub_pats.iter() {
+ for sub_pat in back_sub_pats {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
}
ast::ExprVec(ref init_expressions) |
ast::ExprTup(ref init_expressions) => {
- for ie in init_expressions.iter() {
+ for ie in init_expressions {
walk_expr(cx, &**ie, scope_stack, scope_map);
}
}
scope_stack,
scope_map,
|cx, scope_stack, scope_map| {
- for &ast::Arg { pat: ref pattern, .. } in decl.inputs.iter() {
+ for &ast::Arg { pat: ref pattern, .. } in &decl.inputs {
walk_pattern(cx, &**pattern, scope_stack, scope_map);
}
ast::ExprCall(ref fn_exp, ref args) => {
walk_expr(cx, &**fn_exp, scope_stack, scope_map);
- for arg_exp in args.iter() {
+ for arg_exp in args {
walk_expr(cx, &**arg_exp, scope_stack, scope_map);
}
}
ast::ExprMethodCall(_, _, ref args) => {
- for arg_exp in args.iter() {
+ for arg_exp in args {
walk_expr(cx, &**arg_exp, scope_stack, scope_map);
}
}
// walk only one pattern per arm, as they all must contain the
// same binding names.
- for arm_ref in arms.iter() {
+ for arm_ref in arms {
let arm_span = arm_ref.pats[0].span;
with_new_scope(cx,
scope_stack,
scope_map,
|cx, scope_stack, scope_map| {
- for pat in arm_ref.pats.iter() {
+ for pat in &arm_ref.pats {
walk_pattern(cx, &**pat, scope_stack, scope_map);
}
- for guard_exp in arm_ref.guard.iter() {
+ if let Some(ref guard_exp) = arm_ref.guard {
walk_expr(cx, &**guard_exp, scope_stack, scope_map)
}
}
ast::ExprStruct(_, ref fields, ref base_exp) => {
- for &ast::Field { expr: ref exp, .. } in fields.iter() {
+ for &ast::Field { expr: ref exp, .. } in fields {
walk_expr(cx, &**exp, scope_stack, scope_map);
}
ref outputs,
.. }) => {
// inputs, outputs: Vec<(String, P<Expr>)>
- for &(_, ref exp) in inputs.iter() {
+ for &(_, ref exp) in inputs {
walk_expr(cx, &**exp, scope_stack, scope_map);
}
- for &(_, ref exp, _) in outputs.iter() {
+ for &(_, ref exp, _) in outputs {
walk_expr(cx, &**exp, scope_stack, scope_map);
}
}
},
ty::ty_tup(ref component_types) => {
output.push('(');
- for &component_type in component_types.iter() {
+ for &component_type in component_types {
push_debuginfo_type_name(cx, component_type, true, output);
output.push_str(", ");
}
let sig = ty::erase_late_bound_regions(cx.tcx(), sig);
if sig.inputs.len() > 0 {
- for ¶meter_type in sig.inputs.iter() {
+ for ¶meter_type in &sig.inputs {
push_debuginfo_type_name(cx, parameter_type, true, output);
output.push_str(", ");
}
let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
// First we trans the base, if we have one, to the dest
- for base in optbase.iter() {
+ if let Some(base) = optbase {
assert_eq!(discr, 0);
match ty::expr_kind(bcx.tcx(), &*base.expr) {
ty::RvalueStmtExpr => bcx.tcx().sess.bug("unexpected expr kind for struct base expr"),
_ => {
let base_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &*base.expr, "base"));
- for &(i, t) in base.fields.iter() {
+ for &(i, t) in &base.fields {
let datum = base_datum.get_element(
bcx, t, |srcval| adt::trans_field_ptr(bcx, &*repr, srcval, discr, i));
assert!(type_is_sized(bcx.tcx(), datum.ty));
// (i.e. avoid GEPi and `store`s to an alloca) .
let mut vec_val = C_undef(llty);
- for &(i, ref e) in fields.iter() {
+ for &(i, ref e) in fields {
let block_datum = trans(bcx, &**e);
bcx = block_datum.bcx;
let position = C_uint(bcx.ccx(), i);
Store(bcx, vec_val, addr);
} else {
// Now, we just overwrite the fields we've explicitly specified
- for &(i, ref e) in fields.iter() {
+ for &(i, ref e) in fields {
let dest = adt::trans_field_ptr(bcx, &*repr, addr, discr, i);
let e_ty = expr_ty_adjusted(bcx, &**e);
bcx = trans_into(bcx, &**e, SaveIn(dest));
}
arg_idx += 1;
- for arg_ty in fn_type.arg_tys.iter() {
+ for arg_ty in &fn_type.arg_tys {
if arg_ty.is_ignore() {
continue;
}
pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
let _icx = push_ctxt("foreign::trans_foreign_mod");
- for foreign_item in foreign_mod.items.iter() {
+ for foreign_item in &foreign_mod.items {
let lname = link_name(&**foreign_item);
if let ast::ForeignItemFn(ref decl, _) = foreign_item.node {
}
};
- for &arg_ty in tys.fn_ty.arg_tys.iter() {
+ for &arg_ty in &tys.fn_ty.arg_tys {
if arg_ty.is_ignore() {
continue;
}
i += 1;
- for &arg_ty in tys.fn_ty.arg_tys.iter() {
+ for &arg_ty in &tys.fn_ty.arg_tys {
if arg_ty.is_ignore() {
continue;
}
// As of this point, allow no more tydescs to be created.
ccx.finished_tydescs().set(true);
let glue_fn_ty = Type::generic_glue_fn(ccx).ptr_to();
- for (_, ti) in ccx.tydescs().borrow().iter() {
+ for (_, ti) in &*ccx.tydescs().borrow() {
// Each of the glue functions needs to be cast to a generic type
// before being put into the tydesc because we only have a singleton
// tydesc type. Then we'll recast each function to its real type when
/// the only intrinsic that needs such verification is `transmute`.
pub fn check_intrinsics(ccx: &CrateContext) {
let mut last_failing_id = None;
- for transmute_restriction in ccx.tcx().transmute_restrictions.borrow().iter() {
+ for transmute_restriction in &*ccx.tcx().transmute_restrictions.borrow() {
// Sometimes, a single call to transmute will push multiple
// type pairs to test in order to exhaustively test the
// possibility around a type parameter. If one of those fails,
// items that we need to translate.
if !generics.ty_params.is_empty() {
let mut v = TransItemVisitor{ ccx: ccx };
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
visit::walk_method_helper(&mut v, &**method);
}
return;
}
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
if method.pe_generics().ty_params.len() == 0 {
ast::ExprVec(ref elements) => {
match dest {
Ignore => {
- for element in elements.iter() {
+ for element in elements {
bcx = expr::trans_into(bcx, &**element, Ignore);
}
}
match inputs[inputs.len() - 1].sty {
ty::ty_tup(ref tupled_arguments) => {
debug!("untuple_arguments_if_necessary(): untupling arguments");
- for &tupled_argument in tupled_arguments.iter() {
+ for &tupled_argument in tupled_arguments {
result.push(tupled_argument);
}
}
}
}
- for param in ty_param_defs[supplied_ty_param_count..].iter() {
+ for param in &ty_param_defs[supplied_ty_param_count..] {
match param.default {
Some(default) => {
// This is a default type parameter.
prohibit_projections(this.tcx(), assoc_bindings.as_slice());
}
Some(ref mut v) => {
- for binding in assoc_bindings.iter() {
+ for binding in &assoc_bindings {
match ast_type_binding_to_projection_predicate(this, trait_ref.clone(),
self_ty, binding) {
Ok(pp) => { v.push(pp); }
token::get_name(assoc_name),
token::get_name(ty_param_name));
- for suitable_bound in suitable_bounds.iter() {
+ for suitable_bound in &suitable_bounds {
span_note!(this.tcx().sess, ast_ty.span,
"associated type `{}` could derive from `{}`",
token::get_name(ty_param_name),
let mut region_bounds = Vec::new();
let mut trait_bounds = Vec::new();
let mut trait_def_ids = DefIdMap();
- for ast_bound in ast_bounds.iter() {
+ for ast_bound in ast_bounds {
match *ast_bound {
ast::TraitTyParamBound(ref b, ast::TraitBoundModifier::None) => {
match ::lookup_def_tcx(tcx, b.trait_ref.path.span, b.trait_ref.ref_id) {
// below for details.
demand::eqtype(fcx, pat.span, expected, pat_ty);
- for elt in before.iter() {
+ for elt in before {
check_pat(pcx, &**elt, inner_ty);
}
if let Some(ref slice) = *slice {
});
check_pat(pcx, &**slice, slice_ty);
}
- for elt in after.iter() {
+ for elt in after {
check_pat(pcx, &**elt, inner_ty);
}
}
// Typecheck the patterns first, so that we get types for all the
// bindings.
- for arm in arms.iter() {
+ for arm in arms {
let mut pcx = pat_ctxt {
fcx: fcx,
map: pat_id_map(&tcx.def_map, &*arm.pats[0]),
};
- for p in arm.pats.iter() {
+ for p in &arm.pats {
check_pat(&mut pcx, &**p, discrim_ty);
}
}
"use of trait `{}` in a struct pattern", name);
fcx.write_error(pat.id);
- for field in fields.iter() {
+ for field in fields {
check_pat(pcx, &*field.node.pat, tcx.types.err);
}
return;
"`{}` does not name a struct or a struct variant", name);
fcx.write_error(pat.id);
- for field in fields.iter() {
+ for field in fields {
check_pat(pcx, &*field.node.pat, tcx.types.err);
}
return;
fcx.write_error(pat.id);
if let Some(subpats) = subpats {
- for pat in subpats.iter() {
+ for pat in subpats {
check_pat(pcx, &**pat, tcx.types.err);
}
}
"this pattern has {} field{}, but the corresponding {} has no fields",
subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name);
- for pat in subpats.iter() {
+ for pat in subpats {
check_pat(pcx, &**pat, tcx.types.err);
}
} else {
kind_name,
arg_tys.len(), if arg_tys.len() == 1 {""} else {"s"});
- for pat in subpats.iter() {
+ for pat in subpats {
check_pat(pcx, &**pat, tcx.types.err);
}
}
let mut used_fields = FnvHashMap();
// Typecheck each field.
- for &Spanned { node: ref field, span } in fields.iter() {
+ for &Spanned { node: ref field, span } in fields {
let field_type = match used_fields.entry(field.ident.name) {
Occupied(occupied) => {
span_err!(tcx.sess, span, E0025,
// Create a list of simplified self types, if we can.
let mut simplified_steps = Vec::new();
- for step in steps.iter() {
+ for step in &steps {
match fast_reject::simplify_type(fcx.tcx(), step.self_ty, true) {
None => { break; }
Some(simplified_type) => { simplified_steps.push(simplified_type); }
fn assemble_inherent_candidates(&mut self) {
let steps = self.steps.clone();
- for step in steps.iter() {
+ for step in &*steps {
self.assemble_probe(step.self_ty);
}
}
// metadata if necessary.
ty::populate_implementations_for_type_if_necessary(self.tcx(), def_id);
- for impl_infos in self.tcx().inherent_impls.borrow().get(&def_id).iter() {
- for &impl_def_id in impl_infos.iter() {
+ if let Some(impl_infos) = self.tcx().inherent_impls.borrow().get(&def_id) {
+ for &impl_def_id in &***impl_infos {
self.assemble_inherent_impl_probe(impl_def_id);
}
}
let mut duplicates = HashSet::new();
let opt_applicable_traits = self.fcx.ccx.trait_map.get(&expr_id);
for applicable_traits in opt_applicable_traits.into_iter() {
- for &trait_did in applicable_traits.iter() {
+ for &trait_did in applicable_traits {
if duplicates.insert(trait_did) {
try!(self.assemble_extension_candidates_for_trait(trait_did));
}
Some(impls) => impls,
};
- for &impl_def_id in impl_def_ids.borrow().iter() {
+ for &impl_def_id in &*impl_def_ids.borrow() {
debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={} impl_def_id={}",
trait_def_id.repr(self.tcx()),
impl_def_id.repr(self.tcx()));
// Check if there is an unboxed-closure self-type in the list of receivers.
// If so, add "synthetic impls".
let steps = self.steps.clone();
- for step in steps.iter() {
+ for step in &*steps {
let (closure_def_id, _, _) = match step.self_ty.sty {
ty::ty_closure(a, b, ref c) => (a, b, c),
_ => continue,
method.repr(self.tcx()),
method_index);
- for step in self.steps.iter() {
+ for step in &*self.steps {
debug!("assemble_projection_candidates: step={}",
step.repr(self.tcx()));
None => { }
}
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref m) => {
check_method_body(ccx, &impl_pty.generics, &**m);
ast::ItemTrait(_, ref generics, _, ref trait_methods) => {
check_trait_on_unimplemented(ccx, generics, it);
let trait_def = ty::lookup_trait_def(ccx.tcx, local_def(it.id));
- for trait_method in trait_methods.iter() {
+ for trait_method in trait_methods {
match *trait_method {
RequiredMethod(..) => {
// Nothing to do, since required methods don't have
}
ast::ItemForeignMod(ref m) => {
if m.abi == abi::RustIntrinsic {
- for item in m.items.iter() {
+ for item in &m.items {
check_intrinsic_type(ccx, &**item);
}
} else {
- for item in m.items.iter() {
+ for item in &m.items {
let pty = ty::lookup_item_type(ccx.tcx, local_def(item.id));
if !pty.generics.types.is_empty() {
span_err!(ccx.tcx.sess, item.span, E0044,
// Check existing impl methods to see if they are both present in trait
// and compatible with trait signature
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref impl_method) => {
let impl_method_def_id = local_def(impl_method.id);
// Check for missing items from trait
let provided_methods = ty::provided_trait_methods(tcx, impl_trait_ref.def_id);
let mut missing_methods = Vec::new();
- for trait_item in trait_items.iter() {
+ for trait_item in &*trait_items {
match *trait_item {
ty::MethodTraitItem(ref trait_method) => {
let is_implemented =
// of arguments when we typecheck the functions. This isn't really the
// right way to do this.
let xs = [false, true];
- for check_blocks in xs.iter() {
+ for check_blocks in &xs {
let check_blocks = *check_blocks;
debug!("check_blocks={}", check_blocks);
let mut best_dist = name.len();
let fields = ty::lookup_struct_fields(tcx, id);
let mut best = None;
- for elem in fields.iter() {
+ for elem in &fields {
let n = elem.name.as_str();
// ignore already set fields
if skip.iter().any(|&x| x == n) {
let mut class_field_map = FnvHashMap();
let mut fields_found = 0;
- for field in field_types.iter() {
+ for field in field_types {
class_field_map.insert(field.name, (field.id, false));
}
let mut error_happened = false;
// Typecheck each field.
- for field in ast_fields.iter() {
+ for field in ast_fields {
let mut expected_field_type = tcx.types.err;
let pair = class_field_map.get(&field.ident.node.name).map(|x| *x);
assert!(fields_found <= field_types.len());
if fields_found < field_types.len() {
let mut missing_fields = Vec::new();
- for class_field in field_types.iter() {
+ for class_field in field_types {
let name = class_field.name;
let (_, seen) = class_field_map[name];
if !seen {
// Make sure to still write the types
// otherwise we might ICE
fcx.write_error(id);
- for field in fields.iter() {
+ for field in fields {
check_expr(fcx, &*field.expr);
}
match *base_expr {
constrain_path_type_parameters(fcx, expr);
}
ast::ExprInlineAsm(ref ia) => {
- for &(_, ref input) in ia.inputs.iter() {
+ for &(_, ref input) in &ia.inputs {
check_expr(fcx, &**input);
}
- for &(_, ref out, _) in ia.outputs.iter() {
+ for &(_, ref out, _) in &ia.outputs {
check_expr(fcx, &**out);
}
fcx.write_nil(id);
let typ = match uty {
Some(uty) => {
- for e in args.iter() {
+ for e in args {
check_expr_coercable_to_type(fcx, &**e, uty);
}
uty
}
None => {
let t: Ty = fcx.infcx().next_ty_var();
- for e in args.iter() {
+ for e in args {
check_expr_has_type(fcx, &**e, t);
}
t
let mut warned = false;
let mut any_diverges = false;
let mut any_err = false;
- for s in blk.stmts.iter() {
+ for s in &blk.stmts {
check_stmt(fcx, &**s);
let s_id = ast_util::stmt_id(&**s);
let s_ty = fcx.node_ty(s_id);
let mut disr_vals: Vec<ty::Disr> = Vec::new();
let mut prev_disr_val: Option<ty::Disr> = None;
- for v in vs.iter() {
+ for v in vs {
// If the discriminant value is specified explicitly in the enum check whether the
// initialization expression is valid, otherwise use the last value plus one.
// variables. If the user provided some types, we may still need
// to add defaults. If the user provided *too many* types, that's
// a problem.
- for &space in ParamSpace::all().iter() {
+ for &space in &ParamSpace::all() {
adjust_type_parameters(fcx, span, space, type_defs, &mut substs);
assert_eq!(substs.types.len(space), type_defs.len(space));
fcx: &FnCtxt,
segment: &ast::PathSegment)
{
- for typ in segment.parameters.types().iter() {
+ for typ in &segment.parameters.types() {
span_err!(fcx.tcx().sess, typ.span, E0085,
"type parameters may not appear here");
break;
}
- for lifetime in segment.parameters.lifetimes().iter() {
+ for lifetime in &segment.parameters.lifetimes() {
span_err!(fcx.tcx().sess, lifetime.span, E0086,
"lifetime parameters may not appear here");
break;
span: Span,
component_tys: &[Ty<'tcx>]) {
let mut rcx = Rcx::new(fcx, Repeating(0), SubjectNode::None);
- for &component_ty in component_tys.iter() {
+ for &component_ty in component_tys {
// Check that each type outlives the empty region. Since the
// empty region is a subregion of all others, this can't fail
// unless the type does not meet the well-formedness
.region_obligations(node_id)
.to_vec();
- for r_o in region_obligations.iter() {
+ for r_o in ®ion_obligations {
debug!("visit_region_obligations: r_o={}",
r_o.repr(self.tcx()));
let sup_type = self.resolve_type(r_o.sup_type);
debug!("relate_free_regions >>");
let tcx = self.tcx();
- for &ty in fn_sig_tys.iter() {
+ for &ty in fn_sig_tys {
let ty = self.resolve_type(ty);
debug!("relate_free_regions(t={})", ty.repr(tcx));
let body_scope = CodeExtent::from_node_id(body_id);
tcx,
ty,
body_scope);
- for constraint in constraints.iter() {
+ for constraint in &constraints {
debug!("constraint: {}", constraint.repr(tcx));
match *constraint {
regionmanip::RegionSubRegionConstraint(_,
fn visit_arm(rcx: &mut Rcx, arm: &ast::Arm) {
// see above
- for p in arm.pats.iter() {
+ for p in &arm.pats {
constrain_bindings_in_pat(&**p, rcx);
}
let has_method_map = rcx.fcx.inh.method_map.borrow().contains_key(&method_call);
// Check any autoderefs or autorefs that appear.
- for &adjustment in rcx.fcx.inh.adjustments.borrow().get(&expr.id).iter() {
+ if let Some(adjustment) = rcx.fcx.inh.adjustments.borrow().get(&expr.id) {
debug!("adjustment={:?}", adjustment);
match *adjustment {
ty::AdjustDerefRef(ty::AutoDerefRef {autoderefs, autoref: ref opt_autoref}) => {
let expr_ty = rcx.resolve_node_type(expr.id);
constrain_autoderefs(rcx, expr, autoderefs, expr_ty);
- for autoref in opt_autoref.iter() {
+ if let Some(ref autoref) = *opt_autoref {
link_autoref(rcx, expr, autoderefs, autoref);
// Require that the resulting region encompasses
debug!("ensure_free_variable_types_outlive_closure_bound({}, {})",
bounds.region_bound.repr(tcx), expr.repr(tcx));
- for freevar in freevars.iter() {
+ for freevar in freevars {
let var_node_id = {
let def_id = freevar.def.def_id();
assert!(def_id.krate == ast::LOCAL_CRATE);
};
// Check that the type meets the criteria of the existential bounds:
- for builtin_bound in bounds.builtin_bounds.iter() {
+ for builtin_bound in &bounds.builtin_bounds {
let code = traits::ClosureCapture(var_node_id, expr.span, builtin_bound);
let cause = traits::ObligationCause::new(freevar.span, rcx.fcx.body_id, code);
rcx.fcx.register_builtin_bound(var_ty, builtin_bound, cause);
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_captured_variables({}, {})",
region_bound.repr(tcx), expr.repr(tcx));
- for freevar in freevars.iter() {
+ for freevar in freevars {
debug!("constrain_captured_variables: freevar.def={:?}", freevar.def);
// Identify the variable being closed over and its node-id.
}
// as loop above, but for receiver
- for r in receiver.iter() {
+ if let Some(r) = receiver {
debug!("receiver: {}", r.repr(tcx));
type_of_node_must_outlive(
rcx, infer::CallRcvr(r.span),
r.id, callee_region);
if implicitly_ref_args {
- link_by_ref(rcx, &**r, callee_scope);
+ link_by_ref(rcx, &*r, callee_scope);
}
}
}
let mc = mc::MemCategorizationContext::new(rcx.fcx);
let discr_cmt = ignore_err!(mc.cat_expr(discr));
debug!("discr_cmt={}", discr_cmt.repr(rcx.tcx()));
- for arm in arms.iter() {
- for root_pat in arm.pats.iter() {
+ for arm in arms {
+ for root_pat in &arm.pats {
link_pattern(rcx, mc, discr_cmt.clone(), &**root_pat);
}
}
fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[ast::Arg]) {
debug!("regionck::link_fn_args(body_scope={:?})", body_scope);
let mc = mc::MemCategorizationContext::new(rcx.fcx);
- for arg in args.iter() {
+ for arg in args {
let arg_ty = rcx.fcx.node_ty(arg.id);
let re_scope = ty::ReScope(body_scope);
let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty);
rcx.tcx(),
ty,
region);
- for constraint in constraints.iter() {
+ for constraint in &constraints {
debug!("constraint: {}", constraint.repr(rcx.tcx()));
match *constraint {
regionmanip::RegionSubRegionConstraint(None, r_a, r_b) => {
// The problem is that the type of `x` is `&'a A`. To be
// well-formed, then, A must be lower-generic by `'a`, but we
// don't know that this holds from first principles.
- for &(ref r, ref p) in rcx.region_bound_pairs.iter() {
+ for &(ref r, ref p) in &rcx.region_bound_pairs {
debug!("generic={} p={}",
generic.repr(rcx.tcx()),
p.repr(rcx.tcx()));
}
ty::ty_tup(ref tuptys) => {
- for &tupty in tuptys.iter() {
+ for &tupty in tuptys {
self.accumulate_from_ty(tupty);
}
}
// Variance of each type/region parameter.
let variances = ty::item_variances(self.tcx, def_id);
- for &space in ParamSpace::all().iter() {
+ for &space in &ParamSpace::all() {
let region_params = substs.regions().get_slice(space);
let region_variances = variances.regions.get_slice(space);
let region_param_defs = generics.regions.get_slice(space);
}
}
- for ®ion_bound in region_param_def.bounds.iter() {
+ for ®ion_bound in ®ion_param_def.bounds {
// The type declared a constraint like
//
// 'b : 'a
// Inspect bounds on this type parameter for any
// region bounds.
- for &r in type_param_def.bounds.region_bounds.iter() {
+ for &r in &type_param_def.bounds.region_bounds {
self.stack.push((r, Some(ty)));
self.accumulate_from_ty(type_param_ty);
self.stack.pop().unwrap();
// And then, in turn, to be well-formed, the
// `region_bound` that user specified must imply the
// region bounds required from all of the trait types:
- for &r_d in required_region_bounds.iter() {
+ for &r_d in &required_region_bounds {
// Each of these is an instance of the `'c <= 'b`
// constraint above
self.out.push(RegionSubRegionConstraint(Some(ty), r_d, r_c));
}
ty::with_freevars(self.tcx(), expr.id, |freevars| {
- for freevar in freevars.iter() {
+ for freevar in freevars {
let var_node_id = freevar.def.local_node_id();
let upvar_id = ty::UpvarId { var_id: var_node_id,
closure_expr_id: expr.id };
// bounds attached to the object cast. (In other words, if the
// object type is Foo+Send, this would create an obligation
// for the Send check.)
- for builtin_bound in object_trait.bounds.builtin_bounds.iter() {
+ for builtin_bound in &object_trait.bounds.builtin_bounds {
fcx.register_builtin_bound(
referent_ty,
builtin_bound,
// Create obligations for the projection predicates.
let projection_bounds =
object_trait.projection_bounds_with_self_ty(fcx.tcx(), referent_ty);
- for projection_bound in projection_bounds.iter() {
+ for projection_bound in &projection_bounds {
let projection_obligation =
Obligation::new(cause.clone(), projection_bound.as_predicate());
fcx.register_predicate(projection_obligation);
})
.collect();
- for projection_bound in object_trait.bounds.projection_bounds.iter() {
+ for projection_bound in &object_trait.bounds.projection_bounds {
let pair = (projection_bound.0.projection_ty.trait_ref.def_id,
projection_bound.0.projection_ty.item_name);
associated_types.remove(&pair);
item.span,
region::CodeExtent::from_node_id(item.id),
Some(&mut this.cache));
- for variant in variants.iter() {
- for field in variant.fields.iter() {
+ for variant in &variants {
+ for field in &variant.fields {
// Regions are checked below.
bounds_checker.check_traits_in_ty(field.ty);
}
// For DST, all intermediate types must be sized.
if variant.fields.len() > 0 {
- for field in variant.fields.init().iter() {
+ for field in variant.fields.init() {
fcx.register_builtin_bound(
field.ty,
ty::BoundSized,
let impl_params = generics.types.get_slice(subst::TypeSpace).iter()
.map(|tp| tp.name).collect::<HashSet<_>>();
- for method_param in generics.types.get_slice(subst::FnSpace).iter() {
+ for method_param in generics.types.get_slice(subst::FnSpace) {
if impl_params.contains(&method_param.name) {
span_err!(tcx.sess, span, E0194,
"type parameter `{}` shadows another type parameter of the same name",
assert_eq!(fcx.writeback_errors.get(), false);
let mut wbcx = WritebackCx::new(fcx);
wbcx.visit_block(blk);
- for arg in decl.inputs.iter() {
+ for arg in &decl.inputs {
wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id);
wbcx.visit_pat(&*arg.pat);
match e.node {
ast::ExprClosure(_, _, ref decl, _) => {
- for input in decl.inputs.iter() {
+ for input in &decl.inputs {
let _ = self.visit_node_id(ResolvingExpr(e.span),
input.id);
}
return;
}
- for (upvar_id, upvar_capture) in self.fcx.inh.upvar_capture_map.borrow().iter() {
+ for (upvar_id, upvar_capture) in &*self.fcx.inh.upvar_capture_map.borrow() {
let new_upvar_capture = match *upvar_capture {
ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue,
ty::UpvarCapture::ByRef(ref upvar_borrow) => {
return
}
- for (def_id, closure_ty) in self.fcx.inh.closure_tys.borrow().iter() {
+ for (def_id, closure_ty) in &*self.fcx.inh.closure_tys.borrow() {
let closure_ty = self.resolve(closure_ty, ResolvingClosure(*def_id));
self.fcx.tcx().closure_tys.borrow_mut().insert(*def_id, closure_ty);
}
- for (def_id, &closure_kind) in self.fcx.inh.closure_kinds.borrow().iter() {
+ for (def_id, &closure_kind) in &*self.fcx.inh.closure_kinds.borrow() {
self.fcx.tcx().closure_kinds.borrow_mut().insert(*def_id, closure_kind);
}
}
// the tcx.
let mut tcx_inherent_impls =
self.crate_context.tcx.inherent_impls.borrow_mut();
- for (k, v) in self.inherent_impls.borrow().iter() {
+ for (k, v) in &*self.inherent_impls.borrow() {
tcx_inherent_impls.insert((*k).clone(),
Rc::new((*v.borrow()).clone()));
}
let impl_items = self.create_impl_from_item(item);
- for associated_trait in associated_traits.iter() {
+ for associated_trait in associated_traits {
let trait_ref = ty::node_id_to_trait_ref(self.crate_context.tcx,
associated_trait.ref_id);
debug!("(checking implementation) adding impl for trait '{}', item '{}'",
let impl_type_scheme = ty::lookup_item_type(tcx, impl_id);
let prov = ty::provided_trait_methods(tcx, trait_ref.def_id);
- for trait_method in prov.iter() {
+ for trait_method in &prov {
// Synthesize an ID.
let new_id = tcx.sess.next_node_id();
let new_did = local_def(new_id);
}
}).collect();
- for trait_ref in trait_refs.iter() {
+ if let Some(ref trait_ref) = *trait_refs {
let ty_trait_ref = ty::node_id_to_trait_ref(
self.crate_context.tcx,
trait_ref.ref_id);
assert!(associated_traits.is_some());
// Record all the trait items.
- for trait_ref in associated_traits.iter() {
+ if let Some(trait_ref) = associated_traits {
self.add_trait_impl(trait_ref.def_id, impl_def_id);
}
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
- for item_def_id in impl_items.iter() {
+ for item_def_id in &impl_items {
let impl_item = ty::impl_or_trait_item(tcx, item_def_id.def_id());
match impl_item {
ty::MethodTraitItem(ref method) => {
- for &source in method.provided_source.iter() {
+ if let Some(source) = method.provided_source {
tcx.provided_method_sources
.borrow_mut()
.insert(item_def_id.def_id(), source);
Some(found_impls) => found_impls
};
- for &impl_did in trait_impls.borrow().iter() {
+ for &impl_did in &*trait_impls.borrow() {
let items = &(*impl_items)[impl_did];
if items.len() < 1 {
// We'll error out later. For now, just don't ICE.
// Clone first to avoid a double borrow error.
let trait_impls = trait_impls.borrow().clone();
- for &impl_did in trait_impls.iter() {
+ for &impl_did in &trait_impls {
debug!("check_implementations_of_copy: impl_did={}",
impl_did.repr(tcx));
// replace the type parameters declared on the trait with those
// from the impl
- for &space in [subst::TypeSpace, subst::SelfSpace].iter() {
+ for &space in &[subst::TypeSpace, subst::SelfSpace] {
method_generics.types.replace(
space,
impl_type_scheme.generics.types.get_slice(space).to_vec());
(k, v.borrow().clone())
}).collect();
- for &(trait_def_id, ref impls) in trait_def_ids.iter() {
+ for &(trait_def_id, ref impls) in &trait_def_ids {
self.check_for_overlapping_impls_of_trait(trait_def_id, impls);
}
}
continue;
}
- for &impl2_def_id in trait_impls[(i+1)..].iter() {
+ for &impl2_def_id in &trait_impls[(i+1)..] {
self.check_if_impls_overlap(trait_def_id,
impl1_def_id,
impl2_def_id);
let tcx = ccx.tcx;
// Create a set of parameter types shared among all the variants.
- for variant in variants.iter() {
+ for variant in variants {
let variant_def_id = local_def(variant.node.id);
// Nullary enum constructors get turned into constants; n-ary enum
if let ast::ItemTrait(_, _, _, ref trait_items) = item.node {
// For each method, construct a suitable ty::Method and
// store it into the `tcx.impl_or_trait_items` table:
- for trait_item in trait_items.iter() {
+ for trait_item in trait_items {
match *trait_item {
ast::RequiredMethod(_) |
ast::ProvidedMethod(_) => {
thing: &'static str) {
let mut warn = false;
- for ty_param in generics.ty_params.iter() {
- for bound in ty_param.bounds.iter() {
+ for ty_param in &*generics.ty_params {
+ for bound in &*ty_param.bounds {
match *bound {
ast::TraitTyParamBound(..) => {
warn = true;
};
let mut methods = Vec::new();
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref method) => {
let body_id = method.pe_body().id;
&ty_generics,
parent_visibility);
- for trait_ref in opt_trait_ref.iter() {
+ if let Some(ref trait_ref) = *opt_trait_ref {
astconv::instantiate_trait_ref(ccx,
&ExplicitRscope,
trait_ref,
it.ident.repr(ccx.tcx),
trait_def.repr(ccx.tcx));
- for trait_method in trait_methods.iter() {
+ for trait_method in trait_methods {
let self_type = ty::mk_self_type(tcx);
match *trait_method {
ast::RequiredMethod(ref type_method) => {
{
// Try to find an unbound in bounds.
let mut unbound = None;
- for ab in ast_bounds.iter() {
+ for ab in ast_bounds {
if let &ast::TraitTyParamBound(ref ptr, ast::TraitBoundModifier::Maybe) = ab {
if unbound.is_none() {
assert!(ptr.bound_lifetimes.is_empty());
create_predicates(ccx.tcx, &mut result, space);
// Add the bounds not associated with a type parameter
- for predicate in where_clause.predicates.iter() {
+ for predicate in &where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ref bound_pred) => {
let ty = ast_ty_to_ty(ccx, &ExplicitRscope, &*bound_pred.bounded_ty);
- for bound in bound_pred.bounds.iter() {
+ for bound in &*bound_pred.bounds {
match bound {
&ast::TyParamBound::TraitTyParamBound(ref poly_trait_ref, _) => {
let mut projections = Vec::new();
result.predicates.push(space, trait_ref.as_predicate());
- for projection in projections.iter() {
+ for projection in &projections {
result.predicates.push(space, projection.as_predicate());
}
}
&ast::WherePredicate::RegionPredicate(ref region_pred) => {
let r1 = ast_region_to_region(ccx.tcx, ®ion_pred.lifetime);
- for bound in region_pred.bounds.iter() {
+ for bound in ®ion_pred.bounds {
let r2 = ast_region_to_region(ccx.tcx, bound);
let pred = ty::Binder(ty::OutlivesPredicate(r1, r2));
result.predicates.push(space, ty::Predicate::RegionOutlives(pred))
result: &mut ty::Generics<'tcx>,
space: subst::ParamSpace)
{
- for type_param_def in result.types.get_slice(space).iter() {
+ for type_param_def in result.types.get_slice(space) {
let param_ty = ty::mk_param_from_def(tcx, type_param_def);
for predicate in ty::predicates(tcx, param_ty, &type_param_def.bounds).into_iter() {
result.predicates.push(space, predicate);
}
}
- for region_param_def in result.regions.get_slice(space).iter() {
+ for region_param_def in result.regions.get_slice(space) {
let region = region_param_def.to_early_bound_region();
- for &bound_region in region_param_def.bounds.iter() {
+ for &bound_region in ®ion_param_def.bounds {
// account for new binder introduced in the predicate below; no need
// to shift `region` because it is never a late-bound region
let bound_region = ty_fold::shift_region(bound_region, 1);
ast_generics: &ast::Generics,
abi: abi::Abi)
-> ty::TypeScheme<'tcx> {
- for i in decl.inputs.iter() {
+ for i in &decl.inputs {
match (*i).pat.node {
ast::PatIdent(_, _, _) => (),
ast::PatWild(ast::PatWildSingle) => (),
// `ty::VariantInfo::from_ast_variant()` ourselves
// here, mainly so as to mask the differences between
// struct-like enums and so forth.
- for ast_variant in enum_definition.variants.iter() {
+ for ast_variant in &enum_definition.variants {
let variant =
ty::VariantInfo::from_ast_variant(tcx,
&**ast_variant,
/*discriminant*/ 0);
- for arg_ty in variant.args.iter() {
+ for arg_ty in &variant.args {
self.add_constraints_from_ty(generics, *arg_ty, self.covariant);
}
}
ast::ItemStruct(..) => {
let generics = &ty::lookup_item_type(tcx, did).generics;
let struct_fields = ty::lookup_struct_fields(tcx, did);
- for field_info in struct_fields.iter() {
+ for field_info in &struct_fields {
assert_eq!(field_info.id.krate, ast::LOCAL_CRATE);
let field_ty = ty::node_id_to_type(tcx, field_info.id.node);
self.add_constraints_from_ty(generics, field_ty, self.covariant);
ast::ItemTrait(..) => {
let trait_items = ty::trait_items(tcx, did);
- for trait_item in trait_items.iter() {
+ for trait_item in &*trait_items {
match *trait_item {
ty::MethodTraitItem(ref method) => {
self.add_constraints_from_sig(&method.generics,
}
ty::ty_tup(ref subtys) => {
- for &subty in subtys.iter() {
+ for &subty in subtys {
self.add_constraints_from_ty(generics, subty, variance);
}
}
let projections = data.projection_bounds_with_self_ty(self.tcx(),
self.tcx().types.err);
- for projection in projections.iter() {
+ for projection in &projections {
self.add_constraints_from_ty(generics, projection.0.ty, self.invariant);
}
}
variance: VarianceTermPtr<'a>) {
debug!("add_constraints_from_substs(def_id={:?})", def_id);
- for p in type_param_defs.iter() {
+ for p in type_param_defs {
let variance_decl =
self.declared_variance(p.def_id, def_id, TypeParam,
p.space, p.index as uint);
self.add_constraints_from_ty(generics, substs_ty, variance_i);
}
- for p in region_param_defs.iter() {
+ for p in region_param_defs {
let variance_decl =
self.declared_variance(p.def_id, def_id,
RegionParam, p.space, p.index as uint);
sig: &ty::PolyFnSig<'tcx>,
variance: VarianceTermPtr<'a>) {
let contra = self.contravariant(variance);
- for &input in sig.0.inputs.iter() {
+ for &input in &sig.0.inputs {
self.add_constraints_from_ty(generics, input, contra);
}
if let ty::FnConverging(result_type) = sig.0.output {
while changed {
changed = false;
- for constraint in self.constraints.iter() {
+ for constraint in &self.constraints {
let Constraint { inferred, variance: term } = *constraint;
let InferredIndex(inferred) = inferred;
let variance = self.evaluate(term);
/// Finds the `doc` attribute as a List and returns the list of attributes
/// nested inside.
pub fn doc_list<'a>(&'a self) -> Option<&'a [Attribute]> {
- for attr in self.attrs.iter() {
+ for attr in &self.attrs {
match *attr {
List(ref x, ref list) if "doc" == *x => {
return Some(list.as_slice());
/// Finds the `doc` attribute as a NameValue and returns the corresponding
/// value found.
pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
- for attr in self.attrs.iter() {
+ for attr in &self.attrs {
match *attr {
NameValue(ref x, ref v) if "doc" == *x => {
return Some(v.as_slice());
pub fn is_hidden_from_doc(&self) -> bool {
match self.doc_list() {
- Some(ref l) => {
- for innerattr in l.iter() {
+ Some(l) => {
+ for innerattr in l {
match *innerattr {
Word(ref s) if "hidden" == *s => {
return true
fn clean(&self, cx: &DocContext) -> (Vec<TyParamBound>, Vec<TypeBinding>) {
let mut tp_bounds = vec![];
self.region_bound.clean(cx).map(|b| tp_bounds.push(RegionBound(b)));
- for bb in self.builtin_bounds.iter() {
+ for bb in &self.builtin_bounds {
tp_bounds.push(bb.clean(cx));
}
let mut bindings = vec![];
- for &ty::Binder(ref pb) in self.projection_bounds.iter() {
+ for &ty::Binder(ref pb) in &self.projection_bounds {
bindings.push(TypeBinding {
name: pb.projection_ty.item_name.clean(cx),
ty: pb.ty.clean(cx)
// collect any late bound regions
let mut late_bounds = vec![];
- for &ty_s in self.substs.types.get_slice(ParamSpace::TypeSpace).iter() {
+ for &ty_s in self.substs.types.get_slice(ParamSpace::TypeSpace) {
use rustc::middle::ty::{Region, sty};
if let sty::ty_tup(ref ts) = ty_s.sty {
- for &ty_s in ts.iter() {
+ for &ty_s in ts {
if let sty::ty_rptr(ref reg, _) = ty_s.sty {
if let &Region::ReLateBound(_, _) = *reg {
debug!(" hit an ReLateBound {:?}", reg);
impl<'tcx> Clean<Vec<TyParamBound>> for ty::ParamBounds<'tcx> {
fn clean(&self, cx: &DocContext) -> Vec<TyParamBound> {
let mut v = Vec::new();
- for t in self.trait_bounds.iter() {
+ for t in &self.trait_bounds {
v.push(t.clean(cx));
}
for r in self.region_bounds.iter().filter_map(|r| r.clean(cx)) {
Some(did) => did,
None => return false
};
- for bound in bounds.iter() {
+ for bound in bounds {
if let TyParamBound::TraitBound(PolyTrait {
trait_: Type::ResolvedPath { did, .. }, ..
}, TBM::None) = *bound {
}).collect::<Vec<_>>();
// Finally, run through the type parameters again and insert a ?Sized unbound for
// any we didn't find to be Sized.
- for tp in stripped_typarams.iter() {
+ for tp in &stripped_typarams {
if !sized_params.contains(&tp.name) {
let mut sized_bound = ty::BuiltinBound::BoundSized.clean(cx);
if let TyParamBound::TraitBound(_, ref mut tbm) = sized_bound {
}
fn find(attrs: &[Attribute]) -> Option<PrimitiveType> {
- for attr in attrs.iter() {
+ for attr in attrs {
let list = match *attr {
List(ref k, ref l) if *k == "doc" => l,
_ => continue,
};
- for sub_attr in list.iter() {
+ for sub_attr in list {
let value = match *sub_attr {
NameValue(ref k, ref v)
if *k == "primitive" => v.as_slice(),
let mut ret = vec![];
let remaining = if !denied {
let mut remaining = vec![];
- for path in list.iter() {
+ for path in list {
match inline::try_inline(cx, path.node.id(), None) {
Some(items) => {
ret.extend(items.into_iter());
pub fn load_external_files(names: &[String]) -> Option<String> {
let mut out = String::new();
- for name in names.iter() {
+ for name in names {
out.push_str(load_or_return!(name.as_slice(), None, None).as_slice());
out.push('\n');
}
if lifetimes.len() > 0 || types.len() > 0 || bindings.len() > 0 {
try!(f.write_str("<"));
let mut comma = false;
- for lifetime in lifetimes.iter() {
+ for lifetime in lifetimes {
if comma {
try!(f.write_str(", "));
}
comma = true;
try!(write!(f, "{}", *lifetime));
}
- for ty in types.iter() {
+ for ty in types {
if comma {
try!(f.write_str(", "));
}
comma = true;
try!(write!(f, "{}", *ty));
}
- for binding in bindings.iter() {
+ for binding in bindings {
if comma {
try!(f.write_str(", "));
}
clean::PathParameters::Parenthesized { ref inputs, ref output } => {
try!(f.write_str("("));
let mut comma = false;
- for ty in inputs.iter() {
+ for ty in inputs {
if comma {
try!(f.write_str(", "));
}
match rel_root {
Some(root) => {
let mut root = String::from_str(root.as_slice());
- for seg in path.segments[..amt].iter() {
+ for seg in &path.segments[..amt] {
if "super" == seg.name ||
"self" == seg.name {
try!(write!(w, "{}::", seg.name));
}
}
None => {
- for seg in path.segments[..amt].iter() {
+ for seg in &path.segments[..amt] {
try!(write!(w, "{}::", seg.name));
}
}
Some((ref fqp, shortty)) if abs_root.is_some() => {
let mut url = String::from_str(abs_root.unwrap().as_slice());
let to_link = &fqp[..fqp.len() - 1];
- for component in to_link.iter() {
+ for component in to_link {
url.push_str(component.as_slice());
url.push_str("/");
}
typarams: &Option<Vec<clean::TyParamBound> >) -> fmt::Result {
match *typarams {
Some(ref params) => {
- for param in params.iter() {
+ for param in params {
try!(write!(w, " + "));
try!(write!(w, "{}", *param));
}
(100 * cnt.unmarked) as f64/tot as f64));
try!(write!(f, "</td></tr>"));
- for submodule in m.submodules.iter() {
+ for submodule in &m.submodules {
try!(fmt_inner(f, context, submodule));
}
context.pop();
let default: &[_] = &[];
match krate.module.as_ref().map(|m| m.doc_list().unwrap_or(default)) {
Some(attrs) => {
- for attr in attrs.iter() {
+ for attr in attrs {
match *attr {
clean::NameValue(ref x, ref s)
if "html_favicon_url" == *x => {
krate = cache.fold_crate(krate);
// Cache where all our extern crates are located
- for &(n, ref e) in krate.externs.iter() {
+ for &(n, ref e) in &krate.externs {
cache.extern_locations.insert(n, extern_location(e, &cx.dst));
let did = ast::DefId { krate: n, node: ast::CRATE_NODE_ID };
cache.paths.insert(did, (vec![e.name.to_string()], ItemType::Module));
// Favor linking to as local extern as possible, so iterate all crates in
// reverse topological order.
for &(n, ref e) in krate.externs.iter().rev() {
- for &prim in e.primitives.iter() {
+ for &prim in &e.primitives {
cache.primitive_locations.insert(prim, n);
}
}
- for &prim in krate.primitives.iter() {
+ for &prim in &krate.primitives {
cache.primitive_locations.insert(prim, ast::LOCAL_CRATE);
}
// Attach all orphan methods to the type's definition if the type
// has since been learned.
- for &(pid, ref item) in orphan_methods.iter() {
+ for &(pid, ref item) in orphan_methods {
let did = ast_util::local_def(pid);
match paths.get(&did) {
Some(&(ref fqp, _)) => {
// Reduce `NodeId` in paths into smaller sequential numbers,
// and prune the paths that do not appear in the index.
- for item in search_index.iter() {
+ for item in &*search_index {
match item.parent {
Some(nodeid) => {
if !nodeid_to_pathid.contains_key(&nodeid) {
let mut w = try!(File::create(&dst));
try!(writeln!(&mut w, "var searchIndex = {{}};"));
try!(writeln!(&mut w, "{}", search_index));
- for index in all_indexes.iter() {
+ for index in &all_indexes {
try!(writeln!(&mut w, "{}", *index));
}
try!(writeln!(&mut w, "initSearch(searchIndex);"));
// Update the list of all implementors for traits
let dst = cx.dst.join("implementors");
try!(mkdir(&dst));
- for (&did, imps) in cache.implementors.iter() {
+ for (&did, imps) in &cache.implementors {
// Private modules can leak through to this phase of rustdoc, which
// could contain implementations for otherwise private types. In some
// rare cases we could find an implementation for an item which wasn't
};
let mut mydst = dst.clone();
- for part in remote_path[..remote_path.len() - 1].iter() {
+ for part in &remote_path[..remote_path.len() - 1] {
mydst.push(part.as_slice());
try!(mkdir(&mydst));
}
let mut f = BufferedWriter::new(try!(File::create(&mydst)));
try!(writeln!(&mut f, "(function() {{var implementors = {{}};"));
- for implementor in all_implementors.iter() {
+ for implementor in &all_implementors {
try!(write!(&mut f, "{}", *implementor));
}
try!(write!(&mut f, r"implementors['{}'] = [", krate.name));
- for imp in imps.iter() {
+ for imp in imps {
// If the trait and implementation are in the same crate, then
// there's no need to emit information about it (there's inlining
// going on). If they're in different crates then the crate defining
// Failing that, see if there's an attribute specifying where to find this
// external crate
- for attr in e.attrs.iter() {
+ for attr in &e.attrs {
match *attr {
clean::List(ref x, ref list) if "doc" == *x => {
- for attr in list.iter() {
+ for attr in list {
match *attr {
clean::NameValue(ref x, ref s)
if "html_root_url" == *x => {
impl<'a> Cache {
fn generics(&mut self, generics: &clean::Generics) {
- for typ in generics.type_params.iter() {
+ for typ in &generics.type_params {
self.typarams.insert(typ.did, typ.name.clone());
}
}
.collect::<String>();
match cache().paths.get(&it.def_id) {
Some(&(ref names, _)) => {
- for name in (&names[..names.len() - 1]).iter() {
+ for name in &names[..names.len() - 1] {
url.push_str(name.as_slice());
url.push_str("/");
}
fn build_sidebar(&self, m: &clean::Module) -> HashMap<String, Vec<NameDoc>> {
let mut map = HashMap::new();
- for item in m.items.iter() {
+ for item in &m.items {
if self.ignore_private_item(item) { continue }
// avoid putting foreign items to the sidebar.
debug!("{:?}", indices);
let mut curty = None;
- for &idx in indices.iter() {
+ for &idx in &indices {
let myitem = &items[idx];
let myty = Some(shortty(myitem));
try!(write!(w, "{{ }}"));
} else {
try!(write!(w, "{{\n"));
- for t in types.iter() {
+ for t in &types {
try!(write!(w, " "));
try!(render_method(w, t.item()));
try!(write!(w, ";\n"));
if types.len() > 0 && required.len() > 0 {
try!(w.write_str("\n"));
}
- for m in required.iter() {
+ for m in &required {
try!(write!(w, " "));
try!(render_method(w, m.item()));
try!(write!(w, ";\n"));
if required.len() > 0 && provided.len() > 0 {
try!(w.write_str("\n"));
}
- for m in provided.iter() {
+ for m in &provided {
try!(write!(w, " "));
try!(render_method(w, m.item()));
try!(write!(w, " {{ ... }}\n"));
<h2 id='associated-types'>Associated Types</h2>
<div class='methods'>
"));
- for t in types.iter() {
+ for t in &types {
try!(trait_item(w, *t));
}
try!(write!(w, "</div>"));
<h2 id='required-methods'>Required Methods</h2>
<div class='methods'>
"));
- for m in required.iter() {
+ for m in &required {
try!(trait_item(w, *m));
}
try!(write!(w, "</div>"));
<h2 id='provided-methods'>Provided Methods</h2>
<div class='methods'>
"));
- for m in provided.iter() {
+ for m in &provided {
try!(trait_item(w, *m));
}
try!(write!(w, "</div>"));
"));
match cache.implementors.get(&it.def_id) {
Some(implementors) => {
- for i in implementors.iter() {
+ for i in implementors {
try!(writeln!(w, "<li>{}<code>impl{} {} for {}{}</code></li>",
ConciseStability(&i.stability),
i.generics, i.trait_, i.for_, WhereClause(&i.generics)));
try!(write!(w, " {{}}"));
} else {
try!(write!(w, " {{\n"));
- for v in e.variants.iter() {
+ for v in &e.variants {
try!(write!(w, " "));
let name = v.name.as_ref().unwrap().as_slice();
match v.inner {
try!(document(w, it));
if e.variants.len() > 0 {
try!(write!(w, "<h2 class='variants'>Variants</h2>\n<table>"));
- for variant in e.variants.iter() {
+ for variant in &e.variants {
try!(write!(w, "<tr><td id='variant.{name}'>{stab}<code>{name}</code></td><td>",
stab = ConciseStability(&variant.stability),
name = variant.name.as_ref().unwrap().as_slice()));
doctree::Plain => {
try!(write!(w, " {{\n{}", tab));
let mut fields_stripped = false;
- for field in fields.iter() {
+ for field in fields {
match field.inner {
clean::StructFieldItem(clean::HiddenStructField) => {
fields_stripped = true;
.partition(|i| i.impl_.trait_.is_none());
if non_trait.len() > 0 {
try!(write!(w, "<h2 id='methods'>Methods</h2>"));
- for i in non_trait.iter() {
+ for i in &non_trait {
try!(render_impl(w, i));
}
}
Implementations</h2>"));
let (derived, manual): (Vec<_>, _) = traits.into_iter()
.partition(|i| i.impl_.derived);
- for i in manual.iter() {
+ for i in &manual {
try!(render_impl(w, i));
}
if derived.len() > 0 {
try!(write!(w, "<h3 id='derived_implementations'>Derived Implementations \
</h3>"));
- for i in derived.iter() {
+ for i in &derived {
try!(render_impl(w, i));
}
}
}
try!(write!(w, "<div class='impl-items'>"));
- for trait_item in i.impl_.items.iter() {
+ for trait_item in &i.impl_.items {
try!(doctraititem(w, trait_item, true));
}
fn render_default_methods(w: &mut fmt::Formatter,
t: &clean::Trait,
i: &clean::Impl) -> fmt::Result {
- for trait_item in t.items.iter() {
+ for trait_item in &t.items {
let n = trait_item.item().name.clone();
match i.items.iter().find(|m| { m.name == n }) {
Some(..) => continue,
None => return Ok(())
};
try!(write!(w, "<div class='block {}'><h2>{}</h2>", short, longty));
- for &NameDoc(ref name, ref doc) in items.iter() {
+ for &NameDoc(ref name, ref doc) in items {
let curty = shortty(cur).to_static_str();
let class = if cur.name.as_ref().unwrap() == name &&
short == curty { "current" } else { "" };
impl fmt::Display for Toc {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "<ul>"));
- for entry in self.entries.iter() {
+ for entry in &self.entries {
// recursively format this table of contents (the
// `{children}` is the key).
try!(write!(fmt,
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
- for &(name, _, description) in PASSES.iter() {
+ for &(name, _, description) in PASSES {
println!("{:>20} - {}", name, description);
}
println!("{}", "\nDefault passes for rustdoc:"); // FIXME: #9970
- for &name in DEFAULT_PASSES.iter() {
+ for &name in DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
let input = matches.free[0].as_slice();
let mut libs = SearchPaths::new();
- for s in matches.opt_strs("L").iter() {
+ for s in &matches.opt_strs("L") {
libs.add_path(s.as_slice());
}
let externs = match parse_externs(&matches) {
/// error message.
fn parse_externs(matches: &getopts::Matches) -> Result<core::Externs, String> {
let mut externs = HashMap::new();
- for arg in matches.opt_strs("extern").iter() {
+ for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(1, '=');
let name = match parts.next() {
Some(s) => s,
// First, parse the crate and extract all relevant information.
let mut paths = SearchPaths::new();
- for s in matches.opt_strs("L").iter() {
+ for s in &matches.opt_strs("L") {
paths.add_path(s.as_slice());
}
let cfgs = matches.opt_strs("cfg");
// with the passes which we are supposed to run.
match krate.module.as_ref().unwrap().doc_list() {
Some(nested) => {
- for inner in nested.iter() {
+ for inner in nested {
match *inner {
clean::Word(ref x)
if "no_default_passes" == *x => {
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
- for pass in passes.iter() {
+ for pass in &passes {
let plugin = match PASSES.iter()
.position(|&(p, _, _)| {
p == *pass
output.set_extension("html");
let mut css = String::new();
- for name in matches.opt_strs("markdown-css").iter() {
+ for name in &matches.opt_strs("markdown-css") {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s.as_slice())
}
fn fold_item(&mut self, i: Item) -> Option<Item> {
let mut i = i;
let mut avec: Vec<clean::Attribute> = Vec::new();
- for attr in i.attrs.iter() {
+ for attr in &i.attrs {
match attr {
&clean::NameValue(ref x, ref s)
if "doc" == *x => {
fn fold_item(&mut self, i: Item) -> Option<Item> {
let mut docstr = String::new();
let mut i = i;
- for attr in i.attrs.iter() {
+ for attr in &i.attrs {
match *attr {
clean::NameValue(ref x, ref s)
if "doc" == *x => {
pub fn run_plugins(&self, krate: clean::Crate) -> (clean::Crate, Vec<PluginJson> ) {
let mut out_json = Vec::new();
let mut krate = krate;
- for &callback in self.callbacks.iter() {
+ for &callback in &self.callbacks {
let (c, res) = callback(krate);
krate = c;
out_json.push(res);
om.vis = vis;
om.stab = self.stability(id);
om.id = id;
- for i in m.items.iter() {
+ for i in &m.items {
self.visit_item(&**i, None, &mut om);
}
om
if glob {
match it.node {
ast::ItemMod(ref m) => {
- for i in m.items.iter() {
+ for i in &m.items {
self.visit_item(&**i, None, om);
}
}
fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
e.emit_map(self.len(), |e| {
let mut i = 0;
- for (key, val) in self.iter() {
+ for (key, val) in self {
try!(e.emit_map_elt_key(i, |e| key.encode(e)));
try!(e.emit_map_elt_val(i, |e| val.encode(e)));
i += 1;
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
let mut i = 0;
- for e in self.iter() {
+ for e in self {
try!(s.emit_seq_elt(i, |s| e.encode(s)));
i += 1;
}
> Encodable for EnumSet<T> {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let mut bits = 0;
- for item in self.iter() {
+ for item in self {
bits |= item.to_uint();
}
s.emit_uint(bits)
fn encode<E: Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_map(self.len(), |e| {
let mut i = 0;
- for (key, val) in self.iter() {
+ for (key, val) in self {
try!(e.emit_map_elt_key(i, |e| key.encode(e)));
try!(e.emit_map_elt_val(i, |e| val.encode(e)));
i += 1;
fn encode<E: Encoder>(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_seq(self.len(), |s| {
let mut i = 0;
- for e in self.iter() {
+ for e in self {
try!(s.emit_seq_elt(i, |s| e.encode(s)));
i += 1;
}
/// ```
fn to_hex(&self) -> String {
let mut v = Vec::with_capacity(self.len() * 2);
- for &byte in self.iter() {
+ for &byte in self {
v.push(CHARS[(byte >> 4) as uint]);
v.push(CHARS[(byte & 0xf) as uint]);
}
/// Otherwise, it will return the Json value associated with the final key.
pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{
let mut target = self;
- for key in keys.iter() {
+ for key in keys {
match target.find(*key) {
Some(t) => { target = t; },
None => return None
match map.get(key) {
Some(json_value) => Some(json_value),
None => {
- for (_, v) in map.iter() {
+ for (_, v) in map {
match v.search(key) {
x if x.is_some() => return x,
_ => ()
// Used by Parser to insert StackElement::Key elements at the top of the stack.
fn push_key(&mut self, key: string::String) {
self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16));
- for c in key.as_bytes().iter() {
+ for c in key.as_bytes() {
self.str_buffer.push(*c);
}
}
impl<A: ToJson> ToJson for BTreeMap<string::String, A> {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
- for (key, value) in self.iter() {
+ for (key, value) in self {
d.insert((*key).clone(), value.to_json());
}
Json::Object(d)
impl<A: ToJson> ToJson for HashMap<string::String, A> {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
- for (key, value) in self.iter() {
+ for (key, value) in self {
d.insert((*key).clone(), value.to_json());
}
Json::Object(d)
fn mk_object(items: &[(string::String, Json)]) -> Json {
let mut d = BTreeMap::new();
- for item in items.iter() {
+ for item in items {
match *item {
(ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
}
("\"\\u12ab\"", "\u{12ab}"),
("\"\\uAB12\"", "\u{AB12}")];
- for &(i, o) in s.iter() {
+ for &(i, o) in &s {
let v: string::String = super::decode(i).unwrap();
assert_eq!(v, o);
}
_ => {
f(b'\\');
f(b'x');
- for &offset in [4u, 0u].iter() {
+ for &offset in &[4u, 0u] {
match ((c as i32) >> offset) & 0xf {
i @ 0 ... 9 => f(b'0' + (i as u8)),
i => f(b'a' + (i as u8 - 10)),
let mut observed: u32 = 0;
- for (k, v) in m.iter() {
+ for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
- for &(k, v) in xs.iter() {
+ for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
assert!(a.insert(i));
}
let mut observed: u32 = 0;
- for k in a.iter() {
+ for k in &a {
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
let set: HashSet<int> = xs.iter().map(|&x| x).collect();
- for x in xs.iter() {
+ for x in &xs {
assert!(set.contains(x));
}
}
assert_eq!(last_i, 49);
}
- for _ in s.iter() { panic!("s should be empty!"); }
+ for _ in &s { panic!("s should be empty!"); }
// reset to try again.
s.extend(1..100);
let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX];
let mut writer = Vec::new();
- for i in uints.iter() {
+ for i in &uints {
writer.write_le_u64(*i).unwrap();
}
let mut reader = MemReader::new(writer);
- for i in uints.iter() {
+ for i in &uints {
assert!(reader.read_le_u64().unwrap() == *i);
}
}
let uints = [0, 1, 2, 42, 10_123, 100_123_456, ::u64::MAX];
let mut writer = Vec::new();
- for i in uints.iter() {
+ for i in &uints {
writer.write_be_u64(*i).unwrap();
}
let mut reader = MemReader::new(writer);
- for i in uints.iter() {
+ for i in &uints {
assert!(reader.read_be_u64().unwrap() == *i);
}
}
let ints = [::i32::MIN, -123456, -42, -5, 0, 1, ::i32::MAX];
let mut writer = Vec::new();
- for i in ints.iter() {
+ for i in &ints {
writer.write_be_i32(*i).unwrap();
}
let mut reader = MemReader::new(writer);
- for i in ints.iter() {
+ for i in &ints {
// this tests that the sign extension is working
// (comparing the values as i32 would not test this)
assert!(reader.read_be_int_n(4).unwrap() == *i as i64);
}
let files = check!(readdir(dir));
let mut mem = [0u8; 4];
- for f in files.iter() {
+ for f in &files {
{
let n = f.filestem_str();
check!(File::open(f).read(&mut mem));
let ipaddrs = get_host_addresses("localhost").unwrap();
let mut found_local = false;
let local_addr = &Ipv4Addr(127, 0, 0, 1);
- for addr in ipaddrs.iter() {
+ for addr in &ipaddrs {
found_local = found_local || addr == local_addr;
}
assert!(found_local);
/// character.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "{:?}", self.program));
- for arg in self.args.iter() {
+ for arg in &self.args {
try!(write!(f, " '{:?}'", arg));
}
Ok(())
let output = String::from_utf8(prog.wait_with_output().unwrap().output).unwrap();
let r = os::env();
- for &(ref k, ref v) in r.iter() {
+ for &(ref k, ref v) in &r {
// don't check windows magical empty-named variables
assert!(k.is_empty() ||
output.contains(format!("{}={}", *k, *v).as_slice()),
let output = String::from_utf8(prog.wait_with_output().unwrap().output).unwrap();
let r = os::env();
- for &(ref k, ref v) in r.iter() {
+ for &(ref k, ref v) in &r {
// don't check android RANDOM variables
if *k != "RANDOM".to_string() {
assert!(output.contains(format!("{}={}",
let mut final_base = base;
- for &(dir, base) in bases.iter() {
+ for &(dir, base) in &bases {
if path_s.contains(dir) {
final_base = base;
break;
unsafe {
fn env_convert(input: Vec<Vec<u8>>) -> Vec<(Vec<u8>, Vec<u8>)> {
let mut pairs = Vec::new();
- for p in input.iter() {
+ for p in &input {
let mut it = p.splitn(1, |b| *b == b'=');
let key = it.next().unwrap().to_vec();
let default: &[u8] = &[];
let mut custom_flags = false;
let len = round_up(min_len, page_size());
- for &o in options.iter() {
+ for &o in options {
match o {
MapReadable => { prot |= libc::PROT_READ; },
MapWritable => { prot |= libc::PROT_WRITE; },
let mut offset: uint = 0;
let len = round_up(min_len, page_size());
- for &o in options.iter() {
+ for &o in options {
match o {
MapReadable => { readable = true; },
MapWritable => { writable = true; },
fn test_env_getenv() {
let e = env();
assert!(e.len() > 0u);
- for p in e.iter() {
+ for p in &e {
let (n, v) = (*p).clone();
debug!("{}", n);
let v2 = getenv(n.as_slice());
setenv("HOME", "");
assert!(os::homedir().is_none());
- for s in oldhome.iter() {
+ if let Some(s) = oldhome {
setenv("HOME", s.as_slice());
}
}
setenv("USERPROFILE", "/home/PaloAlto");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
- for s in oldhome.iter() {
+ if let Some(s) = oldhome {
setenv("HOME", s.as_slice());
}
- for s in olduserprofile.iter() {
+ if let Some(s) = olduserprofile {
setenv("USERPROFILE", s.as_slice());
}
}
fn push_many<T: BytesContainer>(&mut self, paths: &[T]) {
let t: Option<&T> = None;
if BytesContainer::is_str(t) {
- for p in paths.iter() {
+ for p in paths {
self.push(p.container_as_str().unwrap())
}
} else {
- for p in paths.iter() {
+ for p in paths {
self.push(p.container_as_bytes())
}
}
// check every remainder mod 8, both in small and big vectors.
let lengths = [0, 1, 2, 3, 4, 5, 6, 7,
80, 81, 82, 83, 84, 85, 86, 87];
- for &n in lengths.iter() {
+ for &n in &lengths {
let mut v = repeat(0u8).take(n).collect::<Vec<_>>();
r.fill_bytes(v.as_mut_slice());
}
// start all the tasks
- for tx in txs.iter() {
+ for tx in &txs {
tx.send(()).unwrap();
}
}
let amt = CALLBACK_CNT.load(Ordering::SeqCst);
&CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)]
};
- for cb in callbacks.iter() {
+ for cb in callbacks {
match cb.load(Ordering::SeqCst) {
0 => {}
n => {
status: SocketStatus) -> IoResult<()> {
let mut set: c::fd_set = unsafe { mem::zeroed() };
let mut max = 0;
- for &fd in fds.iter() {
+ for &fd in fds {
c::fd_set(&mut set, fd);
max = cmp::max(max, fd + 1);
}
Some(env) => {
let mut tmps = Vec::with_capacity(env.len());
- for pair in env.iter() {
+ for pair in env {
let mut kv = Vec::new();
kv.push_all(pair.0.container_as_bytes());
kv.push('=' as u8);
let mut in_progress = Vec::new();
let mut in_quote = false;
- for b in unparsed.iter() {
+ for b in unparsed {
match *b {
b';' if !in_quote => {
parsed.push(Path::new(in_progress.as_slice()));
// To have the spawning semantics of unix/windows stay the same, we need to
// read the *child's* PATH if one is provided. See #15149 for more details.
let program = cfg.env().and_then(|env| {
- for (key, v) in env.iter() {
+ for (key, v) in env {
if b"PATH" != key.container_as_bytes() { continue }
// Split the value and test each path to see if the
let mut cmd = String::new();
append_arg(&mut cmd, str::from_utf8(prog.as_bytes()).ok()
.expect("expected program name to be utf-8 encoded"));
- for arg in args.iter() {
+ for arg in args {
cmd.push(' ');
append_arg(&mut cmd, str::from_utf8(arg.as_bytes()).ok()
.expect("expected argument to be utf-8 encoded"));
Some(env) => {
let mut blk = Vec::new();
- for pair in env.iter() {
+ for pair in env {
let kv = format!("{}={}",
pair.0.container_as_str().unwrap(),
pair.1.container_as_str().unwrap());
DTOR_LOCK.unlock();
ret
};
- for &(key, dtor) in dtors.iter() {
+ for &(key, dtor) in &dtors {
let ptr = TlsGetValue(key);
if !ptr.is_null() {
TlsSetValue(key, ptr::null_mut());
unsafe extern fn run_dtors(mut ptr: *mut u8) {
while !ptr.is_null() {
let list: Box<List> = mem::transmute(ptr);
- for &(ptr, dtor) in list.iter() {
+ for &(ptr, dtor) in &*list {
dtor(ptr);
}
ptr = DTORS.get();
}
fn visit_fn_decl(&mut self, decl: &'ast FnDecl) {
- for a in decl.inputs.iter() {
+ for a in &decl.inputs {
self.insert(a.id, NodeArg(&*a.pat));
}
}
self.parent = i.id;
match i.node {
ItemImpl(_, _, _, _, _, ref impl_items) => {
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
MethodImplItem(ref m) => {
self.insert(m.id, NodeImplItem(impl_item));
}
}
ItemEnum(ref enum_definition, _) => {
- for v in enum_definition.variants.iter() {
+ for v in &enum_definition.variants {
self.insert(v.node.id, NodeVariant(&**v));
}
}
ItemForeignMod(ref nm) => {
- for nitem in nm.items.iter() {
+ for nitem in &nm.items {
self.insert(nitem.id, NodeForeignItem(&**nitem));
}
}
}
}
ItemTrait(_, _, ref bounds, ref trait_items) => {
- for b in bounds.iter() {
+ for b in &**bounds {
if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b {
self.insert(t.trait_ref.ref_id, NodeItem(i));
}
}
- for tm in trait_items.iter() {
+ for tm in trait_items {
match *tm {
RequiredMethod(ref m) => {
self.insert(m.id, NodeTraitItem(tm));
-> (Vec<TypeMethod>, Vec<P<Method>> ) {
let mut reqd = Vec::new();
let mut provd = Vec::new();
- for trt_method in trait_methods.iter() {
+ for trt_method in trait_methods {
match *trt_method {
RequiredMethod(ref tm) => reqd.push((*tm).clone()),
ProvidedMethod(ref m) => provd.push((*m).clone()),
impl<'a, O: IdVisitingOperation> IdVisitor<'a, O> {
fn visit_generics_helper(&mut self, generics: &Generics) {
- for type_parameter in generics.ty_params.iter() {
+ for type_parameter in &*generics.ty_params {
self.operation.visit_id(type_parameter.id)
}
- for lifetime in generics.lifetimes.iter() {
+ for lifetime in &generics.lifetimes {
self.operation.visit_id(lifetime.lifetime.id)
}
}
ViewPathSimple(_, _) |
ViewPathGlob(_) => {}
ViewPathList(_, ref paths) => {
- for path in paths.iter() {
+ for path in paths {
self.operation.visit_id(path.node.id())
}
}
}
}
ItemEnum(ref enum_definition, _) => {
- for variant in enum_definition.variants.iter() {
+ for variant in &enum_definition.variants {
self.operation.visit_id(variant.node.id)
}
}
visit::FkFnBlock => {}
}
- for argument in function_declaration.inputs.iter() {
+ for argument in &function_declaration.inputs {
self.operation.visit_id(argument.id)
}
let mut feature = None;
let mut since = None;
let mut reason = None;
- for meta in metas.iter() {
+ for meta in metas {
if meta.name().get() == "feature" {
match meta.value_str() {
Some(v) => feature = Some(v),
pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P<MetaItem>]) {
let mut set = HashSet::new();
- for meta in metas.iter() {
+ for meta in metas {
let name = meta.name();
if !set.insert(name.clone()) {
match attr.node.value.node {
ast::MetaList(ref s, ref items) if *s == "repr" => {
mark_used(attr);
- for item in items.iter() {
+ for item in items {
match item.node {
ast::MetaWord(ref word) => {
let hint = match word.get() {
}
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
- for fm in self.files.borrow().iter() {
+ for fm in &*self.files.borrow() {
if filename == fm.name {
return fm.clone();
}
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
- for mbc in map.multibyte_chars.borrow().iter() {
+ for mbc in &*map.multibyte_chars.borrow() {
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
elided = true;
}
// Print the offending lines
- for &line_number in display_lines.iter() {
+ for &line_number in display_lines {
if let Some(line) = fm.get_line(line_number) {
try!(write!(&mut err.dst, "{}:{} {}\n", fm.name,
line_number + 1, line));
last_line_number + 1, last_line));
}
} else {
- for &line_number in lines.iter() {
+ for &line_number in lines {
if let Some(line) = fm.get_line(line_number) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_number + 1, line));
bounds.push(cx.typarambound(trait_path.clone()));
// also add in any bounds from the declaration
- for declared_bound in ty_param.bounds.iter() {
+ for declared_bound in &*ty_param.bounds {
bounds.push((*declared_bound).clone());
}
subpats.push(p);
idents
};
- for self_arg_name in self_arg_names.tail().iter() {
+ for self_arg_name in self_arg_names.tail() {
let (p, idents) = mk_self_pat(cx, &self_arg_name[]);
subpats.push(p);
self_pats_idents.push(idents);
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`")
};
- for &FieldInfo { ref self_, span, .. } in fields.iter() {
+ for &FieldInfo { ref self_, span, .. } in fields {
stmts.push(call_hash(span, self_.clone()));
}
let mut arms = Vec::new();
- for variant in enum_def.variants.iter() {
+ for variant in &enum_def.variants {
match variant.node.kind {
ast::TupleVariantKind(ref args) => {
if !args.is_empty() {
return it.expect_item();
}
- for attr in modifiers.iter() {
+ for attr in &modifiers {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
// does this attribute list contain "macro_use" ?
fn contains_macro_use(fld: &mut MacroExpander, attrs: &[ast::Attribute]) -> bool {
- for attr in attrs.iter() {
+ for attr in attrs {
let mut is_use = attr.check_name("macro_use");
if attr.check_name("macro_escape") {
fld.cx.span_warn(attr.span, "macro_escape is a deprecated synonym for macro_use");
ast::Pat { id: _, node: ast::PatIdent(_, ref path1, ref inner), span: _ } => {
self.ident_accumulator.push(path1.node);
// visit optional subpattern of PatIdent:
- for subpat in inner.iter() {
+ if let Some(ref subpat) = *inner {
self.visit_pat(&**subpat)
}
}
/// find the PatIdent paths in a
fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec<ast::Ident> {
let mut pat_idents = PatIdentFinder{ident_accumulator:Vec::new()};
- for arg in fn_decl.inputs.iter() {
+ for arg in &fn_decl.inputs {
pat_idents.visit_pat(&*arg.pat);
}
pat_idents.ident_accumulator
let mut decorator_items = SmallVector::zero();
let mut new_attrs = Vec::new();
- for attr in a.attrs().iter() {
+ for attr in a.attrs() {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
return it
}
- for attr in modifiers.iter() {
+ for attr in &modifiers {
let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) {
self.ecx.expr_ident(e.span, name)));
heads.push(self.ecx.expr_addr_of(e.span, e));
}
- for name in self.name_ordering.iter() {
+ for name in &self.name_ordering {
let e = match self.names.remove(name) {
Some(e) => e,
None => continue
cx.ecx.span_err(cx.args[i].span, "argument never used");
}
}
- for (name, e) in cx.names.iter() {
+ for (name, e) in &cx.names {
if !cx.name_types.contains_key(name) {
cx.ecx.span_err(e.span, "named argument never used");
}
fn mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> Vec<P<ast::Stmt>> {
let mut ss = Vec::new();
- for tt in tts.iter() {
+ for tt in tts {
ss.extend(mk_tt(cx, tt).into_iter());
}
ss
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
- for next_m in seq.tts.iter() {
+ for next_m in &seq.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
&TtDelimited(_, ref delim) => {
- for next_m in delim.tts.iter() {
+ for next_m in &delim.tts {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
}
let mut ret_val = HashMap::new();
let mut idx = 0us;
- for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
+ for m in ms { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
_ => cx.span_bug(def.span, "wrong-structured lhs")
};
- for lhs in lhses.iter() {
+ for lhs in &lhses {
check_lhs_nt_follows(cx, &**lhs, def.span);
}
}
fn visit_item(&mut self, i: &ast::Item) {
- for attr in i.attrs.iter() {
+ for attr in &i.attrs {
if attr.name() == "thread_local" {
self.gate_feature("thread_local", i.span,
"`#[thread_local]` is an experimental feature, and does not \
let mut unknown_features = Vec::new();
- for attr in krate.attrs.iter() {
+ for attr in &krate.attrs {
if !attr.check_name("feature") {
continue
}
expected #![feature(...)]");
}
Some(list) => {
- for mi in list.iter() {
+ for mi in list {
let name = match mi.node {
ast::MetaWord(ref word) => (*word).clone(),
_ => {
let mut i = usize::MAX;
let mut can_trim = true;
let mut first = true;
- for line in lines.iter() {
+ for line in &lines {
for (j, c) in line.chars().enumerate() {
if j > i || !"* \t".contains_char(c) {
can_trim = false;
// one-line comments lose their prefix
static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
- for prefix in ONLINERS.iter() {
+ for prefix in ONLINERS {
if comment.starts_with(*prefix) {
return (&comment[prefix.len()..]).to_string();
}
// check that the given reader produces the desired stream
// of tokens (stop checking after exhausting the expected vec)
fn check_tokenization (mut string_reader: StringReader, expected: Vec<token::Token> ) {
- for expected_tok in expected.iter() {
+ for expected_tok in &expected {
assert_eq!(&string_reader.next_token().tok, expected_tok);
}
}
"impl z { fn a (self: Foo, &myarg: i32) {} }",
];
- for &src in srcs.iter() {
+ for &src in &srcs {
let spans = get_spans_of_pat_idents(src);
let Span{ lo, hi, .. } = spans[0];
assert!("self" == &src[lo.to_usize()..hi.to_usize()],
match self.token {
token::Eof => {
let open_braces = self.open_braces.clone();
- for sp in open_braces.iter() {
+ for sp in &open_braces {
self.span_help(*sp, "did you mean to close this delimiter?");
}
// There shouldn't really be a span, but it's easier for the test runner
Some(i) => {
let mut err = String::from_str("circular modules: ");
let len = included_mod_stack.len();
- for p in included_mod_stack[i.. len].iter() {
+ for p in &included_mod_stack[i.. len] {
err.push_str(&p.display().as_cow()[]);
err.push_str(" -> ");
}
{
try!(self.rbox(0us, b));
let mut first = true;
- for elt in elts.iter() {
+ for elt in elts {
if first { first = false; } else { try!(self.word_space(",")); }
try!(op(self, elt));
}
try!(self.rbox(0us, b));
let len = elts.len();
let mut i = 0us;
- for elt in elts.iter() {
+ for elt in elts {
try!(self.maybe_print_comment(get_span(elt).hi));
try!(op(self, elt));
i += 1us;
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
- for item in _mod.items.iter() {
+ for item in &_mod.items {
try!(self.print_item(&**item));
}
Ok(())
pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod,
attrs: &[ast::Attribute]) -> IoResult<()> {
try!(self.print_inner_attributes(attrs));
- for item in nmod.items.iter() {
+ for item in &nmod.items {
try!(self.print_foreign_item(&**item));
}
Ok(())
pub fn print_opt_lifetime(&mut self,
lifetime: &Option<ast::Lifetime>) -> IoResult<()> {
- for l in lifetime.iter() {
- try!(self.print_lifetime(l));
+ if let Some(l) = *lifetime {
+ try!(self.print_lifetime(&l));
try!(self.nbsp());
}
Ok(())
ast::ItemExternCrate(ref optional_path) => {
try!(self.head(&visibility_qualified(item.vis,
"extern crate")[]));
- for &(ref p, style) in optional_path.iter() {
+ if let Some((ref p, style)) = *optional_path {
try!(self.print_string(p.get(), style));
try!(space(&mut self.s));
try!(word(&mut self.s, "as"));
try!(space(&mut self.s));
try!(self.bopen());
try!(self.print_inner_attributes(&item.attrs[]));
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
ast::MethodImplItem(ref meth) => {
try!(self.print_method(&**meth));
try!(self.print_where_clause(generics));
try!(word(&mut self.s, " "));
try!(self.bopen());
- for meth in methods.iter() {
+ for meth in methods {
try!(self.print_trait_method(meth));
}
try!(self.bclose(item.span));
if !t.bound_lifetimes.is_empty() {
try!(word(&mut self.s, "for<"));
let mut comma = false;
- for lifetime_def in t.bound_lifetimes.iter() {
+ for lifetime_def in &t.bound_lifetimes {
if comma {
try!(self.word_space(","))
}
variants: &[P<ast::Variant>],
span: codemap::Span) -> IoResult<()> {
try!(self.bopen());
- for v in variants.iter() {
+ for v in variants {
try!(self.space_if_not_bol());
try!(self.maybe_print_comment(v.span.lo));
try!(self.print_outer_attributes(&v.node.attrs[]));
try!(self.bopen());
try!(self.hardbreak_if_not_bol());
- for field in struct_def.fields.iter() {
+ for field in &struct_def.fields {
match field.node.kind {
ast::UnnamedField(..) => panic!("unexpected unnamed field"),
ast::NamedField(ident, visibility) => {
},
ast::TtSequence(_, ref seq) => {
try!(word(&mut self.s, "$("));
- for tt_elt in seq.tts.iter() {
+ for tt_elt in &seq.tts {
try!(self.print_tt(tt_elt));
}
try!(word(&mut self.s, ")"));
pub fn print_outer_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0us;
- for attr in attrs.iter() {
+ for attr in attrs {
match attr.node.style {
ast::AttrOuter => {
try!(self.print_attribute(attr));
pub fn print_inner_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
let mut count = 0us;
- for attr in attrs.iter() {
+ for attr in attrs {
match attr.node.style {
ast::AttrInner => {
try!(self.print_attribute(attr));
try!(self.print_inner_attributes(attrs));
- for st in blk.stmts.iter() {
+ for st in &blk.stmts {
try!(self.print_stmt(&**st));
}
match blk.expr {
try!(self.print_if_let(&**pat, &**expr, &** blk, elseopt.as_ref().map(|e| &**e)));
}
ast::ExprWhile(ref test, ref blk, opt_ident) => {
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(self.word_space(":"));
}
try!(self.head("while"));
try!(self.print_block(&**blk));
}
ast::ExprWhileLet(ref pat, ref expr, ref blk, opt_ident) => {
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(self.word_space(":"));
}
try!(self.head("while let"));
try!(self.print_block(&**blk));
}
ast::ExprForLoop(ref pat, ref iter, ref blk, opt_ident) => {
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(self.word_space(":"));
}
try!(self.head("for"));
try!(self.print_block(&**blk));
}
ast::ExprLoop(ref blk, opt_ident) => {
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(self.word_space(":"));
}
try!(self.head("loop"));
try!(self.print_expr(&**expr));
try!(space(&mut self.s));
try!(self.bopen());
- for arm in arms.iter() {
+ for arm in arms {
try!(self.print_arm(arm));
}
try!(self.bclose_(expr.span, indent_unit));
ast::ExprBreak(opt_ident) => {
try!(word(&mut self.s, "break"));
try!(space(&mut self.s));
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(space(&mut self.s));
}
}
ast::ExprAgain(opt_ident) => {
try!(word(&mut self.s, "continue"));
try!(space(&mut self.s));
- for ident in opt_ident.iter() {
- try!(self.print_ident(*ident));
+ if let Some(ident) = opt_ident {
+ try!(self.print_ident(ident));
try!(space(&mut self.s))
}
}
}
let mut first = true;
- for segment in path.segments.iter() {
+ for segment in &path.segments {
if first {
first = false
} else {
try!(word(&mut self.s, "<"));
let mut comma = false;
- for lifetime in data.lifetimes.iter() {
+ for lifetime in &data.lifetimes {
if comma {
try!(self.word_space(","))
}
comma = true;
}
- for binding in data.bindings.iter() {
+ for binding in &*data.bindings {
if comma {
try!(self.word_space(","))
}
try!(self.commasep(Inconsistent,
&before[],
|s, p| s.print_pat(&**p)));
- for p in slice.iter() {
+ if let Some(ref p) = *slice {
if !before.is_empty() { try!(self.word_space(",")); }
try!(self.print_pat(&**p));
match **p {
try!(self.ibox(0us));
try!(self.print_outer_attributes(&arm.attrs[]));
let mut first = true;
- for p in arm.pats.iter() {
+ for p in &arm.pats {
if first {
first = false;
} else {
// self type and the args all in the same box.
try!(self.rbox(0us, Inconsistent));
let mut first = true;
- for &explicit_self in opt_explicit_self.iter() {
+ if let Some(explicit_self) = opt_explicit_self {
let m = match explicit_self {
&ast::SelfStatic => ast::MutImmutable,
_ => match decl.inputs[0].pat.node {
&decl.inputs[1..]
};
- for arg in args.iter() {
+ for arg in args {
if first { first = false; } else { try!(self.word_space(",")); }
try!(self.print_arg(arg));
}
if !bounds.is_empty() {
try!(word(&mut self.s, prefix));
let mut first = true;
- for bound in bounds.iter() {
+ for bound in bounds {
try!(self.nbsp());
if first {
first = false;
{
try!(self.print_lifetime(&lifetime.lifetime));
let mut sep = ":";
- for v in lifetime.bounds.iter() {
+ for v in &lifetime.bounds {
try!(word(&mut self.s, sep));
try!(self.print_lifetime(v));
sep = "+";
}
ast::LitBinary(ref v) => {
let mut escaped: String = String::new();
- for &ch in v.iter() {
+ for &ch in &**v {
ascii::escape_default(ch as u8,
|ch| escaped.push(ch as char));
}
}
comments::Isolated => {
try!(self.hardbreak_if_not_bol());
- for line in cmnt.lines.iter() {
+ for line in &cmnt.lines {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
hardbreak(&mut self.s)
} else {
try!(self.ibox(0us));
- for line in cmnt.lines.iter() {
+ for line in &cmnt.lines {
if !line.is_empty() {
try!(word(&mut self.s, &line[]));
}
pub fn prefill(init: &[T]) -> Interner<T> {
let rv = Interner::new();
- for v in init.iter() {
+ for v in init {
rv.intern((*v).clone());
}
rv
pub fn prefill(init: &[&str]) -> StrInterner {
let rv = StrInterner::new();
- for &v in init.iter() { rv.intern(v); }
+ for &v in init { rv.intern(v); }
rv
}
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
- for attr in krate.attrs.iter() {
+ for attr in &krate.attrs {
visitor.visit_attribute(attr);
}
}
pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod) {
- for item in module.items.iter() {
+ for item in &module.items {
visitor.visit_item(&**item)
}
}
pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V,
lifetime_def: &'v LifetimeDef) {
visitor.visit_name(lifetime_def.lifetime.span, lifetime_def.lifetime.name);
- for bound in lifetime_def.bounds.iter() {
+ for bound in &lifetime_def.bounds {
visitor.visit_lifetime_bound(bound);
}
}
visitor.visit_path(path, item.id);
}
ViewPathList(ref prefix, ref list) => {
- for id in list.iter() {
+ for id in list {
match id.node {
PathListIdent { name, .. } => {
visitor.visit_ident(id.span, name);
visitor.visit_mod(module, item.span, item.id)
}
ItemForeignMod(ref foreign_module) => {
- for foreign_item in foreign_module.items.iter() {
+ for foreign_item in &foreign_module.items {
visitor.visit_foreign_item(&**foreign_item)
}
}
None => ()
}
visitor.visit_ty(&**typ);
- for impl_item in impl_items.iter() {
+ for impl_item in impl_items {
match *impl_item {
MethodImplItem(ref method) => {
walk_method_helper(visitor, &**method)
ItemTrait(_, ref generics, ref bounds, ref methods) => {
visitor.visit_generics(generics);
walk_ty_param_bounds_helper(visitor, bounds);
- for method in methods.iter() {
+ for method in methods {
visitor.visit_trait_item(method)
}
}
ItemMac(ref mac) => visitor.visit_mac(mac),
}
- for attr in item.attrs.iter() {
+ for attr in &item.attrs {
visitor.visit_attribute(attr);
}
}
pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V,
enum_definition: &'v EnumDef,
generics: &'v Generics) {
- for variant in enum_definition.variants.iter() {
+ for variant in &enum_definition.variants {
visitor.visit_variant(&**variant, generics);
}
}
match variant.node.kind {
TupleVariantKind(ref variant_arguments) => {
- for variant_argument in variant_arguments.iter() {
+ for variant_argument in variant_arguments {
visitor.visit_ty(&*variant_argument.ty)
}
}
Some(ref expr) => visitor.visit_expr(&**expr),
None => ()
}
- for attr in variant.node.attrs.iter() {
+ for attr in &variant.node.attrs {
visitor.visit_attribute(attr);
}
}
visitor.visit_ty(&*mutable_type.ty)
}
TyTup(ref tuple_element_types) => {
- for tuple_element_type in tuple_element_types.iter() {
+ for tuple_element_type in tuple_element_types {
visitor.visit_ty(&**tuple_element_type)
}
}
TyBareFn(ref function_declaration) => {
- for argument in function_declaration.decl.inputs.iter() {
+ for argument in &function_declaration.decl.inputs {
visitor.visit_ty(&*argument.ty)
}
walk_fn_ret_ty(visitor, &function_declaration.decl.output);
pub fn walk_lifetime_decls_helper<'v, V: Visitor<'v>>(visitor: &mut V,
lifetimes: &'v Vec<LifetimeDef>) {
- for l in lifetimes.iter() {
+ for l in lifetimes {
visitor.visit_lifetime_def(l);
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
- for segment in path.segments.iter() {
+ for segment in &path.segments {
visitor.visit_path_segment(path.span, segment);
}
}
path_parameters: &'v PathParameters) {
match *path_parameters {
ast::AngleBracketedParameters(ref data) => {
- for typ in data.types.iter() {
+ for typ in &*data.types {
visitor.visit_ty(&**typ);
}
- for lifetime in data.lifetimes.iter() {
+ for lifetime in &data.lifetimes {
visitor.visit_lifetime_ref(lifetime);
}
- for binding in data.bindings.iter() {
+ for binding in &*data.bindings {
visitor.visit_assoc_type_binding(&**binding);
}
}
ast::ParenthesizedParameters(ref data) => {
- for typ in data.inputs.iter() {
+ for typ in &data.inputs {
visitor.visit_ty(&**typ);
}
- for typ in data.output.iter() {
+ if let Some(ref typ) = data.output {
visitor.visit_ty(&**typ);
}
}
match pattern.node {
PatEnum(ref path, ref children) => {
visitor.visit_path(path, pattern.id);
- for children in children.iter() {
- for child in children.iter() {
- visitor.visit_pat(&**child)
+ if let Some(ref children) = *children {
+ for child in children {
+ visitor.visit_pat(&*child)
}
}
}
PatStruct(ref path, ref fields, _) => {
visitor.visit_path(path, pattern.id);
- for field in fields.iter() {
+ for field in fields {
visitor.visit_pat(&*field.node.pat)
}
}
PatTup(ref tuple_elements) => {
- for tuple_element in tuple_elements.iter() {
+ for tuple_element in tuple_elements {
visitor.visit_pat(&**tuple_element)
}
}
}
PatWild(_) => (),
PatVec(ref prepattern, ref slice_pattern, ref postpatterns) => {
- for prepattern in prepattern.iter() {
+ for prepattern in prepattern {
visitor.visit_pat(&**prepattern)
}
- for slice_pattern in slice_pattern.iter() {
+ if let Some(ref slice_pattern) = *slice_pattern {
visitor.visit_pat(&**slice_pattern)
}
- for postpattern in postpatterns.iter() {
+ for postpattern in postpatterns {
visitor.visit_pat(&**postpattern)
}
}
ForeignItemStatic(ref typ, _) => visitor.visit_ty(&**typ),
}
- for attr in foreign_item.attrs.iter() {
+ for attr in &foreign_item.attrs {
visitor.visit_attribute(attr);
}
}
pub fn walk_ty_param_bounds_helper<'v, V: Visitor<'v>>(visitor: &mut V,
bounds: &'v OwnedSlice<TyParamBound>) {
- for bound in bounds.iter() {
+ for bound in &**bounds {
visitor.visit_ty_param_bound(bound)
}
}
}
pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) {
- for type_parameter in generics.ty_params.iter() {
+ for type_parameter in &*generics.ty_params {
walk_ty_param(visitor, type_parameter);
}
walk_lifetime_decls_helper(visitor, &generics.lifetimes);
- for predicate in generics.where_clause.predicates.iter() {
+ for predicate in &generics.where_clause.predicates {
match predicate {
&ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate{ref bounded_ty,
ref bounds,
..}) => {
visitor.visit_lifetime_ref(lifetime);
- for bound in bounds.iter() {
+ for bound in bounds {
visitor.visit_lifetime_ref(bound);
}
}
}
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) {
- for argument in function_declaration.inputs.iter() {
+ for argument in &function_declaration.inputs {
visitor.visit_pat(&*argument.pat);
visitor.visit_ty(&*argument.ty)
}
&**body,
method.span,
method.id);
- for attr in method.attrs.iter() {
+ for attr in &method.attrs {
visitor.visit_attribute(attr);
}
pub fn walk_ty_method<'v, V: Visitor<'v>>(visitor: &mut V, method_type: &'v TypeMethod) {
visitor.visit_ident(method_type.span, method_type.ident);
visitor.visit_explicit_self(&method_type.explicit_self);
- for argument_type in method_type.decl.inputs.iter() {
+ for argument_type in &method_type.decl.inputs {
visitor.visit_ty(&*argument_type.ty)
}
visitor.visit_generics(&method_type.generics);
walk_fn_ret_ty(visitor, &method_type.decl.output);
- for attr in method_type.attrs.iter() {
+ for attr in &method_type.attrs {
visitor.visit_attribute(attr);
}
}
pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V,
struct_definition: &'v StructDef) {
- for field in struct_definition.fields.iter() {
+ for field in &struct_definition.fields {
visitor.visit_struct_field(field)
}
}
visitor.visit_ty(&*struct_field.node.ty);
- for attr in struct_field.node.attrs.iter() {
+ for attr in &struct_field.node.attrs {
visitor.visit_attribute(attr);
}
}
pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) {
- for statement in block.stmts.iter() {
+ for statement in &block.stmts {
visitor.visit_stmt(&**statement)
}
walk_expr_opt(visitor, &block.expr)
}
pub fn walk_exprs<'v, V: Visitor<'v>>(visitor: &mut V, expressions: &'v [P<Expr>]) {
- for expression in expressions.iter() {
+ for expression in expressions {
visitor.visit_expr(&**expression)
}
}
}
ExprStruct(ref path, ref fields, ref optional_base) => {
visitor.visit_path(path, expression.id);
- for field in fields.iter() {
+ for field in fields {
visitor.visit_expr(&*field.expr)
}
walk_expr_opt(visitor, optional_base)
}
ExprTup(ref subexpressions) => {
- for subexpression in subexpressions.iter() {
+ for subexpression in subexpressions {
visitor.visit_expr(&**subexpression)
}
}
ExprCall(ref callee_expression, ref arguments) => {
- for argument in arguments.iter() {
+ for argument in arguments {
visitor.visit_expr(&**argument)
}
visitor.visit_expr(&**callee_expression)
}
ExprMethodCall(_, ref types, ref arguments) => {
walk_exprs(visitor, arguments.as_slice());
- for typ in types.iter() {
+ for typ in types {
visitor.visit_ty(&**typ)
}
}
ExprLoop(ref block, _) => visitor.visit_block(&**block),
ExprMatch(ref subexpression, ref arms, _) => {
visitor.visit_expr(&**subexpression);
- for arm in arms.iter() {
+ for arm in arms {
visitor.visit_arm(arm)
}
}
visitor.visit_expr(&**subexpression)
}
ExprInlineAsm(ref ia) => {
- for input in ia.inputs.iter() {
+ for input in &ia.inputs {
let (_, ref input) = *input;
visitor.visit_expr(&**input)
}
- for output in ia.outputs.iter() {
+ for output in &ia.outputs {
let (_, ref output, _) = *output;
visitor.visit_expr(&**output)
}
}
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) {
- for pattern in arm.pats.iter() {
+ for pattern in &arm.pats {
visitor.visit_pat(&**pattern)
}
walk_expr_opt(visitor, &arm.guard);
visitor.visit_expr(&*arm.body);
- for attr in arm.attrs.iter() {
+ for attr in &arm.attrs {
visitor.visit_attribute(attr);
}
}
*dst = (*src).clone();
}
- for &c in cap.iter() {
+ for &c in cap {
let cur = c as char;
let mut old_state = state;
match state {
}
let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"];
- for &cap in caps.iter() {
+ for &cap in &caps {
let res = get_res("", cap, &[], vars);
assert!(res.is_err(),
"Op {} succeeded incorrectly with 0 stack entries", cap);
"Op {} failed with 1 stack entry: {}", cap, res.err().unwrap());
}
let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"];
- for &cap in caps.iter() {
+ for &cap in &caps {
let res = expand(cap.as_bytes(), &[], vars);
assert!(res.is_err(),
"Binop {} succeeded incorrectly with 0 stack entries", cap);
#[test]
fn test_comparison_ops() {
let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])];
- for &(op, bs) in v.iter() {
+ for &(op, bs) in &v {
let s = format!("%{{1}}%{{2}}%{}%d", op);
let res = expand(s.as_bytes(), &[], &mut Variables::new());
assert!(res.is_ok(), res.err().unwrap());
};
// Look for the terminal in all of the search directories
- for p in dirs_to_search.iter() {
+ for p in &dirs_to_search {
if p.exists() {
let f = first_char.to_string();
let newp = p.join_many(&[&f[], term]);
try!(self.write_plain("\nfailures:\n"));
let mut failures = Vec::new();
let mut fail_out = String::new();
- for &(ref f, ref stdout) in self.failures.iter() {
+ for &(ref f, ref stdout) in &self.failures {
failures.push(f.name.to_string());
if stdout.len() > 0 {
fail_out.push_str(format!("---- {} stdout ----\n\t",
try!(self.write_plain("\nfailures:\n"));
failures.sort();
- for name in failures.iter() {
+ for name in &failures {
try!(self.write_plain(format!(" {}\n",
name.as_slice()).as_slice()));
}
TrMetrics(mm) => {
let tname = test.name.as_slice();
let MetricMap(mm) = mm;
- for (k,v) in mm.iter() {
+ for (k,v) in &mm {
st.metrics
.insert_metric(format!("{}.{}",
tname,
{
fn testfn() { }
let mut tests = Vec::new();
- for name in names.iter() {
+ for name in &names {
let test = TestDescAndFn {
desc: TestDesc {
name: DynTestName((*name).clone()),
fn sum(&self) -> T {
let mut partials = vec![];
- for &x in self.iter() {
+ for &x in self {
let mut x = x;
let mut j = 0;
// This inner loop applies `hi`/`lo` summation to each
} else {
let mean = self.mean();
let mut v: T = Float::zero();
- for s in self.iter() {
+ for s in self {
let x = *s - mean;
v = v + x*x;
}
// First check the canonical decompositions
match bsearch_table(c, canonical_table) {
Some(canon) => {
- for x in canon.iter() {
+ for x in canon {
d(*x, i, k);
}
return;
// Then check the compatibility decompositions
match bsearch_table(c, compatibility_table) {
Some(compat) => {
- for x in compat.iter() {
+ for x in compat {
d(*x, i, k);
}
return;
let cur = self.cur_items.get(self.cur_idx).unwrap();
let mut section = "".to_string();
- for &(_, idx) in self.stack.iter() {
+ for &(_, idx) in &self.stack {
section.push_str(&(idx + 1).to_string()[]);
section.push('.');
}
-> B {
let eq_fn = lst.eq_fn;
let data = lst.data.borrow();
- for entry in (*data).iter() {
+ for entry in &(*data) {
if eq_fn(entry.key.clone(), k.clone()) {
return entry.value.clone();
}
fn print_complements() {
let all = [Blue, Red, Yellow];
- for aa in all.iter() {
- for bb in all.iter() {
+ for aa in &all {
+ for bb in &all {
println!("{:?} + {:?} -> {:?}", *aa, *bb, transform(*aa, *bb));
}
}
fn show_color_list(set: Vec<Color>) -> String {
let mut out = String::new();
- for col in set.iter() {
+ for col in &set {
out.push(' ');
out.push_str(format!("{:?}", col).as_slice());
}
fn sum_and_scale(a: &'static [AminoAcid]) -> Vec<AminoAcid> {
let mut result = Vec::new();
let mut p = 0f32;
- for a_i in a.iter() {
+ for a_i in a {
let mut a_i = *a_i;
p += a_i.p;
a_i.p = p * LOOKUP_SCALE;
fn nextc(&mut self) -> u8 {
let r = self.rng(1.0);
- for a in self.lookup.iter() {
+ for a in &self.lookup[] {
if a.p >= r {
return a.c;
}
let mut pairs = Vec::new();
// map -> [(k,%)]
- for (key, &val) in mm.iter() {
+ for (key, &val) in mm {
pairs.push(((*key).clone(), pct(val, total)));
}
let pairs_sorted = sortKV(pairs);
let mut buffer = String::new();
- for &(ref k, v) in pairs_sorted.iter() {
+ for &(ref k, v) in &pairs_sorted {
buffer.push_str(format!("{:?} {:0.3}\n",
k.to_ascii_uppercase(),
v).as_slice());
vector.as_mut_slice().sort();
let mut total_count = 0;
- for &(count, _) in vector.iter() {
+ for &(count, _) in &vector {
total_count += count;
}
(i + 1) * chunk_size
};
- for &init_i in vec_init_i[start..end].iter() {
+ for &init_i in &vec_init_i[start..end] {
write_line(init_i, init_r_slice, &mut res);
}
// dx) is on the board.
fn mask(dy: i32, dx: i32, id: usize, p: &Vec<(i32, i32)>) -> Option<u64> {
let mut m = 1 << (50 + id);
- for &(y, x) in p.iter() {
+ for &(y, x) in p {
let x = x + dx + (y + (dy % 2)) / 2;
if x < 0 || x > 4 {return None;}
let y = y + dy;
if board & 1 << i != 0 { continue; }
for (cur_id, pos_masks) in masks_at.iter().enumerate() {
if board & 1 << (50 + cur_id) != 0 { continue; }
- for &cur_m in pos_masks.iter() {
+ for &cur_m in pos_masks {
if cur_m & board != 0 { continue; }
coverable |= cur_m;
// if every coordinates can be covered and every
let mut px = 0.0;
let mut py = 0.0;
let mut pz = 0.0;
- for bi in bodies.iter() {
+ for bi in &*bodies {
px += bi.vx * bi.mass;
py += bi.vy * bi.mass;
pz += bi.vz * bi.mass;
let f = Foo {
a: [box 3, box 4, box 5],
};
- for &a in f.a.iter() { //~ ERROR cannot move out
+ for &a in &f.a { //~ ERROR cannot move out
}
let x = Some(box 1);
fn main() {
let mut vector = vec![1us, 2];
- for &x in vector.iter() {
+ for &x in &vector {
let cap = vector.capacity();
vector.extend(repeat(0)); //~ ERROR cannot borrow
vector[1us] = 5us; //~ ERROR cannot borrow
impl Foo {
pub fn foo<F>(&mut self, mut fun: F) where F: FnMut(&isize) {
- for f in self.n.iter() {
+ for f in &self.n {
fun(f);
}
}
let a = "".to_string();
let b: Vec<&str> = a.lines().collect();
drop(a); //~ ERROR cannot move out of `a` because it is borrowed
- for s in b.iter() {
+ for s in &b {
println!("{}", *s);
}
}
//~^ ERROR borrowed value does not live long enough
];
- for &&x in v.iter() {
+ for &&x in &v {
println!("{}", x + 3);
}
}
}
fn set(&mut self, bits: &[T]) {
- for &bit in bits.iter() {
+ for &bit in bits {
self.set(bit)
}
}
impl<A> vec_monad<A> for Vec<A> {
fn bind<B, F>(&self, mut f: F) where F: FnMut(A) -> Vec<B> {
let mut r = panic!();
- for elt in self.iter() { r = r + f(*elt); }
+ for elt in self { r = r + f(*elt); }
//~^ ERROR the type of this value must be known
}
}
fn fail_len(v: Vec<isize> ) -> usize {
let mut i = 3;
panic!();
- for x in v.iter() { i += 1us; }
+ for x in &v { i += 1us; }
//~^ ERROR: unreachable statement
return i;
}
('c', 'd'),
('e', 'f')];
- for &(x,y) in v.iter() {} // should be OK
+ for &(x,y) in &v {} // should be OK
// Make sure none of the errors above were fatal
let x: char = true; //~ ERROR mismatched types
};
list_database(push_id);
- for l in lines_to_use.iter() {
+ for l in &lines_to_use {
println!("{}", l.local_path);
}
pub fn list_database<F>(mut f: F) where F: FnMut(&CrateId) {
let stuff = ["foo", "bar"];
- for l in stuff.iter() {
+ for l in &stuff {
f(&CrateId::new(*l));
}
}
z: true
};
- for &Struct { x, y, z } in [s].iter() {
+ for &Struct { x, y, z } in &[s] {
zzz(); // #break
}
let tuple: (i8, u8, i16, u16, i32, u32, i64, u64, f32, f64) =
(0x6f, 0x70, -113, 114, -115, 116, -117, 118, 119.5, 120.5);
- for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in [tuple].iter() {
+ for &(_i8, _u8, _i16, _u16, _i32, _u32, _i64, _u64, _f32, _f64) in &[tuple] {
zzz(); // #break
}
let x = 1000000; // wan meeeljen doollaars!
- for &x in range.iter() {
+ for &x in &range {
zzz(); // #break
sentinel();
match (20i32, 'c') {
(a, ref b) => {}
}
- for a in [111i32].iter() {}
+ for a in &[111i32] {}
}
fn after_panic() {
match (20i32, 'c') {
(a, ref b) => {}
}
- for a in [111i32].iter() {}
+ for a in &[111i32] {}
}
fn after_diverging_function() {
match (20i32, 'c') {
(a, ref b) => {}
}
- for a in [111i32].iter() {}
+ for a in &[111i32] {}
}
fn after_break() {
match (20i32, 'c') {
(a, ref b) => {}
}
- for a in [111i32].iter() {}
+ for a in &[111i32] {}
}
}
match (20i32, 'c') {
(a, ref b) => {}
}
- for a in [111i32].iter() {}
+ for a in &[111i32] {}
}
}
'\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006',
'\u2007', '\u2008', '\u2009', '\u200A', '\u2028', '\u2029', '\u202F',
'\u205F', '\u3000'];
- for c in chars.iter() {
+ for c in &chars {
let ws = c.is_whitespace();
println!("{} {}" , c , ws);
}
'\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006',
'\u2007', '\u2008', '\u2009', '\u200A', '\u2028', '\u2029', '\u202F',
'\u205F', '\u3000'];
- for c in chars.iter() {
+ for c in &chars {
let ws = c.is_whitespace();
println!("{} {}", c , ws);
}
fn f(v: &[int]) -> int {
let mut n = 0;
- for e in v.iter() {
+ for e in v {
n = *e; // This comment once triggered pretty printer bug
}
pub fn main() {
let mut sum = 0;
let xs = vec!(1, 2, 3, 4, 5);
- for x in xs.iter() {
+ for x in &xs {
sum += *x;
}
assert_eq!(sum, 15);
let v = vec!(-1.0f64, 0.0, 1.0, 2.0, 3.0);
// Statement form does not require parentheses:
- for i in v.iter() {
+ for i in &v {
println!("{}", *i);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-fn iter_vec<T, F>(v: Vec<T> , mut f: F) where F: FnMut(&T) { for x in v.iter() { f(x); } }
+fn iter_vec<T, F>(v: Vec<T> , mut f: F) where F: FnMut(&T) { for x in &v { f(x); } }
pub fn main() {
let v = vec![1i32, 2, 3, 4, 5, 6, 7];
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-fn iter_vec<T, F>(v: Vec<T>, mut f: F) where F: FnMut(&T) { for x in v.iter() { f(x); } }
+fn iter_vec<T, F>(v: Vec<T>, mut f: F) where F: FnMut(&T) { for x in &v { f(x); } }
pub fn main() {
let v = vec![1i32, 2, 3, 4, 5];
fn want_slice(v: &[int]) -> int {
let mut sum = 0;
- for i in v.iter() { sum += *i; }
+ for i in v { sum += *i; }
sum
}
loop { i += 1; if i == 20 { break; } }
assert_eq!(i, 20);
let xs = [1, 2, 3, 4, 5, 6];
- for x in xs.iter() {
+ for x in &xs {
if *x == 3 { break; } assert!((*x <= 3));
}
i = 0;
if i >= 10 { break; }
}
let ys = vec!(1, 2, 3, 4, 5, 6);
- for x in ys.iter() {
+ for x in &ys {
if *x % 2 == 0 { continue; }
assert!((*x % 2 != 0));
}
// except according to those terms.
fn each<T, F>(x: &[T], mut f: F) where F: FnMut(&T) {
- for val in x.iter() {
+ for val in x {
f(val)
}
}
#![feature(unboxed_closures)]
fn each<'a,T,F:FnMut(&'a T)>(x: &'a [T], mut f: F) {
- for val in x.iter() {
+ for val in x {
f(val)
}
}
pub fn main() {
// The array containing [AddFlags] should not be dropped until
// after the for loop:
- for x in [AddFlags(1)].iter() {
+ for x in &[AddFlags(1)] {
check_flags(0);
}
check_flags(1);
fn sum(x: &[int]) -> int {
let mut sum = 0;
- for y in x.iter() { sum += *y; }
+ for y in x { sum += *y; }
return sum;
}
pub fn main() {
unsafe {
- for &bare_fn in bare_fns.iter() { bare_fn() }
+ for &bare_fn in bare_fns { bare_fn() }
for closure in closures.iter_mut() {
let S(ref mut closure) = *closure;
(*closure)()
impl Box {
pub fn set_many(&mut self, xs: &[uint]) {
- for x in xs.iter() { self.x = *x; }
+ for x in xs { self.x = *x; }
}
}
pub fn main() {
let v : &[(int,int)] = &[ (1, 2), (3, 4), (5, 6) ];
- for &(x, y) in v.iter() {
+ for &(x, y) in v {
println!("{}", y);
println!("{}", x);
}
struct Pair { x: int, y: int }
pub fn main() {
- for elt in (vec!(Pair {x: 10, y: 20}, Pair {x: 30, y: 0})).iter() {
+ for elt in &(vec!(Pair {x: 10, y: 20}, Pair {x: 30, y: 0})) {
assert_eq!(elt.x + elt.y, 30);
}
}
pub fn main() {
let x = [ 3, 3, 3 ];
- for i in x.iter() {
+ for i in &x {
assert_eq!(*i, 3);
}
}
// except according to those terms.
-pub fn main() { let x: Vec<int> = Vec::new(); for _ in x.iter() { panic!("moop"); } }
+pub fn main() { let x: Vec<int> = Vec::new(); for _ in &x { panic!("moop"); } }
pub fn main() {
let x = [1; 100];
let mut y = 0;
- for i in x.iter() {
+ for i in &x[] {
if y > 10 {
break;
}
pub fn main() {
let mut h = HashMap::new();
let kvs = [(1, 10), (2, 20), (3, 30)];
- for &(k,v) in kvs.iter() {
+ for &(k,v) in &kvs {
h.insert(k,v);
}
let mut x = 0;
pub fn main() {
let mut h = HashMap::new();
let kvs = [(1, 10), (2, 20), (3, 30)];
- for &(k,v) in kvs.iter() {
+ for &(k,v) in &kvs {
h.insert(k,v);
}
let mut x = 0;
let mut y = 0;
- for (&k,&v) in h.iter() {
+ for (&k,&v) in &h {
x += k;
y += v;
}
let y = [2; 100];
let mut p = 0;
let mut q = 0;
- for i in x.iter() {
- for j in y.iter() {
+ for i in &x[] {
+ for j in &y[] {
p += *j;
}
q += *i + p;
pub fn main() {
let x = [1; 100];
let mut y = 0;
- for i in x.iter() {
+ for i in &x[] {
y += *i
}
assert!(y == 100);
impl<T> vec_utils<T> for Vec<T> {
fn map_<U, F>(x: &Vec<T> , mut f: F) -> Vec<U> where F: FnMut(&T) -> U {
let mut r = Vec::new();
- for elt in x.iter() {
+ for elt in x {
r.push(f(elt));
}
r
enum ctrl_proto { find_reducer(Vec<u8>, Sender<int>), mapper_done, }
fn start_mappers(ctrl: Sender<ctrl_proto>, inputs: Vec<String>) {
- for i in inputs.iter() {
+ for i in &inputs {
let ctrl = ctrl.clone();
let i = i.clone();
Thread::spawn(move|| map_task(ctrl.clone(), i.clone()) );
}
fn f(arr: &[&Foo]) {
- for &f in arr.iter() {
+ for &f in arr {
println!("{:?}", f);
}
}
// Test that regionck creates the right region links in the pattern
// binding of a for loop
fn foo<'a>(v: &'a [uint]) -> &'a uint {
- for &ref x in v.iter() { return x; }
+ for &ref x in v { return x; }
unreachable!()
}
let mut line = [0; 10];
input.read(&mut line);
let mut row = Vec::new();
- for c in line.iter() {
+ for c in &line {
row.push(square_from_char(*c as char))
}
grid.push(row);
let width = grid[0].len();
- for row in grid.iter() { assert!(row.len() == width) }
+ for row in &grid { assert!(row.len() == width) }
grid
}
}
fn print_str_vector(vector: Vec<String> ) {
- for string in vector.iter() {
+ for string in &vector {
println!("{}", *string);
}
}
trait Canvas {
fn add_point(&self, point: &int);
fn add_points(&self, shapes: &[int]) {
- for pt in shapes.iter() {
+ for pt in shapes {
self.add_point(pt)
}
}
// Unlike interfaces traits support default implementations.
// Got an ICE as soon as I added this method.
fn add_points(&mut self, shapes: &[Point]) {
- for pt in shapes.iter() {self.add_point(*pt)};
+ for pt in shapes {self.add_point(*pt)};
}
}
let mut res = "*".to_string();
res.push_str(cmd.len().to_string());
res.push_str("\r\n");
- for s in cmd.iter() {
+ for s in &cmd {
res.push_str(["$".to_string(), s.len().to_string(), "\r\n".to_string(),
(*s).clone(), "\r\n".to_string()].concat() );
}
use std::os;
pub fn main() {
- for arg in os::args().iter() {
+ for arg in &os::args() {
match (*arg).clone() {
_s => { }
}
let bubbles = box Dog{name: "bubbles".to_string()};
let barker = [snoopy as Box<Barks>, bubbles as Box<Barks>];
- for pup in barker.iter() {
+ for pup in &barker {
println!("{}", pup.bark());
}
}
static V: &'static [X] = &[X { vec: &[1, 2, 3] }];
pub fn main() {
- for &v in V.iter() {
+ for &v in V {
println!("{:?}", v.vec);
}
}
pub fn main() {
let x = vec!(1, 2, 3);
let mut y = 0;
- for i in x.iter() { println!("{}", *i); y += *i; }
+ for i in &x { println!("{}", *i); y += *i; }
println!("{}", y);
assert_eq!(y, 6);
let s = "hello there".to_string();
fn main() {
let mut foo = Vec::new();
- 'foo: for i in [1, 2, 3].iter() {
+ 'foo: for i in &[1, 2, 3] {
foo.push(i);
}
}
pub fn main() {
let x = vec!(10, 20, 30);
let mut sum = 0;
- for x in x.iter() { sum += *x; }
+ for x in &x { sum += *x; }
assert_eq!(sum, 60);
}
impl<A> vec_monad<A> for Vec<A> {
fn bind<B, F>(&self, mut f: F) -> Vec<B> where F: FnMut(&A) -> Vec<B> {
let mut r = Vec::new();
- for elt in self.iter() {
+ for elt in self {
r.extend(f(elt).into_iter());
}
r
fn test2() {
let mut ints = [0; 32];
for i in ints.iter_mut() { *i += 22; }
- for i in ints.iter() { assert!(*i == 22); }
+ for i in &ints { assert!(*i == 22); }
}
pub fn main() {
type Output = V;
fn index<'a>(&'a self, index: &K) -> &'a V {
- for pair in self.pairs.iter() {
+ for pair in &self.pairs {
if pair.key == *index {
return &pair.value
}
assert_eq!(foos[i], Foo { bar: 1, baz: 2});
}
- for &foo in foos.iter() {
+ for &foo in &foos {
assert_eq!(foo, Foo { bar: 1, baz: 2 });
}
}
}
fn add_type(&mut self, s: TypeStructure<'tcx>) -> Type<'tcx> {
- for &ty in self.types.iter() {
+ for &ty in &self.types {
if *ty == s {
return ty;
}
let y = [ 0; 1 ];
print!("[");
- for xi in x.iter() {
+ for xi in &x[] {
print!("{:?}, ", &xi[]);
}
println!("]");
match t::none::<int> {
t::some::<int>(_) => {
- for _i in c.iter() {
+ for _i in &c {
println!("{}", a);
let a = 17;
b.push(a);
impl<T> vec_utils<T> for Vec<T> {
fn length_(&self) -> uint { self.len() }
- fn iter_<F>(&self, mut f: F) where F: FnMut(&T) { for x in self.iter() { f(x); } }
+ fn iter_<F>(&self, mut f: F) where F: FnMut(&T) { for x in self { f(x); } }
fn map_<U, F>(&self, mut f: F) -> Vec<U> where F: FnMut(&T) -> U {
let mut r = Vec::new();
- for elt in self.iter() {
+ for elt in self {
r.push(f(elt));
}
r
// Read from spawned tasks...
let mut sum = 0;
- for _r in results.iter() {
+ for _r in &results {
i = 0;
while i < number_of_messages {
let value = rx.recv().unwrap();
fn check_legs(arc: Arc<Vec<Box<Pet+Sync+Send>>>) {
let mut legs = 0;
- for pet in arc.iter() {
+ for pet in &*arc {
legs += pet.num_legs();
}
assert!(legs == 12);
}
fn check_names(arc: Arc<Vec<Box<Pet+Sync+Send>>>) {
- for pet in arc.iter() {
+ for pet in &*arc {
pet.name(box |name| {
assert!(name.as_bytes()[0] == 'a' as u8 && name.as_bytes()[1] == 'l' as u8);
})
}
}
fn check_pedigree(arc: Arc<Vec<Box<Pet+Sync+Send>>>) {
- for pet in arc.iter() {
+ for pet in &*arc {
assert!(pet.of_good_pedigree());
}
}
impl<T> map<T> for Vec<T> {
fn map<U, F>(&self, mut f: F) -> Vec<U> where F: FnMut(&T) -> U {
let mut r = Vec::new();
- for i in self.iter() {
+ for i in self {
r.push(f(i));
}
r
// ... and then panic on each and every single one.
for panic_countdown in 0..count {
// refresh the counters.
- for c in drop_counts.iter() {
+ for c in &drop_counts {
c.store(0, Ordering::Relaxed);
}