and librustpkg.
Output: &Path,
FileType: lib::llvm::FileType) {
unsafe {
- do Output.with_c_str |Output| {
+ Output.with_c_str(|Output| {
let result = llvm::LLVMRustWriteOutputFile(
Target, PM, M, Output, FileType);
if !result {
llvm_err(sess, ~"Could not write output");
}
- }
+ })
}
}
for cratepath in r.iter() {
debug!("linking: {}", cratepath.display());
- do cratepath.with_c_str |buf_t| {
+ cratepath.with_c_str(|buf_t| {
if !llvm::LLVMRustLoadCrate(manager, buf_t) {
llvm_err(sess, ~"Could not link");
}
debug!("linked: {}", cratepath.display());
- }
+ })
}
// We custom-build a JIT execution engine via some rust wrappers
// Next, we need to get a handle on the _rust_main function by
// looking up it's corresponding ValueRef and then requesting that
// the execution engine compiles the function.
- let fun = do "_rust_main".with_c_str |entry| {
+ let fun = "_rust_main".with_c_str(|entry| {
llvm::LLVMGetNamedFunction(m, entry)
- };
+ });
if fun.is_null() {
llvm::LLVMDisposeExecutionEngine(ee);
llvm::LLVMContextDispose(c);
llvm::LLVMInitializeMipsAsmParser();
if sess.opts.save_temps {
- do output.with_extension("no-opt.bc").with_c_str |buf| {
+ output.with_extension("no-opt.bc").with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
- }
+ })
}
configure_llvm(sess);
};
let use_softfp = sess.opts.debugging_opts & session::use_softfp != 0;
- let tm = do sess.targ_cfg.target_strs.target_triple.with_c_str |T| {
- do sess.opts.target_cpu.with_c_str |CPU| {
- do sess.opts.target_feature.with_c_str |Features| {
+ let tm = sess.targ_cfg.target_strs.target_triple.with_c_str(|T| {
+ sess.opts.target_cpu.with_c_str(|CPU| {
+ sess.opts.target_feature.with_c_str(|Features| {
llvm::LLVMRustCreateTargetMachine(
T, CPU, Features,
lib::llvm::CodeModelDefault,
true,
use_softfp
)
- }
- }
- };
+ })
+ })
+ });
// Create the two optimizing pass managers. These mirror what clang
// does, and are by populated by LLVM's default PassManagerBuilder.
// If we're verifying or linting, add them to the function pass
// manager.
let addpass = |pass: &str| {
- do pass.with_c_str |s| { llvm::LLVMRustAddPass(fpm, s) }
+ pass.with_c_str(|s| llvm::LLVMRustAddPass(fpm, s))
};
if !sess.no_verify() { assert!(addpass("verify")); }
if sess.lint_llvm() { assert!(addpass("lint")); }
}
for pass in sess.opts.custom_passes.iter() {
- do pass.with_c_str |s| {
+ pass.with_c_str(|s| {
if !llvm::LLVMRustAddPass(mpm, s) {
sess.warn(format!("Unknown pass {}, ignoring", *pass));
}
- }
+ })
}
// Finally, run the actual optimization passes
llvm::LLVMDisposePassManager(mpm);
if sess.opts.save_temps {
- do output.with_extension("bc").with_c_str |buf| {
+ output.with_extension("bc").with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
- }
+ })
}
if sess.opts.jit {
match output_type {
output_type_none => {}
output_type_bitcode => {
- do output.with_c_str |buf| {
+ output.with_c_str(|buf| {
llvm::LLVMWriteBitcodeToFile(llmod, buf);
- }
+ })
}
output_type_llvm_assembly => {
- do output.with_c_str |output| {
+ output.with_c_str(|output| {
llvm::LLVMRustPrintModule(cpm, llmod, output)
- }
+ })
}
output_type_assembly => {
WriteOutputFile(sess, tm, cpm, llmod, output, lib::llvm::AssemblyFile);
add(*arg);
}
- do llvm_args.as_imm_buf |p, len| {
+ llvm_args.as_imm_buf(|p, len| {
llvm::LLVMRustSetLLVMOptions(len as c_int, p);
- }
+ })
}
unsafe fn populate_llvm_passes(fpm: lib::llvm::PassManagerRef,
_ => {
let mut tstr = ~"";
- do char::escape_unicode(c) |c| { tstr.push_char(c); }
+ char::escape_unicode(c, |c| tstr.push_char(c));
result.push_char('$');
result.push_str(tstr.slice_from(1));
}
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: ~[~str], demitter: @diagnostic::Emitter)
-> ast::CrateConfig {
- do cfgspecs.move_iter().map |s| {
+ cfgspecs.move_iter().map(|s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
- }.collect::<ast::CrateConfig>()
+ }).collect::<ast::CrateConfig>()
}
pub enum input {
// any items that do not belong in the current configuration
pub fn strip_unconfigured_items(crate: ast::Crate) -> ast::Crate {
let config = crate.config.clone();
- do strip_items(crate) |attrs| {
- in_cfg(config, attrs)
- }
+ strip_items(crate, |attrs| in_cfg(config, attrs))
}
impl<'self> fold::ast_fold for Context<'self> {
}
fn fold_mod(cx: &Context, m: &ast::_mod) -> ast::_mod {
- let filtered_items = do m.items.iter().filter_map |a| {
+ let filtered_items = m.items.iter().filter_map(|a| {
filter_item(cx, *a).and_then(|x| cx.fold_item(x))
- }.collect();
- let filtered_view_items = do m.view_items.iter().filter_map |a| {
- do filter_view_item(cx, a).map |x| {
- cx.fold_view_item(x)
- }
- }.collect();
+ }).collect();
+ let filtered_view_items = m.view_items.iter().filter_map(|a| {
+ filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
+ }).collect();
ast::_mod {
view_items: filtered_view_items,
items: filtered_items
.iter()
.filter_map(|a| filter_foreign_item(cx, *a))
.collect();
- let filtered_view_items = do nm.view_items.iter().filter_map |a| {
- do filter_view_item(cx, a).map |x| {
- cx.fold_view_item(x)
- }
- }.collect();
+ let filtered_view_items = nm.view_items.iter().filter_map(|a| {
+ filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
+ }).collect();
ast::foreign_mod {
abis: nm.abis,
view_items: filtered_view_items,
}
fn fold_block(cx: &Context, b: &ast::Block) -> ast::Block {
- let resulting_stmts = do b.stmts.iter().filter_map |a| {
+ let resulting_stmts = b.stmts.iter().filter_map(|a| {
filter_stmt(cx, *a).and_then(|stmt| cx.fold_stmt(stmt))
- }.collect();
- let filtered_view_items = do b.view_items.iter().filter_map |a| {
+ }).collect();
+ let filtered_view_items = b.view_items.iter().filter_map(|a| {
filter_view_item(cx, a).map(|x| cx.fold_view_item(x))
- }.collect();
+ }).collect();
ast::Block {
view_items: filtered_view_items,
stmts: resulting_stmts,
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !*cx.sess.building_library {
@ast::item {
- attrs: do item.attrs.iter().filter_map |attr| {
+ attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
Some(*attr)
} else {
None
}
- }.collect(),
+ }).collect(),
.. (*item).clone()
}
} else {
fn strip_test_functions(crate: ast::Crate) -> ast::Crate {
// When not compiling with --test we should not compile the
// #[test] functions
- do config::strip_items(crate) |attrs| {
+ config::strip_items(crate, |attrs| {
!attr::contains_name(attrs, "test") &&
!attr::contains_name(attrs, "bench")
- }
+ })
}
fn is_test_fn(cx: @mut TestCtxt, i: @ast::item) -> bool {
}
fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
- do i.attrs.iter().any |attr| {
+ i.attrs.iter().any(|attr| {
// check ignore(cfg(foo, bar))
"ignore" == attr.name() && match attr.meta_item_list() {
Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)),
None => true
}
- }
+ })
}
fn should_fail(i: @ast::item) -> bool {
let odir = matches.opt_str("out-dir").map(|o| Path::new(o));
let ofile = matches.opt_str("o").map(|o| Path::new(o));
let cfg = build_configuration(sess);
- let pretty = do matches.opt_default("pretty", "normal").map |a| {
+ let pretty = matches.opt_default("pretty", "normal").map(|a| {
parse_pretty(sess, a)
- };
+ });
match pretty {
Some::<PpMode>(ppm) => {
pretty_print_input(sess, cfg, &input, ppm);
task_builder.opts.stack_size = Some(STACK_SIZE);
}
- match do task_builder.try {
+ match task_builder.try(|| {
let ch = ch_capture.clone();
// The 'diagnostics emitter'. Every error, warning, etc. should
// go through this function.
// Due reasons explain in #7732, if there was a jit execution context it
// must be consumed and passed along to our parent task.
back::link::jit::consume_engine()
- } {
+ }) {
result::Ok(_) => { /* fallthrough */ }
result::Err(_) => {
// Task failed without emitting a fatal diagnostic
pub fn main_args(args: &[~str]) -> int {
let owned_args = args.to_owned();
- do monitor |demitter| {
- run_compiler(owned_args, demitter);
- }
-
- return 0;
+ monitor(|demitter| run_compiler(owned_args, demitter));
+ 0
}
}
pub fn mk_target_data(string_rep: &str) -> TargetData {
- let lltd = do string_rep.with_c_str |buf| {
+ let lltd = string_rep.with_c_str(|buf| {
unsafe { llvm::LLVMCreateTargetData(buf) }
- };
+ });
TargetData {
lltd: lltd,
});
}
- let sorted = do extra::sort::merge_sort(result) |a, b| {
+ let sorted = extra::sort::merge_sort(result, |a, b| {
(a.name, a.vers, a.hash) <= (b.name, b.vers, b.hash)
- };
+ });
debug!("sorted:");
for x in sorted.iter() {
let belt = tag_index_buckets_bucket_elt;
let mut ret = None;
- do reader::tagged_docs(tagged_doc.doc, belt) |elt| {
+ reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(*elt.data, elt.start, 4) as uint;
if eq_fn(elt.data.slice(elt.start + 4, elt.end)) {
ret = Some(reader::doc_at(d.data, pos).doc);
} else {
true
}
- };
+ });
ret
}
fn item_method_sort(item: ebml::Doc) -> char {
let mut ret = 'r';
- do reader::tagged_docs(item, tag_item_trait_method_sort) |doc| {
+ reader::tagged_docs(item, tag_item_trait_method_sort, |doc| {
ret = doc.as_str_slice()[0] as char;
false
- };
+ });
ret
}
fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> {
let mut ret = None;
- do reader::tagged_docs(d, tag_items_data_parent_item) |did| {
+ reader::tagged_docs(d, tag_items_data_parent_item, |did| {
ret = Some(reader::with_doc_data(did, parse_def_id));
false
- };
+ });
ret
}
}
fn get_provided_source(d: ebml::Doc, cdata: Cmd) -> Option<ast::DefId> {
- do reader::maybe_get_doc(d, tag_item_method_provided_source).map |doc| {
+ reader::maybe_get_doc(d, tag_item_method_provided_source).map(|doc| {
translate_def_id(cdata, reader::with_doc_data(doc, parse_def_id))
- }
+ })
}
fn each_reexport(d: ebml::Doc, f: |ebml::Doc| -> bool) -> bool {
}
fn variant_disr_val(d: ebml::Doc) -> Option<ty::Disr> {
- do reader::maybe_get_doc(d, tag_disr_val).and_then |val_doc| {
- do reader::with_doc_data(val_doc) |data| { u64::parse_bytes(data, 10u) }
- }
+ reader::maybe_get_doc(d, tag_disr_val).and_then(|val_doc| {
+ reader::with_doc_data(val_doc, |data| u64::parse_bytes(data, 10u))
+ })
}
fn doc_type(doc: ebml::Doc, tcx: ty::ctxt, cdata: Cmd) -> ty::t {
tcx: ty::ctxt,
cdata: Cmd) -> Option<ty::t>
{
- do reader::maybe_get_doc(doc, tag_item_method_transformed_self_ty).map |tp| {
+ reader::maybe_get_doc(doc, tag_item_method_transformed_self_ty).map(|tp| {
parse_ty_data(*tp.data, cdata.cnum, tp.start, tcx,
|_, did| translate_def_id(cdata, did))
- }
+ })
}
pub fn item_type(_item_id: ast::DefId, item: ebml::Doc,
tag: uint)
-> @~[ty::TypeParameterDef] {
let mut bounds = ~[];
- do reader::tagged_docs(item, tag) |p| {
+ reader::tagged_docs(item, tag, |p| {
let bd = parse_type_param_def_data(
*p.data, p.start, cdata.cnum, tcx,
|_, did| translate_def_id(cdata, did));
bounds.push(bd);
true
- };
+ });
@bounds
}
tcx: ty::ctxt,
cdata: Cmd)
-> @[ty::RegionParameterDef] {
- do at_vec::build(None) |push| {
- do reader::tagged_docs(item_doc, tag_region_param_def) |rp_doc| {
+ at_vec::build(None, |push| {
+ reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident);
let ident = item_name(tcx.sess.intr(), ident_str_doc);
push(ty::RegionParameterDef { ident: ident,
def_id: def_id });
true
- };
- }
+ });
+ })
}
fn item_ty_param_count(item: ebml::Doc) -> uint {
fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] {
let mut ids: ~[ast::DefId] = ~[];
let v = tag_items_data_item_variant;
- do reader::tagged_docs(item, v) |p| {
+ reader::tagged_docs(item, v, |p| {
let ext = reader::with_doc_data(p, parse_def_id);
ids.push(ast::DefId { crate: cdata.cnum, node: ext.node });
true
- };
+ });
return ids;
}
let len = reader::doc_as_u32(len_doc) as uint;
let mut result = vec::with_capacity(len);
- do reader::docs(path_doc) |tag, elt_doc| {
+ reader::docs(path_doc, |tag, elt_doc| {
if tag == tag_path_elt_mod {
let str = elt_doc.as_str_slice();
result.push(ast_map::path_mod(token::str_to_ident(str)));
// ignore tag_path_len element
}
true
- };
+ });
return result;
}
let mut bounds = ty::EmptyBuiltinBounds();
// Collect the builtin bounds from the encoded supertraits.
// FIXME(#8559): They should be encoded directly.
- do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
+ reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Bypasses real supertraits. See get_supertraits() if you wanted them.
let trait_ref = doc_trait_ref(trait_doc, tcx, cdata);
- do tcx.lang_items.to_builtin_kind(trait_ref.def_id).map |bound| {
+ tcx.lang_items.to_builtin_kind(trait_ref.def_id).map(|bound| {
bounds.add(bound);
- };
+ });
true
- };
+ });
ty::TraitDef {
generics: ty::Generics {type_param_defs: tp_defs,
region_param_defs: rp_defs},
tcx: ty::ctxt) -> Option<@ty::TraitRef>
{
let item_doc = lookup_item(id, cdata.data);
- do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map |tp| {
+ reader::maybe_get_doc(item_doc, tag_item_trait_ref).map(|tp| {
@doc_trait_ref(tp, tcx, cdata)
- }
+ })
}
pub fn get_impl_vtables(cdata: Cmd,
name: ast::Ident) -> Option<ast::DefId> {
let items = reader::get_doc(reader::Doc(cdata.data), tag_items);
let mut found = None;
- do reader::tagged_docs(find_item(id, items), tag_item_impl_method) |mid| {
+ reader::tagged_docs(find_item(id, items), tag_item_impl_method, |mid| {
let m_did = reader::with_doc_data(mid, parse_def_id);
if item_name(intr, find_item(m_did.node, items)) == name {
found = Some(translate_def_id(cdata, m_did));
}
true
- };
+ });
found
}
pub fn each_lang_item(cdata: Cmd, f: |ast::NodeId, uint| -> bool) -> bool {
let root = reader::Doc(cdata.data);
let lang_items = reader::get_doc(root, tag_lang_items);
- do reader::tagged_docs(lang_items, tag_lang_items_item) |item_doc| {
+ reader::tagged_docs(lang_items, tag_lang_items_item, |item_doc| {
let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
let id = reader::doc_as_u32(id_doc) as uint;
let node_id_doc = reader::get_doc(item_doc,
let node_id = reader::doc_as_u32(node_id_doc) as ast::NodeId;
f(node_id, id)
- }
+ })
}
struct EachItemContext<'self> {
let mut continue_ = true;
// Iterate over all children.
- do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
+ reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(self.cdata, child_def_id);
}
}
continue_
- };
+ });
if !continue_ {
return false
}
// Iterate over reexports.
- do each_reexport(item_doc) |reexport_doc| {
+ each_reexport(item_doc, |reexport_doc| {
let def_id_doc = reader::get_doc(
reexport_doc,
tag_items_data_item_reexport_def_id);
}
continue_
- };
+ });
continue_
}
ast::Ident,
ast::visibility|) {
// Iterate over all children.
- let _ = do reader::tagged_docs(item_doc, tag_mod_child) |child_info_doc| {
+ let _ = reader::tagged_docs(item_doc, tag_mod_child, |child_info_doc| {
let child_def_id = reader::with_doc_data(child_info_doc,
parse_def_id);
let child_def_id = translate_def_id(cdata, child_def_id);
}
true
- };
+ });
// As a special case, iterate over all static methods of
// associated implementations too. This is a bit of a botch.
// --pcwalton
- let _ = do reader::tagged_docs(item_doc,
- tag_items_data_item_inherent_impl)
- |inherent_impl_def_id_doc| {
+ let _ = reader::tagged_docs(item_doc,
+ tag_items_data_item_inherent_impl,
+ |inherent_impl_def_id_doc| {
let inherent_impl_def_id = item_def_id(inherent_impl_def_id_doc,
cdata);
let items = reader::get_doc(reader::Doc(cdata.data), tag_items);
match maybe_find_item(inherent_impl_def_id.node, items) {
None => {}
Some(inherent_impl_doc) => {
- let _ = do reader::tagged_docs(inherent_impl_doc,
- tag_item_impl_method)
- |impl_method_def_id_doc| {
+ let _ = reader::tagged_docs(inherent_impl_doc,
+ tag_item_impl_method,
+ |impl_method_def_id_doc| {
let impl_method_def_id =
reader::with_doc_data(impl_method_def_id_doc,
parse_def_id);
}
true
- };
+ });
}
}
true
- };
+ });
// Iterate over all reexports.
- let _ = do each_reexport(item_doc) |reexport_doc| {
+ let _ = each_reexport(item_doc, |reexport_doc| {
let def_id_doc = reader::get_doc(reexport_doc,
tag_items_data_item_reexport_def_id);
let child_def_id = reader::with_doc_data(def_id_doc,
}
true
- };
+ });
}
/// Iterates over each child of the given item.
fn item_impl_methods(intr: @ident_interner, cdata: Cmd, item: ebml::Doc,
tcx: ty::ctxt) -> ~[@ty::Method] {
let mut rslt = ~[];
- do reader::tagged_docs(item, tag_item_impl_method) |doc| {
+ reader::tagged_docs(item, tag_item_impl_method, |doc| {
let m_did = reader::with_doc_data(doc, parse_def_id);
rslt.push(@get_method(intr, cdata, m_did.node, tcx));
true
- };
+ });
rslt
}
let data = cdata.data;
let item = lookup_item(id, data);
let mut result = ~[];
- do reader::tagged_docs(item, tag_item_trait_method) |mth| {
+ reader::tagged_docs(item, tag_item_trait_method, |mth| {
result.push(item_def_id(mth, cdata));
true
- };
+ });
result
}
let item = lookup_item(id, data);
let mut result = ~[];
- do reader::tagged_docs(item, tag_item_trait_method) |mth_id| {
+ reader::tagged_docs(item, tag_item_trait_method, |mth_id| {
let did = item_def_id(mth_id, cdata);
let mth = lookup_item(did.node, data);
result.push(@get_method(intr, cdata, did.node, tcx));
}
true
- };
+ });
return result;
}
-> ~[@ty::TraitRef] {
let mut results = ~[];
let item_doc = lookup_item(id, cdata.data);
- do reader::tagged_docs(item_doc, tag_item_super_trait_ref) |trait_doc| {
+ reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Only reads the ones that *aren't* builtin-bounds. See also
// get_trait_def() for collecting the builtin bounds.
// FIXME(#8559): The builtin bounds shouldn't be encoded in the first place.
results.push(@trait_ref);
}
true
- };
+ });
return results;
}
}
let mut ret = None;
- do reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
+ reader::tagged_docs(item, tag_item_impl_type_basename, |doc| {
ret = Some(token::str_to_ident(doc.as_str_slice()));
false
- };
+ });
ret
}
}
// If this impl implements a trait, don't consider it.
- let ret = do reader::tagged_docs(item, tag_item_trait_ref) |_doc| {
+ let ret = reader::tagged_docs(item, tag_item_trait_ref, |_doc| {
false
- };
+ });
if !ret { return None }
let mut impl_method_ids = ~[];
- do reader::tagged_docs(item, tag_item_impl_method) |impl_method_doc| {
+ reader::tagged_docs(item, tag_item_impl_method, |impl_method_doc| {
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
true
- };
+ });
let mut static_impl_methods = ~[];
for impl_method_id in impl_method_ids.iter() {
node_id: ast::NodeId,
f: |~[@ast::MetaItem]|) {
let item = lookup_item(node_id, cdata.data);
- do reader::tagged_docs(item, tag_attributes) |attributes| {
- do reader::tagged_docs(attributes, tag_attribute) |attribute| {
+ reader::tagged_docs(item, tag_attributes, |attributes| {
+ reader::tagged_docs(attributes, tag_attribute, |attribute| {
f(get_meta_items(attribute));
true
- };
+ });
true
- };
+ });
}
fn struct_field_family_to_visibility(family: Family) -> ast::visibility {
let data = cdata.data;
let item = lookup_item(id, data);
let mut result = ~[];
- do reader::tagged_docs(item, tag_item_field) |an_item| {
+ reader::tagged_docs(item, tag_item_field, |an_item| {
let f = item_family(an_item);
if f == PublicField || f == PrivateField || f == InheritedField {
// FIXME #6993: name should be of type Name, not Ident
});
}
true
- };
- do reader::tagged_docs(item, tag_item_unnamed_field) |an_item| {
+ });
+ reader::tagged_docs(item, tag_item_unnamed_field, |an_item| {
let did = item_def_id(an_item, cdata);
result.push(ty::field_ty {
name: special_idents::unnamed_field.name,
vis: ast::inherited,
});
true
- };
+ });
result
}
}
fn read_path(d: ebml::Doc) -> (~str, uint) {
- do reader::with_doc_data(d) |desc| {
+ reader::with_doc_data(d, |desc| {
let pos = u64_from_be_bytes(desc, 0u, 4u) as uint;
let pathbytes = desc.slice(4u, desc.len());
let path = str::from_utf8(pathbytes);
(path, pos)
- }
+ })
}
fn describe_def(items: ebml::Doc, id: ast::DefId) -> ~str {
fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
let mut items: ~[@ast::MetaItem] = ~[];
- do reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
+ reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
items.push(attr::mk_word_item(n));
true
- };
- do reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
+ });
+ reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = nd.as_str_slice().to_managed();
// but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(n, v));
true
- };
- do reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
+ });
+ reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = nd.as_str_slice().to_managed();
let subitems = get_meta_items(meta_item_doc);
items.push(attr::mk_list_item(n, subitems));
true
- };
+ });
return items;
}
let mut attrs: ~[ast::Attribute] = ~[];
match reader::maybe_get_doc(md, tag_attributes) {
option::Some(attrs_d) => {
- do reader::tagged_docs(attrs_d, tag_attribute) |attr_doc| {
+ reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| {
let meta_items = get_meta_items(attr_doc);
// Currently it's only possible to have a single meta item on
// an attribute
span: codemap::dummy_sp()
});
true
- };
+ });
}
option::None => ()
}
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_managed()
}
- do reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
+ reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
deps.push(CrateDep {cnum: crate_num,
name: token::str_to_ident(docstr(depdoc, tag_crate_dep_name)),
vers: docstr(depdoc, tag_crate_dep_vers),
hash: docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
true
- };
+ });
return deps;
}
pub fn each_impl(cdata: Cmd, callback: |ast::DefId|) {
let impls_doc = reader::get_doc(reader::Doc(cdata.data), tag_impls);
- let _ = do reader::tagged_docs(impls_doc, tag_impls_impl) |impl_doc| {
+ let _ = reader::tagged_docs(impls_doc, tag_impls_impl, |impl_doc| {
callback(item_def_id(impl_doc, cdata));
true
- };
+ });
}
pub fn each_implementation_for_type(cdata: Cmd,
id: ast::NodeId,
callback: |ast::DefId|) {
let item_doc = lookup_item(id, cdata.data);
- do reader::tagged_docs(item_doc, tag_items_data_item_inherent_impl)
- |impl_doc| {
+ reader::tagged_docs(item_doc,
+ tag_items_data_item_inherent_impl,
+ |impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
- };
+ });
}
pub fn each_implementation_for_trait(cdata: Cmd,
callback: |ast::DefId|) {
let item_doc = lookup_item(id, cdata.data);
- let _ = do reader::tagged_docs(item_doc,
- tag_items_data_item_extension_impl)
- |impl_doc| {
+ let _ = reader::tagged_docs(item_doc,
+ tag_items_data_item_extension_impl,
+ |impl_doc| {
let implementation_def_id = item_def_id(impl_doc, cdata);
callback(implementation_def_id);
true
- };
+ });
}
pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
match item_family(parent_item_doc) {
Trait => Some(item_def_id(parent_item_doc, cdata)),
Impl => {
- do reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref).map
- |_| {
- item_trait_ref(parent_item_doc, tcx, cdata).def_id
- }
+ reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref)
+ .map(|_| item_trait_ref(parent_item_doc, tcx, cdata).def_id)
}
_ => None
}
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
- do each_auxiliary_node_id(*item) |auxiliary_node_id| {
+ each_auxiliary_node_id(*item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
true
- };
+ });
match item.node {
item_impl(*) => {
// Pull the cnums and name,vers,hash out of cstore
let mut deps = ~[];
- do cstore::iter_crate_data(cstore) |key, val| {
+ cstore::iter_crate_data(cstore, |key, val| {
let dep = decoder::CrateDep {cnum: key,
name: ecx.tcx.sess.ident_of(val.name),
vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)};
deps.push(dep);
- };
+ });
// Sort by cnum
extra::sort::quick_sort(deps, |kv1, kv2| kv1.cnum <= kv2.cnum);
ebml_w.wr_str(def_to_str(local_def(item.id)));
ebml_w.end_tag();
- do each_auxiliary_node_id(item) |auxiliary_node_id| {
+ each_auxiliary_node_id(item, |auxiliary_node_id| {
ebml_w.start_tag(tag_mod_child);
ebml_w.wr_str(def_to_str(local_def(auxiliary_node_id)));
ebml_w.end_tag();
true
- };
+ });
}
// Encode reexports for the root module.
}
pub fn search(filesearch: @FileSearch, pick: pick) {
- do filesearch.for_each_lib_search_path() |lib_search_path| {
+ filesearch.for_each_lib_search_path(|lib_search_path| {
debug!("searching {}", lib_search_path.display());
match io::result(|| fs::readdir(lib_search_path)) {
Ok(files) => {
}
Err(*) => FileDoesntMatch,
}
- };
+ });
}
pub fn relative_target_lib_path(target_triple: &str) -> Path {
debug!("matching {} metadata requirements against {} items",
local_metas.len(), extern_metas.len());
- do local_metas.iter().all |needed| {
- attr::contains(extern_metas, *needed)
- }
+ local_metas.iter().all(|needed| attr::contains(extern_metas, *needed))
}
fn get_metadata_section(os: Os,
filename: &Path) -> Option<@~[u8]> {
unsafe {
- let mb = do filename.with_c_str |buf| {
+ let mb = filename.with_c_str(|buf| {
llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf)
- };
+ });
if mb as int == 0 { return option::None::<@~[u8]>; }
let of = match mk_object_file(mb) {
option::Some(of) => of,
vlen);
let minsz = num::min(vlen, csz);
let mut version_ok = false;
- do vec::raw::buf_as_slice(cvbuf, minsz) |buf0| {
+ vec::raw::buf_as_slice(cvbuf, minsz, |buf0| {
version_ok = (buf0 ==
encoder::metadata_encoding_version);
- }
+ });
if !version_ok { return None; }
let cvbuf1 = ptr::offset(cvbuf, vlen as int);
debug!("inflating {} bytes of compressed metadata",
csz - vlen);
- do vec::raw::buf_as_slice(cvbuf1, csz-vlen) |bytes| {
+ vec::raw::buf_as_slice(cvbuf1, csz-vlen, |bytes| {
let inflated = flate::inflate_bytes(bytes);
found = Some(@(inflated));
- }
+ });
if found != None {
return found;
}
fn enc_substs(w: @mut MemWriter, cx: @ctxt, substs: &ty::substs) {
enc_region_substs(w, cx, &substs.regions);
- do enc_opt(w, substs.self_ty) |t| { enc_ty(w, cx, t) }
+ enc_opt(w, substs.self_ty, |t| enc_ty(w, cx, t));
mywrite!(w, "[");
for t in substs.tps.iter() { enc_ty(w, cx, *t); }
mywrite!(w, "]");
fn enc_abi_set(w: @mut MemWriter, abis: AbiSet) {
mywrite!(w, "[");
- do abis.each |abi| {
+ abis.each(|abi| {
mywrite!(w, "{},", abi.name());
true
- };
+ });
mywrite!(w, "]")
}
impl fold::ast_fold for NestedItemsDropper {
fn fold_block(&self, blk: &ast::Block) -> ast::Block {
- let stmts_sans_items = do blk.stmts.iter().filter_map |stmt| {
+ let stmts_sans_items = blk.stmts.iter().filter_map(|stmt| {
match stmt.node {
ast::StmtExpr(_, _) | ast::StmtSemi(_, _) |
ast::StmtDecl(@codemap::Spanned {
}, _) => None,
ast::StmtMac(*) => fail!("unexpanded macro in astencode")
}
- }.collect();
+ }).collect();
let blk_sans_items = ast::Block {
view_items: ~[], // I don't know if we need the view_items here,
// but it doesn't break tests!
fn encode_method_map_entry(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
mme: method_map_entry) {
- do ebml_w.emit_struct("method_map_entry", 3) |ebml_w| {
- do ebml_w.emit_struct_field("self_ty", 0u) |ebml_w| {
+ ebml_w.emit_struct("method_map_entry", 3, |ebml_w| {
+ ebml_w.emit_struct_field("self_ty", 0u, |ebml_w| {
ebml_w.emit_ty(ecx, mme.self_ty);
- }
- do ebml_w.emit_struct_field("explicit_self", 2u) |ebml_w| {
+ });
+ ebml_w.emit_struct_field("explicit_self", 2u, |ebml_w| {
mme.explicit_self.encode(ebml_w);
- }
- do ebml_w.emit_struct_field("origin", 1u) |ebml_w| {
+ });
+ ebml_w.emit_struct_field("origin", 1u, |ebml_w| {
mme.origin.encode(ebml_w);
- }
- do ebml_w.emit_struct_field("self_mode", 3) |ebml_w| {
+ });
+ ebml_w.emit_struct_field("self_mode", 3, |ebml_w| {
mme.self_mode.encode(ebml_w);
- }
- }
+ });
+ })
}
impl read_method_map_entry_helper for reader::Decoder {
fn read_method_map_entry(&mut self, xcx: @ExtendedDecodeContext)
-> method_map_entry {
- do self.read_struct("method_map_entry", 3) |this| {
+ self.read_struct("method_map_entry", 3, |this| {
method_map_entry {
self_ty: this.read_struct_field("self_ty", 0u, |this| {
this.read_ty(xcx)
self_mode
}),
}
- }
+ })
}
}
// ty::t doesn't work, and there is no way (atm) to have
// hand-written encoding routines combine with auto-generated
// ones. perhaps we should fix this.
- do ebml_w.emit_from_vec(*dr) |ebml_w, param_tables| {
+ ebml_w.emit_from_vec(*dr, |ebml_w, param_tables| {
encode_vtable_param_res(ecx, ebml_w, *param_tables);
- }
+ })
}
pub fn encode_vtable_param_res(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
param_tables: typeck::vtable_param_res) {
- do ebml_w.emit_from_vec(*param_tables) |ebml_w, vtable_origin| {
+ ebml_w.emit_from_vec(*param_tables, |ebml_w, vtable_origin| {
encode_vtable_origin(ecx, ebml_w, vtable_origin)
- }
+ })
}
pub fn encode_vtable_origin(ecx: &e::EncodeContext,
ebml_w: &mut writer::Encoder,
vtable_origin: &typeck::vtable_origin) {
- do ebml_w.emit_enum("vtable_origin") |ebml_w| {
+ ebml_w.emit_enum("vtable_origin", |ebml_w| {
match *vtable_origin {
typeck::vtable_static(def_id, ref tys, vtable_res) => {
- do ebml_w.emit_enum_variant("vtable_static", 0u, 3u) |ebml_w| {
- do ebml_w.emit_enum_variant_arg(0u) |ebml_w| {
+ ebml_w.emit_enum_variant("vtable_static", 0u, 3u, |ebml_w| {
+ ebml_w.emit_enum_variant_arg(0u, |ebml_w| {
ebml_w.emit_def_id(def_id)
- }
- do ebml_w.emit_enum_variant_arg(1u) |ebml_w| {
+ });
+ ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
ebml_w.emit_tys(ecx, *tys);
- }
- do ebml_w.emit_enum_variant_arg(2u) |ebml_w| {
+ });
+ ebml_w.emit_enum_variant_arg(2u, |ebml_w| {
encode_vtable_res(ecx, ebml_w, vtable_res);
- }
- }
+ })
+ })
}
typeck::vtable_param(pn, bn) => {
- do ebml_w.emit_enum_variant("vtable_param", 1u, 2u) |ebml_w| {
- do ebml_w.emit_enum_variant_arg(0u) |ebml_w| {
+ ebml_w.emit_enum_variant("vtable_param", 1u, 2u, |ebml_w| {
+ ebml_w.emit_enum_variant_arg(0u, |ebml_w| {
pn.encode(ebml_w);
- }
- do ebml_w.emit_enum_variant_arg(1u) |ebml_w| {
+ });
+ ebml_w.emit_enum_variant_arg(1u, |ebml_w| {
ebml_w.emit_uint(bn);
- }
- }
+ })
+ })
}
}
- }
+ })
}
pub trait vtable_decoder_helpers {
fn read_vtable_origin(&mut self,
tcx: ty::ctxt, cdata: @cstore::crate_metadata)
-> typeck::vtable_origin {
- do self.read_enum("vtable_origin") |this| {
- do this.read_enum_variant(["vtable_static",
- "vtable_param",
- "vtable_self"])
- |this, i| {
+ self.read_enum("vtable_origin", |this| {
+ this.read_enum_variant(["vtable_static",
+ "vtable_param",
+ "vtable_self"],
+ |this, i| {
match i {
0 => {
typeck::vtable_static(
- do this.read_enum_variant_arg(0u) |this| {
+ this.read_enum_variant_arg(0u, |this| {
this.read_def_id_noxcx(cdata)
- },
- do this.read_enum_variant_arg(1u) |this| {
+ }),
+ this.read_enum_variant_arg(1u, |this| {
this.read_tys_noxcx(tcx, cdata)
- },
- do this.read_enum_variant_arg(2u) |this| {
+ }),
+ this.read_enum_variant_arg(2u, |this| {
this.read_vtable_res(tcx, cdata)
- }
+ })
)
}
1 => {
typeck::vtable_param(
- do this.read_enum_variant_arg(0u) |this| {
+ this.read_enum_variant_arg(0u, |this| {
Decodable::decode(this)
- },
- do this.read_enum_variant_arg(1u) |this| {
+ }),
+ this.read_enum_variant_arg(1u, |this| {
this.read_uint()
- }
+ })
)
}
// hard to avoid - user input
_ => fail!("bad enum variant")
}
- }
- }
+ })
+ })
}
}
impl ebml_writer_helpers for writer::Encoder {
fn emit_ty(&mut self, ecx: &e::EncodeContext, ty: ty::t) {
- do self.emit_opaque |this| {
- e::write_type(ecx, this, ty)
- }
+ self.emit_opaque(|this| e::write_type(ecx, this, ty))
}
fn emit_vstore(&mut self, ecx: &e::EncodeContext, vstore: ty::vstore) {
- do self.emit_opaque |this| {
- e::write_vstore(ecx, this, vstore)
- }
+ self.emit_opaque(|this| e::write_vstore(ecx, this, vstore))
}
fn emit_tys(&mut self, ecx: &e::EncodeContext, tys: &[ty::t]) {
- do self.emit_from_vec(tys) |this, ty| {
- this.emit_ty(ecx, *ty)
- }
+ self.emit_from_vec(tys, |this, ty| this.emit_ty(ecx, *ty))
}
fn emit_type_param_def(&mut self,
ecx: &e::EncodeContext,
type_param_def: &ty::TypeParameterDef) {
- do self.emit_opaque |this| {
+ self.emit_opaque(|this| {
tyencode::enc_type_param_def(this.writer,
ecx.ty_str_ctxt(),
type_param_def)
- }
+ })
}
fn emit_tpbt(&mut self,
ecx: &e::EncodeContext,
tpbt: ty::ty_param_bounds_and_ty) {
- do self.emit_struct("ty_param_bounds_and_ty", 2) |this| {
- do this.emit_struct_field("generics", 0) |this| {
- do this.emit_struct("Generics", 2) |this| {
- do this.emit_struct_field("type_param_defs", 0) |this| {
- do this.emit_from_vec(*tpbt.generics.type_param_defs)
- |this, type_param_def| {
+ self.emit_struct("ty_param_bounds_and_ty", 2, |this| {
+ this.emit_struct_field("generics", 0, |this| {
+ this.emit_struct("Generics", 2, |this| {
+ this.emit_struct_field("type_param_defs", 0, |this| {
+ this.emit_from_vec(*tpbt.generics.type_param_defs,
+ |this, type_param_def| {
this.emit_type_param_def(ecx, type_param_def);
- }
- }
- do this.emit_struct_field("region_param_defs", 1) |this| {
+ })
+ });
+ this.emit_struct_field("region_param_defs", 1, |this| {
tpbt.generics.region_param_defs.encode(this);
- }
- }
- }
- do this.emit_struct_field("ty", 1) |this| {
+ })
+ })
+ });
+ this.emit_struct_field("ty", 1, |this| {
this.emit_ty(ecx, tpbt.ty);
- }
- }
+ })
+ })
}
}
{
let r = tcx.def_map.find(&id);
for def in r.iter() {
- do ebml_w.tag(c::tag_table_def) |ebml_w| {
+ ebml_w.tag(c::tag_table_def, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
- (*def).encode(ebml_w)
- }
- }
+ ebml_w.tag(c::tag_table_val, |ebml_w| (*def).encode(ebml_w));
+ })
}
}
{
let r = tcx.node_types.find(&(id as uint));
for &ty in r.iter() {
- do ebml_w.tag(c::tag_table_node_type) |ebml_w| {
+ ebml_w.tag(c::tag_table_node_type, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_ty(ecx, *ty);
- }
- }
+ })
+ })
}
}
{
let r = tcx.node_type_substs.find(&id);
for tys in r.iter() {
- do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
+ ebml_w.tag(c::tag_table_node_type_subst, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tys(ecx, **tys)
- }
- }
+ })
+ })
}
}
{
let r = tcx.freevars.find(&id);
for &fv in r.iter() {
- do ebml_w.tag(c::tag_table_freevars) |ebml_w| {
+ ebml_w.tag(c::tag_table_freevars, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
- do ebml_w.emit_from_vec(**fv) |ebml_w, fv_entry| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
+ ebml_w.emit_from_vec(**fv, |ebml_w, fv_entry| {
encode_freevar_entry(ebml_w, *fv_entry)
- }
- }
- }
+ })
+ })
+ })
}
}
{
let r = tcx.tcache.find(&lid);
for &tpbt in r.iter() {
- do ebml_w.tag(c::tag_table_tcache) |ebml_w| {
+ ebml_w.tag(c::tag_table_tcache, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_tpbt(ecx, *tpbt);
- }
- }
+ })
+ })
}
}
{
let r = tcx.ty_param_defs.find(&id);
for &type_param_def in r.iter() {
- do ebml_w.tag(c::tag_table_param_defs) |ebml_w| {
+ ebml_w.tag(c::tag_table_param_defs, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
ebml_w.emit_type_param_def(ecx, type_param_def)
- }
- }
+ })
+ })
}
}
{
let r = maps.method_map.find(&id);
for &mme in r.iter() {
- do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
+ ebml_w.tag(c::tag_table_method_map, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
encode_method_map_entry(ecx, ebml_w, *mme)
- }
- }
+ })
+ })
}
}
{
let r = maps.vtable_map.find(&id);
for &dr in r.iter() {
- do ebml_w.tag(c::tag_table_vtable_map) |ebml_w| {
+ ebml_w.tag(c::tag_table_vtable_map, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
encode_vtable_res(ecx, ebml_w, *dr);
- }
- }
+ })
+ })
}
}
{
let r = tcx.adjustments.find(&id);
for adj in r.iter() {
- do ebml_w.tag(c::tag_table_adjustments) |ebml_w| {
+ ebml_w.tag(c::tag_table_adjustments, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
(**adj).encode(ebml_w)
- }
- }
+ })
+ })
}
}
{
let r = maps.capture_map.find(&id);
for &cap_vars in r.iter() {
- do ebml_w.tag(c::tag_table_capture_map) |ebml_w| {
+ ebml_w.tag(c::tag_table_capture_map, |ebml_w| {
ebml_w.id(id);
- do ebml_w.tag(c::tag_table_val) |ebml_w| {
- do ebml_w.emit_from_vec(*cap_vars) |ebml_w, cap_var| {
+ ebml_w.tag(c::tag_table_val, |ebml_w| {
+ ebml_w.emit_from_vec(*cap_vars, |ebml_w, cap_var| {
cap_var.encode(ebml_w);
- }
- }
- }
+ })
+ })
+ })
}
}
}
impl ebml_decoder_decoder_helpers for reader::Decoder {
fn read_ty_noxcx(&mut self,
tcx: ty::ctxt, cdata: @cstore::crate_metadata) -> ty::t {
- do self.read_opaque |_, doc| {
+ self.read_opaque(|_, doc| {
tydecode::parse_ty_data(
*doc.data,
cdata.cnum,
doc.start,
tcx,
|_, id| decoder::translate_def_id(cdata, id))
- }
+ })
}
fn read_tys_noxcx(&mut self,
// context. However, we do not bother, because region types
// are not used during trans.
- return do self.read_opaque |this, doc| {
+ return self.read_opaque(|this, doc| {
debug!("read_ty({})", type_string(doc));
let ty = tydecode::parse_ty_data(
|s, a| this.convert_def_id(xcx, s, a));
ty
- };
+ });
fn type_string(doc: ebml::Doc) -> ~str {
let mut str = ~"";
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
-> ty::TypeParameterDef {
- do self.read_opaque |this, doc| {
+ self.read_opaque(|this, doc| {
tydecode::parse_type_param_def_data(
*doc.data,
doc.start,
xcx.dcx.cdata.cnum,
xcx.dcx.tcx,
|s, a| this.convert_def_id(xcx, s, a))
- }
+ })
}
fn read_ty_param_bounds_and_ty(&mut self, xcx: @ExtendedDecodeContext)
-> ty::ty_param_bounds_and_ty {
- do self.read_struct("ty_param_bounds_and_ty", 2) |this| {
+ self.read_struct("ty_param_bounds_and_ty", 2, |this| {
ty::ty_param_bounds_and_ty {
- generics: do this.read_struct_field("generics", 0) |this| {
- do this.read_struct("Generics", 2) |this| {
+ generics: this.read_struct_field("generics", 0, |this| {
+ this.read_struct("Generics", 2, |this| {
ty::Generics {
type_param_defs:
this.read_struct_field("type_param_defs",
Decodable::decode(this)
})
}
- }
- },
+ })
+ }),
ty: this.read_struct_field("ty", 1, |this| {
this.read_ty(xcx)
})
}
- }
+ })
}
fn convert_def_id(&mut self,
ast_doc: ebml::Doc) {
let dcx = xcx.dcx;
let tbl_doc = ast_doc.get(c::tag_table as uint);
- do reader::docs(tbl_doc) |tag, entry_doc| {
+ reader::docs(tbl_doc, |tag, entry_doc| {
let id0 = entry_doc.get(c::tag_table_id as uint).as_int();
let id = xcx.tr_id(id0);
debug!(">< Side table doc loaded");
true
- };
+ });
}
// ______________________________________________________________________
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
- do self.dfcx_loans.each_bit_on_entry_frozen(scope_id) |loan_index| {
+ self.dfcx_loans.each_bit_on_entry_frozen(scope_id, |loan_index| {
let loan = &self.all_loans[loan_index];
op(loan)
- }
+ })
}
pub fn each_in_scope_loan(&self,
//! currently in scope.
let region_maps = self.tcx().region_maps;
- do self.each_issued_loan(scope_id) |loan| {
+ self.each_issued_loan(scope_id, |loan| {
if region_maps.is_subscope_of(scope_id, loan.kill_scope) {
op(loan)
} else {
true
}
- }
+ })
}
pub fn each_in_scope_restriction(&self,
//! Iterates through all the in-scope restrictions for the
//! given `loan_path`
- do self.each_in_scope_loan(scope_id) |loan| {
+ self.each_in_scope_loan(scope_id, |loan| {
let mut ret = true;
for restr in loan.restrictions.iter() {
if restr.loan_path == loan_path {
}
}
ret
- }
+ })
}
pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] {
//! we encounter `scope_id`.
let mut result = ~[];
- do self.dfcx_loans.each_gen_bit_frozen(scope_id) |loan_index| {
+ self.dfcx_loans.each_gen_bit_frozen(scope_id, |loan_index| {
result.push(loan_index);
true
- };
+ });
return result;
}
let new_loan_indices = self.loans_generated_by(scope_id);
debug!("new_loan_indices = {:?}", new_loan_indices);
- do self.each_issued_loan(scope_id) |issued_loan| {
+ self.each_issued_loan(scope_id, |issued_loan| {
for &new_loan_index in new_loan_indices.iter() {
let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan);
}
true
- };
+ });
for (i, &x) in new_loan_indices.iter().enumerate() {
let old_loan = &self.all_loans[x];
debug!("check_if_path_is_moved(id={:?}, use_kind={:?}, lp={})",
id, use_kind, lp.repr(self.bccx.tcx));
- do self.move_data.each_move_of(id, lp) |move, moved_lp| {
+ self.move_data.each_move_of(id, lp, |move, moved_lp| {
self.bccx.report_use_of_moved_value(
span,
use_kind,
move,
moved_lp);
false
- };
+ });
}
pub fn check_assignment(&self, expr: @ast::Expr) {
if self.is_local_variable(cmt) {
assert!(cmt.mutbl.is_immutable()); // no "const" locals
let lp = opt_loan_path(cmt).unwrap();
- do self.move_data.each_assignment_of(expr.id, lp) |assign| {
+ self.move_data.each_assignment_of(expr.id, lp, |assign| {
self.bccx.report_reassigned_immutable_variable(
expr.span,
lp,
assign);
false
- };
+ });
return;
}
// `RESTR_MUTATE` restriction whenever the contents of an
// owned pointer are borrowed, and hence while `v[*]` is not
// restricted from being written, `v` is.
- let cont = do this.each_in_scope_restriction(expr.id, loan_path)
- |loan, restr|
- {
+ let cont = this.each_in_scope_restriction(expr.id,
+ loan_path,
+ |loan, restr| {
if restr.set.intersects(RESTR_MUTATE) {
this.report_illegal_mutation(expr, loan_path, loan);
false
} else {
true
}
- };
+ });
if !cont { return false }
}
// Check for a non-const loan of `loan_path`
- let cont = do this.each_in_scope_loan(expr.id) |loan| {
+ let cont = this.each_in_scope_loan(expr.id, |loan| {
if loan.loan_path == loan_path &&
loan.mutbl != ConstMutability {
this.report_illegal_mutation(expr,
} else {
true
}
- };
+ });
if !cont { return false }
}
}
fn check_move_out_from_id(&self, id: ast::NodeId, span: Span) {
- do self.move_data.each_path_moved_by(id) |_, move_path| {
+ self.move_data.each_path_moved_by(id, |_, move_path| {
match self.analyze_move_out_from(id, move_path) {
MoveOk => {}
MoveWhileBorrowed(loan_path, loan_span) => {
}
}
true
- };
+ });
}
pub fn analyze_move_out_from(&self,
let mut ret = MoveOk;
// check for a conflicting loan:
- do self.each_in_scope_restriction(expr_id, move_path) |loan, _| {
+ self.each_in_scope_restriction(expr_id, move_path, |loan, _| {
// Any restriction prevents moves.
ret = MoveWhileBorrowed(loan.loan_path, loan.span);
false
- };
+ });
ret
}
None => {
// Variable declarations without initializers are considered "moves":
let tcx = this.bccx.tcx;
- do pat_util::pat_bindings(tcx.def_map, local.pat)
- |_, id, span, _| {
+ pat_util::pat_bindings(tcx.def_map, local.pat, |_, id, span, _| {
gather_moves::gather_decl(this.bccx,
this.move_data,
id,
span,
id);
- }
+ })
}
Some(init) => {
// Variable declarations with initializers are considered "assigns":
let tcx = this.bccx.tcx;
- do pat_util::pat_bindings(tcx.def_map, local.pat)
- |_, id, span, _| {
+ pat_util::pat_bindings(tcx.def_map, local.pat, |_, id, span, _| {
gather_moves::gather_assignment(this.bccx,
this.move_data,
id,
span,
@LpVar(id),
id);
- }
+ });
let init_cmt = this.bccx.cat_expr(init);
this.gather_pat(init_cmt, local.pat, None);
}
* moves (non-`ref` bindings with linear type).
*/
- do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| {
+ self.bccx.cat_pattern(discr_cmt, root_pat, |cmt, pat| {
match pat.node {
ast::PatIdent(bm, _, _) if self.pat_is_binding(pat) => {
match bm {
_ => {}
}
- }
+ })
}
pub fn vec_slice_info(&self, pat: @ast::Pat, slice_ty: ty::t)
}
mc::cat_deref(cmt_base, _, pk) => {
- do opt_loan_path(cmt_base).map |lp| {
+ opt_loan_path(cmt_base).map(|lp| {
@LpExtend(lp, cmt.mutbl, LpDeref(pk))
- }
+ })
}
mc::cat_interior(cmt_base, ik) => {
- do opt_loan_path(cmt_base).map |lp| {
+ opt_loan_path(cmt_base).map(|lp| {
@LpExtend(lp, cmt.mutbl, LpInterior(ik))
- }
+ })
}
mc::cat_downcast(cmt_base) |
match self.path_map.find_copy(&lp) {
Some(index) => {
- do self.each_base_path(index) |p| {
+ self.each_base_path(index, |p| {
result.push(p);
true
- };
+ });
}
None => {
match *lp {
f: |MoveIndex| -> bool)
-> bool {
let mut ret = true;
- do self.each_extending_path(index0) |index| {
+ self.each_extending_path(index0, |index| {
let mut p = self.path(index).first_move;
while p != InvalidMoveIndex {
if !f(p) {
p = self.move(p).next_move;
}
ret
- };
+ });
ret
}
path: MovePathIndex,
kill_id: ast::NodeId,
dfcx_moves: &mut MoveDataFlow) {
- do self.each_applicable_move(path) |move_index| {
+ self.each_applicable_move(path, |move_index| {
dfcx_moves.add_kill(kill_id, *move_index);
true
- };
+ });
}
}
* Iterates through each path moved by `id`
*/
- do self.dfcx_moves.each_gen_bit_frozen(id) |index| {
+ self.dfcx_moves.each_gen_bit_frozen(id, |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
f(move, self.move_data.path(moved_path).loan_path)
- }
+ })
}
pub fn each_move_of(&self,
let mut ret = true;
- do self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
+ self.dfcx_moves.each_bit_on_entry_frozen(id, |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
if base_indices.iter().any(|x| x == &moved_path) {
}
} else {
for &loan_path_index in opt_loan_path_index.iter() {
- let cont = do self.move_data.each_base_path(moved_path) |p| {
+ let cont = self.move_data.each_base_path(moved_path, |p| {
if p == loan_path_index {
// Scenario 3: some extension of `loan_path`
// was moved
} else {
true
}
- };
+ });
if !cont { ret = false; break }
}
}
ret
- }
+ })
}
pub fn is_assignee(&self,
}
};
- do self.dfcx_assign.each_bit_on_entry_frozen(id) |index| {
+ self.dfcx_assign.each_bit_on_entry_frozen(id, |index| {
let assignment = &self.move_data.var_assignments[index];
if assignment.path == loan_path_index && !f(assignment) {
false
} else {
true
}
- }
+ })
}
}
}
};
- do walk_pat(*pat) |p| {
+ walk_pat(*pat, |p| {
if pat_matches_nan(p) {
cx.tcx.sess.span_warn(p.span, "unmatchable NaN in pattern, \
use the is_nan method in a guard instead");
}
true
- };
+ });
let v = ~[*pat];
match is_useful(cx, &seen, v) {
is_useful_specialized(cx, m, v, vec(n), n, left_ty)
}
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
- let max_len = do m.rev_iter().fold(0) |max_len, r| {
+ let max_len = m.rev_iter().fold(0, |max_len, r| {
match r[0].node {
PatVec(ref before, _, ref after) => {
num::max(before.len() + after.len(), max_len)
}
_ => max_len
}
- };
+ });
for n in iter::range(0u, max_len + 1) {
match is_useful_specialized(cx, m, v, vec(n), n, left_ty) {
not_useful => (),
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
// Find the lengths and slices of all vector patterns.
- let vec_pat_lens = do m.iter().filter_map |r| {
+ let vec_pat_lens = m.iter().filter_map(|r| {
match r[0].node {
PatVec(ref before, ref slice, ref after) => {
Some((before.len() + after.len(), slice.is_some()))
}
_ => None
}
- }.collect::<~[(uint, bool)]>();
+ }).collect::<~[(uint, bool)]>();
// Sort them by length such that for patterns of the same length,
// those with a destructured slice come first.
let mut by_ref_span = None;
let mut any_by_move = false;
for pat in pats.iter() {
- do pat_bindings(def_map, *pat) |bm, id, span, _path| {
+ pat_bindings(def_map, *pat, |bm, id, span, _path| {
match bm {
BindByRef(_) => {
by_ref_span = Some(span);
}
}
}
- }
+ })
}
let check_move: |&Pat, Option<@Pat>| = |p, sub| {
if !any_by_move { return; } // pointless micro-optimization
for pat in pats.iter() {
- do walk_pat(*pat) |p| {
+ walk_pat(*pat, |p| {
if pat_is_binding(def_map, p) {
match p.node {
PatIdent(_, _, sub) => {
}
}
true
- };
+ });
}
}
}
ast::ExprStruct(_, ref fs, None) => {
- let cs = do fs.iter().map |f| {
- self.classify(f.expr)
- };
+ let cs = fs.iter().map(|f| self.classify(f.expr));
join_all(cs)
}
fn compute_id_range(&mut self, id: ast::NodeId) -> (uint, uint) {
let mut expanded = false;
let len = self.nodeid_to_bitset.len();
- let n = do self.nodeid_to_bitset.find_or_insert_with(id) |_| {
+ let n = self.nodeid_to_bitset.find_or_insert_with(id, |_| {
expanded = true;
len
- };
+ });
if expanded {
let entry = if self.oper.initial_value() { uint::max_value } else {0};
- do self.words_per_id.times {
+ self.words_per_id.times(|| {
self.gens.push(0);
self.kills.push(0);
self.on_entry.push(entry);
- }
+ })
}
let start = *n * self.words_per_id;
let end = start + self.words_per_id;
debug!("DataFlowContext::walk_pat(pat={}, in_out={})",
pat.repr(self.dfcx.tcx), bits_to_str(reslice(in_out)));
- do ast_util::walk_pat(pat) |p| {
+ ast_util::walk_pat(pat, |p| {
debug!(" p.id={:?} in_out={}", p.id, bits_to_str(reslice(in_out)));
self.merge_with_entry_set(p.id, in_out);
self.dfcx.apply_gen_kill(p.id, in_out);
true
- };
+ });
}
fn walk_pat_alternatives(&mut self,
fn each_node() {
let graph = create_graph();
let expected = ["A", "B", "C", "D", "E", "F"];
- do graph.each_node |idx, node| {
+ graph.each_node(|idx, node| {
assert_eq!(&expected[*idx], graph.node_data(idx));
assert_eq!(expected[*idx], node.data);
true
- };
+ });
}
#[test]
fn each_edge() {
let graph = create_graph();
let expected = ["AB", "BC", "BD", "DE", "EC", "FB"];
- do graph.each_edge |idx, edge| {
+ graph.each_edge(|idx, edge| {
assert_eq!(&expected[*idx], graph.edge_data(idx));
assert_eq!(expected[*idx], edge.data);
true
- };
+ });
}
fn test_adjacent_edges<N:Eq,E:Eq>(graph: &Graph<N,E>,
assert_eq!(graph.node_data(start_index), &start_data);
let mut counter = 0;
- do graph.each_incoming_edge(start_index) |edge_index, edge| {
+ graph.each_incoming_edge(start_index, |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_incoming.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
}
counter += 1;
true
- };
+ });
assert_eq!(counter, expected_incoming.len());
let mut counter = 0;
- do graph.each_outgoing_edge(start_index) |edge_index, edge| {
+ graph.each_outgoing_edge(start_index, |edge_index, edge| {
assert_eq!(graph.edge_data(edge_index), &edge.data);
assert!(counter < expected_outgoing.len());
debug!("counter={:?} expected={:?} edge_index={:?} edge={:?}",
}
counter += 1;
true
- };
+ });
assert_eq!(counter, expected_outgoing.len());
}
// If this trait has builtin-kind supertraits, meet them.
let self_ty: ty::t = ty::node_id_to_type(cx.tcx, it.id);
debug!("checking impl with self type {:?}", ty::get(self_ty).sty);
- do check_builtin_bounds(cx, self_ty, trait_def.bounds) |missing| {
+ check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| {
cx.tcx.sess.span_err(self_type.span,
format!("the type `{}', which does not fulfill `{}`, cannot implement this \
trait", ty_to_str(cx.tcx, self_ty), missing.user_string(cx.tcx)));
cx.tcx.sess.span_note(self_type.span,
format!("types implementing this trait must fulfill `{}`",
trait_def.bounds.user_string(cx.tcx)));
- }
+ });
// If this is a destructor, check kinds.
if cx.tcx.lang_items.drop_trait() == Some(trait_def_id) {
fn_id: NodeId) {
// Check kinds on free variables:
- do with_appropriate_checker(cx, fn_id) |chk| {
+ with_appropriate_checker(cx, fn_id, |chk| {
let r = freevars::get_freevars(cx.tcx, fn_id);
for fv in r.iter() {
chk(cx, *fv);
}
- }
+ });
visit::walk_fn(cx, fk, decl, body, sp, fn_id, ());
}
ty: ty::t,
type_param_def: &ty::TypeParameterDef)
{
- do check_builtin_bounds(cx, ty, type_param_def.bounds.builtin_bounds) |missing| {
+ check_builtin_bounds(cx,
+ ty,
+ type_param_def.bounds.builtin_bounds,
+ |missing| {
cx.tcx.sess.span_err(
sp,
format!("instantiating a type parameter with an incompatible type \
`{}`, which does not fulfill `{}`",
ty_to_str(cx.tcx, ty),
missing.user_string(cx.tcx)));
- }
+ });
}
pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,
bounds: ty::BuiltinBounds, referenced_ty: Option<ty::t>)
{
- do check_builtin_bounds(cx, ty, bounds) |missing| {
+ check_builtin_bounds(cx, ty, bounds, |missing| {
// Will be Some if the freevar is implicitly borrowed (stack closure).
// Emit a less mysterious error message in this case.
match referenced_ty {
sp,
format!("this closure's environment must satisfy `{}`",
bounds.user_string(cx.tcx)));
- }
+ });
}
pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,
bounds: ty::BuiltinBounds) {
- do check_builtin_bounds(cx, ty, bounds) |missing| {
+ check_builtin_bounds(cx, ty, bounds, |missing| {
cx.tcx.sess.span_err(sp,
format!("cannot pack type `{}`, which does not fulfill \
`{}`, as a trait bounded by {}",
ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),
bounds.user_string(cx.tcx)));
- }
+ });
}
fn is_nullary_variant(cx: &Context, ex: @Expr) -> bool {
pub fn collect_external_language_items(&mut self) {
let crate_store = self.session.cstore;
- do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
- do each_lang_item(crate_store, crate_number)
- |node_id, item_index| {
+ iter_crate_data(crate_store, |crate_number, _crate_metadata| {
+ each_lang_item(crate_store, crate_number, |node_id, item_index| {
let def_id = ast::DefId { crate: crate_number, node: node_id };
self.collect_item(item_index, def_id);
true
- };
- }
+ });
+ })
}
pub fn collect(&mut self, crate: &ast::Crate) {
// of what we changed so we can roll everything back after invoking the
// specified closure
let mut pushed = 0u;
- do each_lint(self.tcx.sess, attrs) |meta, level, lintname| {
+ each_lint(self.tcx.sess, attrs, |meta, level, lintname| {
match self.dict.find_equiv(&lintname) {
None => {
self.span_lint(
}
}
true
- };
+ });
let old_is_doc_hidden = self.is_doc_hidden;
self.is_doc_hidden = self.is_doc_hidden ||
// rollback
self.is_doc_hidden = old_is_doc_hidden;
- do pushed.times {
+ pushed.times(|| {
let (lint, lvl, src) = self.lint_stack.pop();
self.set_level(lint, lvl, src);
- }
+ })
}
fn visit_ids(&self, f: |&mut ast_util::IdVisitor<Context>|) {
// this crate
match cx.tcx.items.find(&id.node) {
Some(ast_node) => {
- let s = do ast_node.with_attrs |attrs| {
- do attrs.map |a| {
+ let s = ast_node.with_attrs(|attrs| {
+ attrs.map(|a| {
attr::find_stability(a.iter().map(|a| a.meta()))
- }
- };
+ })
+ });
match s {
Some(s) => s,
let mut s = None;
// run through all the attributes and take the first
// stability one.
- do csearch::get_item_attrs(cx.tcx.cstore, id) |meta_items| {
+ csearch::get_item_attrs(cx.tcx.cstore, id, |meta_items| {
if s.is_none() {
s = attr::find_stability(meta_items.move_iter())
}
- }
+ });
s
};
impl<'self> Visitor<()> for Context<'self> {
fn visit_item(&mut self, it: @ast::item, _: ()) {
- do self.with_lint_attrs(it.attrs) |cx| {
+ self.with_lint_attrs(it.attrs, |cx| {
check_item_ctypes(cx, it);
check_item_non_camel_case_types(cx, it);
check_item_non_uppercase_statics(cx, it);
check_missing_doc_item(cx, it);
check_attrs_usage(cx, it.attrs);
- do cx.visit_ids |v| {
- v.visit_item(it, ());
- }
+ cx.visit_ids(|v| v.visit_item(it, ()));
visit::walk_item(cx, it, ());
- }
+ })
}
fn visit_foreign_item(&mut self, it: @ast::foreign_item, _: ()) {
match *fk {
visit::fk_method(_, _, m) => {
- do self.with_lint_attrs(m.attrs) |cx| {
+ self.with_lint_attrs(m.attrs, |cx| {
check_missing_doc_method(cx, m);
check_attrs_usage(cx, m.attrs);
- do cx.visit_ids |v| {
+ cx.visit_ids(|v| {
v.visit_fn(fk, decl, body, span, id, ());
- }
+ });
recurse(cx);
- }
+ })
}
_ => recurse(self),
}
fn visit_ty_method(&mut self, t: &ast::TypeMethod, _: ()) {
- do self.with_lint_attrs(t.attrs) |cx| {
+ self.with_lint_attrs(t.attrs, |cx| {
check_missing_doc_ty_method(cx, t);
check_attrs_usage(cx, t.attrs);
visit::walk_ty_method(cx, t, ());
- }
+ })
}
fn visit_struct_def(&mut self,
}
fn visit_struct_field(&mut self, s: @ast::struct_field, _: ()) {
- do self.with_lint_attrs(s.node.attrs) |cx| {
+ self.with_lint_attrs(s.node.attrs, |cx| {
check_missing_doc_struct_field(cx, s);
check_attrs_usage(cx, s.node.attrs);
visit::walk_struct_field(cx, s, ());
- }
+ })
}
fn visit_variant(&mut self, v: &ast::variant, g: &ast::Generics, _: ()) {
- do self.with_lint_attrs(v.node.attrs) |cx| {
+ self.with_lint_attrs(v.node.attrs, |cx| {
check_missing_doc_variant(cx, v);
check_attrs_usage(cx, v.node.attrs);
visit::walk_variant(cx, v, g, ());
- }
+ })
}
}
for &(lint, level) in tcx.sess.opts.lint_opts.iter() {
cx.set_level(lint, level, CommandLine);
}
- do cx.with_lint_attrs(crate.attrs) |cx| {
- do cx.visit_ids |v| {
+ cx.with_lint_attrs(crate.attrs, |cx| {
+ cx.visit_ids(|v| {
v.visited_outermost = true;
visit::walk_crate(v, crate, ());
- }
+ });
check_crate_attrs_usage(cx, crate.attrs);
visit::walk_crate(cx, crate, ());
- }
+ });
// If we missed any lints added to the session, then there's a bug somewhere
// in the iteration code.
}
for arg in decl.inputs.iter() {
- do pat_util::pat_bindings(this.tcx.def_map, arg.pat)
- |_bm, arg_id, _x, path| {
+ pat_util::pat_bindings(this.tcx.def_map,
+ arg.pat,
+ |_bm, arg_id, _x, path| {
debug!("adding argument {}", arg_id);
let ident = ast_util::path_to_ident(path);
fn_maps.add_variable(Arg(arg_id, ident));
- }
+ })
};
// Add `this`, whether explicit or implicit.
fn visit_local(v: &mut LivenessVisitor, local: @Local, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
- do pat_util::pat_bindings(def_map, local.pat) |bm, p_id, sp, path| {
+ pat_util::pat_bindings(def_map, local.pat, |bm, p_id, sp, path| {
debug!("adding local variable {}", p_id);
let name = ast_util::path_to_ident(path);
this.add_live_node_for_node(p_id, VarDefNode(sp));
is_mutbl: mutbl,
kind: kind
}));
- }
+ });
visit::walk_local(v, local, this);
}
fn visit_arm(v: &mut LivenessVisitor, arm: &Arm, this: @mut IrMaps) {
let def_map = this.tcx.def_map;
for pat in arm.pats.iter() {
- do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
+ pat_util::pat_bindings(def_map, *pat, |bm, p_id, sp, path| {
debug!("adding local variable {} from match with bm {:?}",
p_id, bm);
let name = ast_util::path_to_ident(path);
is_mutbl: mutbl,
kind: FromMatch(bm)
}));
- }
+ })
}
visit::walk_arm(v, arm, this);
}
match expr.node {
ExprPath(_) => {
let def = self.tcx.def_map.get_copy(&expr.id);
- do moves::moved_variable_node_id_from_def(def).map |rdef| {
+ moves::moved_variable_node_id_from_def(def).map(|rdef| {
self.variable(rdef, expr.span)
- }
+ })
}
_ => None
}
-> Option<Variable> {
match self.tcx.def_map.find(&node_id) {
Some(&def) => {
- do moves::moved_variable_node_id_from_def(def).map |rdef| {
+ moves::moved_variable_node_id_from_def(def).map(|rdef| {
self.variable(rdef, span)
- }
+ })
}
None => {
self.tcx.sess.span_bug(
pat: @Pat,
f: |LiveNode, Variable, Span, NodeId|) {
let def_map = self.tcx.def_map;
- do pat_util::pat_bindings(def_map, pat) |_bm, p_id, sp, _n| {
+ pat_util::pat_bindings(def_map, pat, |_bm, p_id, sp, _n| {
let ln = self.live_node(p_id, sp);
let var = self.variable(p_id, sp);
f(ln, var, sp, p_id);
- }
+ })
}
pub fn arm_pats_bindings(&self,
pub fn define_bindings_in_arm_pats(&self, pats: &[@Pat], succ: LiveNode)
-> LiveNode {
let mut succ = succ;
- do self.arm_pats_bindings(pats) |ln, var, _sp, _id| {
+ self.arm_pats_bindings(pats, |ln, var, _sp, _id| {
self.init_from_succ(ln, succ);
self.define(ln, var);
succ = ln;
- }
+ });
succ
}
}
pub fn ln_str(&self, ln: LiveNode) -> ~str {
- str::from_utf8_owned(do io::mem::with_mem_writer |wr| {
+ str::from_utf8_owned(io::mem::with_mem_writer(|wr| {
let wr = wr as &mut io::Writer;
write!(wr, "[ln({}) of kind {:?} reads", *ln, self.ir.lnks[*ln]);
self.write_vars(wr, ln, |idx| self.users[idx].reader );
write!(wr, " writes");
self.write_vars(wr, ln, |idx| self.users[idx].writer );
write!(wr, " precedes {}]", self.successors[*ln].to_str());
- })
+ }))
}
pub fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) {
if ln == succ_ln { return false; }
let mut changed = false;
- do self.indices2(ln, succ_ln) |idx, succ_idx| {
+ self.indices2(ln, succ_ln, |idx, succ_idx| {
let users = &mut *self.users;
changed |= copy_if_invalid(users[succ_idx].reader,
&mut users[idx].reader);
users[idx].used = true;
changed = true;
}
- }
+ });
debug!("merge_from_succ(ln={}, succ={}, first_merge={}, changed={})",
ln.to_str(), self.ln_str(succ_ln), first_merge, changed);
pub fn propagate_through_block(&self, blk: &Block, succ: LiveNode)
-> LiveNode {
let succ = self.propagate_through_opt_expr(blk.expr, succ);
- do blk.stmts.rev_iter().fold(succ) |succ, stmt| {
+ blk.stmts.rev_iter().fold(succ, |succ, stmt| {
self.propagate_through_stmt(*stmt, succ)
- }
+ })
}
pub fn propagate_through_stmt(&self, stmt: &Stmt, succ: LiveNode)
pub fn propagate_through_exprs(&self, exprs: &[@Expr], succ: LiveNode)
-> LiveNode {
- do exprs.rev_iter().fold(succ) |succ, expr| {
+ exprs.rev_iter().fold(succ, |succ, expr| {
self.propagate_through_expr(*expr, succ)
- }
+ })
}
pub fn propagate_through_opt_expr(&self,
opt_expr: Option<@Expr>,
succ: LiveNode)
-> LiveNode {
- do opt_expr.iter().fold(succ) |succ, expr| {
+ opt_expr.iter().fold(succ, |succ, expr| {
self.propagate_through_expr(*expr, succ)
- }
+ })
}
pub fn propagate_through_expr(&self, expr: @Expr, succ: LiveNode)
// the construction of a closure itself is not important,
// but we have to consider the closed over variables.
let caps = self.ir.captures(expr);
- do caps.rev_iter().fold(succ) |succ, cap| {
+ caps.rev_iter().fold(succ, |succ, cap| {
self.init_from_succ(cap.ln, succ);
let var = self.variable(cap.var_nid, expr.span);
self.acc(cap.ln, var, ACC_READ | ACC_USE);
cap.ln
- }
+ })
})
}
ExprStruct(_, ref fields, with_expr) => {
let succ = self.propagate_through_opt_expr(with_expr, succ);
- do fields.rev_iter().fold(succ) |succ, field| {
+ fields.rev_iter().fold(succ, |succ, field| {
self.propagate_through_expr(field.expr, succ)
- }
+ })
}
ExprCall(f, ref args, _) => {
}
ExprInlineAsm(ref ia) => {
- let succ = do ia.inputs.rev_iter().fold(succ) |succ, &(_, expr)| {
+ let succ = ia.inputs.rev_iter().fold(succ, |succ, &(_, expr)| {
self.propagate_through_expr(expr, succ)
- };
- do ia.outputs.rev_iter().fold(succ) |succ, &(_, expr)| {
+ });
+ ia.outputs.rev_iter().fold(succ, |succ, &(_, expr)| {
// see comment on lvalues in
// propagate_through_lvalue_components()
let succ = self.write_lvalue(expr, succ, ACC_WRITE);
self.propagate_through_lvalue_components(expr, succ)
- }
+ })
}
ExprLogLevel |
// should not be live at this point.
debug!("check_local() with no initializer");
- do this.pat_bindings(local.pat) |ln, var, sp, id| {
+ this.pat_bindings(local.pat, |ln, var, sp, id| {
if !this.warn_about_unused(sp, id, ln, var) {
match this.live_on_exit(ln, var) {
None => { /* not live: good */ }
}
}
}
- }
+ })
}
}
}
fn check_arm(this: &mut Liveness, arm: &Arm) {
- do this.arm_pats_bindings(arm.pats) |ln, var, sp, id| {
+ this.arm_pats_bindings(arm.pats, |ln, var, sp, id| {
this.warn_about_unused(sp, id, ln, var);
- }
+ });
visit::walk_arm(this, arm, ());
}
pub fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
for arg in decl.inputs.iter() {
- do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
- |_bm, p_id, sp, _n| {
+ pat_util::pat_bindings(self.tcx.def_map,
+ arg.pat,
+ |_bm, p_id, sp, _n| {
let var = self.variable(p_id, sp);
self.warn_about_unused(sp, p_id, entry_ln, var);
- }
+ })
}
}
pub fn warn_about_unused_or_dead_vars_in_pat(&self, pat: @Pat) {
- do self.pat_bindings(pat) |ln, var, sp, id| {
+ self.pat_bindings(pat, |ln, var, sp, id| {
if !self.warn_about_unused(sp, id, ln, var) {
self.warn_about_dead_assign(sp, id, ln, var);
}
- }
+ })
}
pub fn warn_about_unused(&self,
* into itself or not based on its type and annotation.
*/
- do pat_bindings(self.tcx.def_map, pat) |bm, id, _span, path| {
+ pat_bindings(self.tcx.def_map, pat, |bm, id, _span, path| {
let binding_moves = match bm {
BindByRef(_) => false,
BindByValue(_) => {
if binding_moves {
self.move_maps.moves_map.insert(id);
}
- }
+ })
}
pub fn use_receiver(&mut self,
// use the NodeId of their namesake in the first pattern.
pub fn pat_id_map(dm: resolve::DefMap, pat: &Pat) -> PatIdMap {
let mut map = HashMap::new();
- do pat_bindings(dm, pat) |_bm, p_id, _s, n| {
+ pat_bindings(dm, pat, |_bm, p_id, _s, n| {
map.insert(path_to_ident(n), p_id);
- };
+ });
map
}
pub fn pat_bindings(dm: resolve::DefMap,
pat: &Pat,
it: |BindingMode, NodeId, Span, &Path|) {
- do walk_pat(pat) |p| {
+ walk_pat(pat, |p| {
match p.node {
PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
it(binding_mode, p.id, p.span, pth);
_ => {}
}
true
- };
+ });
}
pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> ~[NodeId] {
/// an ident, e.g. `foo`, or `Foo(foo)` or `foo @ Bar(*)`.
pub fn pat_contains_bindings(dm: resolve::DefMap, pat: &Pat) -> bool {
let mut contains_bindings = false;
- do walk_pat(pat) |p| {
+ walk_pat(pat, |p| {
if pat_is_binding(dm, p) {
contains_bindings = true;
false // there's at least one binding, can short circuit now.
} else {
true
}
- };
+ });
contains_bindings
}
fn visit_foreign_item(&mut self, foreign_item: @foreign_item,
context:ReducedGraphParent) {
- do self.resolver.build_reduced_graph_for_foreign_item(foreign_item,
- context) |r, c| {
+ self.resolver.build_reduced_graph_for_foreign_item(foreign_item,
+ context,
+ |r, c| {
let mut v = BuildReducedGraphVisitor{ resolver: r };
visit::walk_foreign_item(&mut v, foreign_item, c);
- }
+ })
}
fn visit_view_item(&mut self, view_item:&view_item, context:ReducedGraphParent) {
// If this is a newtype or unit-like struct, define a name
// in the value namespace as well
- do ctor_id.while_some |cid| {
+ ctor_id.while_some(|cid| {
name_bindings.define_value(DefStruct(local_def(cid)), sp,
is_public);
None
- }
+ });
// Record the def ID of this struct.
self.structs.insert(local_def(item.id));
let def = DefFn(local_def(foreign_item.id), unsafe_fn);
name_bindings.define_value(def, foreign_item.span, is_public);
- do self.with_type_parameter_rib(
- HasTypeParameters(
- generics, foreign_item.id, 0, NormalRibKind)) |this|
- {
- f(this, new_parent)
- }
+ self.with_type_parameter_rib(
+ HasTypeParameters(generics,
+ foreign_item.id,
+ 0,
+ NormalRibKind),
+ |this| f(this, new_parent));
}
foreign_item_static(_, m) => {
let def = DefStatic(local_def(foreign_item.id), m);
DefForeignMod(def_id) => {
// Foreign modules have no names. Recur and populate
// eagerly.
- do csearch::each_child_of_item(self.session.cstore,
- def_id)
- |def_like, child_ident, vis| {
+ csearch::each_child_of_item(self.session.cstore,
+ def_id,
+ |def_like,
+ child_ident,
+ vis| {
self.build_reduced_graph_for_external_crate_def(
root,
def_like,
child_ident,
vis)
- }
+ });
}
_ => {
let (child_name_bindings, new_parent) =
Some(def_id) => def_id,
};
- do csearch::each_child_of_item(self.session.cstore, def_id)
- |def_like, child_ident, visibility| {
+ csearch::each_child_of_item(self.session.cstore,
+ def_id,
+ |def_like, child_ident, visibility| {
debug!("(populating external module) ... found ident: {}",
token::ident_to_str(&child_ident));
self.build_reduced_graph_for_external_crate_def(module,
def_like,
child_ident,
visibility)
- }
+ });
module.populated = true
}
/// crate.
fn build_reduced_graph_for_external_crate(&mut self,
root: @mut Module) {
- do csearch::each_top_level_item_of_crate(self.session.cstore,
- root.def_id.unwrap().crate)
- |def_like, ident, visibility| {
+ csearch::each_top_level_item_of_crate(self.session.cstore,
+ root.def_id.unwrap().crate,
+ |def_like, ident, visibility| {
self.build_reduced_graph_for_external_crate_def(root,
def_like,
ident,
visibility)
- }
+ });
}
/// Creates and adds an import directive to the given module.
// n.b. the discr expr gets visted twice.
// but maybe it's okay since the first time will signal an
// error if there is one? -- tjc
- do self.with_type_parameter_rib(
- HasTypeParameters(
- generics, item.id, 0, NormalRibKind)) |this| {
+ self.with_type_parameter_rib(HasTypeParameters(generics,
+ item.id,
+ 0,
+ NormalRibKind),
+ |this| {
visit::walk_item(this, item, ());
- }
+ });
}
item_ty(_, ref generics) => {
- do self.with_type_parameter_rib
- (HasTypeParameters(generics, item.id, 0,
- NormalRibKind))
- |this| {
+ self.with_type_parameter_rib(HasTypeParameters(generics,
+ item.id,
+ 0,
+ NormalRibKind),
+ |this| {
visit::walk_item(this, item, ());
- }
+ });
}
item_impl(ref generics,
DlDef(DefSelfTy(item.id)));
// Create a new rib for the trait-wide type parameters.
- do self.with_type_parameter_rib
- (HasTypeParameters(generics, item.id, 0,
- NormalRibKind)) |this| {
-
+ self.with_type_parameter_rib(HasTypeParameters(generics,
+ item.id,
+ 0,
+ NormalRibKind),
+ |this| {
this.resolve_type_parameters(&generics.ty_params);
// Resolve derived traits.
match *method {
required(ref ty_m) => {
- do this.with_type_parameter_rib
+ this.with_type_parameter_rib
(HasTypeParameters(&ty_m.generics,
item.id,
generics.ty_params.len(),
- MethodRibKind(item.id, Required))) |this| {
+ MethodRibKind(item.id, Required)),
+ |this| {
// Resolve the method-specific type
// parameters.
}
this.resolve_type(&ty_m.decl.output);
- }
+ });
}
provided(m) => {
this.resolve_method(MethodRibKind(item.id,
}
}
}
- }
+ });
self.type_ribs.pop();
}
}
item_mod(ref module_) => {
- do self.with_scope(Some(item.ident)) |this| {
+ self.with_scope(Some(item.ident), |this| {
this.resolve_module(module_, item.span, item.ident,
item.id);
- }
+ });
}
item_foreign_mod(ref foreign_module) => {
- do self.with_scope(Some(item.ident)) |this| {
+ self.with_scope(Some(item.ident), |this| {
for foreign_item in foreign_module.items.iter() {
match foreign_item.node {
foreign_item_fn(_, ref generics) => {
}
}
}
- }
+ });
}
item_fn(ref fn_decl, _, _, ref generics, ref block) => {
self.label_ribs.push(function_label_rib);
// If this function has type parameters, add them now.
- do self.with_type_parameter_rib(type_parameters) |this| {
+ self.with_type_parameter_rib(type_parameters, |this| {
// Resolve the type parameters.
match type_parameters {
NoTypeParameters => {
this.resolve_block(block);
debug!("(resolving function) leaving function");
- }
+ });
self.label_ribs.pop();
self.value_ribs.pop();
}
// If applicable, create a rib for the type parameters.
- do self.with_type_parameter_rib(HasTypeParameters
- (generics, id, 0,
- OpaqueFunctionRibKind)) |this| {
-
+ self.with_type_parameter_rib(HasTypeParameters(generics,
+ id,
+ 0,
+ OpaqueFunctionRibKind),
+ |this| {
// Resolve the type parameters.
this.resolve_type_parameters(&generics.ty_params);
for field in fields.iter() {
this.resolve_type(&field.node.ty);
}
- }
+ });
}
// Does this really need to take a RibKind or is it always going
methods: &[@method]) {
// If applicable, create a rib for the type parameters.
let outer_type_parameter_count = generics.ty_params.len();
- do self.with_type_parameter_rib(HasTypeParameters
- (generics, id, 0,
- NormalRibKind)) |this| {
+ self.with_type_parameter_rib(HasTypeParameters(generics,
+ id,
+ 0,
+ NormalRibKind),
+ |this| {
// Resolve the type parameters.
this.resolve_type_parameters(&generics.ty_params);
Some(r) => { this.current_trait_refs = r; }
None => ()
}
- }
+ });
}
fn resolve_module(&mut self,
// user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: @Pat) -> BindingMap {
let mut result = HashMap::new();
- do pat_bindings(self.def_map, pat) |binding_mode, _id, sp, path| {
+ pat_bindings(self.def_map, pat, |binding_mode, _id, sp, path| {
let name = mtwt_resolve(path_to_ident(path));
result.insert(name,
binding_info {span: sp,
binding_mode: binding_mode});
- }
+ });
return result;
}
}
}
- do bounds.as_ref().map |bound_vec| {
+ bounds.as_ref().map(|bound_vec| {
for bound in bound_vec.iter() {
self.resolve_type_parameter_bound(ty.id, bound);
}
- };
+ });
}
ty_closure(c) => {
- do c.bounds.as_ref().map |bounds| {
+ c.bounds.as_ref().map(|bounds| {
for bound in bounds.iter() {
self.resolve_type_parameter_bound(ty.id, bound);
}
- };
+ });
visit::walk_ty(self, ty, ());
}
// pattern that binds them
bindings_list: Option<@mut HashMap<Name,NodeId>>) {
let pat_id = pattern.id;
- do walk_pat(pattern) |pattern| {
+ walk_pat(pattern, |pattern| {
match pattern.node {
PatIdent(binding_mode, ref path, _)
if !path.global && path.segments.len() == 1 => {
}
}
true
- };
+ });
}
fn resolve_bare_identifier_pattern(&mut self, name: Ident)
}
ExprLoop(_, Some(label)) => {
- do self.with_label_rib |this| {
+ self.with_label_rib(|this| {
let def_like = DlDef(DefLabel(expr.id));
let rib = this.label_ribs[this.label_ribs.len() - 1];
// plain insert (no renaming)
rib.bindings.insert(label.name, def_like);
visit::walk_expr(this, expr, ());
- }
+ })
}
ExprForLoop(*) => fail!("non-desugared expr_for_loop"),
debug!("(recording def) recording {:?} for {:?}, last private {:?}",
def, node_id, lp);
self.last_private.insert(node_id, lp);
- do self.def_map.insert_or_update_with(node_id, def) |_, old_value| {
+ self.def_map.insert_or_update_with(node_id, def, |_, old_value| {
// Resolve appears to "resolve" the same ID multiple
// times, so here is a sanity check it at least comes to
// the same conclusion! - nmatsakis
self.session.bug(format!("node_id {:?} resolved first to {:?} \
and then {:?}", node_id, *old_value, def));
}
- };
+ });
}
fn enforce_default_binding_mode(&mut self,
bcx.val_to_str(val));
let _indenter = indenter();
- do m.map |br| {
+ m.map(|br| {
match br.pats[col].node {
ast::PatIdent(_, ref path, Some(inner)) => {
let pats = vec::append(
}
_ => (*br).clone(),
}
- }
+ })
}
fn assert_is_binding_or_wild(bcx: @mut Block, p: @ast::Pat) {
let _indenter = indenter();
// Collect all of the matches that can match against anything.
- let matches = do enter_match(bcx, dm, m, col, val) |p| {
+ let matches = enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(~[]),
ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(~[]),
_ => None
}
- };
+ });
// Ok, now, this is pretty subtle. A "default" match is a match
// that needs to be considered if none of the actual checks on the
let tcx = bcx.tcx();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
let mut i = 0;
- do enter_match(bcx, tcx.def_map, m, col, val) |p| {
+ enter_match(bcx, tcx.def_map, m, col, val, |p| {
let answer = match p.node {
ast::PatEnum(*) |
ast::PatIdent(_, _, None) if pat_is_const(tcx.def_map, p) => {
};
i += 1;
answer
- }
+ })
}
fn enter_rec_or_struct<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatStruct(_, ref fpats, _) => {
let mut pats = ~[];
Some(vec::from_elem(fields.len(), dummy))
}
}
- }
+ })
}
fn enter_tup<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatTup(ref elts) => Some((*elts).clone()),
_ => {
Some(vec::from_elem(n_elts, dummy))
}
}
- }
+ })
}
fn enter_tuple_struct<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatEnum(_, Some(ref elts)) => Some((*elts).clone()),
_ => {
Some(vec::from_elem(n_elts, dummy))
}
}
- }
+ })
}
fn enter_box<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatBox(sub) => {
Some(~[sub])
Some(~[dummy])
}
}
- }
+ })
}
fn enter_uniq<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: dummy_sp()};
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatUniq(sub) => {
Some(~[sub])
Some(~[dummy])
}
}
- }
+ })
}
fn enter_region<'r>(bcx: @mut Block,
let _indenter = indenter();
let dummy = @ast::Pat { id: 0, node: ast::PatWild, span: dummy_sp() };
- do enter_match(bcx, dm, m, col, val) |p| {
+ enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatRegion(sub) => {
Some(~[sub])
Some(~[dummy])
}
}
- }
+ })
}
// Returns the options in one column of matches. An option is something that
val: ValueRef)
-> ExtractedBlock {
let _icx = push_ctxt("match::extract_variant_args");
- let args = do vec::from_fn(adt::num_args(repr, disr_val)) |i| {
+ let args = vec::from_fn(adt::num_args(repr, disr_val), |i| {
adt::trans_field_ptr(bcx, repr, val, disr_val, i)
- };
+ });
ExtractedBlock { vals: args, bcx: bcx }
}
let (bcx, base, len) = vec_datum.get_vec_base_and_len(bcx, pat_span, pat_id, 0);
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
- let mut elems = do vec::from_fn(elem_count) |i| {
+ let mut elems = vec::from_fn(elem_count, |i| {
match slice {
None => GEPi(bcx, base, [i]),
Some(n) if i < n => GEPi(bcx, base, [i]),
}
_ => unsafe { llvm::LLVMGetUndef(vt.llunit_ty.to_ref()) }
}
- };
+ });
if slice.is_some() {
let n = slice.unwrap();
let slice_byte_offset = Mul(bcx, vt.llunit_size, C_uint(bcx.ccx(), n));
m: &[Match],
col: uint)
-> bool {
- do m.iter().any |br| {
+ m.iter().any(|br| {
let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u };
bcx.ccx().maps.root_map.contains_key(&key)
- }
+ })
}
fn root_pats_as_necessary(mut bcx: @mut Block,
// matches may be wildcards like _ or identifiers).
macro_rules! any_pat (
($m:expr, $pattern:pat) => (
- do ($m).iter().any |br| {
+ ($m).iter().any(|br| {
match br.pats[col].node {
$pattern => true,
_ => false
}
- }
+ })
)
)
}
fn any_tuple_struct_pat(bcx: @mut Block, m: &[Match], col: uint) -> bool {
- do m.iter().any |br| {
+ m.iter().any(|br| {
let pat = br.pats[col];
match pat.node {
ast::PatEnum(_, Some(_)) => {
}
_ => false
}
- }
+ })
}
trait CustomFailureHandler {
let datum = Datum {val: llval, ty: binding_info.ty,
mode: ByRef(ZeroMem)};
bcx = datum.store_to(bcx, INIT, lldest);
- do opt_temp_cleanups.mutate |temp_cleanups| {
+ opt_temp_cleanups.mutate(|temp_cleanups| {
add_clean_temp_mem(bcx, lldest, binding_info.ty);
temp_cleanups.push(lldest);
temp_cleanups
- };
+ });
}
TrByRef => {}
}
bcx = insert_lllocals(bcx, data.bindings_map, false);
let val = unpack_result!(bcx, {
- do with_scope_result(bcx, guard_expr.info(),
- "guard") |bcx| {
+ with_scope_result(bcx, guard_expr.info(), "guard", |bcx| {
expr::trans_to_datum(bcx, guard_expr).to_result()
- }
+ })
});
let val = bool_to_i1(bcx, val);
revoke_clean(bcx, *llval);
}
- return do with_cond(bcx, Not(bcx, val)) |bcx| {
+ return with_cond(bcx, Not(bcx, val), |bcx| {
// Guard does not match: free the values we copied,
// and remove all bindings from the lllocals table
let bcx = drop_bindings(bcx, data);
compile_submatch(bcx, m, vals, chk);
bcx
- };
+ });
fn drop_bindings(bcx: @mut Block, data: &ArmData) -> @mut Block {
let mut bcx = bcx;
Some(ref rec_fields) => {
let pat_ty = node_id_type(bcx, pat_id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
- do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
+ expr::with_field_tys(tcx, pat_ty, None, |discr, field_tys| {
let rec_vals = rec_fields.map(|field_name| {
let ix = ty::field_idx_strict(tcx, field_name.name, field_tys);
adt::trans_field_ptr(bcx, pat_repr, val, discr, ix)
enter_rec_or_struct(bcx, dm, m, col, *rec_fields, val),
vec::append(rec_vals, vals_left),
chk);
- }
+ });
return;
}
None => {}
ty::ty_tup(ref elts) => elts.len(),
_ => ccx.sess.bug("non-tuple type in tuple pattern")
};
- let tup_vals = do vec::from_fn(n_tup_elts) |i| {
+ let tup_vals = vec::from_fn(n_tup_elts, |i| {
adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
- };
+ });
compile_submatch(bcx, enter_tup(bcx, dm, m, col, val, n_tup_elts),
vec::append(tup_vals, vals_left), chk);
return;
}
let struct_repr = adt::represent_type(bcx.ccx(), struct_ty);
- let llstructvals = do vec::from_fn(struct_element_count) |i| {
+ let llstructvals = vec::from_fn(struct_element_count, |i| {
adt::trans_field_ptr(bcx, struct_repr, val, 0, i)
- };
+ });
compile_submatch(bcx,
enter_tuple_struct(bcx, dm, m, col, val,
struct_element_count),
compare => {
let t = node_id_type(bcx, pat_id);
let Result {bcx: after_cx, val: matches} = {
- do with_scope_result(bcx, None,
- "compaReScope") |bcx| {
+ with_scope_result(bcx, None, "compaReScope", |bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
rslt(bcx, And(bcx, llge, llle))
}
}
- }
+ })
};
bcx = sub_block(after_cx, "compare_next");
CondBr(after_cx, matches, opt_cx.llbb, bcx.llbb);
}
compare_vec_len => {
let Result {bcx: after_cx, val: matches} = {
- do with_scope_result(bcx, None,
- "compare_vec_len_scope") |bcx| {
+ with_scope_result(bcx,
+ None,
+ "compare_vec_len_scope",
+ |bcx| {
match trans_opt(bcx, opt) {
single_result(
Result {bcx, val}) => {
rslt(bcx, And(bcx, llge, llle))
}
}
- }
+ })
};
bcx = sub_block(after_cx, "compare_vec_len_next");
arms: &[ast::Arm],
dest: Dest) -> @mut Block {
let _icx = push_ctxt("match::trans_match");
- do with_scope(bcx, match_expr.info(), "match") |bcx| {
+ with_scope(bcx, match_expr.info(), "match", |bcx| {
trans_match_inner(bcx, discr_expr, arms, dest)
- }
+ })
}
fn create_bindings_map(bcx: @mut Block, pat: @ast::Pat) -> BindingsMap {
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let mut bindings_map = HashMap::new();
- do pat_bindings(tcx.def_map, pat) |bm, p_id, span, path| {
+ pat_bindings(tcx.def_map, pat, |bm, p_id, span, path| {
let ident = path_to_ident(path);
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
span: span,
ty: variable_ty
});
- }
+ });
return bindings_map;
}
// create dummy memory for the variables if we have no
// value to store into them immediately
let tcx = bcx.tcx();
- do pat_bindings(tcx.def_map, pat) |_, p_id, _, path| {
+ pat_bindings(tcx.def_map, pat, |_, p_id, _, path| {
bcx = mk_binding_alloca(
bcx, p_id, path, BindLocal,
|bcx, var_ty, llval| { zero_mem(bcx, llval, var_ty); bcx });
- }
+ });
bcx
}
}
let tcx = bcx.tcx();
let pat_ty = node_id_type(bcx, pat.id);
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
- do expr::with_field_tys(tcx, pat_ty, None) |discr, field_tys| {
+ expr::with_field_tys(tcx, pat_ty, None, |discr, field_tys| {
for f in fields.iter() {
let ix = ty::field_idx_strict(tcx, f.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, pat_repr, val,
discr, ix);
bcx = bind_irrefutable_pat(bcx, f.pat, fldptr, binding_mode);
}
- }
+ })
}
ast::PatTup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id);
}
ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(cx.tcx, def_id);
- let mut ftys = do fields.map |field| {
+ let mut ftys = fields.map(|field| {
ty::lookup_field_type(cx.tcx, def_id, field.id, substs)
- };
+ });
let packed = ty::lookup_packed(cx.tcx, def_id);
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
if dtor { ftys.push(ty::mk_bool()); }
}
fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> ~[Case] {
- do ty::enum_variants(tcx, def_id).map |vi| {
- let arg_tys = do vi.args.map |&raw_ty| {
+ ty::enum_variants(tcx, def_id).map(|vi| {
+ let arg_tys = vi.args.map(|&raw_ty| {
ty::subst(tcx, substs, raw_ty)
- };
+ });
Case { discr: vi.disr_val, tys: arg_tys }
- }
+ })
}
let ccx = bcx.ccx();
let val = if needs_cast {
- let fields = do st.fields.map |&ty| {
- type_of::type_of(ccx, ty)
- };
+ let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
let real_ty = Type::struct_(fields, st.packed);
PointerCast(bcx, val, real_ty.ptr_to())
} else {
C_struct(build_const_struct(ccx, nonnull, vals), false)
} else {
assert_eq!(vals.len(), 0);
- let vals = do nonnull.fields.iter().enumerate().map |(i, &ty)| {
+ let vals = nonnull.fields.iter().enumerate().map(|(i, &ty)| {
let llty = type_of::sizing_type_of(ccx, ty);
if i == ptrfield { C_null(llty) } else { C_undef(llty) }
- }.collect::<~[ValueRef]>();
+ }).collect::<~[ValueRef]>();
C_struct(build_const_struct(ccx, nonnull, vals), false)
}
}
let mut output_types = ~[];
// Prepare the output operands
- let outputs = do ia.outputs.map |&(c, out)| {
+ let outputs = ia.outputs.map(|&(c, out)| {
constraints.push(c);
let out_datum = unpack_datum!(bcx, trans_to_datum(bcx, out));
output_types.push(type_of(bcx.ccx(), out_datum.ty));
out_datum.val
- };
+ });
for c in cleanups.iter() {
revoke_clean(bcx, *c);
cleanups.clear();
// Now the input operands
- let inputs = do ia.inputs.map |&(c, input)| {
+ let inputs = ia.inputs.map(|&(c, input)| {
constraints.push(c);
unpack_result!(bcx, {
&mut cleanups,
callee::DontAutorefArg)
})
-
- };
+ });
for c in cleanups.iter() {
revoke_clean(bcx, *c);
ast::asm_intel => lib::llvm::AD_Intel
};
- let r = do ia.asm.with_c_str |a| {
- do constraints.with_c_str |c| {
+ let r = ia.asm.with_c_str(|a| {
+ constraints.with_c_str(|c| {
InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect)
- }
- };
+ })
+ });
// Again, based on how many outputs we have
if numOutputs == 1 {
local_data_key!(task_local_insn_key: ~[&'static str])
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
- do local_data::get(task_local_insn_key) |c| {
+ local_data::get(task_local_insn_key, |c| {
match c {
Some(ctx) => blk(*ctx),
None => ()
}
- }
+ })
}
pub fn init_insn_ctxt() {
#[unsafe_destructor]
impl Drop for _InsnCtxt {
fn drop(&mut self) {
- do local_data::modify(task_local_insn_key) |c| {
- do c.map |mut ctx| {
+ local_data::modify(task_local_insn_key, |c| {
+ c.map(|mut ctx| {
ctx.pop();
ctx
- }
- }
+ })
+ })
}
}
pub fn push_ctxt(s: &'static str) -> _InsnCtxt {
debug!("new InsnCtxt: {}", s);
- do local_data::modify(task_local_insn_key) |c| {
- do c.map |mut ctx| {
+ local_data::modify(task_local_insn_key, |c| {
+ c.map(|mut ctx| {
ctx.push(s);
ctx
- }
- }
+ })
+ });
_InsnCtxt { _x: () }
}
// only use this for foreign function ABIs and glue, use `decl_rust_fn` for Rust functions
pub fn decl_fn(llmod: ModuleRef, name: &str, cc: lib::llvm::CallConv, ty: Type) -> ValueRef {
- let llfn: ValueRef = do name.with_c_str |buf| {
+ let llfn: ValueRef = name.with_c_str(|buf| {
unsafe {
llvm::LLVMGetOrInsertFunction(llmod, buf, ty.to_ref())
}
- };
+ });
lib::llvm::SetFunctionCallConv(llfn, cc);
// Function addresses in Rust are never significant, allowing functions to be merged.
None => ()
}
let f = decl_rust_fn(ccx, inputs, output, name);
- do csearch::get_item_attrs(ccx.tcx.cstore, did) |meta_items| {
+ csearch::get_item_attrs(ccx.tcx.cstore, did, |meta_items| {
set_llvm_fn_attrs(meta_items.iter().map(|&x| attr::mk_attr(x)).to_owned_vec(), f)
- }
+ });
ccx.externs.insert(name.to_owned(), f);
f
}
None => ()
}
unsafe {
- let c = do name.with_c_str |buf| {
+ let c = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf)
- };
+ });
externs.insert(name.to_owned(), c);
return c;
}
}
pub fn set_no_split_stack(f: ValueRef) {
- do "no-split-stack".with_c_str |buf| {
+ "no-split-stack".with_c_str(|buf| {
unsafe { llvm::LLVMAddFunctionAttrString(f, buf); }
- }
+ })
}
// Double-check that we never ask LLVM to declare the same symbol twice. It
// Structural comparison: a rather involved form of glue.
pub fn maybe_name_value(cx: &CrateContext, v: ValueRef, s: &str) {
if cx.sess.opts.save_temps {
- do s.with_c_str |buf| {
+ s.with_c_str(|buf| {
unsafe {
llvm::LLVMSetValueName(v, buf)
}
- }
+ })
}
}
match ty::get(t).sty {
ty::ty_struct(*) => {
let repr = adt::represent_type(cx.ccx(), t);
- do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
+ expr::with_field_tys(cx.tcx(), t, None, |discr, field_tys| {
for (i, field_ty) in field_tys.iter().enumerate() {
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
cx = f(cx, llfld_a, field_ty.mt.ty);
}
- }
+ })
}
ty::ty_estr(ty::vstore_fixed(_)) |
ty::ty_evec(_, ty::vstore_fixed(_)) => {
ty_to_str(cx.ccx().tcx, rhs_t));
}
};
- do with_cond(cx, is_zero) |bcx| {
+ with_cond(cx, is_zero, |bcx| {
controlflow::trans_fail(bcx, Some(span), text)
- }
+ })
}
pub fn null_env_ptr(ccx: &CrateContext) -> ValueRef {
pub fn have_cached_lpad(bcx: @mut Block) -> bool {
let mut res = false;
- do in_lpad_scope_cx(bcx) |inf| {
+ in_lpad_scope_cx(bcx, |inf| {
match inf.landing_pad {
Some(_) => res = true,
None => res = false
}
- }
+ });
return res;
}
let mut cached = None;
let mut pad_bcx = bcx; // Guaranteed to be set below
- do in_lpad_scope_cx(bcx) |inf| {
+ in_lpad_scope_cx(bcx, |inf| {
// If there is a valid landing pad still around, use it
match inf.landing_pad {
Some(target) => cached = Some(target),
inf.landing_pad = Some(pad_bcx.llbb);
}
}
- }
+ });
// Can't return from block above
match cached { Some(b) => return b, None => () }
// The landing pad return type (the type being propagated). Not sure what
opt_node_info: Option<NodeInfo>)
-> @mut Block {
unsafe {
- let llbb = do name.with_c_str |buf| {
+ let llbb = name.with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx.ccx.llcx, cx.llfn, buf)
- };
+ });
let bcx = @mut Block::new(llbb,
parent,
is_lpad,
pub fn mk_staticallocas_basic_block(llfn: ValueRef) -> BasicBlockRef {
unsafe {
let cx = task_llcx();
- do "static_allocas".with_c_str | buf| {
+ "static_allocas".with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx, llfn, buf)
- }
+ })
}
}
pub fn mk_return_basic_block(llfn: ValueRef) -> BasicBlockRef {
unsafe {
let cx = task_llcx();
- do "return".with_c_str |buf| {
+ "return".with_c_str(|buf| {
llvm::LLVMAppendBasicBlockInContext(cx, llfn, buf)
- }
+ })
}
}
// Return an array containing the ValueRefs that we get from
// llvm::LLVMGetParam for each argument.
- do vec::from_fn(args.len()) |i| {
+ vec::from_fn(args.len(), |i| {
unsafe { llvm::LLVMGetParam(cx.llfn, cx.arg_pos(i) as c_uint) }
- }
+ })
}
pub fn copy_args_to_allocas(fcx: @mut FunctionContext,
llfndecl: ValueRef)
{
// Translate variant arguments to function arguments.
- let fn_args = do args.map |varg| {
+ let fn_args = args.map(|varg| {
ast::arg {
ty: (*varg.ty()).clone(),
pat: ast_util::ident_to_pat(
special_idents::arg),
id: varg.id(),
}
- };
+ });
let no_substs: &[ty::t] = [];
let ty_param_substs = match param_substs {
"main"
};
let llfn = decl_cdecl_fn(ccx.llmod, main_name, llfty);
- let llbb = do "top".with_c_str |buf| {
+ let llbb = "top".with_c_str(|buf| {
unsafe {
llvm::LLVMAppendBasicBlockInContext(ccx.llcx, llfn, buf)
}
- };
+ });
let bld = ccx.builder.B;
unsafe {
llvm::LLVMPositionBuilderAtEnd(bld, llbb);
};
let args = {
- let opaque_rust_main = do "rust_main".with_c_str |buf| {
+ let opaque_rust_main = "rust_main".with_c_str(|buf| {
llvm::LLVMBuildPointerCast(bld, rust_main, Type::i8p().to_ref(), buf)
- };
+ });
~[
C_null(Type::opaque_box(ccx).ptr_to()),
(rust_main, args)
};
- let result = do args.as_imm_buf |buf, len| {
+ let result = args.as_imm_buf(|buf, len| {
llvm::LLVMBuildCall(bld, start_fn, buf, len as c_uint, noname())
- };
+ });
llvm::LLVMBuildRet(bld, result);
}
unsafe {
let llty = llvm::LLVMTypeOf(v);
- let g = do sym.with_c_str |buf| {
+ let g = sym.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, llty, buf)
- };
+ });
if !ccx.reachable.contains(&id) {
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
match (attr::first_attr_value_str_by_name(i.attrs, "link_section")) {
Some(sect) => unsafe {
- do sect.with_c_str |buf| {
+ sect.with_c_str(|buf| {
llvm::LLVMSetSection(v, buf);
- }
+ })
},
None => ()
}
if attr::contains_name(ni.attrs, "crate_map") {
if *ccx.sess.building_library {
let s = "_rust_crate_map_toplevel";
- let g = unsafe { do s.with_c_str |buf| {
- let ty = type_of(ccx, ty);
- llvm::LLVMAddGlobal(ccx.llmod,
- ty.to_ref(), buf)
- } };
+ let g = unsafe {
+ s.with_c_str(|buf| {
+ let ty = type_of(ccx, ty);
+ llvm::LLVMAddGlobal(ccx.llmod,
+ ty.to_ref(),
+ buf)
+ })
+ };
lib::llvm::SetLinkage(g,
lib::llvm::ExternalWeakLinkage);
g
} else {
let ident = foreign::link_name(ccx, ni);
unsafe {
- do ident.with_c_str |buf| {
+ ident.with_c_str(|buf| {
let ty = type_of(ccx, ty);
llvm::LLVMAddGlobal(ccx.llmod,
ty.to_ref(), buf)
- }
+ })
}
}
}
}
let gc_metadata_name = ~"_gc_module_metadata_" + llmod_id;
- let gc_metadata = do gc_metadata_name.with_c_str |buf| {
+ let gc_metadata = gc_metadata_name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, Type::i32().to_ref(), buf)
}
- };
+ });
unsafe {
llvm::LLVMSetGlobalConstant(gc_metadata, True);
lib::llvm::SetLinkage(gc_metadata, lib::llvm::ExternalLinkage);
let str_slice_type = Type::struct_([Type::i8p(), ccx.int_type], false);
let elttype = Type::struct_([str_slice_type, ccx.int_type], false);
let maptype = Type::array(&elttype, ccx.module_data.len() as u64);
- let map = do "_rust_mod_map".with_c_str |buf| {
+ let map = "_rust_mod_map".with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, maptype.to_ref(), buf)
}
- };
+ });
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let mut elts: ~[ValueRef] = ~[];
slicetype, // sub crate-maps
int_type.ptr_to(), // event loop factory
], false);
- let map = do sym_name.with_c_str |buf| {
+ let map = sym_name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(llmod, maptype.to_ref(), buf)
}
- };
+ });
// On windows we'd like to export the toplevel cratemap
// such that we can find it from libstd.
if targ_cfg.os == OsWin32 && "toplevel" == mapname {
cdata.name,
cstore::get_crate_vers(cstore, i),
cstore::get_crate_hash(cstore, i));
- let cr = do nm.with_c_str |buf| {
+ let cr = nm.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type.to_ref(), buf)
}
- };
+ });
subcrates.push(p2i(ccx, cr));
i += 1;
}
match ccx.tcx.lang_items.event_loop_factory() {
Some(did) => unsafe {
let name = csearch::get_symbol(ccx.sess.cstore, did);
- let global = do name.with_c_str |buf| {
+ let global = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type.to_ref(), buf)
- };
+ });
global
},
None => C_null(ccx.int_type.ptr_to())
};
unsafe {
let maptype = Type::array(&ccx.int_type, subcrates.len() as u64);
- let vec_elements = do "_crate_map_child_vectors".with_c_str |buf| {
+ let vec_elements = "_crate_map_child_vectors".with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, maptype.to_ref(), buf)
- };
+ });
lib::llvm::SetLinkage(vec_elements, lib::llvm::InternalLinkage);
llvm::LLVMSetInitializer(vec_elements, C_array(ccx.int_type, subcrates));
let encode_parms = crate_ctxt_to_encode_parms(cx, encode_inlined_item);
let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate));
let llconst = C_struct([llmeta], false);
- let mut llglobal = do "rust_metadata".with_c_str |buf| {
+ let mut llglobal = "rust_metadata".with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(cx.llmod, val_ty(llconst).to_ref(), buf)
}
- };
+ });
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
- do cx.sess.targ_cfg.target_strs.meta_sect_name.with_c_str |buf| {
+ cx.sess.targ_cfg.target_strs.meta_sect_name.with_c_str(|buf| {
llvm::LLVMSetSection(llglobal, buf)
- };
+ });
lib::llvm::SetLinkage(llglobal, lib::llvm::InternalLinkage);
let t_ptr_i8 = Type::i8p();
llglobal = llvm::LLVMConstBitCast(llglobal, t_ptr_i8.to_ref());
- let llvm_used = do "llvm.used".with_c_str |buf| {
+ let llvm_used = "llvm.used".with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, Type::array(&t_ptr_i8, 1).to_ref(), buf)
- };
+ });
lib::llvm::SetLinkage(llvm_used, lib::llvm::AppendingLinkage);
llvm::LLVMSetInitializer(llvm_used, C_array(t_ptr_i8, [llglobal]));
}
pub fn write_abi_version(ccx: &mut CrateContext) {
unsafe {
let llval = C_uint(ccx, abi::abi_version);
- let llglobal = do "rust_abi_version".with_c_str |buf| {
+ let llglobal = "rust_abi_version".with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llval).to_ref(), buf)
- };
+ });
llvm::LLVMSetInitializer(llglobal, llval);
llvm::LLVMSetGlobalConstant(llglobal, True);
}
let maptype = val_ty(ccx.crate_map).to_ref();
- do "__rust_crate_map_toplevel".with_c_str |buf| {
+ "__rust_crate_map_toplevel".with_c_str(|buf| {
unsafe {
llvm::LLVMAddAlias(ccx.llmod, maptype,
ccx.crate_map, buf);
}
- }
+ })
}
glue::emit_tydescs(ccx);
println!("n_inlines: {}", ccx.stats.n_inlines);
println!("n_closures: {}", ccx.stats.n_closures);
println("fn stats:");
- do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
+ sort::quick_sort(ccx.stats.fn_stats,
+ |&(_, _, insns_a), &(_, _, insns_b)| {
insns_a > insns_b
- }
+ });
for tuple in ccx.stats.fn_stats.iter() {
match *tuple {
(ref name, ms, insns) => {
self.ccx.stats.n_llvm_insns += 1;
}
if self.ccx.sess.count_llvm_insns() {
- do base::with_insn_ctxt |v| {
+ base::with_insn_ctxt(|v| {
let h = &mut self.ccx.stats.llvm_insns;
// Build version of path with cycles removed.
_ => 0u
};
h.insert(s, n+1u);
- }
+ })
}
}
if name.is_empty() {
llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
} else {
- do name.with_c_str |c| {
+ name.with_c_str(|c| {
llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), c)
- }
+ })
}
}
}
let min = llvm::LLVMConstInt(t, lo, signed);
let max = llvm::LLVMConstInt(t, hi, signed);
- do [min, max].as_imm_buf |ptr, len| {
+ [min, max].as_imm_buf(|ptr, len| {
llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint,
llvm::LLVMMDNodeInContext(self.ccx.llcx,
ptr, len as c_uint));
- }
+ })
}
value
}
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
} else {
- let v = do ixs.iter().map |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>();
+ let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<~[ValueRef]>();
self.count_insn("gepi");
self.inbounds_gep(base, v)
}
let sanitized = text.replace("$", "");
let comment_text = format!("\\# {}", sanitized.replace("\n", "\n\t# "));
self.count_insn("inlineasm");
- let asm = do comment_text.with_c_str |c| {
+ let asm = comment_text.with_c_str(|c| {
unsafe {
llvm::LLVMConstInlineAsm(Type::func([], &Type::void()).to_ref(),
c, noname(), False, False)
}
- };
+ });
self.call(asm, [], []);
}
}
let alignstack = if alignstack { lib::llvm::True }
else { lib::llvm::False };
- let argtys = do inputs.map |v| {
+ let argtys = inputs.map(|v| {
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
val_ty(*v)
- };
+ });
debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
let fty = Type::func(argtys, &output);
let BB: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
let FN: ValueRef = llvm::LLVMGetBasicBlockParent(BB);
let M: ModuleRef = llvm::LLVMGetGlobalParent(FN);
- let T: ValueRef = do "llvm.trap".with_c_str |buf| {
+ let T: ValueRef = "llvm.trap".with_c_str(|buf| {
llvm::LLVMGetNamedFunction(M, buf)
- };
+ });
assert!((T as int != 0));
let args: &[ValueRef] = [];
self.count_insn("trap");
*/
- do base::with_scope_result(in_cx, call_info, "call") |cx| {
+ base::with_scope_result(in_cx, call_info, "call", |cx| {
let callee = get_callee(cx);
let mut bcx = callee.bcx;
let ccx = cx.ccx();
}
rslt(bcx, llresult)
- }
+ })
}
pub enum CallArgs<'self> {
ast::OwnedSigil | ast::ManagedSigil => {
let box_cell_v = GEPi(cx, v, [0u, abi::fn_field_box]);
let box_ptr_v = Load(cx, box_cell_v);
- do with_cond(cx, IsNotNull(cx, box_ptr_v)) |bcx| {
+ with_cond(cx, IsNotNull(cx, box_ptr_v), |bcx| {
let closure_ty = ty::mk_opaque_closure_ptr(tcx, sigil);
glue_fn(bcx, box_cell_v, closure_ty)
- }
+ })
}
}
}
}
let ccx = bcx.ccx();
- do with_cond(bcx, IsNotNull(bcx, cbox)) |bcx| {
+ with_cond(bcx, IsNotNull(bcx, cbox), |bcx| {
// Load the type descr found in the cbox
let lltydescty = ccx.tydesc_type.ptr_to();
let cbox = Load(bcx, cbox);
glue::trans_exchange_free(bcx, cbox);
bcx
- }
+ })
}
debug!("add_clean({}, {}, {})", bcx.to_str(), bcx.val_to_str(val), t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
- do in_scope_cx(bcx, None) |scope_info| {
+ in_scope_cx(bcx, None, |scope_info| {
scope_info.cleanups.push(clean(@TypeDroppingCleanupFunction {
val: val,
t: t,
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
- }
+ })
}
pub fn add_clean_temp_immediate(cx: @mut Block, val: ValueRef, ty: ty::t) {
cx.to_str(), cx.val_to_str(val),
ty.repr(cx.tcx()));
let cleanup_type = cleanup_type(cx.tcx(), ty);
- do in_scope_cx(cx, None) |scope_info| {
+ in_scope_cx(cx, None, |scope_info| {
scope_info.cleanups.push(clean_temp(val,
@ImmediateTypeDroppingCleanupFunction {
val: val,
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
- }
+ })
}
pub fn add_clean_temp_mem(bcx: @mut Block, val: ValueRef, t: ty::t) {
bcx.to_str(), bcx.val_to_str(val),
t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
- do in_scope_cx(bcx, scope_id) |scope_info| {
+ in_scope_cx(bcx, scope_id, |scope_info| {
scope_info.cleanups.push(clean_temp(val,
@TypeDroppingCleanupFunction {
val: val,
} as @CleanupFunction,
cleanup_type));
grow_scope_clean(scope_info);
- }
+ })
}
pub fn add_clean_return_to_mut(bcx: @mut Block,
scope_id: ast::NodeId,
bcx.to_str(),
bcx.val_to_str(frozen_val_ref),
bcx.val_to_str(bits_val_ref));
- do in_scope_cx(bcx, Some(scope_id)) |scope_info| {
+ in_scope_cx(bcx, Some(scope_id), |scope_info| {
scope_info.cleanups.push(clean_temp(
frozen_val_ref,
@WriteGuardReleasingCleanupFunction {
} as @CleanupFunction,
normal_exit_only));
grow_scope_clean(scope_info);
- }
+ })
}
pub fn add_clean_free(cx: @mut Block, ptr: ValueRef, heap: heap) {
let free_fn = match heap {
} as @CleanupFunction
}
};
- do in_scope_cx(cx, None) |scope_info| {
+ in_scope_cx(cx, None, |scope_info| {
scope_info.cleanups.push(clean_temp(ptr,
free_fn,
normal_exit_and_unwind));
grow_scope_clean(scope_info);
- }
+ })
}
// Note that this only works for temporaries. We should, at some point, move
// this will be more involved. For now, we simply zero out the local, and the
// drop glue checks whether it is zero.
pub fn revoke_clean(cx: @mut Block, val: ValueRef) {
- do in_scope_cx(cx, None) |scope_info| {
+ in_scope_cx(cx, None, |scope_info| {
let cleanup_pos = scope_info.cleanups.iter().position(
|cu| match *cu {
clean_temp(v, _, _) if v == val => true,
scope_info.cleanups.len()));
shrink_scope_clean(scope_info, *i);
}
- }
+ })
}
pub fn block_cleanups(bcx: &mut Block) -> ~[cleanup] {
pub fn C_floating(s: &str, t: Type) -> ValueRef {
unsafe {
- do s.with_c_str |buf| {
- llvm::LLVMConstRealOfString(t.to_ref(), buf)
- }
+ s.with_c_str(|buf| llvm::LLVMConstRealOfString(t.to_ref(), buf))
}
}
None => ()
}
- let sc = do s.as_imm_buf |buf, buflen| {
+ let sc = s.as_imm_buf(|buf, buflen| {
llvm::LLVMConstStringInContext(cx.llcx, buf as *c_char, buflen as c_uint, False)
- };
+ });
let gsym = token::gensym("str");
- let g = do format!("str{}", gsym).with_c_str |buf| {
+ let g = format!("str{}", gsym).with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(sc).to_ref(), buf)
- };
+ });
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
let lldata = C_bytes(data);
let gsym = token::gensym("binary");
- let g = do format!("binary{}", gsym).with_c_str |buf| {
+ let g = format!("binary{}", gsym).with_c_str(|buf| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(lldata).to_ref(), buf)
- };
+ });
llvm::LLVMSetInitializer(g, lldata);
llvm::LLVMSetGlobalConstant(g, True);
lib::llvm::SetLinkage(g, lib::llvm::InternalLinkage);
pub fn C_struct(elts: &[ValueRef], packed: bool) -> ValueRef {
unsafe {
- do elts.as_imm_buf |ptr, len| {
+ elts.as_imm_buf(|ptr, len| {
llvm::LLVMConstStructInContext(base::task_llcx(), ptr, len as c_uint, packed as Bool)
- }
+ })
}
}
pub fn C_named_struct(T: Type, elts: &[ValueRef]) -> ValueRef {
unsafe {
- do elts.as_imm_buf |ptr, len| {
+ elts.as_imm_buf(|ptr, len| {
llvm::LLVMConstNamedStruct(T.to_ref(), ptr, len as c_uint)
- }
+ })
}
}
pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
-> ValueRef {
unsafe {
- let r = do us.as_imm_buf |p, len| {
+ let r = us.as_imm_buf(|p, len| {
llvm::LLVMConstExtractValue(v, p, len as c_uint)
- };
+ });
debug!("const_get_elt(v={}, us={:?}, r={})",
cx.tn.val_to_str(v), us, cx.tn.val_to_str(r));
match bcx.fcx.param_substs {
Some(substs) => {
- do params.iter().map |t| {
+ params.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
- }.collect()
+ }).collect()
}
_ => params
}
typeck::vtable_static(trait_id, ref tys, sub) => {
let tys = match param_substs {
Some(substs) => {
- do tys.iter().map |t| {
+ tys.iter().map(|t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
- }.collect()
+ }).collect()
}
_ => tys.to_owned()
};
fn const_addr_of(cx: &mut CrateContext, cv: ValueRef) -> ValueRef {
unsafe {
- let gv = do "const".with_c_str |name| {
+ let gv = "const".with_c_str(|name| {
llvm::LLVMAddGlobal(cx.llmod, val_ty(cv).to_ref(), name)
- };
+ });
llvm::LLVMSetInitializer(gv, cv);
llvm::LLVMSetGlobalConstant(gv, True);
SetLinkage(gv, PrivateLinkage);
Some(@ty::AutoDerefRef(ref adj)) => {
let mut ty = ety;
let mut maybe_ptr = None;
- do adj.autoderefs.times {
+ adj.autoderefs.times(|| {
let (dv, dt) = const_deref(cx, llconst, ty, false);
maybe_ptr = Some(llconst);
llconst = dv;
ty = dt;
- }
+ });
match adj.autoref {
None => { }
let bt = ty::expr_ty_adjusted(cx.tcx, base);
let brepr = adt::represent_type(cx, bt);
let (bv, inlineable) = const_expr(cx, base);
- do expr::with_field_tys(cx.tcx, bt, None) |discr, field_tys| {
+ expr::with_field_tys(cx.tcx, bt, None, |discr, field_tys| {
let ix = ty::field_idx_strict(cx.tcx, field.name, field_tys);
(adt::const_get_field(cx, brepr, bv, discr, ix), inlineable)
- }
+ })
}
ast::ExprIndex(_, base, index) => {
None => None
};
- do expr::with_field_tys(tcx, ety, Some(e.id))
- |discr, field_tys| {
+ expr::with_field_tys(tcx, ety, Some(e.id), |discr, field_tys| {
let cs = field_tys.iter().enumerate()
.map(|(ix, &field_ty)| {
match fs.iter().find(|f| field_ty.ident.name == f.ident.node.name) {
let (cs, inlineable) = vec::unzip(cs.move_iter());
(adt::trans_const(cx, repr, discr, cs),
inlineable.iter().fold(true, |a, &b| a && b))
- }
+ })
}
ast::ExprVec(ref es, ast::MutImmutable) => {
let (v, _, inlineable) = const_vec(cx, e, *es);
ast::ExprVec(ref es, ast::MutImmutable) => {
let (cv, llunitty, _) = const_vec(cx, e, *es);
let llty = val_ty(cv);
- let gv = do "const".with_c_str |name| {
+ let gv = "const".with_c_str(|name| {
llvm::LLVMAddGlobal(cx.llmod, llty.to_ref(), name)
- };
+ });
llvm::LLVMSetInitializer(gv, cv);
llvm::LLVMSetGlobalConstant(gv, True);
SetLinkage(gv, PrivateLinkage);
unsafe {
let llcx = llvm::LLVMContextCreate();
set_task_llcx(llcx);
- let llmod = do name.with_c_str |buf| {
+ let llmod = name.with_c_str(|buf| {
llvm::LLVMModuleCreateWithNameInContext(buf, llcx)
- };
+ });
let data_layout: &str = sess.targ_cfg.target_strs.data_layout;
let targ_triple: &str = sess.targ_cfg.target_strs.target_triple;
- do data_layout.with_c_str |buf| {
- llvm::LLVMSetDataLayout(llmod, buf)
- };
- do targ_triple.with_c_str |buf| {
+ data_layout.with_c_str(|buf| llvm::LLVMSetDataLayout(llmod, buf));
+ targ_triple.with_c_str(|buf| {
llvm::LLVMRustSetNormalizedTarget(llmod, buf)
- };
+ });
let targ_cfg = sess.targ_cfg;
let td = mk_target_data(sess.targ_cfg.target_strs.data_layout);
None => {}
}
// if true { .. } [else { .. }]
- return do with_scope(bcx, thn.info(), "if_true_then") |bcx| {
+ return with_scope(bcx, thn.info(), "if_true_then", |bcx| {
let bcx_out = trans_block(bcx, thn, dest);
debuginfo::clear_source_location(bcx.fcx);
trans_block_cleanups(bcx_out, block_cleanups(bcx))
- }
+ })
} else {
let mut trans = TransItemVisitor { ccx: bcx.fcx.ccx } ;
trans.visit_block(thn, ());
match els {
// if false { .. } else { .. }
Some(elexpr) => {
- return do with_scope(bcx, elexpr.info(), "if_false_then") |bcx| {
+ return with_scope(bcx,
+ elexpr.info(),
+ "if_false_then",
+ |bcx| {
let bcx_out = trans_if_else(bcx, elexpr, dest);
debuginfo::clear_source_location(bcx.fcx);
trans_block_cleanups(bcx_out, block_cleanups(bcx))
- }
+ })
}
// if false { .. }
None => return bcx,
ByRef(_) => {
let cast = PointerCast(bcx, dst, val_ty(self.val));
let cmp = ICmp(bcx, lib::llvm::IntNE, cast, self.val);
- do with_cond(bcx, cmp) |bcx| {
+ with_cond(bcx, cmp, |bcx| {
self.copy_to_no_check(bcx, action, dst)
- }
+ })
}
ByValue => {
self.copy_to_no_check(bcx, action, dst)
let cx = bcx.ccx();
let def_map = cx.tcx.def_map;
- do pat_util::pat_bindings(def_map, local.pat) |_, node_id, span, path_ref| {
-
+ pat_util::pat_bindings(def_map, local.pat, |_, node_id, span, path_ref| {
let var_ident = ast_util::path_to_ident(path_ref);
let var_type = node_id_type(bcx, node_id);
DirectVariable { alloca: llptr },
LocalVariable,
span);
- }
+ })
}
/// Creates debug information for a variable captured in a closure.
let def_map = cx.tcx.def_map;
let scope_metadata = bcx.fcx.debug_context.get_ref(cx, arg.pat.span).fn_metadata;
- do pat_util::pat_bindings(def_map, arg.pat) |_, node_id, span, path_ref| {
-
+ pat_util::pat_bindings(def_map, arg.pat, |_, node_id, span, path_ref| {
let llptr = match bcx.fcx.llargs.find_copy(&node_id) {
Some(v) => v,
None => {
DirectVariable { alloca: llptr },
ArgumentVariable(argument_index),
span);
- }
+ })
}
/// Sets the current debug location at the beginning of the span.
let scope_line = get_scope_line(cx, top_level_block, loc.line);
- let fn_metadata = do function_name.with_c_str |function_name| {
- do linkage_name.with_c_str |linkage_name| {
- unsafe {
- llvm::LLVMDIBuilderCreateFunction(
- DIB(cx),
- containing_scope,
- function_name,
- linkage_name,
- file_metadata,
- loc.line as c_uint,
- function_type_metadata,
- false,
- true,
- scope_line as c_uint,
- FlagPrototyped as c_uint,
- cx.sess.opts.optimize != session::No,
- llfn,
- template_parameters,
- ptr::null())
- }
- }};
+ let fn_metadata = function_name.with_c_str(|function_name| {
+ linkage_name.with_c_str(|linkage_name| {
+ unsafe {
+ llvm::LLVMDIBuilderCreateFunction(
+ DIB(cx),
+ containing_scope,
+ function_name,
+ linkage_name,
+ file_metadata,
+ loc.line as c_uint,
+ function_type_metadata,
+ false,
+ true,
+ scope_line as c_uint,
+ FlagPrototyped as c_uint,
+ cx.sess.opts.optimize != session::No,
+ llfn,
+ template_parameters,
+ ptr::null())
+ }
+ })
+ });
// Initialize fn debug context (including scope map and namespace map)
let mut fn_debug_context = ~FunctionDebugContextData {
source_locations_enabled: false,
};
- let arg_pats = do fn_decl.inputs.map |arg_ref| { arg_ref.pat };
+ let arg_pats = fn_decl.inputs.map(|arg_ref| arg_ref.pat);
populate_scope_map(cx, arg_pats, top_level_block, fn_metadata, &mut fn_debug_context.scope_map);
return FunctionDebugContext(fn_debug_context);
let ident = special_idents::type_self;
- let param_metadata = do token::ident_to_str(&ident).with_c_str |name| {
+ let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
0,
0)
}
- };
+ });
template_params.push(param_metadata);
}
// Again, only create type information if extra_debuginfo is enabled
if cx.sess.opts.extra_debuginfo {
let actual_type_metadata = type_metadata(cx, actual_type, codemap::dummy_sp());
- let param_metadata = do token::ident_to_str(&ident).with_c_str |name| {
+ let param_metadata = token::ident_to_str(&ident).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
0,
0)
}
- };
+ });
template_params.push(param_metadata);
}
}
let work_dir = cx.sess.working_dir.as_str().unwrap();
let producer = format!("rustc version {}", env!("CFG_VERSION"));
- do crate_name.with_c_str |crate_name| {
- do work_dir.with_c_str |work_dir| {
- do producer.with_c_str |producer| {
- do "".with_c_str |flags| {
- do "".with_c_str |split_name| {
- unsafe {
- llvm::LLVMDIBuilderCreateCompileUnit(
- dcx.builder,
- DW_LANG_RUST,
- crate_name,
- work_dir,
- producer,
- cx.sess.opts.optimize != session::No,
- flags,
- 0,
- split_name);
- }
- }}}}};
+ crate_name.with_c_str(|crate_name| {
+ work_dir.with_c_str(|work_dir| {
+ producer.with_c_str(|producer| {
+ "".with_c_str(|flags| {
+ "".with_c_str(|split_name| {
+ unsafe {
+ llvm::LLVMDIBuilderCreateCompileUnit(
+ dcx.builder,
+ DW_LANG_RUST,
+ crate_name,
+ work_dir,
+ producer,
+ cx.sess.opts.optimize != session::No,
+ flags,
+ 0,
+ split_name);
+ }
+ })
+ })
+ })
+ })
+ });
}
fn declare_local(bcx: @mut Block,
CapturedVariable => 0
} as c_uint;
- let (var_alloca, var_metadata) = do name.with_c_str |name| {
+ let (var_alloca, var_metadata) = name.with_c_str(|name| {
match variable_access {
DirectVariable { alloca } => (
alloca,
}
)
}
- };
+ });
set_debug_location(cx, DebugLocation::new(scope_metadata, loc.line, *loc.col));
unsafe {
};
let file_metadata =
- do file_name.with_c_str |file_name| {
- do work_dir.with_c_str |work_dir| {
- unsafe {
- llvm::LLVMDIBuilderCreateFile(DIB(cx), file_name, work_dir)
- }
- }};
+ file_name.with_c_str(|file_name| {
+ work_dir.with_c_str(|work_dir| {
+ unsafe {
+ llvm::LLVMDIBuilderCreateFile(DIB(cx), file_name, work_dir)
+ }
+ })
+ });
debug_context(cx).created_files.insert(full_path.to_owned(), file_metadata);
return file_metadata;
let llvm_type = type_of::type_of(cx, t);
let (size, align) = size_and_align_of(cx, llvm_type);
- let ty_metadata = do name.with_c_str |name| {
+ let ty_metadata = name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
bytes_to_bits(align),
encoding)
}
- };
+ });
return ty_metadata;
}
let pointer_llvm_type = type_of::type_of(cx, pointer_type);
let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type);
let name = ppaux::ty_to_str(cx.tcx, pointer_type);
- let ptr_metadata = do name.with_c_str |name| {
+ let ptr_metadata = name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreatePointerType(
DIB(cx),
bytes_to_bits(pointer_align),
name)
}
- };
+ });
return ptr_metadata;
}
impl MemberDescriptionFactory for StructMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
- do self.fields.map |field| {
+ self.fields.map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
@""
} else {
type_metadata: type_metadata(cx, field.mt.ty, self.span),
offset: ComputedMemberOffset,
}
- }
+ })
}
}
impl MemberDescriptionFactory for TupleMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
- do self.component_types.map |&component_type| {
+ self.component_types.map(|&component_type| {
MemberDescription {
name: @"",
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
}
- }
+ })
}
}
_ => cx.sess.bug("unreachable")
};
- do struct_defs
+ struct_defs
.iter()
.enumerate()
- .map |(i, struct_def)| {
+ .map(|(i, struct_def)| {
let (variant_type_metadata, variant_llvm_type, member_desc_factory) =
describe_variant(cx,
struct_def,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
}
- }.collect()
+ }).collect()
}
}
impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &mut CrateContext)
-> ~[MemberDescription] {
- do self.args.iter().enumerate().map |(i, &(name, ty))| {
+ self.args.iter().enumerate().map(|(i, &(name, ty))| {
MemberDescription {
name: name,
llvm_type: type_of::type_of(cx, ty),
},
offset: ComputedMemberOffset,
}
- }.collect()
+ }).collect()
}
}
// Get the argument names from the enum variant info
let mut arg_names = match variant_info.arg_names {
- Some(ref names) => do names.map |ident| { token::ident_to_str(ident) },
- None => do variant_info.args.map |_| { @"" }
+ Some(ref names) => names.map(|ident| token::ident_to_str(ident)),
+ None => variant_info.args.map(|_| @"")
};
// If this is not a univariant enum, there is also the (unnamed) discriminant field
let name: &str = token::ident_to_str(&v.name);
let discriminant_value = v.disr_val as c_ulonglong;
- do name.with_c_str |name| {
+ name.with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateEnumerator(
DIB(cx),
name,
discriminant_value)
}
- }
+ })
})
.collect();
let (discriminant_size, discriminant_align) = size_and_align_of(cx, discriminant_llvm_type);
let discriminant_base_type_metadata = type_metadata(cx, adt::ty_of_inttype(inttype),
codemap::dummy_sp());
- do enum_name.with_c_str |enum_name| {
+ enum_name.with_c_str(|enum_name| {
unsafe {
llvm::LLVMDIBuilderCreateEnumerationType(
DIB(cx),
create_DIArray(DIB(cx), enumerators_metadata),
discriminant_base_type_metadata)
}
- }
+ })
};
let type_rep = adt::represent_type(cx, enum_type);
let enum_llvm_type = type_of::type_of(cx, enum_type);
let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type);
- let enum_metadata = do enum_name.with_c_str |enum_name| {
+ let enum_metadata = enum_name.with_c_str(|enum_name| {
unsafe {
llvm::LLVMDIBuilderCreateUnionType(
DIB(cx),
0, // Flags
ptr::null(),
0) // RuntimeLang
- }};
+ }
+ });
UnfinishedMetadata {
cache_id: cache_id_for_type(enum_type),
ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i)
};
- do member_description.name.with_c_str |member_name| {
+ member_description.name.with_c_str(|member_name| {
unsafe {
llvm::LLVMDIBuilderCreateMemberType(
DIB(cx),
0,
member_description.type_metadata)
}
- }
+ })
})
.collect();
};
return unsafe {
- do struct_type_name.with_c_str |name| {
- do unique_id.with_c_str |unique_id| {
- // LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to
- // hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp
- let empty_array = create_DIArray(DIB(cx), []);
+ struct_type_name.with_c_str(|name| {
+ unique_id.with_c_str(|unique_id| {
+ // LLVMDIBuilderCreateStructType() wants an empty array. A null pointer will lead to
+ // hard to trace and debug LLVM assertions later on in llvm/lib/IR/Value.cpp
+ let empty_array = create_DIArray(DIB(cx), []);
- llvm::LLVMDIBuilderCreateStructType(
- DIB(cx),
- containing_scope,
- name,
- file_metadata,
- loc.line as c_uint,
- bytes_to_bits(struct_size),
- bytes_to_bits(struct_align),
- 0,
- ptr::null(),
- empty_array,
- 0,
- ptr::null(),
- unique_id)
- }}};
+ llvm::LLVMDIBuilderCreateStructType(
+ DIB(cx),
+ containing_scope,
+ name,
+ file_metadata,
+ loc.line as c_uint,
+ bytes_to_bits(struct_size),
+ bytes_to_bits(struct_align),
+ 0,
+ ptr::null(),
+ empty_array,
+ 0,
+ ptr::null(),
+ unique_id)
+ })
+ })
+ };
}
fn boxed_type_metadata(cx: &mut CrateContext,
debug!("unimplemented_type_metadata: {:?}", ty::get(t));
let name = ppaux::ty_to_str(cx.tcx, t);
- let metadata = do format!("NYI<{}>", name).with_c_str |name| {
+ let metadata = format!("NYI<{}>", name).with_c_str(|name| {
unsafe {
llvm::LLVMDIBuilderCreateBasicType(
DIB(cx),
8_u64,
DW_ATE_unsigned as c_uint)
}
- };
+ });
return metadata;
}
// Push argument identifiers onto the stack so arguments integrate nicely with variable
// shadowing.
for &arg_pat in arg_pats.iter() {
- do pat_util::pat_bindings(def_map, arg_pat) |_, _, _, path_ref| {
+ pat_util::pat_bindings(def_map, arg_pat, |_, _, _, path_ref| {
let ident = ast_util::path_to_ident(path_ref);
scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, ident: Some(ident) });
- }
+ })
}
walk_block(cx, fn_entry_block, &mut scope_stack, scope_map);
ast::ExprIf(@ref cond_exp, ref then_block, ref opt_else_exp) => {
walk_expr(cx, cond_exp, scope_stack, scope_map);
- do with_new_scope(cx, then_block.span, scope_stack, scope_map) |cx,
- scope_stack,
- scope_map| {
+ with_new_scope(cx,
+ then_block.span,
+ scope_stack,
+ scope_map,
+ |cx, scope_stack, scope_map| {
walk_block(cx, then_block, scope_stack, scope_map);
- }
+ });
match *opt_else_exp {
Some(@ref else_exp) => walk_expr(cx, else_exp, scope_stack, scope_map),
ast::ExprWhile(@ref cond_exp, ref loop_body) => {
walk_expr(cx, cond_exp, scope_stack, scope_map);
- do with_new_scope(cx, loop_body.span, scope_stack, scope_map) |cx,
- scope_stack,
- scope_map| {
+ with_new_scope(cx,
+ loop_body.span,
+ scope_stack,
+ scope_map,
+ |cx, scope_stack, scope_map| {
walk_block(cx, loop_body, scope_stack, scope_map);
- }
+ })
}
ast::ExprForLoop(_, _, _, _) => {
ast::ExprLoop(ref block, _) |
ast::ExprBlock(ref block) => {
- do with_new_scope(cx, block.span, scope_stack, scope_map) |cx,
- scope_stack,
- scope_map| {
+ with_new_scope(cx,
+ block.span,
+ scope_stack,
+ scope_map,
+ |cx, scope_stack, scope_map| {
walk_block(cx, block, scope_stack, scope_map);
- }
+ })
}
ast::ExprFnBlock(ast::fn_decl { inputs: ref inputs, _ }, ref block) |
ast::ExprProc(ast::fn_decl { inputs: ref inputs, _ }, ref block) => {
- do with_new_scope(cx, block.span, scope_stack, scope_map) |cx,
- scope_stack,
- scope_map| {
+ with_new_scope(cx,
+ block.span,
+ scope_stack,
+ scope_map,
+ |cx, scope_stack, scope_map| {
for &ast::arg { pat: pattern, _ } in inputs.iter() {
walk_pattern(cx, pattern, scope_stack, scope_map);
}
walk_block(cx, block, scope_stack, scope_map);
- }
+ })
}
// ast::expr_loop_body(@ref inner_exp) |
for arm_ref in arms.iter() {
let arm_span = arm_ref.pats[0].span;
- do with_new_scope(cx, arm_span, scope_stack, scope_map) |cx,
- scope_stack,
- scope_map| {
+ with_new_scope(cx,
+ arm_span,
+ scope_stack,
+ scope_map,
+ |cx, scope_stack, scope_map| {
for &pat in arm_ref.pats.iter() {
walk_pattern(cx, pat, scope_stack, scope_map);
}
}
walk_block(cx, &arm_ref.body, scope_stack, scope_map);
- }
+ })
}
}
let namespace_name = token::ident_to_str(&ident);
let namespace_metadata = unsafe {
- do namespace_name.with_c_str |namespace_name| {
+ namespace_name.with_c_str(|namespace_name| {
llvm::LLVMDIBuilderCreateNameSpace(
DIB(cx),
parent_scope,
namespace_name,
ptr::null(), // cannot reconstruct file ...
0) // ... or line information, but that's not so important.
- }
+ })
};
let node = @NamespaceTreeNode {
return _match::trans_match(bcx, expr, discr, *arms, dest);
}
ast::ExprBlock(ref blk) => {
- return do base::with_scope(bcx, blk.info(),
- "block-expr body") |bcx| {
+ return base::with_scope(bcx,
+ blk.info(),
+ "block-expr body",
+ |bcx| {
controlflow::trans_block(bcx, blk, dest)
- };
+ });
}
ast::ExprStruct(_, ref fields, base) => {
return trans_rec_or_struct(bcx, (*fields), base, expr.span, expr.id, dest);
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base));
let repr = adt::represent_type(bcx.ccx(), base_datum.ty);
- do with_field_tys(bcx.tcx(), base_datum.ty, None) |discr, field_tys| {
+ with_field_tys(bcx.tcx(), base_datum.ty, None, |discr, field_tys| {
let ix = ty::field_idx_strict(bcx.tcx(), field.name, field_tys);
DatumBlock {
- datum: do base_datum.get_element(bcx,
- field_tys[ix].mt.ty,
- ZeroMem) |srcval| {
+ datum: base_datum.get_element(bcx,
+ field_tys[ix].mt.ty,
+ ZeroMem,
+ |srcval| {
adt::trans_field_ptr(bcx, repr, srcval, discr, ix)
- },
+ }),
bcx: bcx
}
- }
+ })
}
fn trans_index(bcx: @mut Block,
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, ix_val, len);
let expect = ccx.intrinsics.get_copy(&("llvm.expect.i1"));
let expected = Call(bcx, expect, [bounds_check, C_i1(false)], []);
- let bcx = do with_cond(bcx, expected) |bcx| {
+ let bcx = with_cond(bcx, expected, |bcx| {
controlflow::trans_fail_bounds_check(bcx, index_expr.span, ix_val, len)
- };
+ });
let elt = InBoundsGEP(bcx, base, [ix_val]);
let elt = PointerCast(bcx, elt, vt.llunit_ty.ptr_to());
return DatumBlock {
let symbol = csearch::get_symbol(
bcx.ccx().sess.cstore,
did);
- let llval = do symbol.with_c_str |buf| {
+ let llval = symbol.with_c_str(|buf| {
llvm::LLVMAddGlobal(bcx.ccx().llmod,
llty.to_ref(),
buf)
- };
+ });
let extern_const_values = &mut bcx.ccx().extern_const_values;
extern_const_values.insert(did, llval);
llval
let ty = node_id_type(bcx, id);
let tcx = bcx.tcx();
- do with_field_tys(tcx, ty, Some(id)) |discr, field_tys| {
+ with_field_tys(tcx, ty, Some(id), |discr, field_tys| {
let mut need_base = vec::from_elem(field_tys.len(), true);
- let numbered_fields = do fields.map |field| {
+ let numbered_fields = fields.map(|field| {
let opt_pos =
field_tys.iter().position(|field_ty|
field_ty.ident.name == field.ident.node.name);
"Couldn't find field in struct type")
}
}
- };
+ });
let optbase = match base {
Some(base_expr) => {
let mut leftovers = ~[];
let repr = adt::represent_type(bcx.ccx(), ty);
trans_adt(bcx, repr, discr, numbered_fields, optbase, dest)
- }
+ })
}
/**
// And, would it ever be reasonable to be here with discr != 0?
let base_datum = unpack_datum!(bcx, trans_to_datum(bcx, base.expr));
for &(i, t) in base.fields.iter() {
- let datum = do base_datum.get_element(bcx, t, ZeroMem) |srcval| {
+ let datum = base_datum.get_element(bcx, t, ZeroMem, |srcval| {
adt::trans_field_ptr(bcx, repr, srcval, discr, i)
- };
+ });
let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i);
bcx = datum.store_to(bcx, INIT, dest);
}
let bcx = bcx;
let Result {bcx: past_lhs, val: lhs} = {
- do base::with_scope_result(bcx, a.info(), "lhs") |bcx| {
+ base::with_scope_result(bcx, a.info(), "lhs", |bcx| {
trans_to_datum(bcx, a).to_result()
- }
+ })
};
if past_lhs.unreachable {
}
let Result {bcx: past_rhs, val: rhs} = {
- do base::with_scope_result(before_rhs, b.info(), "rhs") |bcx| {
+ base::with_scope_result(before_rhs, b.info(), "rhs", |bcx| {
trans_to_datum(bcx, b).to_result()
- }
+ })
};
if past_rhs.unreachable {
ccx, modpath, "loglevel");
let global;
unsafe {
- global = do s.with_c_str |buf| {
+ global = s.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, Type::i32().to_ref(), buf)
- };
+ });
llvm::LLVMSetGlobalConstant(global, False);
llvm::LLVMSetInitializer(global, C_null(Type::i32()));
lib::llvm::SetLinkage(global, lib::llvm::InternalLinkage);
}
// Perform the call itself
- let llrust_ret_val = do llrust_args.as_imm_buf |ptr, len| {
+ let llrust_ret_val = llrust_args.as_imm_buf(|ptr, len| {
debug!("calling llrustfn = {}", ccx.tn.val_to_str(llrustfn));
llvm::LLVMBuildCall(builder, llrustfn, ptr,
len as c_uint, noname())
- };
+ });
// Get the return value where the foreign fn expects it.
let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast {
pub fn make_visit_glue(bcx: @mut Block, v: ValueRef, t: ty::t) -> @mut Block {
let _icx = push_ctxt("make_visit_glue");
- do with_scope(bcx, None, "visitor cleanup") |bcx| {
+ with_scope(bcx, None, "visitor cleanup", |bcx| {
let mut bcx = bcx;
let (visitor_trait, object_ty) = match ty::visitor_object_ty(bcx.tcx(),
ty::ReStatic) {
// The visitor is a boxed object and needs to be dropped
add_clean(bcx, v, object_ty);
bcx
- }
+ })
}
pub fn make_free_glue(bcx: @mut Block, v: ValueRef, t: ty::t) -> @mut Block {
class_did: ast::DefId, substs: &ty::substs) -> @mut Block {
let repr = adt::represent_type(bcx.ccx(), t);
let drop_flag = adt::trans_drop_flag_ptr(bcx, repr, v0);
- do with_cond(bcx, IsNotNull(bcx, Load(bcx, drop_flag))) |cx| {
+ with_cond(bcx, IsNotNull(bcx, Load(bcx, drop_flag)), |cx| {
trans_struct_drop(cx, t, v0, dtor_did, class_did, substs)
- }
+ })
}
pub fn trans_struct_drop(bcx: @mut Block, t: ty::t, v0: ValueRef, dtor_did: ast::DefId,
// Be sure to put all of the fields into a scope so we can use an invoke
// instruction to call the user destructor but still call the field
// destructors if the user destructor fails.
- do with_scope(bcx, None, "field drops") |bcx| {
+ with_scope(bcx, None, "field drops", |bcx| {
let self_arg = PointerCast(bcx, v0, params[0]);
let args = ~[self_arg];
let (_, bcx) = invoke(bcx, dtor_addr, args, []);
bcx
- }
+ })
}
pub fn make_drop_glue(bcx: @mut Block, v0: ValueRef, t: ty::t) -> @mut Block {
ty::ty_trait(_, _, ty::UniqTraitStore, _, _) => {
let lluniquevalue = GEPi(bcx, v0, [0, abi::trt_field_box]);
// Only drop the value when it is non-null
- do with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue))) |bcx| {
+ with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue)), |bcx| {
let llvtable = Load(bcx, GEPi(bcx, v0, [0, abi::trt_field_vtable]));
// Cast the vtable to a pointer to a pointer to a tydesc.
abi::tydesc_field_free_glue,
None);
bcx
- }
+ })
}
ty::ty_opaque_closure_ptr(ck) => {
closure::make_opaque_cbox_drop_glue(bcx, ck, v0)
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
note_unique_llvm_symbol(ccx, name);
debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name);
- let gvar = do name.with_c_str |buf| {
+ let gvar = name.with_c_str(|buf| {
unsafe {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf)
}
- };
+ });
let ty_name = C_estr_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed());
}
// Not in the cache. Actually build it.
- let methods = do origins.flat_map |origin| {
+ let methods = origins.flat_map(|origin| {
match *origin {
typeck::vtable_static(id, ref substs, sub_vtables) => {
emit_vtable_methods(bcx, id, *substs, sub_vtables)
}
_ => ccx.sess.bug("get_vtable: expected a static origin"),
}
- };
+ });
// Generate a type descriptor for the vtable.
let tydesc = get_tydesc(ccx, self_ty);
let tbl = C_struct(components, false);
let sym = token::gensym("vtable");
- let vt_gvar = do format!("vtable{}", sym).with_c_str |buf| {
+ let vt_gvar = format!("vtable{}", sym).with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl).to_ref(), buf)
- };
+ });
llvm::LLVMSetInitializer(vt_gvar, tbl);
llvm::LLVMSetGlobalConstant(vt_gvar, lib::llvm::True);
lib::llvm::SetLinkage(vt_gvar, lib::llvm::InternalLinkage);
ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
let trait_method_def_ids = ty::trait_method_def_ids(tcx, trt_id);
- do trait_method_def_ids.map |method_def_id| {
+ trait_method_def_ids.map(|method_def_id| {
let ident = ty::method(tcx, *method_def_id).ident;
// The substitutions we have are on the impl, so we grab
// the method type from the impl to substitute into.
trans_fn_ref_with_vtables(bcx, m_id, 0,
substs, Some(vtables)).llfn
}
- }
+ })
}
pub fn trans_trait_cast(bcx: @mut Block,
ty::ty_tup(ref tys) => {
let extra = ~[self.c_uint(tys.len())]
+ self.c_size_and_align(t);
- do self.bracketed("tup", extra) |this| {
+ self.bracketed("tup", extra, |this| {
for (i, t) in tys.iter().enumerate() {
let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
this.visit("tup_field", extra);
}
- }
+ })
}
// FIXME (#2594): fetch constants out of intrinsic
let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()),
self.c_bool(named_fields),
self.c_uint(fields.len())] + self.c_size_and_align(t);
- do self.bracketed("class", extra) |this| {
+ self.bracketed("class", extra, |this| {
for (i, field) in fields.iter().enumerate() {
let extra = ~[this.c_uint(i),
this.c_slice(bcx.ccx().sess.str_of(field.ident)),
+ this.c_mt(&field.mt);
this.visit("class_field", extra);
}
- }
+ })
}
// FIXME (#2595): visiting all the variants in turn is probably
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
+ self.c_size_and_align(t);
- do self.bracketed("enum", enum_args) |this| {
+ self.bracketed("enum", enum_args, |this| {
for (i, v) in variants.iter().enumerate() {
let name = ccx.sess.str_of(v.name);
let variant_args = ~[this.c_uint(i),
C_u64(v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
- do this.bracketed("enum_variant", variant_args) |this| {
+ this.bracketed("enum_variant", variant_args, |this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
this.c_tydesc(*a)];
this.visit("enum_variant_field", field_args);
}
- }
+ })
}
- }
+ })
}
ty::ty_trait(_, _, _, _, _) => {
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
let not_null = IsNotNull(bcx, box_datum.val);
- do with_cond(bcx, not_null) |bcx| {
+ with_cond(bcx, not_null, |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
body_datum.ty);
} else {
glue::trans_exchange_free(bcx, box_datum.val)
}
- }
+ })
}
pub fn get_dominating_store(self, bcx: &mut Block) -> Option<Value> {
match self.get_single_user().and_then(|user| user.as_store_inst()) {
Some(store) => {
- do store.get_parent().and_then |store_bb| {
+ store.get_parent().and_then(|store_bb| {
let mut bb = BasicBlock(bcx.llbb);
let mut ret = Some(store);
while *bb != *store_bb {
}
}
ret
- }
+ })
}
_ => None
}
fn next(&mut self) -> Option<Value> {
let current = self.next;
- self.next = do current.and_then |u| { u.get_next_use() };
+ self.next = current.and_then(|u| u.get_next_use());
- do current.map |u| { u.get_user() }
+ current.map(|u| u.get_user())
}
}
let mut encountered_box = encountered_box;
let mut needs_unwind_cleanup = false;
- do maybe_walk_ty(ty) |ty| {
+ maybe_walk_ty(ty, |ty| {
let old_encountered_box = encountered_box;
let result = match get(ty).sty {
ty_box(_) | ty_opaque_box => {
encountered_box = old_encountered_box;
result
- }
+ });
return needs_unwind_cleanup;
}
-> TypeContents {
let _i = indenter();
let mut tc = TC::All;
- do each_inherited_builtin_bound(cx, bounds, traits) |bound| {
+ each_inherited_builtin_bound(cx, bounds, traits, |bound| {
tc = tc - match bound {
BoundStatic => TC::Nonstatic,
BoundSend => TC::Nonsendable,
BoundFreeze => TC::Nonfreezable,
BoundSized => TC::Nonsized,
};
- }
+ });
return tc;
// Iterates over all builtin bounds on the type parameter def, including
f(bound);
}
- do each_bound_trait_and_supertraits(cx, traits) |trait_ref| {
+ each_bound_trait_and_supertraits(cx, traits, |trait_ref| {
let trait_def = lookup_trait_def(cx, trait_ref.def_id);
for bound in trait_def.bounds.iter() {
f(bound);
}
true
- };
+ });
}
}
}
ty_enum(did, ref substs) => {
seen.push(did);
let vs = enum_variants(cx, did);
- let r = !vs.is_empty() && do vs.iter().all |variant| {
- do variant.args.iter().any |aty| {
+ let r = !vs.is_empty() && vs.iter().all(|variant| {
+ variant.args.iter().any(|aty| {
let sty = subst(cx, substs, *aty);
type_requires(cx, seen, r_ty, sty)
- }
- };
+ })
+ });
seen.pop();
r
}
ty_opaque_closure_ptr(_) => result = true,
ty_struct(did, ref substs) => {
let fields = lookup_struct_fields(cx, did);
- result = do fields.iter().all |f| {
+ result = fields.iter().all(|f| {
let fty = ty::lookup_item_type(cx, f.id);
let sty = subst(cx, substs, fty.ty);
type_is_pod(cx, sty)
- };
+ });
}
ty_estr(vstore_slice(*)) | ty_evec(_, vstore_slice(*)) => {
method_map: typeck::method_map,
id: ast::NodeId)
-> Option<@~[TypeParameterDef]> {
- do method_map.find(&id).map |method| {
+ method_map.find(&id).map(|method| {
match method.origin {
typeck::method_static(did) => {
// n.b.: When we encode impl methods, the bounds
n_mth).generics.type_param_defs)
}
}
- }
+ })
}
pub fn resolve_expr(tcx: ctxt, expr: &ast::Expr) -> ast::Def {
/// to a bitset or some other representation.
pub fn param_tys_in_type(ty: t) -> ~[param_ty] {
let mut rslt = ~[];
- do walk_ty(ty) |ty| {
+ walk_ty(ty, |ty| {
match get(ty).sty {
ty_param(p) => {
rslt.push(p);
}
_ => ()
}
- }
+ });
rslt
}
// contain duplicates. (Integral type vars aren't counted.)
fn vars_in_type(ty: t) -> ~[TyVid] {
let mut rslt = ~[];
- do walk_ty(ty) |ty| {
+ walk_ty(ty, |ty| {
match get(ty).sty {
ty_infer(TyVar(v)) => rslt.push(v),
_ => ()
}
- }
+ });
rslt
}
Some(&ast_map::node_item(item, _)) => {
match item.node {
ast::item_struct(struct_def, _) => {
- do struct_def.ctor_id.map |ctor_id| {
+ struct_def.ctor_id.map(|ctor_id| {
ast_util::local_def(ctor_id)
- }
+ })
}
_ => cx.sess.bug("called struct_ctor_id on non-struct")
}
assert!(fields.len() > 0);
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
- let arg_names = do fields.map |field| {
+ let arg_names = fields.map(|field| {
match field.node.kind {
named_field(ident, _) => ident,
unnamed_field => cx.sess.bug(
"enum_variants: all fields in struct must have a name")
}
- };
+ });
return VariantInfo {
args: arg_tys,
id: ast::DefId,
substs: &substs)
-> ~[@VariantInfo] {
- do enum_variants(cx, id).iter().map |variant_info| {
+ enum_variants(cx, id).iter().map(|variant_info| {
let substd_args = variant_info.args.iter()
.map(|aty| subst(cx, substs, *aty)).collect();
ctor_ty: substd_ctor_ty,
..(**variant_info).clone()
}
- }.collect()
+ }).collect()
}
pub fn item_path_str(cx: ctxt, id: ast::DefId) -> ~str {
}
} else {
let mut cont = true;
- do csearch::get_item_attrs(tcx.cstore, did) |meta_items| {
+ csearch::get_item_attrs(tcx.cstore, did, |meta_items| {
if cont {
cont = meta_items.iter().advance(|ptrptr| f(*ptrptr));
}
- }
+ });
return cont;
}
}
}
fn struct_field_tys(fields: &[@struct_field]) -> ~[field_ty] {
- do fields.map |field| {
+ fields.map(|field| {
match field.node.kind {
named_field(ident, visibility) => {
field_ty {
}
}
}
- }
+ })
}
// Returns a list of fields corresponding to the struct's items. trans uses
// this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs)
-> ~[field] {
- do lookup_struct_fields(cx, did).map |f| {
+ lookup_struct_fields(cx, did).map(|f| {
field {
// FIXME #6993: change type of field to Name and get rid of new()
ident: ast::Ident::new(f.name),
mutbl: MutImmutable
}
}
- }
+ })
}
pub fn is_binopable(cx: ctxt, ty: t, op: ast::BinOp) -> bool {
type_param_defs: &[TypeParameterDef]) -> uint {
let mut total = 0;
for type_param_def in type_param_defs.iter() {
- do each_bound_trait_and_supertraits(
- tcx, type_param_def.bounds.trait_bounds) |_| {
+ each_bound_trait_and_supertraits(
+ tcx, type_param_def.bounds.trait_bounds, |_| {
total += 1;
true
- };
+ });
}
return total;
}
pub fn get_tydesc_ty(tcx: ctxt) -> Result<t, ~str> {
- do tcx.lang_items.require(TyDescStructLangItem).map |tydesc_lang_item| {
+ tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| {
tcx.intrinsic_defs.find_copy(&tydesc_lang_item)
.expect("Failed to resolve TyDesc")
- }
+ })
}
pub fn get_opaque_ty(tcx: ctxt) -> Result<t, ~str> {
- do tcx.lang_items.require(OpaqueStructLangItem).map |opaque_lang_item| {
+ tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| {
tcx.intrinsic_defs.find_copy(&opaque_lang_item)
.expect("Failed to resolve Opaque")
- }
+ })
}
pub fn visitor_object_ty(tcx: ctxt,
return
}
- do csearch::each_implementation_for_type(tcx.sess.cstore, type_id)
+ csearch::each_implementation_for_type(tcx.sess.cstore, type_id,
|implementation_def_id| {
let implementation = @csearch::get_impl(tcx, implementation_def_id);
// Store the implementation info.
tcx.impls.insert(implementation_def_id, implementation);
- }
+ });
tcx.populated_external_types.insert(type_id);
}
return
}
- do csearch::each_implementation_for_trait(tcx.sess.cstore, trait_id)
+ csearch::each_implementation_for_trait(tcx.sess.cstore, trait_id,
|implementation_def_id| {
let implementation = @csearch::get_impl(tcx, implementation_def_id);
// Store the implementation info.
tcx.impls.insert(implementation_def_id, implementation);
- }
+ });
tcx.populated_external_traits.insert(trait_id);
}
iter(hash, &mt.mutbl);
};
fn iter<T: IterBytes>(hash: &mut SipState, t: &T) {
- do t.iter_bytes(true) |bytes| { hash.input(bytes); true };
+ t.iter_bytes(true, |bytes| { hash.input(bytes); true });
}
- do ty::walk_ty(t) |t| {
+ ty::walk_ty(t, |t| {
match ty::get(t).sty {
ty_nil => hash.input([0]),
ty_bot => hash.input([1]),
mt(&mut hash, m);
}
}
- }
+ });
hash.result_u64()
}
// that function type
let rb = rscope::BindingRscope::new(id);
- let opt_transformed_self_ty = do opt_self_info.map |self_info| {
+ let opt_transformed_self_ty = opt_self_info.map(|self_info| {
transform_self_ty(this, &rb, self_info)
- };
+ });
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, a, None));
// that function type
let rb = rscope::BindingRscope::new(id);
- let input_tys = do decl.inputs.iter().enumerate().map |(i, a)| {
- let expected_arg_ty = do expected_sig.as_ref().and_then |e| {
+ let input_tys = decl.inputs.iter().enumerate().map(|(i, a)| {
+ let expected_arg_ty = expected_sig.as_ref().and_then(|e| {
// no guarantee that the correct number of expected args
// were supplied
if i < e.inputs.len() {Some(e.inputs[i])} else {None}
- };
+ });
ty_of_arg(this, &rb, a, expected_arg_ty)
- }.collect();
+ }).collect();
let expected_ret_ty = expected_sig.map(|e| e.output);
let output_ty = match decl.output.node {
// we find the trait the method came from, counting up the
// methods from them.
let mut method_count = 0;
- do ty::each_bound_trait_and_supertraits(tcx, &[subtrait])
- |bound_ref| {
+ ty::each_bound_trait_and_supertraits(tcx, &[subtrait], |bound_ref| {
if bound_ref.def_id == trait_ref.def_id { false }
else {
method_count += ty::trait_methods(tcx, bound_ref.def_id).len();
true
}
- };
+ });
return method_count + n_method;
}
};
let trait_ref = @TraitRef { def_id: did, substs: rcvr_substs.clone() };
- do self.push_inherent_candidates_from_bounds_inner(&[trait_ref])
+ self.push_inherent_candidates_from_bounds_inner(&[trait_ref],
|new_trait_ref, m, method_num, _bound_num| {
let vtable_index =
self.get_method_index(new_trait_ref, trait_ref, method_num);
real_index: vtable_index
})
}
- };
+ });
}
fn push_inherent_candidates_from_param(&self,
self_ty: ty::t,
bounds: &[@TraitRef],
param: param_index) {
- do self.push_inherent_candidates_from_bounds_inner(bounds)
+ self.push_inherent_candidates_from_bounds_inner(bounds,
|trait_ref, m, method_num, bound_num| {
Candidate {
rcvr_match_condition: RcvrMatchesIfSubtype(self_ty),
bound_num: bound_num,
})
}
- }
+ })
}
// Do a search through a list of bounds, using a callback to actually
let tcx = self.tcx();
let mut next_bound_idx = 0; // count only trait bounds
- do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
+ ty::each_bound_trait_and_supertraits(tcx, bounds, |bound_trait_ref| {
let this_bound_idx = next_bound_idx;
next_bound_idx += 1;
}
}
true
- };
+ });
}
}
fn visit_block(&mut self, b:&ast::Block, _:()) {
- // non-obvious: the `blk` variable maps to region lb, so
- // we have to keep this up-to-date. This
- // is... unfortunate. It'd be nice to not need this.
- do self.fcx.with_region_lb(b.id) {
- visit::walk_block(self, b, ());
- }
+ // non-obvious: the `blk` variable maps to region lb, so
+ // we have to keep this up-to-date. This
+ // is... unfortunate. It'd be nice to not need this.
+ self.fcx.with_region_lb(b.id, || visit::walk_block(self, b, ()));
}
// Don't descend into fns and items
// Add formal parameters.
for (arg_ty, input) in arg_tys.iter().zip(decl.inputs.iter()) {
// Create type variables for each argument.
- do pat_util::pat_bindings(tcx.def_map, input.pat)
- |_bm, pat_id, _sp, _path| {
+ pat_util::pat_bindings(tcx.def_map,
+ input.pat,
+ |_bm, pat_id, _sp, _path| {
visit.assign(pat_id, None);
- }
+ });
// Check the pattern.
let pcx = pat_ctxt {
pub fn check_expr_has_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
- do check_expr_with_unifier(fcx, expr, Some(expected)) {
+ check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr));
- }
+ });
}
pub fn check_expr_coercable_to_type(
fcx: @mut FnCtxt, expr: @ast::Expr,
expected: ty::t) {
- do check_expr_with_unifier(fcx, expr, Some(expected)) {
+ check_expr_with_unifier(fcx, expr, Some(expected), || {
demand::coerce(fcx, expr.span, expected, expr)
- }
+ });
}
pub fn check_expr_with_hint(
substs: &ty::substs) -> Option<ty::t> {
let o_field = items.iter().find(|f| f.name == fieldname);
- do o_field.map() |f| {
- ty::lookup_field_type(tcx, class_id, f.id, substs)
- }
+ o_field.map(|f| ty::lookup_field_type(tcx, class_id, f.id, substs))
}
// Controls whether the arguments are automatically referenced. This is useful
}
}
ast::ExprUnary(callee_id, unop, oprnd) => {
- let exp_inner = do unpack_expected(fcx, expected) |sty| {
+ let exp_inner = unpack_expected(fcx, expected, |sty| {
match unop {
ast::UnBox(_) | ast::UnUniq => match *sty {
ty::ty_box(ref mt) | ty::ty_uniq(ref mt) => Some(mt.ty),
ast::UnNot | ast::UnNeg => expected,
ast::UnDeref => None
}
- };
+ });
check_expr_with_opt_hint(fcx, oprnd, exp_inner);
let mut oprnd_t = fcx.expr_ty(oprnd);
if !ty::type_is_error(oprnd_t) &&
expected,
|x| Some((*x).clone()));
let inner_ty = match expected_sty {
- Some(ty::ty_closure(_)) => expected.unwrap(),
+ Some(ty::ty_closure(ref closure_ty))
+ if closure_ty.sigil == ast::OwnedSigil => {
+ expected.unwrap()
+ }
_ => match expected {
Some(expected_t) => {
fcx.type_error_message(expr.span, |actual| {
format!("last argument in `do` call \
- has non-closure type: {}",
+ has non-procedure type: {}",
actual)
}, expected_t, None);
let err_ty = ty::mk_err();
let mut bot_field = false;
let mut err_field = false;
- let elt_ts = do elts.iter().enumerate().map |(i, e)| {
+ let elt_ts = elts.iter().enumerate().map(|(i, e)| {
let opt_hint = match flds {
Some(ref fs) if i < fs.len() => Some(fs[i]),
_ => None
err_field = err_field || ty::type_is_error(t);
bot_field = bot_field || ty::type_is_bot(t);
t
- }.collect();
+ }).collect();
if bot_field {
fcx.write_bot(id);
} else if err_field {
let purity_state = fcx.ps.recurse(blk);
let prev = replace(&mut fcx.ps, purity_state);
- do fcx.with_region_lb(blk.id) {
+ fcx.with_region_lb(blk.id, || {
let mut warned = false;
let mut last_was_bot = false;
let mut any_bot = false;
}
}
};
- }
+ });
fcx.ps = prev;
}
fn constrain_bindings_in_pat(pat: @ast::Pat, rcx: &mut Rcx) {
let tcx = rcx.fcx.tcx();
debug!("regionck::visit_pat(pat={})", pat.repr(tcx));
- do pat_util::pat_bindings(tcx.def_map, pat) |_, id, span, _| {
+ pat_util::pat_bindings(tcx.def_map, pat, |_, id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
// accessed. We must be wary of loops like this:
constrain_regions_in_type_of_node(
rcx, id, encl_region,
infer::BindingTypeIsNotValidAtDecl(span));
- }
+ })
}
fn visit_expr(rcx: &mut Rcx, expr: @ast::Expr) {
region_to_str(tcx, "", false, minimum_lifetime),
ty_to_str(tcx, ty));
- do relate_nested_regions(tcx, Some(minimum_lifetime), ty) |r_sub, r_sup| {
+ relate_nested_regions(tcx, Some(minimum_lifetime), ty, |r_sub, r_sup| {
debug!("relate_nested_regions(r_sub={}, r_sup={})",
r_sub.repr(tcx),
r_sup.repr(tcx));
true, infer::ReferenceOutlivesReferent(ty, origin.span()),
r_sub, r_sup);
}
- }
+ });
return (e == rcx.errors_reported);
}
// ty is the value supplied for the type parameter A...
let mut param_result = ~[];
- do ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds) |trait_ref| {
+ ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds, |trait_ref| {
// ...and here trait_ref is each bound that was declared on A,
// expressed in terms of the type parameters.
}
}
true
- };
+ });
debug!("lookup_vtables_for_param result(\
location_info={:?}, \
let mut n_bound = 0;
let mut ret = None;
- do ty::each_bound_trait_and_supertraits(tcx, bounds) |bound_trait_ref| {
+ ty::each_bound_trait_and_supertraits(tcx, bounds, |bound_trait_ref| {
debug!("checking bounds trait {}",
bound_trait_ref.repr(vcx.tcx()));
n_bound += 1;
true
}
- };
+ });
ret
}
ty::RegionTraitStore(ty::ReStatic),
ast::MutImmutable,
ty::EmptyBuiltinBounds());
- do fixup_ty(vcx, location_info, t, is_early).map |t_f| {
+ fixup_ty(vcx, location_info, t, is_early).map(|t_f| {
match ty::get(t_f).sty {
ty::ty_trait(_, ref substs_f, _, _, _) => (*substs_f).clone(),
_ => fail!("t_f should be a trait")
}
- }
+ })
}
fn fixup_ty(vcx: &VtableContext,
let cx = fcx.ccx;
match ex.node {
ast::ExprPath(*) => {
- do fcx.opt_node_ty_substs(ex.id) |substs| {
+ fcx.opt_node_ty_substs(ex.id, |substs| {
debug!("vtable resolution on parameter bounds for expr {}",
ex.repr(fcx.tcx()));
let def = cx.tcx.def_map.get_copy(&ex.id);
}
}
true
- };
+ });
}
ast::ExprParen(e) => {
id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t));
write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t);
- do fcx.opt_node_ty_substs(id) |substs| {
+ fcx.opt_node_ty_substs(id, |substs| {
let mut new_tps = ~[];
for subst in substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, *subst) {
}
write_substs_to_tcx(tcx, id, new_tps);
ret.is_some()
- };
+ });
ret
}
}
*/
let mut found_nominal = false;
- do ty::walk_ty(original_type) |t| {
+ ty::walk_ty(original_type, |t| {
match get(t).sty {
ty_enum(def_id, _) |
ty_trait(def_id, _, _, _, _) |
_ => { }
}
- }
+ });
return found_nominal;
}
}
pub fn check_implementation_coherence(&self) {
- do self.crate_context.tcx.trait_impls.each_key |&trait_id| {
+ self.crate_context.tcx.trait_impls.each_key(|&trait_id| {
self.check_implementation_coherence_of(trait_id);
true
- };
+ });
}
pub fn check_implementation_coherence_of(&self, trait_def_id: DefId) {
// Unify pairs of polytypes.
- do self.iter_impls_of_trait(trait_def_id) |a| {
+ self.iter_impls_of_trait(trait_def_id, |a| {
let implementation_a = a;
let polytype_a =
self.get_self_type_for_implementation(implementation_a);
// "We have an impl of trait <trait_def_id> for type <polytype_a>,
// and that impl is <implementation_a>"
- do self.iter_impls_of_trait(trait_def_id) |b| {
+ self.iter_impls_of_trait(trait_def_id, |b| {
let implementation_b = b;
// An impl is coherent with itself
"note conflicting implementation here");
}
}
- }
- }
+ })
+ })
}
pub fn iter_impls_of_trait(&self, trait_def_id: DefId, f: |@Impl|) {
let mut impls_seen = HashSet::new();
let crate_store = self.crate_context.tcx.sess.cstore;
- do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
- do each_impl(crate_store, crate_number) |def_id| {
+ iter_crate_data(crate_store, |crate_number, _crate_metadata| {
+ each_impl(crate_store, crate_number, |def_id| {
assert_eq!(crate_number, def_id.crate);
self.add_external_impl(&mut impls_seen, def_id)
- }
- }
+ })
+ })
}
//
// Represents [A',B',C']
let num_trait_bounds = trait_ty_generics.type_param_defs.len();
- let non_shifted_trait_tps = do vec::from_fn(num_trait_bounds) |i| {
+ let non_shifted_trait_tps = vec::from_fn(num_trait_bounds, |i| {
ty::mk_param(tcx, i, trait_ty_generics.type_param_defs[i].def_id)
- };
+ });
// Represents [D']
let self_param = ty::mk_param(tcx, num_trait_bounds,
// Represents [E',F',G']
let num_method_bounds = m.generics.type_param_defs.len();
- let shifted_method_tps = do vec::from_fn(num_method_bounds) |i| {
+ let shifted_method_tps = vec::from_fn(num_method_bounds, |i| {
ty::mk_param(tcx, i + num_trait_bounds + 1,
m.generics.type_param_defs[i].def_id)
- };
+ });
// Convert the regions 'a, 'b, 'c defined on the trait into
// bound regions on the fn.
// See above for details.
match ty::get(b).sty {
ty::ty_rptr(_, mt_b) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_pointer(a, sty_a, b, mt_b)
- };
+ });
}
ty::ty_estr(vstore_slice(_)) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_string(a, sty_a, b)
- };
+ });
}
ty::ty_evec(mt_b, vstore_slice(_)) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_vector(a, sty_a, b, mt_b)
- };
+ });
}
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil, _}) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_fn(a, sty_a, b)
- };
+ });
}
ty::ty_trait(_, _, ty::RegionTraitStore(*), m, _) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_borrowed_object(a, sty_a, b, m)
- };
+ });
}
ty::ty_ptr(mt_b) => {
- return do self.unpack_actual_value(a) |sty_a| {
+ return self.unpack_actual_value(a, |sty_a| {
self.coerce_unsafe_ptr(a, sty_a, b, mt_b)
- };
+ });
}
_ => {}
}
- do self.unpack_actual_value(a) |sty_a| {
+ self.unpack_actual_value(a, |sty_a| {
match *sty_a {
ty::ty_bare_fn(ref a_f) => {
// Bare functions are coercable to any closure type.
self.subtype(a, b)
}
}
- }
+ })
}
pub fn subtype(&self, a: ty::t, b: ty::t) -> CoerceResult {
fn_ty_a: &ty::BareFnTy,
b: ty::t)
-> CoerceResult {
- do self.unpack_actual_value(b) |sty_b| {
+ self.unpack_actual_value(b, |sty_b| {
self.coerce_from_bare_fn_post_unpack(a, fn_ty_a, b, sty_b)
- }
+ })
}
pub fn coerce_from_bare_fn_post_unpack(&self,
}
fn args(&self, a: ty::t, b: ty::t) -> cres<ty::t> {
- do self.contratys(a, b).and_then |t| {
- Ok(t)
- }
+ self.contratys(a, b).and_then(|t| Ok(t))
}
fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) -> cres<ast::Sigil> {
match (a, b) {
(ty::vstore_slice(a_r), ty::vstore_slice(b_r)) => {
- do self.contraregions(a_r, b_r).and_then |r| {
+ self.contraregions(a_r, b_r).and_then(|r| {
Ok(ty::vstore_slice(r))
- }
+ })
}
_ if a == b => {
match (a, b) {
(ty::RegionTraitStore(a_r), ty::RegionTraitStore(b_r)) => {
- do self.contraregions(a_r, b_r).and_then |r| {
+ self.contraregions(a_r, b_r).and_then(|r| {
Ok(ty::RegionTraitStore(r))
- }
+ })
}
_ if a == b => {
pub fn eq_tys<C:Combine>(this: &C, a: ty::t, b: ty::t) -> ures {
let suber = this.sub();
- do this.infcx().try {
- do suber.tys(a, b).and_then |_ok| {
- suber.contratys(a, b)
- }.to_ures()
- }
+ this.infcx().try(|| {
+ suber.tys(a, b).and_then(|_ok| suber.contratys(a, b)).to_ures()
+ })
}
pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)
a.repr(this.infcx().tcx),
b.repr(this.infcx().tcx));
let sub = this.sub();
- do indent {
+ indent(|| {
this.infcx().try(|| {
- do sub.regions(a, b).and_then |_r| {
- sub.contraregions(a, b)
- }
+ sub.regions(a, b).and_then(|_r| sub.contraregions(a, b))
}).or_else(|e| {
// substitute a better error, but use the regions
// found in the original error
_ => Err(e)
}
}).to_ures()
- }
+ })
}
pub fn eq_opt_regions<C:Combine>(
b: Option<ty::Region>) -> cres<Option<ty::Region>> {
match (a, b) {
- (None, None) => {
- Ok(None)
- }
- (Some(a), Some(b)) => {
- do eq_regions(this, a, b).then {
- Ok(Some(a))
+ (None, None) => Ok(None),
+ (Some(a), Some(b)) => eq_regions(this, a, b).then(|| Ok(Some(a))),
+ (_, _) => {
+ // If these two substitutions are for the same type (and
+ // they should be), then the type should either
+ // consistently have a region parameter or not have a
+ // region parameter.
+ this.infcx().tcx.sess.bug(
+ format!("substitution a had opt_region {} and \
+ b had opt_region {}",
+ a.inf_str(this.infcx()),
+ b.inf_str(this.infcx())));
}
- }
- (_, _) => {
- // If these two substitutions are for the same type (and
- // they should be), then the type should either
- // consistently have a region parameter or not have a
- // region parameter.
- this.infcx().tcx.sess.bug(
- format!("substitution a had opt_region {} and \
- b had opt_region {}",
- a.inf_str(this.infcx()),
- b.inf_str(this.infcx())));
- }
}
}
}
(&ty::ty_box(ref a_mt), &ty::ty_box(ref b_mt)) => {
- do this.mts(a_mt, b_mt).and_then |mt| {
- Ok(ty::mk_box(tcx, mt))
- }
+ this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_box(tcx, mt)))
}
(&ty::ty_uniq(ref a_mt), &ty::ty_uniq(ref b_mt)) => {
- do this.mts(a_mt, b_mt).and_then |mt| {
- Ok(ty::mk_uniq(tcx, mt))
- }
+ this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_uniq(tcx, mt)))
}
(&ty::ty_ptr(ref a_mt), &ty::ty_ptr(ref b_mt)) => {
- do this.mts(a_mt, b_mt).and_then |mt| {
- Ok(ty::mk_ptr(tcx, mt))
- }
+ this.mts(a_mt, b_mt).and_then(|mt| Ok(ty::mk_ptr(tcx, mt)))
}
(&ty::ty_rptr(a_r, ref a_mt), &ty::ty_rptr(b_r, ref b_mt)) => {
}
(&ty::ty_evec(ref a_mt, vs_a), &ty::ty_evec(ref b_mt, vs_b)) => {
- do this.mts(a_mt, b_mt).and_then |mt| {
- do this.vstores(ty::terr_vec, vs_a, vs_b).and_then |vs| {
+ this.mts(a_mt, b_mt).and_then(|mt| {
+ this.vstores(ty::terr_vec, vs_a, vs_b).and_then(|vs| {
Ok(ty::mk_evec(tcx, mt, vs))
- }
- }
+ })
+ })
}
(&ty::ty_estr(vs_a), &ty::ty_estr(vs_b)) => {
}
(&ty::ty_bare_fn(ref a_fty), &ty::ty_bare_fn(ref b_fty)) => {
- do this.bare_fn_tys(a_fty, b_fty).and_then |fty| {
+ this.bare_fn_tys(a_fty, b_fty).and_then(|fty| {
Ok(ty::mk_bare_fn(tcx, fty))
- }
+ })
}
(&ty::ty_closure(ref a_fty), &ty::ty_closure(ref b_fty)) => {
- do this.closure_tys(a_fty, b_fty).and_then |fty| {
+ this.closure_tys(a_fty, b_fty).and_then(|fty| {
Ok(ty::mk_closure(tcx, fty))
- }
+ })
}
_ => Err(ty::terr_sorts(expected_found(this, a, b)))
(&Some(_), &None) => Ok((*a).clone()),
(&None, &Some(_)) => Ok((*b).clone()),
(&Some(ref v_a), &Some(ref v_b)) => {
- do lattice_op(self, v_a, v_b).and_then |v| {
- Ok(Some(v))
- }
+ lattice_op(self, v_a, v_b).and_then(|v| Ok(Some(v)))
}
}
}
// Otherwise, we need to merge A and B into one variable. We can
// then use either variable as an upper bound:
let cf = this.combine_fields();
- do cf.var_sub_var(a_vid.clone(), b_vid.clone()).then {
+ cf.var_sub_var(a_vid.clone(), b_vid.clone()).then(|| {
Ok(VarResult(a_vid.clone()))
- }
+ })
}
pub fn lattice_var_and_t<L:LatticeDir + Combine,
// and then return b.
debug!("bnd=None");
let a_bounds = this.with_bnd(a_bounds, (*b).clone());
- do this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then {
+ this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then(|| {
this.infcx().set(a_id.clone(),
Root(a_bounds.clone(), nde_a.rank));
Ok((*b).clone())
- }
+ })
}
}
}
values: Types(expected_found(a_is_expected, a, b))
};
- let result = do cx.commit {
- cx.lub(a_is_expected, trace).tys(a, b)
- };
-
+ let result = cx.commit(|| cx.lub(a_is_expected, trace).tys(a, b));
match result {
Ok(t) => t,
Err(ref err) => {
b: ty::t)
-> ures {
debug!("mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.commit {
+ indent(|| {
+ cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
cx.sub(a_is_expected, trace).tys(a, b)
- }
- }.to_ures()
+ })
+ }).to_ures()
}
pub fn can_mk_subty(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_subty({} <: {})", a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.probe {
+ indent(|| {
+ cx.probe(|| {
let trace = TypeTrace {
origin: Misc(codemap::dummy_sp()),
values: Types(expected_found(true, a, b))
};
cx.sub(true, trace).tys(a, b)
- }
- }.to_ures()
+ })
+ }).to_ures()
}
pub fn mk_subr(cx: @mut InferCtxt,
b: ty::t)
-> ures {
debug!("mk_eqty({} <: {})", a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.commit {
+ indent(|| {
+ cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
let suber = cx.sub(a_is_expected, trace);
eq_tys(&suber, a, b)
- }
- }.to_ures()
+ })
+ }).to_ures()
}
pub fn mk_sub_trait_refs(cx: @mut InferCtxt,
{
debug!("mk_sub_trait_refs({} <: {})",
a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.commit {
+ indent(|| {
+ cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: TraitRefs(expected_found(a_is_expected, a, b))
};
let suber = cx.sub(a_is_expected, trace);
suber.trait_refs(a, b)
- }
- }.to_ures()
+ })
+ }).to_ures()
}
fn expected_found<T>(a_is_expected: bool,
b: ty::t)
-> CoerceResult {
debug!("mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.commit {
+ indent(|| {
+ cx.commit(|| {
let trace = TypeTrace {
origin: origin,
values: Types(expected_found(a_is_expected, a, b))
};
Coerce(cx.combine_fields(a_is_expected, trace)).tys(a, b)
- }
- }
+ })
+ })
}
pub fn can_mk_coercety(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_coercety({} -> {})", a.inf_str(cx), b.inf_str(cx));
- do indent {
- do cx.probe {
+ indent(|| {
+ cx.probe(|| {
let trace = TypeTrace {
origin: Misc(codemap::dummy_sp()),
values: Types(expected_found(true, a, b))
};
Coerce(cx.combine_fields(true, trace)).tys(a, b)
- }
- }.to_ures()
+ })
+ }).to_ures()
}
// See comment on the type `resolve_state` below
impl<T:Clone + Eq> CresCompare<T> for cres<T> {
fn compare(&self, t: T, f: || -> ty::type_err) -> cres<T> {
- do (*self).clone().and_then |s| {
+ (*self).clone().and_then(|s| {
if s == t {
(*self).clone()
} else {
Err(f())
}
- }
+ })
}
}
assert!(!self.in_snapshot());
debug!("commit()");
- do indent {
+ indent(|| {
let r = self.try(|| f());
self.ty_var_bindings.bindings.truncate(0);
self.int_var_bindings.bindings.truncate(0);
self.region_vars.commit();
r
- }
+ })
}
/// Execute `f`, unroll bindings on failure
/// Execute `f` then unroll any bindings it creates
pub fn probe<T,E>(@mut self, f: || -> Result<T,E>) -> Result<T,E> {
debug!("probe()");
- do indent {
+ indent(|| {
let snapshot = self.start_snapshot();
let r = f();
self.rollback_to(&snapshot);
r
- }
+ })
}
}
err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
- let error_str = do err.map_default(~"") |t_err| {
+ let error_str = err.map_default(~"", |t_err| {
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
- };
- let resolved_expected = do expected_ty.map |e_ty| {
+ });
+ let resolved_expected = expected_ty.map(|e_ty| {
self.resolve_type_vars_if_possible(e_ty)
- };
+ });
if !resolved_expected.map_default(false, |e| { ty::type_is_error(e) }) {
match resolved_expected {
None => self.tcx.sess.span_err(sp,
}
fn expansion(&self, var_data: &mut [VarData]) {
- do self.iterate_until_fixed_point("Expansion") |constraint| {
+ self.iterate_until_fixed_point("Expansion", |constraint| {
match *constraint {
ConstrainRegSubVar(a_region, b_vid) => {
let b_data = &mut var_data[b_vid.to_uint()];
false
}
}
- }
+ })
}
fn expand_node(&self,
fn contraction(&self,
var_data: &mut [VarData]) {
- do self.iterate_until_fixed_point("Contraction") |constraint| {
+ self.iterate_until_fixed_point("Contraction", |constraint| {
match *constraint {
ConstrainRegSubVar(*) => {
// This is an expansion constraint. Ignore.
false
}
}
- }
+ })
}
fn contract_node(&self,
debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir);
let source_node_index = NodeIndex(source_vid.to_uint());
- do graph.each_adjacent_edge(source_node_index, dir) |_, edge| {
+ graph.each_adjacent_edge(source_node_index, dir, |_, edge| {
match edge.data {
ConstrainVarSubVar(from_vid, to_vid) => {
let opp_vid =
ConstrainRegSubReg(*) => {}
}
true
- };
+ });
}
}
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
let (skol_map, _, b_sig) = {
- do replace_bound_regions_in_fn_sig(self.infcx.tcx, None, b) |br| {
+ replace_bound_regions_in_fn_sig(self.infcx.tcx, None, b, |br| {
let skol = self.infcx.region_vars.new_skolemized(br);
debug!("Bound region {} skolemized to {:?}",
bound_region_to_str(self.infcx.tcx, "", false, br),
skol);
skol
- }
+ })
};
debug!("a_sig={}", a_sig.inf_str(self.infcx));
fn find(&self, br: ty::BoundRegion) -> Option<ty::Region> {
let mut ret = None;
- do list::each(*self) |isr| {
+ list::each(*self, |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { ret = Some(isr_r); false } else { true }
- };
+ });
ret
}
}
let cx = local_data::get(super::ctxtkey, |x| *x.unwrap());
let mut externs = HashMap::new();
- do cstore::iter_crate_data(cx.sess.cstore) |n, meta| {
+ cstore::iter_crate_data(cx.sess.cstore, |n, meta| {
externs.insert(n, meta.clean());
- }
+ });
Crate {
name: match maybe_meta {
}
// Did someone say rightward-drift?
- do local_data::get(current_location_key) |loc| {
+ local_data::get(current_location_key, |loc| {
let loc = loc.unwrap();
- do local_data::get(cache_key) |cache| {
- do cache.unwrap().read |cache| {
+ local_data::get(cache_key, |cache| {
+ cache.unwrap().read(|cache| {
let abs_root = root(cache, loc.as_slice());
let rel_root = match path.segments[0].name.as_slice() {
"self" => Some(~"./"),
}
}
write!(w, "{}", generics);
- }
- }
- }
+ })
+ })
+ })
}
/// Helper to render type parameters
fn fmt(g: &clean::Type, f: &mut fmt::Formatter) {
match *g {
clean::TyParamBinder(id) | clean::Generic(id) => {
- do local_data::get(cache_key) |cache| {
- do cache.unwrap().read |m| {
+ local_data::get(cache_key, |cache| {
+ cache.unwrap().read(|m| {
f.buf.write(m.typarams.get(&id).as_bytes());
- }
- }
+ })
+ })
}
clean::ResolvedPath{id, typarams: ref tp, path: ref path} => {
resolved_path(f.buf, id, path, false);
let markdown = sd_markdown_new(extensions, 16, &callbacks,
&options as *html_renderopt as *libc::c_void);
- do s.as_imm_buf |data, len| {
+ s.as_imm_buf(|data, len| {
sd_markdown_render(ob, data, len as libc::size_t, markdown);
- }
+ });
sd_markdown_free(markdown);
- do vec::raw::buf_as_slice((*ob).data, (*ob).size as uint) |buf| {
+ vec::raw::buf_as_slice((*ob).data, (*ob).size as uint, |buf| {
w.write(buf);
- }
+ });
bufrelease(ob);
}
/// Makes a directory on the filesystem, failing the task if an error occurs and
/// skipping if the directory already exists.
fn mkdir(path: &Path) {
- do io::io_error::cond.trap(|err| {
+ io::io_error::cond.trap(|err| {
error!("Couldn't create directory `{}`: {}",
path.display(), err.desc);
fail!()
- }).inside {
+ }).inside(|| {
if !path.is_dir() {
fs::mkdir(path, io::UserRWX);
}
- }
+ })
}
/// Takes a path to a source file and cleans the path to it. This canonicalizes
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = ~"../../";
- do clean_srcpath(p.dirname()) |component| {
+ clean_srcpath(p.dirname(), |component| {
cur.push(component);
mkdir(&cur);
root_path.push_str("../");
- }
+ });
cur.push(p.filename().expect("source has no filename") + bytes!(".html"));
let mut w = BufferedWriter::new(File::create(&cur).unwrap());
clean::ImplItem(ref i) => {
match i.trait_ {
Some(clean::ResolvedPath{ id, _ }) => {
- let v = do self.implementors.find_or_insert_with(id) |_|{
+ let v = self.implementors.find_or_insert_with(id, |_|{
~[]
- };
+ });
match i.for_ {
clean::ResolvedPath{_} => {
v.unshift(PathType(i.for_.clone()));
clean::Item{ attrs, inner: clean::ImplItem(i), _ } => {
match i.for_ {
clean::ResolvedPath { id, _ } => {
- let v = do self.impls.find_or_insert_with(id) |_| {
+ let v = self.impls.find_or_insert_with(id, |_| {
~[]
- };
+ });
// extract relevant documentation for this impl
match attrs.move_iter().find(|a| {
match *a {
Process(cx, item) => {
let mut cx = cx;
let item = Cell::new(item);
- do (|| {
- do cx.item(item.take()) |cx, item| {
+ (|| {
+ cx.item(item.take(), |cx, item| {
prog_chan.send(JobNew);
chan.send(Process(cx.clone(), item));
- }
- }).finally {
+ })
+ }).finally(|| {
// If we fail, everything else should still get
// completed
prog_chan.send(JobDone);
- }
+ })
}
Die => break,
}
clean::ModuleItem(*) => {
let name = item.name.get_ref().to_owned();
let item = Cell::new(item);
- do self.recurse(name) |this| {
+ self.recurse(name, |this| {
let item = item.take();
let dst = this.dst.join("index.html");
render(File::create(&dst).unwrap(), this, &item, false);
for item in m.items.move_iter() {
f(this, item);
}
- }
+ })
}
// Things which don't have names (like impls) don't get special
if it.cx.include_sources {
let mut path = ~[];
- do clean_srcpath(it.item.source.filename.as_bytes()) |component| {
+ clean_srcpath(it.item.source.filename.as_bytes(), |component| {
path.push(component.to_owned());
- }
+ });
let href = if it.item.source.loline == it.item.source.hiline {
format!("{}", it.item.source.loline)
} else {
}
debug!("{:?}", indices);
- do sort::quick_sort(indices) |&i1, &i2| {
- lt(&items[i1], &items[i2], i1, i2)
- }
+ sort::quick_sort(indices, |&i1, &i2| lt(&items[i1], &items[i2], i1, i2));
debug!("{:?}", indices);
let mut curty = "";
write!(w, "</div>");
}
- do local_data::get(cache_key) |cache| {
- do cache.unwrap().read |cache| {
+ local_data::get(cache_key, |cache| {
+ cache.unwrap().read(|cache| {
match cache.implementors.find(&it.id) {
Some(implementors) => {
write!(w, "
}
None => {}
}
- }
- }
+ })
+ })
}
fn render_method(w: &mut Writer, meth: &clean::Item, withlink: bool) {
}
fn render_methods(w: &mut Writer, it: &clean::Item) {
- do local_data::get(cache_key) |cache| {
+ local_data::get(cache_key, |cache| {
let cache = cache.unwrap();
- do cache.read |c| {
+ cache.read(|c| {
match c.impls.find(&it.id) {
Some(v) => {
let mut non_trait = v.iter().filter(|p| {
}
None => {}
}
- }
- }
+ })
+ })
}
fn render_impl(w: &mut Writer, i: &clean::Impl, dox: &Option<~str>) {
None => continue,
Some(id) => id,
};
- do local_data::get(cache_key) |cache| {
- do cache.unwrap().read |cache| {
+ local_data::get(cache_key, |cache| {
+ cache.unwrap().read(|cache| {
match cache.traits.find(&trait_id) {
Some(t) => {
let name = meth.name.clone();
}
None => {}
}
- }
- }
+ })
+ })
}
// If we've implemented a trait, then also emit documentation for all
match trait_id {
None => {}
Some(id) => {
- do local_data::get(cache_key) |cache| {
- do cache.unwrap().read |cache| {
+ local_data::get(cache_key, |cache| {
+ cache.unwrap().read(|cache| {
match cache.traits.find(&id) {
Some(t) => {
for method in t.methods.iter() {
}
None => {}
}
- }
- }
+ })
+ })
}
}
write!(w, "</div>");
// This stripper collects all *retained* nodes.
let mut retained = HashSet::new();
let crate = Cell::new(crate);
- let exported_items = do local_data::get(super::analysiskey) |analysis| {
+ let exported_items = local_data::get(super::analysiskey, |analysis| {
analysis.unwrap().exported_items.clone()
- };
+ });
let mut crate = crate.take();
// strip all private items
let lines = s.lines_any().collect::<~[&str]>();
let mut saw_first_line = false;
let mut saw_second_line = false;
- let min_indent = do lines.iter().fold(uint::max_value) |min_indent, line| {
+ let min_indent = lines.iter().fold(uint::max_value, |min_indent, line| {
// After we see the first non-whitespace line, look at
// the line we have. If it is not whitespace, and therefore
} else {
saw_first_line = true;
let mut spaces = 0;
- do line.chars().all |char| {
+ line.chars().all(|char| {
// Only comparing against space because I wouldn't
// know what to do with mixed whitespace chars
if char == ' ' {
} else {
false
}
- };
+ });
num::min(min_indent, spaces)
}
- };
+ });
match lines {
[head, .. tail] => {
let mut unindented = ~[ head.trim() ];
- unindented.push_all(do tail.map |&line| {
+ unindented.push_all(tail.map(|&line| {
if line.is_whitespace() {
line
} else {
assert!(line.len() >= min_indent);
line.slice_from(min_indent)
}
- });
+ }));
unindented.connect("\n")
}
[] => s.to_owned()
pub fn list_installed_packages(f: |&PkgId| -> bool) -> bool {
let workspaces = rust_path();
for p in workspaces.iter() {
- let binfiles = do io::ignore_io_error { fs::readdir(&p.join("bin")) };
+ let binfiles = io::ignore_io_error(|| fs::readdir(&p.join("bin")));
for exec in binfiles.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
match exec.filestem_str() {
}
}
}
- let libfiles = do io::ignore_io_error { fs::readdir(&p.join("lib")) };
+ let libfiles = io::ignore_io_error(|| fs::readdir(&p.join("lib")));
for lib in libfiles.iter() {
debug!("Full name: {}", lib.display());
match has_library(lib) {
let rel_p = lib.path_relative_from(&parent).unwrap();
debug!("Rel: {}", rel_p.display());
let rel_path = rel_p.join(basename);
- do rel_path.display().with_str |s| {
+ rel_path.display().with_str(|s| {
debug!("Rel name: {}", s);
f(&PkgId::new(s));
- }
+ });
}
None => ()
}
}
pub fn has_library(p: &Path) -> Option<~str> {
- let files = do io::ignore_io_error { fs::readdir(p) };
+ let files = io::ignore_io_error(|| fs::readdir(p));
for path in files.iter() {
if path.extension_str() == Some(os::consts::DLL_EXTENSION) {
let stuff : &str = path.filestem_str().expect("has_library: weird path");
pub fn package_is_installed(p: &PkgId) -> bool {
let mut is_installed = false;
- do list_installed_packages() |installed| {
+ list_installed_packages(|installed| {
if installed == p {
is_installed = true;
false
} else {
true
}
- };
+ });
is_installed
}
// argument
let pkgid = PkgId::new(args[0].clone());
let mut dest_ws = default_workspace();
- do each_pkg_parent_workspace(&self.context, &pkgid) |workspace| {
+ each_pkg_parent_workspace(&self.context, &pkgid, |workspace| {
debug!("found pkg {} in workspace {}, trying to build",
pkgid.to_str(), workspace.display());
dest_ws = determine_destination(os::getcwd(),
false, pkgid.clone());
self.build(&mut pkg_src, what);
true
- };
+ });
// n.b. If this builds multiple packages, it only returns the workspace for
// the last one. The whole building-multiple-packages-with-the-same-ID is weird
// anyway and there are no tests for it, so maybe take it out
}
"list" => {
println("Installed packages:");
- do installed_packages::list_installed_packages |pkg_id| {
- do pkg_id.path.display().with_str |s| {
- println(s);
- }
+ installed_packages::list_installed_packages(|pkg_id| {
+ pkg_id.path.display().with_str(|s| println(s));
true
- };
+ });
}
"prefer" => {
if args.len() < 1 {
else {
let rp = rust_path();
assert!(!rp.is_empty());
- do each_pkg_parent_workspace(&self.context, &pkgid) |workspace| {
+ each_pkg_parent_workspace(&self.context, &pkgid, |workspace| {
path_util::uninstall_package_from(workspace, &pkgid);
note(format!("Uninstalled package {} (was installed in {})",
pkgid.to_str(), workspace.display()));
true
- };
+ });
}
}
"unprefer" => {
// Build the package script if needed
let script_build = format!("build_package_script({})",
package_script_path.display());
- let pkg_exe = do self.workcache_context.with_prep(script_build) |prep| {
+ let pkg_exe = self.workcache_context.with_prep(script_build, |prep| {
let subsysroot = sysroot.clone();
let psp = package_script_path.clone();
let ws = workspace.clone();
let pid = pkgid.clone();
- do prep.exec |exec| {
+ prep.exec(|exec| {
let mut pscript = PkgScript::parse(subsysroot.clone(),
psp.clone(),
&ws,
&pid);
pscript.build_custom(exec)
- }
- };
+ })
+ });
// We always *run* the package script
let (cfgs, hook_result) = PkgScript::run_custom(&Path::new(pkg_exe), &sysroot);
debug!("Command return code = {:?}", hook_result);
target_exec.display(), target_lib,
maybe_executable, maybe_library);
- do self.workcache_context.with_prep(id.install_tag()) |prep| {
+ self.workcache_context.with_prep(id.install_tag(), |prep| {
for ee in maybe_executable.iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
prep.declare_input("binary",
let sub_target_ex = target_exec.clone();
let sub_target_lib = target_lib.clone();
let sub_build_inputs = build_inputs.to_owned();
- do prep.exec |exe_thing| {
+ prep.exec(|exe_thing| {
let mut outputs = ~[];
// Declare all the *inputs* to the declared input too, as inputs
for executable in subex.iter() {
outputs.push(target_lib.as_str().unwrap().to_owned());
}
outputs
- }
- }
+ })
+ })
}
fn prefer(&self, _id: &str, _vers: Option<~str>) {
pub fn hash(&self) -> ~str {
// FIXME (#9639): hash should take a &[u8] so we can hash the real path
- do self.path.display().with_str |s| {
+ self.path.display().with_str(|s| {
let vers = self.version.to_str();
format!("{}-{}-{}", s, hash(s + vers), vers)
- }
+ })
}
pub fn short_name_with_version(&self) -> ~str {
let mut failed = false;
- do cond.trap(|_| {
+ cond.trap(|_| {
failed = true;
- }).inside {
- git_clone_url(url, &clone_target, &pkgid.version);
- };
+ }).inside(|| git_clone_url(url, &clone_target, &pkgid.version));
if failed {
return None;
debug!("build_crates: compiling {}", path.display());
let cfgs = crate.cfgs + cfgs;
- do ctx.workcache_context.with_prep(crate_tag(&path)) |prep| {
+ ctx.workcache_context.with_prep(crate_tag(&path), |prep| {
debug!("Building crate {}, declaring it as an input", path.display());
// FIXME (#9639): This needs to handle non-utf8 paths
prep.declare_input("file", path.as_str().unwrap(),
let sub_deps = deps.clone();
let inputs = inputs_to_discover.map(|&(ref k, ref p)|
(k.clone(), p.as_str().unwrap().to_owned()));
- do prep.exec |exec| {
+ prep.exec(|exec| {
for &(ref kind, ref p) in inputs.iter() {
let pth = Path::new(p.clone());
exec.discover_input(*kind, *p, if *kind == ~"file" {
let result = result.as_ref().map(|p|p.as_str().unwrap());
debug!("Result of compiling {} was {}", subpath.display(), result.to_str());
result.to_str()
- }
- };
+ })
+ });
}
}
if p.is_dir() {
if p == src_dir.join(&pkgid.path) || {
let pf = p.filename_str();
- do pf.iter().any |&g| {
+ pf.iter().any(|&g| {
match split_version_general(g, '-') {
None => false,
Some((ref might_match, ref vers)) => {
&& (pkgid.version == *vers || pkgid.version == NoVersion)
}
}
- }
+ })
} {
found = Some(p.clone());
}
fn library_in(short_name: &str, version: &Version, dir_to_search: &Path) -> Option<Path> {
debug!("Listing directory {}", dir_to_search.display());
- let dir_contents = do io::ignore_io_error { fs::readdir(dir_to_search) };
+ let dir_contents = io::ignore_io_error(|| fs::readdir(dir_to_search));
debug!("dir has {:?} entries", dir_contents.len());
let lib_prefix = format!("{}{}", os::consts::DLL_PREFIX, short_name);
// Find a filename that matches the pattern:
// (lib_prefix)-hash-(version)(lib_suffix)
- let mut libraries = do dir_contents.iter().filter |p| {
+ let mut libraries = dir_contents.iter().filter(|p| {
let extension = p.extension_str();
debug!("p = {}, p's extension is {:?}", p.display(), extension);
match extension {
None => false,
Some(ref s) => lib_filetype == *s
}
- };
+ });
let mut result_filename = None;
for p_path in libraries {
// Return the filename that matches, which we now know exists
// (if result_filename != None)
- let abs_path = do result_filename.map |result_filename| {
+ let abs_path = result_filename.map(|result_filename| {
let absolute_path = dir_to_search.join(&result_filename);
debug!("result_filename = {}", absolute_path.display());
absolute_path
- };
+ });
abs_path
}
#[cfg(target_os = "win32")]
pub fn chmod_read_only(p: &Path) -> bool {
unsafe {
- do p.with_c_str |src_buf| {
- libc::chmod(src_buf, S_IRUSR as libc::c_int) == 0 as libc::c_int
- }
+ p.with_c_str(|src_buf| libc::chmod(src_buf, S_IRUSR as libc::c_int) == 0 as libc::c_int)
}
}
#[cfg(not(target_os = "win32"))]
pub fn chmod_read_only(p: &Path) -> bool {
unsafe {
- do p.with_c_str |src_buf| {
- libc::chmod(src_buf, S_IRUSR as libc::mode_t) == 0
- as libc::c_int
- }
+ p.with_c_str(|src_buf| libc::chmod(src_buf, S_IRUSR as libc::mode_t) == 0 as libc::c_int)
}
}
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
- do dst.len().times() {
+ dst.len().times(|| {
*x = to_be32(*y);
x = x.offset(1);
y = y.offset(1);
- }
+ })
}
}
pub fn sha1_10(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..10];
- do bh.iter {
- sh.input(bytes);
- }
+ bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
pub fn sha1_1k(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..1024];
- do bh.iter {
- sh.input(bytes);
- }
+ bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
pub fn sha1_64k(bh: & mut BenchHarness) {
let mut sh = Sha1::new();
let bytes = [1u8, ..65536];
- do bh.iter {
- sh.input(bytes);
- }
+ bh.iter(|| sh.input(bytes));
bh.bytes = bytes.len() as u64;
}
}
fn test_executable_exists(repo: &Path, short_name: &str) -> bool {
debug!("test_executable_exists: repo = {}, short_name = {}", repo.display(), short_name);
let exec = built_test_in_workspace(&PkgId::new(short_name), repo);
- do exec.map_default(false) |exec| {
- exec.exists() && is_rwx(&exec)
- }
+ exec.map_default(false, |exec| exec.exists() && is_rwx(&exec));
}
fn remove_executable_file(p: &PkgId, workspace: &Path) {
debug!("Frobbed? {:?}", maybe_p);
match maybe_p {
Some(ref p) => {
- do io::io_error::cond.trap(|e| {
+ io::io_error::cond.trap(|e| {
cond.raise((p.clone(), format!("Bad path: {}", e.desc)));
- }).inside {
+ }).inside(|| {
let mut w = File::open_mode(p, io::Append, io::Write);
w.write(bytes!("/* hi */\n"));
- }
+ })
}
None => fail!("frob_source_file failed to find a source file in {}",
pkg_src_dir.display())
assert!("github.com/catamorphism/test-pkg-0.1" ==
PkgId::new("github.com/catamorphism/test-pkg").to_str());
- do cond.trap(|(p, e)| {
+ cond.trap(|(p, e)| {
assert!(p.filename().is_none())
assert!("0-length pkgid" == e);
whatever.clone()
- }).inside {
+ }).inside(|| {
let x = PkgId::new("");
assert_eq!(~"foo-0.1", x.to_str());
- }
+ });
- do cond.trap(|(p, e)| {
+ cond.trap(|(p, e)| {
let abs = os::make_absolute(&Path::new("foo/bar/quux"));
assert_eq!(p, abs);
assert!("absolute pkgid" == e);
whatever.clone()
- }).inside {
+ }).inside(|| {
let zp = os::make_absolute(&Path::new("foo/bar/quux"));
// FIXME (#9639): This needs to handle non-utf8 paths
let z = PkgId::new(zp.as_str().unwrap());
assert_eq!(~"foo-0.1", z.to_str());
- }
+ })
}
let my_workspace = api::my_workspace(&context.context, "cdep");
let foo_c_name = my_workspace.join_many(["src", "cdep-0.1", "foo.c"]);
- let out_lib_path = do context.workcache_context.with_prep("foo.c") |prep| {
+ let out_lib_path = context.workcache_context.with_prep("foo.c", |prep| {
let sub_cx = context.context.clone();
debug!("foo_c_name = {}", foo_c_name.display());
prep.declare_input("file",
foo_c_name.as_str().unwrap().to_owned(),
digest_file_with_date(&foo_c_name));
- let out_path = do prep.exec |exec| {
+ let out_path = prep.exec(|exec| {
let out_path = api::build_library_in_workspace(exec,
&mut sub_cx.clone(),
"cdep",
"foo");
let out_p = Path::new(out_path);
out_p.as_str().unwrap().to_owned()
- };
+ });
out_path
- };
+ });
let out_lib_path = Path::new(out_lib_path);
debug!("out_lib_path = {}", out_lib_path.display());
context.add_library_path(out_lib_path.dir_path());
-> ast::_mod {
fn strip_main(item: @ast::item) -> @ast::item {
@ast::item {
- attrs: do item.attrs.iter().filter_map |attr| {
+ attrs: item.attrs.iter().filter_map(|attr| {
if "main" != attr.name() {
Some(*attr)
} else {
None
}
- }.collect(),
+ }).collect(),
.. (*item).clone()
}
}
fold::noop_fold_mod(&ast::_mod {
- items: do m.items.map |item| {
- strip_main(*item)
- },
+ items: m.items.map(|item| strip_main(*item)),
.. (*m).clone()
}, fold)
}
// and the `PkgSrc` constructor will detect that;
// or else it's already in a workspace and we'll build into that
// workspace
- let pkg_src = do cond.trap(|_| {
+ let pkg_src = cond.trap(|_| {
// Nonexistent package? Then print a better error
error(format!("Package {} depends on {}, but I don't know \
how to find it",
self.parent.path.display(),
pkg_id.path.display()));
fail!()
- }).inside {
+ }).inside(|| {
PkgSrc::new(source_workspace.clone(),
dest_workspace.clone(),
// Use the rust_path_hack to search for dependencies iff
// we were already using it
self.context.context.use_rust_path_hack,
pkg_id.clone())
- };
+ });
let (outputs_disc, inputs_disc) =
self.context.install(
pkg_src,