use lib::llvm::{ArchiveRef, llvm};
use std::cast;
+use std::vec_ng::Vec;
use std::io::fs;
use std::io;
use std::libc;
paths: &[&Path]) -> ProcessOutput {
let ar = get_ar_prog(sess);
- let mut args = ~[args.to_owned()];
+ let mut args = vec!(args.to_owned());
let mut paths = paths.iter().map(|p| p.as_str().unwrap().to_owned());
args.extend(&mut paths);
debug!("{} {}", ar, args.connect(" "));
}
/// Read a file in the archive
- pub fn read(&self, file: &str) -> ~[u8] {
+ pub fn read(&self, file: &str) -> Vec<u8> {
// Apparently if "ar p" is used on windows, it generates a corrupt file
// which has bad headers and LLVM will immediately choke on it
if cfg!(windows) && cfg!(windows) { // FIXME(#10734) double-and
lto: bool) -> io::IoResult<()> {
let object = format!("{}.o", name);
let bytecode = format!("{}.bc", name);
- let mut ignore = ~[METADATA_FILENAME, bytecode.as_slice()];
+ let mut ignore = vec!(METADATA_FILENAME, bytecode.as_slice());
if lto {
ignore.push(object.as_slice());
}
}
/// Lists all files in an archive
- pub fn files(&self) -> ~[~str] {
+ pub fn files(&self) -> Vec<~str> {
let output = run_ar(self.sess, "t", None, [&self.dst]);
let output = str::from_utf8(output.output).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of
// all SYMDEF files as these are just magical placeholders which get
// re-created when we make a new archive anyway.
let files = try!(fs::readdir(loc.path()));
- let mut inputs = ~[];
+ let mut inputs = Vec::new();
for file in files.iter() {
let filename = file.filename_str().unwrap();
if skip.iter().any(|s| *s == filename) { continue }
if inputs.len() == 0 { return Ok(()) }
// Finally, add all the renamed files to this archive
- let mut args = ~[&self.dst];
+ let mut args = vec!(&self.dst);
args.extend(&mut inputs.iter());
run_ar(self.sess, "r", None, args.as_slice());
Ok(())
pub fn get_target_strs(target_triple: ~str, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.contains("thumb") {
- ~[~"-mthumb"]
+ vec!(~"-mthumb")
} else {
- ~[~"-marm"]
+ vec!(~"-marm")
};
return target_strs::t {
module_asm: ~"",
let vectorize_slp = !sess.opts.cg.no_vectorize_slp &&
sess.opts.optimize == session::Aggressive;
- let mut llvm_c_strs = ~[];
- let mut llvm_args = ~[];
+ let mut llvm_c_strs = Vec::new();
+ let mut llvm_args = Vec::new();
{
let add = |arg: &str| {
let s = arg.to_c_str();
pub fn link_binary(sess: Session,
trans: &CrateTranslation,
outputs: &OutputFilenames,
- id: &CrateId) -> ~[Path] {
- let mut out_filenames = ~[];
+ id: &CrateId) -> Vec<Path> {
+ let mut out_filenames = Vec::new();
let crate_types = sess.crate_types.borrow();
for &crate_type in crate_types.get().iter() {
let out_file = link_binary_output(sess, trans, crate_type, outputs, id);
dylib: bool,
tmpdir: &Path,
obj_filename: &Path,
- out_filename: &Path) -> ~[~str] {
+ out_filename: &Path) -> Vec<~str> {
// The default library location, we need this to find the runtime.
// The location of crates will be determined as needed.
let lib_path = sess.filesearch.get_target_lib_path();
let stage: ~str = ~"-L" + lib_path.as_str().unwrap();
- let mut args = ~[stage];
+ let mut args = vec!(stage);
// FIXME (#9639): This needs to handle non-utf8 paths
args.push_all([
// Also note that the native libraries linked here are only the ones located
// in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above.
-fn add_local_native_libraries(args: &mut ~[~str], sess: Session) {
+fn add_local_native_libraries(args: &mut Vec<~str> , sess: Session) {
let addl_lib_search_paths = sess.opts.addl_lib_search_paths.borrow();
for path in addl_lib_search_paths.get().iter() {
// FIXME (#9639): This needs to handle non-utf8 paths
// Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of
// the intermediate rlib version)
-fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session,
+fn add_upstream_rust_crates(args: &mut Vec<~str> , sess: Session,
dylib: bool, tmpdir: &Path) {
// As a limitation of the current implementation, we require that everything
// returning `None` if not all libraries could be found with that
// preference.
fn get_deps(cstore: &cstore::CStore, preference: cstore::LinkagePreference)
- -> Option<~[(ast::CrateNum, Path)]>
+ -> Option<Vec<(ast::CrateNum, Path)> >
{
let crates = cstore.get_used_crates(preference);
if crates.iter().all(|&(_, ref p)| p.is_some()) {
}
// Adds the static "rlib" versions of all crates to the command line.
- fn add_static_crates(args: &mut ~[~str], sess: Session, tmpdir: &Path,
- crates: ~[(ast::CrateNum, Path)]) {
+ fn add_static_crates(args: &mut Vec<~str> , sess: Session, tmpdir: &Path,
+ crates: Vec<(ast::CrateNum, Path)> ) {
for (cnum, cratepath) in crates.move_iter() {
// When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been
}
// Same thing as above, but for dynamic crates instead of static crates.
- fn add_dynamic_crates(args: &mut ~[~str], sess: Session,
- crates: ~[(ast::CrateNum, Path)]) {
+ fn add_dynamic_crates(args: &mut Vec<~str> , sess: Session,
+ crates: Vec<(ast::CrateNum, Path)> ) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
assert!(!sess.lto());
// generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate.
-fn add_upstream_native_libraries(args: &mut ~[~str], sess: Session) {
+fn add_upstream_native_libraries(args: &mut Vec<~str> , sess: Session) {
let cstore = sess.cstore;
cstore.iter_crate_data(|cnum, _| {
let libs = csearch::get_native_libraries(cstore, cnum);
target_triple: target_triple,
- cc_args: ~[],
+ cc_args: Vec::new(),
};
}
os != abi::OsWin32
}
-pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] {
+pub fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> Vec<~str> {
let os = sess.targ_cfg.os;
// No rpath on windows
if os == abi::OsWin32 {
- return ~[];
+ return Vec::new();
}
- let mut flags = ~[];
+ let mut flags = Vec::new();
if sess.targ_cfg.os == abi::OsFreebsd {
flags.push_all([~"-Wl,-rpath,/usr/local/lib/gcc46",
p
}
-pub fn rpaths_to_flags(rpaths: &[~str]) -> ~[~str] {
- let mut ret = ~[];
+pub fn rpaths_to_flags(rpaths: &[~str]) -> Vec<~str> {
+ let mut ret = Vec::new();
for rpath in rpaths.iter() {
ret.push("-Wl,-rpath," + *rpath);
}
sysroot: &Path,
output: &Path,
libs: &[Path],
- target_triple: &str) -> ~[~str] {
+ target_triple: &str) -> Vec<~str> {
debug!("sysroot: {}", sysroot.display());
debug!("output: {}", output.display());
debug!("libs:");
let abs_rpaths = get_absolute_rpaths(libs);
// And a final backup rpath to the global library location.
- let fallback_rpaths = ~[get_install_prefix_rpath(target_triple)];
+ let fallback_rpaths = vec!(get_install_prefix_rpath(target_triple));
fn log_rpaths(desc: &str, rpaths: &[~str]) {
debug!("{} rpaths:", desc);
fn get_rpaths_relative_to_output(os: abi::Os,
output: &Path,
- libs: &[Path]) -> ~[~str] {
+ libs: &[Path]) -> Vec<~str> {
libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect()
}
prefix+"/"+relative.as_str().expect("non-utf8 component in path")
}
-fn get_absolute_rpaths(libs: &[Path]) -> ~[~str] {
+fn get_absolute_rpaths(libs: &[Path]) -> Vec<~str> {
libs.iter().map(|a| get_absolute_rpath(a)).collect()
}
path.as_str().expect("non-utf8 component in rpath").to_owned()
}
-pub fn minimize_rpaths(rpaths: &[~str]) -> ~[~str] {
+pub fn minimize_rpaths(rpaths: &[~str]) -> Vec<~str> {
let mut set = HashSet::new();
- let mut minimized = ~[];
+ let mut minimized = Vec::new();
for rpath in rpaths.iter() {
if set.insert(rpath.as_slice()) {
minimized.push(rpath.clone());
#[test]
fn test_rpaths_to_flags() {
let flags = rpaths_to_flags([~"path1", ~"path2"]);
- assert_eq!(flags, ~[~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"]);
+ assert_eq!(flags, vec!(~"-Wl,-rpath,path1", ~"-Wl,-rpath,path2"));
}
#[test]
meta_sect_name: ~str,
data_layout: ~str,
target_triple: ~str,
- cc_args: ~[~str],
+ cc_args: Vec<~str> ,
}
target_triple: target_triple,
- cc_args: ~[~"-m32"],
+ cc_args: vec!(~"-m32"),
};
}
target_triple: target_triple,
- cc_args: ~[~"-m64"],
+ cc_args: vec!(~"-m64"),
};
}
}
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
-fn parse_cfgspecs(cfgspecs: ~[~str])
+fn parse_cfgspecs(cfgspecs: Vec<~str> )
-> ast::CrateConfig {
cfgspecs.move_iter().map(|s| {
let sess = parse::new_parse_sess();
module: ModuleRef,
metadata_module: ModuleRef,
link: LinkMeta,
- metadata: ~[u8],
- reachable: ~[~str],
+ metadata: Vec<u8> ,
+ reachable: Vec<~str> ,
}
/// Run the translation phase to LLVM, after which the AST and analysis can
krate: &ast::Crate) -> io::IoResult<()> {
let id = link::find_crate_id(krate.attrs.as_slice(), outputs);
- let mut out_filenames = ~[];
+ let mut out_filenames = Vec::new();
for output_type in sess.opts.output_types.iter() {
let file = outputs.path(*output_type);
match *output_type {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
- let files: ~[~str] = {
+ let files: Vec<~str> = {
let files = sess.codemap.files.borrow();
files.get()
.iter()
let lint_levels = [lint::allow, lint::warn,
lint::deny, lint::forbid];
- let mut lint_opts = ~[];
+ let mut lint_opts = Vec::new();
let lint_dict = lint::get_lint_dict();
for level in lint_levels.iter() {
let level_name = lint::level_to_str(*level);
let level_short = level_name.slice_chars(0, 1);
let level_short = level_short.to_ascii().to_upper().into_str();
- let flags = vec::append(matches.opt_strs(level_short),
+ let flags = vec_ng::append(matches.opt_strs(level_short),
matches.opt_strs(level_name));
for lint_name in flags.iter() {
let lint_name = lint_name.replace("-", "_");
}
let mut output_types = if parse_only || no_trans {
- ~[]
+ Vec::new()
} else {
matches.opt_strs("emit").flat_map(|s| {
s.split(',').map(|part| {
working_dir: os::getcwd(),
lints: RefCell::new(HashMap::new()),
node_id: Cell::new(1),
- crate_types: @RefCell::new(~[]),
+ crate_types: @RefCell::new(Vec::new()),
features: front::feature_gate::Features::new()
}
}
}
// rustc command line options
-pub fn optgroups() -> ~[getopts::OptGroup] {
- ~[
+pub fn optgroups() -> Vec<getopts::OptGroup> {
+ vec!(
optflag("h", "help", "Display this message"),
optmulti("", "cfg", "Configure the compilation environment", "SPEC"),
optmulti("L", "", "Add a directory to the library search path", "PATH"),
optmulti("F", "forbid", "Set lint forbidden", "OPT"),
optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]"),
optmulti("Z", "", "Set internal debugging options", "FLAG"),
- optflag( "v", "version", "Print version info and exit"),
- ]
+ optflag( "v", "version", "Print version info and exit"))
}
pub struct OutputFilenames {
0
)
-pub fn debugging_opts_map() -> ~[(&'static str, &'static str, u64)] {
- ~[("verbose", "in general, enable more debug printouts", VERBOSE),
+pub fn debugging_opts_map() -> Vec<(&'static str, &'static str, u64)> {
+ vec!(("verbose", "in general, enable more debug printouts", VERBOSE),
("time-passes", "measure time of each rustc pass", TIME_PASSES),
("count-llvm-insns", "count where LLVM \
instrs originate", COUNT_LLVM_INSNS),
PRINT_LLVM_PASSES),
("lto", "Perform LLVM link-time optimizations", LTO),
("ast-json", "Print the AST as JSON and halt", AST_JSON),
- ("ast-json-noexpand", "Print the pre-expansion AST as JSON and halt", AST_JSON_NOEXPAND),
- ]
+ ("ast-json-noexpand", "Print the pre-expansion AST as JSON and halt", AST_JSON_NOEXPAND))
}
#[deriving(Clone, Eq)]
pub struct Options {
// The crate config requested for the session, which may be combined
// with additional crate configurations during the compile process
- crate_types: ~[CrateType],
+ crate_types: Vec<CrateType> ,
gc: bool,
optimize: OptLevel,
debuginfo: DebugInfoLevel,
- lint_opts: ~[(lint::Lint, lint::level)],
- output_types: ~[back::link::OutputType],
+ lint_opts: Vec<(lint::Lint, lint::level)> ,
+ output_types: Vec<back::link::OutputType> ,
// This was mutable for rustpkg, which updates search paths based on the
// parsed code. It remains mutable in case its replacements wants to use
// this.
local_crate_source_file: Option<Path>,
working_dir: Path,
lints: RefCell<HashMap<ast::NodeId,
- ~[(lint::Lint, codemap::Span, ~str)]>>,
+ Vec<(lint::Lint, codemap::Span, ~str)> >>,
node_id: Cell<ast::NodeId>,
- crate_types: @RefCell<~[CrateType]>,
+ crate_types: @RefCell<Vec<CrateType> >,
features: front::feature_gate::Features
}
Some(arr) => { arr.push((lint, sp, msg)); return; }
None => {}
}
- lints.get().insert(id, ~[(lint, sp, msg)]);
+ lints.get().insert(id, vec!((lint, sp, msg)));
}
pub fn next_node_id(&self) -> ast::NodeId {
self.reserve_node_ids(1)
/// Some reasonable defaults
pub fn basic_options() -> @Options {
@Options {
- crate_types: ~[],
+ crate_types: Vec::new(),
gc: false,
optimize: No,
+<<<<<<< HEAD
debuginfo: NoDebugInfo,
lint_opts: ~[],
output_types: ~[],
+||||||| merged common ancestors
+ debuginfo: false,
+ lint_opts: ~[],
+ output_types: ~[],
+=======
+ debuginfo: false,
+ lint_opts: Vec::new(),
+ output_types: Vec::new(),
+>>>>>>> librustc: Automatically change uses of `~[T]` to `Vec<T>` in rustc.
addl_lib_search_paths: @RefCell::new(HashSet::new()),
maybe_sysroot: None,
target_triple: host_triple(),
}
}
- fn parse_list(slot: &mut ~[~str], v: Option<&str>) -> bool {
+ fn parse_list(slot: &mut Vec<~str> , v: Option<&str>) -> bool {
match v {
Some(s) => {
for s in s.words() {
"tool to assemble archives with"),
linker: Option<~str> = (None, parse_opt_string,
"system linker to link outputs with"),
- link_args: ~[~str] = (~[], parse_list,
+ link_args: Vec<~str> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"),
target_cpu: ~str = (~"generic", parse_string,
"select target processor (llc -mcpu=help for details)"),
target_feature: ~str = (~"", parse_string,
"target specific attributes (llc -mattr=help for details)"),
- passes: ~[~str] = (~[], parse_list,
+ passes: Vec<~str> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"),
- llvm_args: ~[~str] = (~[], parse_list,
+ llvm_args: Vec<~str> = (Vec::new(), parse_list,
"a list of arguments to pass to llvm (space separated)"),
save_temps: bool = (false, parse_bool,
"save all temporary output files during compilation"),
}
pub fn collect_crate_types(session: &Session,
- attrs: &[ast::Attribute]) -> ~[CrateType] {
+ attrs: &[ast::Attribute]) -> Vec<CrateType> {
// If we're generating a test executable, then ignore all other output
// styles at all other locations
if session.opts.test {
- return ~[CrateTypeExecutable];
+ return Vec<CrateTypeExecutable> ;
}
let mut base = session.opts.crate_types.clone();
let mut iter = attrs.iter().filter_map(|a| {
}
fn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod {
- let filtered_items: ~[&@ast::Item] = m.items.iter()
+ let filtered_items: Vec<&@ast::Item> = m.items.iter()
.filter(|&a| item_in_cfg(cx, *a))
.collect();
let flattened_items = filtered_items.move_iter()
}
fn fold_block(cx: &mut Context, b: ast::P<ast::Block>) -> ast::P<ast::Block> {
- let resulting_stmts: ~[&@ast::Stmt] =
+ let resulting_stmts: Vec<&@ast::Stmt> =
b.stmts.iter().filter(|&a| retain_stmt(cx, *a)).collect();
let resulting_stmts = resulting_stmts.move_iter()
.flat_map(|&stmt| cx.fold_stmt(stmt).move_iter())
}
struct Context {
- features: ~[&'static str],
+ features: Vec<&'static str> ,
sess: Session,
}
pub fn check_crate(sess: Session, krate: &ast::Crate) {
let mut cx = Context {
- features: ~[],
+ features: Vec::new(),
sess: sess,
};
struct Test {
span: Span,
- path: ~[ast::Ident],
+ path: Vec<ast::Ident> ,
bench: bool,
ignore: bool,
should_fail: bool
struct TestCtxt<'a> {
sess: session::Session,
- path: RefCell<~[ast::Ident]>,
+ path: RefCell<Vec<ast::Ident> >,
ext_cx: ExtCtxt<'a>,
- testfns: RefCell<~[Test]>,
+ testfns: RefCell<Vec<Test> >,
is_test_crate: bool,
config: ast::CrateConfig,
}
loader: loader,
deriving_hash_type_parameter: false,
}),
- path: RefCell::new(~[]),
- testfns: RefCell::new(~[]),
+ path: RefCell::new(Vec::new()),
+ testfns: RefCell::new(Vec::new()),
is_test_crate: is_test_crate(&krate),
config: krate.config.clone(),
};
let vi = if cx.is_test_crate {
ast::ViewItemUse(
vec!(@nospan(ast::ViewPathSimple(id_test,
- path_node(~[id_test]),
+ path_node(vec!(id_test)),
ast::DUMMY_NODE_ID))))
} else {
ast::ViewItemExternCrate(id_test,
codemap::Spanned { node: t, span: DUMMY_SP }
}
-fn path_node(ids: ~[ast::Ident]) -> ast::Path {
+fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: false,
}
}
-fn path_node_global(ids: ~[ast::Ident]) -> ast::Path {
+fn path_node_global(ids: Vec<ast::Ident> ) -> ast::Path {
ast::Path {
span: DUMMY_SP,
global: true,
let lint_dict = lint::get_lint_dict();
let mut lint_dict = lint_dict.move_iter()
.map(|(k, v)| (v, k))
- .collect::<~[(lint::LintSpec, &'static str)]>();
+ .collect::<Vec<(lint::LintSpec, &'static str)> >();
lint_dict.sort();
let mut max_key = 0;
return;
}
- let lint_flags = vec::append(matches.opt_strs("W"),
+ let lint_flags = vec_ng::append(matches.opt_strs("W"),
matches.opt_strs("warn"));
if lint_flags.iter().any(|x| x == &~"help") {
describe_warnings();
}
fn parse_crate_attrs(sess: session::Session, input: &d::Input) ->
- ~[ast::Attribute] {
+ Vec<ast::Attribute> {
let result = match *input {
d::FileInput(ref ifile) => {
parse::parse_crate_attrs_from_file(ifile,
let mut e = Env {
sess: sess,
os: os,
- crate_cache: @RefCell::new(~[]),
+ crate_cache: @RefCell::new(Vec::new()),
next_crate_num: 1,
intr: intr
};
struct Env {
sess: Session,
os: loader::Os,
- crate_cache: @RefCell<~[cache_entry]>,
+ crate_cache: @RefCell<vec!(cache_entry)>,
next_crate_num: ast::CrateNum,
intr: @IdentInterner
}
env: Env {
sess: sess,
os: os,
- crate_cache: @RefCell::new(~[]),
+ crate_cache: @RefCell::new(Vec::new()),
next_crate_num: 1,
intr: token::get_ident_interner(),
}
callback)
}
-pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> ~[ast_map::PathElem] {
+pub fn get_item_path(tcx: ty::ctxt, def: ast::DefId) -> Vec<ast_map::PathElem> {
let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate);
let path = decoder::get_item_path(cdata, def.node);
// FIXME #1920: This path is not always correct if the crate is not linked
// into the root namespace.
- vec::append(~[ast_map::PathMod(token::intern(cdata.name))], path)
+ vec_ng::append(vec!(ast_map::PathMod(token::intern(cdata.name))), path)
}
pub enum found_ast {
}
pub fn get_enum_variants(tcx: ty::ctxt, def: ast::DefId)
- -> ~[@ty::VariantInfo] {
+ -> Vec<@ty::VariantInfo> {
let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate);
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
}
pub fn get_trait_method_def_ids(cstore: @cstore::CStore,
- def: ast::DefId) -> ~[ast::DefId] {
+ def: ast::DefId) -> Vec<ast::DefId> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_trait_method_def_ids(cdata, def.node)
}
pub fn get_provided_trait_methods(tcx: ty::ctxt,
def: ast::DefId)
- -> ~[@ty::Method] {
+ -> Vec<@ty::Method> {
let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx)
}
-pub fn get_supertraits(tcx: ty::ctxt, def: ast::DefId) -> ~[@ty::TraitRef] {
+pub fn get_supertraits(tcx: ty::ctxt, def: ast::DefId) -> Vec<@ty::TraitRef> {
let cstore = tcx.cstore;
let cdata = cstore.get_crate_data(def.krate);
decoder::get_supertraits(cdata, def.node, tcx)
pub fn get_static_methods_if_impl(cstore: @cstore::CStore,
def: ast::DefId)
- -> Option<~[StaticMethodInfo]> {
+ -> Option<Vec<StaticMethodInfo> > {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node)
}
pub fn get_item_attrs(cstore: @cstore::CStore,
def_id: ast::DefId,
- f: |~[@ast::MetaItem]|) {
+ f: |Vec<@ast::MetaItem> |) {
let cdata = cstore.get_crate_data(def_id.krate);
decoder::get_item_attrs(cdata, def_id.node, f)
}
pub fn get_struct_fields(cstore: @cstore::CStore,
def: ast::DefId)
- -> ~[ty::field_ty] {
+ -> Vec<ty::field_ty> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_struct_fields(cstore.intr, cdata, def.node)
}
class_id, def) );
let ty = decoder::item_type(def, the_field, tcx, cdata);
ty::ty_param_bounds_and_ty {
- generics: ty::Generics {type_param_defs: Rc::new(~[]),
- region_param_defs: Rc::new(~[])},
+ generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
+ region_param_defs: Rc::new(Vec::new())},
ty: ty
}
}
pub fn get_native_libraries(cstore: @cstore::CStore,
crate_num: ast::CrateNum)
- -> ~[(cstore::NativeLibaryKind, ~str)] {
+ -> Vec<(cstore::NativeLibaryKind, ~str)> {
let cdata = cstore.get_crate_data(crate_num);
decoder::get_native_libraries(cdata)
}
pub fn get_exported_macros(cstore: @cstore::CStore,
crate_num: ast::CrateNum)
- -> ~[~str] {
+ -> Vec<~str> {
let cdata = cstore.get_crate_data(crate_num);
decoder::get_exported_macros(cdata)
}
pub struct CStore {
priv metas: RefCell<HashMap<ast::CrateNum, @crate_metadata>>,
priv extern_mod_crate_map: RefCell<extern_mod_crate_map>,
- priv used_crate_sources: RefCell<~[CrateSource]>,
- priv used_libraries: RefCell<~[(~str, NativeLibaryKind)]>,
- priv used_link_args: RefCell<~[~str]>,
+ priv used_crate_sources: RefCell<Vec<CrateSource> >,
+ priv used_libraries: RefCell<Vec<(~str, NativeLibaryKind)> >,
+ priv used_link_args: RefCell<Vec<~str> >,
intr: @IdentInterner
}
CStore {
metas: RefCell::new(HashMap::new()),
extern_mod_crate_map: RefCell::new(HashMap::new()),
- used_crate_sources: RefCell::new(~[]),
- used_libraries: RefCell::new(~[]),
- used_link_args: RefCell::new(~[]),
+ used_crate_sources: RefCell::new(Vec::new()),
+ used_libraries: RefCell::new(Vec::new()),
+ used_link_args: RefCell::new(Vec::new()),
intr: intr
}
}
}
pub fn get_used_crates(&self, prefer: LinkagePreference)
- -> ~[(ast::CrateNum, Option<Path>)] {
+ -> Vec<(ast::CrateNum, Option<Path>)> {
let used_crate_sources = self.used_crate_sources.borrow();
used_crate_sources.get()
.iter()
}
pub fn get_used_libraries<'a>(&'a self)
- -> &'a RefCell<~[(~str, NativeLibaryKind)]> {
+ -> &'a RefCell<Vec<(~str, NativeLibaryKind)> > {
&self.used_libraries
}
}
}
- pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<~[~str]> {
+ pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell<Vec<~str> > {
&self.used_link_args
}
tcx: ty::ctxt,
cdata: Cmd,
tag: uint)
- -> Rc<~[ty::TypeParameterDef]> {
- let mut bounds = ~[];
+ -> Rc<Vec<ty::TypeParameterDef> > {
+ let mut bounds = Vec::new();
reader::tagged_docs(item, tag, |p| {
let bd = parse_type_param_def_data(
p.data, p.start, cdata.cnum, tcx,
}
fn item_region_param_defs(item_doc: ebml::Doc, cdata: Cmd)
- -> Rc<~[ty::RegionParameterDef]> {
- let mut v = ~[];
+ -> Rc<Vec<ty::RegionParameterDef> > {
+ let mut v = Vec::new();
reader::tagged_docs(item_doc, tag_region_param_def, |rp_doc| {
let ident_str_doc = reader::get_doc(rp_doc,
tag_region_param_def_ident);
n
}
-fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> ~[ast::DefId] {
- let mut ids: ~[ast::DefId] = ~[];
+fn enum_variant_ids(item: ebml::Doc, cdata: Cmd) -> Vec<ast::DefId> {
+ let mut ids: Vec<ast::DefId> = Vec::new();
let v = tag_items_data_item_variant;
reader::tagged_docs(item, v, |p| {
let ext = reader::with_doc_data(p, parse_def_id);
return ids;
}
-fn item_path(item_doc: ebml::Doc) -> ~[ast_map::PathElem] {
+fn item_path(item_doc: ebml::Doc) -> Vec<ast_map::PathElem> {
let path_doc = reader::get_doc(item_doc, tag_path);
let len_doc = reader::get_doc(path_doc, tag_path_len);
callback)
}
-pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> ~[ast_map::PathElem] {
+pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> Vec<ast_map::PathElem> {
item_path(lookup_item(id, cdata.data()))
}
pub type DecodeInlinedItem<'a> = 'a |cdata: @cstore::crate_metadata,
tcx: ty::ctxt,
- path: ~[ast_map::PathElem],
+ path: Vec<ast_map::PathElem> ,
par_doc: ebml::Doc|
- -> Result<ast::InlinedItem, ~[ast_map::PathElem]>;
+ -> Result<ast::InlinedItem, Vec<ast_map::PathElem> >;
pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,
decode_inlined_item: DecodeInlinedItem)
}
pub fn get_enum_variants(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId,
- tcx: ty::ctxt) -> ~[@ty::VariantInfo] {
+ tcx: ty::ctxt) -> Vec<@ty::VariantInfo> {
let data = cdata.data();
let items = reader::get_doc(reader::Doc(data), tag_items);
let item = find_item(id, items);
- let mut infos: ~[@ty::VariantInfo] = ~[];
+ let mut infos: Vec<@ty::VariantInfo> = Vec::new();
let variant_ids = enum_variant_ids(item, cdata);
let mut disr_val = 0;
for did in variant_ids.iter() {
let name = item_name(intr, item);
let arg_tys = match ty::get(ctor_ty).sty {
ty::ty_bare_fn(ref f) => f.sig.inputs.clone(),
- _ => ~[], // Nullary enum variant.
+ _ => Vec::new(), // Nullary enum variant.
};
match variant_disr_val(item) {
Some(val) => { disr_val = val; }
}
fn item_impl_methods(intr: @IdentInterner, cdata: Cmd, item: ebml::Doc,
- tcx: ty::ctxt) -> ~[@ty::Method] {
- let mut rslt = ~[];
+ tcx: ty::ctxt) -> Vec<@ty::Method> {
+ let mut rslt = Vec::new();
reader::tagged_docs(item, tag_item_impl_method, |doc| {
let m_did = reader::with_doc_data(doc, parse_def_id);
rslt.push(@get_method(intr, cdata, m_did.node, tcx));
}
pub fn get_trait_method_def_ids(cdata: Cmd,
- id: ast::NodeId) -> ~[ast::DefId] {
+ id: ast::NodeId) -> Vec<ast::DefId> {
let data = cdata.data();
let item = lookup_item(id, data);
- let mut result = ~[];
+ let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_method, |mth| {
result.push(item_def_id(mth, cdata));
true
pub fn get_provided_trait_methods(intr: @IdentInterner, cdata: Cmd,
id: ast::NodeId, tcx: ty::ctxt) ->
- ~[@ty::Method] {
+ Vec<@ty::Method> {
let data = cdata.data();
let item = lookup_item(id, data);
- let mut result = ~[];
+ let mut result = Vec::new();
reader::tagged_docs(item, tag_item_trait_method, |mth_id| {
let did = item_def_id(mth_id, cdata);
/// Returns the supertraits of the given trait.
pub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)
- -> ~[@ty::TraitRef] {
- let mut results = ~[];
+ -> Vec<@ty::TraitRef> {
+ let mut results = Vec::new();
let item_doc = lookup_item(id, cdata.data());
reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
// NB. Only reads the ones that *aren't* builtin-bounds. See also
pub fn get_static_methods_if_impl(intr: @IdentInterner,
cdata: Cmd,
node_id: ast::NodeId)
- -> Option<~[StaticMethodInfo]> {
+ -> Option<Vec<StaticMethodInfo> > {
let item = lookup_item(node_id, cdata.data());
if item_family(item) != Impl {
return None;
if !ret { return None }
- let mut impl_method_ids = ~[];
+ let mut impl_method_ids = Vec::new();
reader::tagged_docs(item, tag_item_impl_method, |impl_method_doc| {
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
true
});
- let mut static_impl_methods = ~[];
+ let mut static_impl_methods = Vec::new();
for impl_method_id in impl_method_ids.iter() {
let impl_method_doc = lookup_item(impl_method_id.node, cdata.data());
let family = item_family(impl_method_doc);
pub fn get_item_attrs(cdata: Cmd,
node_id: ast::NodeId,
- f: |~[@ast::MetaItem]|) {
+ f: |Vec<@ast::MetaItem> |) {
// The attributes for a tuple struct are attached to the definition, not the ctor;
// we assume that someone passing in a tuple struct ctor is actually wanting to
// look at the definition
}
pub fn get_struct_fields(intr: @IdentInterner, cdata: Cmd, id: ast::NodeId)
- -> ~[ty::field_ty] {
+ -> Vec<ty::field_ty> {
let data = cdata.data();
let item = lookup_item(id, data);
- let mut result = ~[];
+ let mut result = Vec::new();
reader::tagged_docs(item, tag_item_field, |an_item| {
let f = item_family(an_item);
if f == PublicField || f == PrivateField || f == InheritedField {
item_visibility(lookup_item(id, cdata.data()))
}
-fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
- let mut items: ~[@ast::MetaItem] = ~[];
+fn get_meta_items(md: ebml::Doc) -> Vec<@ast::MetaItem> {
+ let mut items: Vec<@ast::MetaItem> = Vec::new();
reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let n = token::intern_and_get_ident(nd.as_str_slice());
return items;
}
-fn get_attributes(md: ebml::Doc) -> ~[ast::Attribute] {
- let mut attrs: ~[ast::Attribute] = ~[];
+fn get_attributes(md: ebml::Doc) -> Vec<ast::Attribute> {
+ let mut attrs: Vec<ast::Attribute> = Vec::new();
match reader::maybe_get_doc(md, tag_attributes) {
option::Some(attrs_d) => {
reader::tagged_docs(attrs_d, tag_attribute, |attr_doc| {
write!(out, "\n\n")
}
-pub fn get_crate_attributes(data: &[u8]) -> ~[ast::Attribute] {
+pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
return get_attributes(reader::Doc(data));
}
hash: Svh,
}
-pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] {
- let mut deps: ~[CrateDep] = ~[];
+pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
+ let mut deps: Vec<CrateDep> = Vec::new();
let cratedoc = reader::Doc(data);
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
let mut crate_num = 1;
}
-pub fn get_native_libraries(cdata: Cmd) -> ~[(cstore::NativeLibaryKind, ~str)] {
+pub fn get_native_libraries(cdata: Cmd) -> Vec<(cstore::NativeLibaryKind, ~str)> {
let libraries = reader::get_doc(reader::Doc(cdata.data()),
tag_native_libraries);
- let mut result = ~[];
+ let mut result = Vec::new();
reader::tagged_docs(libraries, tag_native_libraries_lib, |lib_doc| {
let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind);
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
.map(|doc| item_def_id(doc, cdata))
}
-pub fn get_exported_macros(cdata: Cmd) -> ~[~str] {
+pub fn get_exported_macros(cdata: Cmd) -> Vec<~str> {
let macros = reader::get_doc(reader::Doc(cdata.data()),
tag_exported_macros);
- let mut result = ~[];
+ let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
result.push(macro_doc.as_str());
true
ebml_w: &mut writer::Encoder,
id: NodeId,
variants: &[P<Variant>],
- index: @RefCell<~[entry<i64>]>,
+ index: @RefCell<Vec<entry<i64>> >,
generics: &ast::Generics) {
debug!("encode_enum_variant_info(id={:?})", id);
fn encode_info_for_struct(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
fields: &[StructField],
- global_index: @RefCell<~[entry<i64>]>)
- -> ~[entry<i64>] {
+ global_index: @RefCell<Vec<entry<i64>> >)
+ -> Vec<entry<i64>> {
/* Each class has its own index, since different classes
may have fields with the same name */
- let mut index = ~[];
+ let mut index = Vec::new();
let tcx = ecx.tcx;
/* We encode both private and public fields -- need to include
private fields to get the offsets right */
ebml_w: &mut writer::Encoder,
name: ast::Ident,
ctor_id: NodeId,
- index: @RefCell<~[entry<i64>]>,
+ index: @RefCell<Vec<entry<i64>> >,
struct_id: NodeId) {
{
let mut index = index.borrow_mut();
fn encode_info_for_item(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
item: &Item,
- index: @RefCell<~[entry<i64>]>,
+ index: @RefCell<Vec<entry<i64>> >,
path: PathElems,
vis: ast::Visibility) {
let tcx = ecx.tcx;
fn add_to_index(item: &Item, ebml_w: &writer::Encoder,
- index: @RefCell<~[entry<i64>]>) {
+ index: @RefCell<Vec<entry<i64>> >) {
let mut index = index.borrow_mut();
index.get().push(entry {
val: item.id as i64,
fn encode_info_for_foreign_item(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
nitem: &ForeignItem,
- index: @RefCell<~[entry<i64>]>,
+ index: @RefCell<Vec<entry<i64>> >,
path: PathElems,
abi: AbiSet) {
{
fn my_visit_item(i: &Item,
ebml_w: &mut writer::Encoder,
ecx_ptr: *int,
- index: @RefCell<~[entry<i64>]>) {
+ index: @RefCell<Vec<entry<i64>> >) {
let mut ebml_w = unsafe { ebml_w.unsafe_clone() };
// See above
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
fn my_visit_foreign_item(ni: &ForeignItem,
ebml_w: &mut writer::Encoder,
ecx_ptr:*int,
- index: @RefCell<~[entry<i64>]>) {
+ index: @RefCell<Vec<entry<i64>> >) {
// See above
let ecx: &EncodeContext = unsafe { cast::transmute(ecx_ptr) };
debug!("writing foreign item {}::{}",
struct EncodeVisitor<'a,'b> {
ebml_w_for_visit_item: &'a mut writer::Encoder<'b>,
ecx_ptr:*int,
- index: @RefCell<~[entry<i64>]>,
+ index: @RefCell<Vec<entry<i64>> >,
}
impl<'a,'b> visit::Visitor<()> for EncodeVisitor<'a,'b> {
fn encode_info_for_items(ecx: &EncodeContext,
ebml_w: &mut writer::Encoder,
krate: &Crate)
- -> ~[entry<i64>] {
- let index = @RefCell::new(~[]);
+ -> Vec<entry<i64>> {
+ let index = @RefCell::new(Vec::new());
ebml_w.start_tag(tag_items_data);
{
let mut index = index.borrow_mut();
// Path and definition ID indexing
fn create_index<T:Clone + Hash + 'static>(
- index: ~[entry<T>])
- -> ~[@~[entry<T>]] {
- let mut buckets: ~[@RefCell<~[entry<T>]>] = ~[];
+ index: Vec<entry<T>> )
+ -> Vec<@Vec<entry<T>> > {
+ let mut buckets: Vec<@RefCell<Vec<entry<T>> >> = Vec::new();
for _ in range(0u, 256u) {
- buckets.push(@RefCell::new(~[]));
+ buckets.push(@RefCell::new(Vec::new()));
}
for elt in index.iter() {
let h = hash::hash(&elt.val) as uint;
bucket.get().push((*elt).clone());
}
- let mut buckets_frozen = ~[];
+ let mut buckets_frozen = Vec::new();
for bucket in buckets.iter() {
buckets_frozen.push(@/*bad*/(**bucket).get());
}
fn encode_index<T:'static>(
ebml_w: &mut writer::Encoder,
- buckets: ~[@~[entry<T>]],
+ buckets: Vec<@Vec<entry<T>> > ,
write_fn: |&mut MemWriter, &T|) {
ebml_w.start_tag(tag_index);
- let mut bucket_locs = ~[];
+ let mut bucket_locs = Vec::new();
ebml_w.start_tag(tag_index_buckets);
for bucket in buckets.iter() {
bucket_locs.push(ebml_w.writer.tell().unwrap());
// metadata that Rust cares about for linking crates. If the user didn't
// provide it we will throw it in anyway with a default value.
fn synthesize_crate_attrs(ecx: &EncodeContext,
- krate: &Crate) -> ~[Attribute] {
+ krate: &Crate) -> Vec<Attribute> {
fn synthesize_crateid_attr(ecx: &EncodeContext) -> Attribute {
assert!(!ecx.link_meta.crateid.name.is_empty());
token::intern_and_get_ident(ecx.link_meta.crateid.to_str())))
}
- let mut attrs = ~[];
+ let mut attrs = Vec::new();
for attr in krate.attrs.iter() {
if !attr.name().equiv(&("crate_id")) {
attrs.push(*attr);
}
fn encode_crate_deps(ebml_w: &mut writer::Encoder, cstore: &cstore::CStore) {
- fn get_ordered_deps(cstore: &cstore::CStore) -> ~[decoder::CrateDep] {
+ fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<decoder::CrateDep> {
// Pull the cnums and name,vers,hash out of cstore
- let mut deps = ~[];
+ let mut deps = Vec::new();
cstore.iter_crate_data(|key, val| {
let dep = decoder::CrateDep {
cnum: key,
0x74, //'t' as u8,
0, 0, 0, 1 ];
-pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> ~[u8] {
+pub fn encode_metadata(parms: EncodeParams, krate: &Crate) -> Vec<u8> {
let mut wr = MemWriter::new();
encode_metadata_inner(&mut wr, parms, krate);
wr.unwrap()
/// $HOME/.rust
/// DIR/.rust for any DIR that's the current working directory
/// or an ancestor of it
-pub fn rust_path() -> ~[Path] {
- let mut env_rust_path: ~[Path] = match get_rust_path() {
+pub fn rust_path() -> Vec<Path> {
+ let mut env_rust_path: Vec<Path> = match get_rust_path() {
Some(env_path) => {
- let env_path_components: ~[&str] =
+ let env_path_components: Vec<&str> =
env_path.split_str(PATH_ENTRY_SEPARATOR).collect();
env_path_components.map(|&s| Path::new(s))
}
- None => ~[]
+ None => Vec::new()
};
let mut cwd = os::getcwd();
// now add in default entries
// A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this
// serach is being performed for.
- let mut libraries = ~[];
+ let mut libraries = Vec::new();
for (_hash, (rlibs, dylibs)) in candidates.move_iter() {
let mut metadata = None;
let rlib = self.extract_one(rlibs, "rlib", &mut metadata);
let self_ty = parse_opt(st, |st| parse_ty(st, |x,y| conv(x,y)) );
assert_eq!(next(st), '[');
- let mut params: ~[ty::t] = ~[];
+ let mut params: Vec<ty::t> = Vec::new();
while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); }
st.pos = st.pos + 1u;
}
'T' => {
assert_eq!(next(st), '[');
- let mut params = ~[];
+ let mut params = Vec::new();
while peek(st) != ']' { params.push(parse_ty(st, |x,y| conv(x,y))); }
st.pos = st.pos + 1u;
return ty::mk_tup(st.tcx, params);
assert_eq!(next(st), '[');
let id = parse_uint(st) as ast::NodeId;
assert_eq!(next(st), '|');
- let mut inputs = ~[];
+ let mut inputs = Vec::new();
while peek(st) != ']' {
inputs.push(parse_ty(st, |x,y| conv(x,y)));
}
fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
- trait_bounds: ~[]
+ trait_bounds: Vec::new()
};
loop {
match next(st) {
enc_trait_store(w, cx, store);
enc_mutability(w, mt);
let bounds = ty::ParamBounds {builtin_bounds: bounds,
- trait_bounds: ~[]};
+ trait_bounds: Vec::new()};
enc_bounds(w, cx, &bounds);
mywrite!(w, "]");
}
enc_onceness(w, ft.onceness);
enc_region(w, cx, ft.region);
let bounds = ty::ParamBounds {builtin_bounds: ft.bounds,
- trait_bounds: ~[]};
+ trait_bounds: Vec::new()};
enc_bounds(w, cx, &bounds);
enc_fn_sig(w, cx, &ft.sig);
}
pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
tcx: ty::ctxt,
maps: Maps,
- path: ~[ast_map::PathElem],
+ path: Vec<ast_map::PathElem> ,
par_doc: ebml::Doc)
- -> Result<ast::InlinedItem, ~[ast_map::PathElem]> {
+ -> Result<ast::InlinedItem, Vec<ast_map::PathElem> > {
let dcx = @DecodeContext {
cdata: cdata,
tcx: tcx,
fn renumber_and_map_ast(xcx: @ExtendedDecodeContext,
map: &ast_map::Map,
- path: ~[ast_map::PathElem],
+ path: Vec<ast_map::PathElem> ,
ii: ast::InlinedItem) -> ast::InlinedItem {
ast_map::map_decoded_item(map,
path.move_iter().collect(),
trait ebml_decoder_decoder_helpers {
fn read_ty(&mut self, xcx: @ExtendedDecodeContext) -> ty::t;
- fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> ~[ty::t];
+ fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> ;
fn read_type_param_def(&mut self, xcx: @ExtendedDecodeContext)
-> ty::TypeParameterDef;
fn read_ty_param_bounds_and_ty(&mut self, xcx: @ExtendedDecodeContext)
tcx: ty::ctxt, cdata: @cstore::crate_metadata) -> ty::t;
fn read_tys_noxcx(&mut self,
tcx: ty::ctxt,
- cdata: @cstore::crate_metadata) -> ~[ty::t];
+ cdata: @cstore::crate_metadata) -> Vec<ty::t> ;
}
impl<'a> ebml_decoder_decoder_helpers for reader::Decoder<'a> {
fn read_tys_noxcx(&mut self,
tcx: ty::ctxt,
- cdata: @cstore::crate_metadata) -> ~[ty::t] {
+ cdata: @cstore::crate_metadata) -> Vec<ty::t> {
self.read_to_vec(|this| this.read_ty_noxcx(tcx, cdata) )
}
}
}
- fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> ~[ty::t] {
+ fn read_tys(&mut self, xcx: @ExtendedDecodeContext) -> Vec<ty::t> {
self.read_to_vec(|this| this.read_ty(xcx) )
}
let item = quote_item!(cx,
fn new_int_alist<B>() -> alist<int, B> {
fn eq_int(a: int, b: int) -> bool { a == b }
- return alist {eq_fn: eq_int, data: ~[]};
+ return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap();
let item_in = e::IIItemRef(item);
let item_out = simplify_ast(item_in);
let item_exp = ast::IIItem(quote_item!(cx,
fn new_int_alist<B>() -> alist<int, B> {
- return alist {eq_fn: eq_int, data: ~[]};
+ return alist {eq_fn: eq_int, data: Vec::new()};
}
).unwrap());
match (item_out, item_exp) {
})
}
- pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> ~[uint] {
+ pub fn loans_generated_by(&self, scope_id: ast::NodeId) -> Vec<uint> {
//! Returns a vector of the loans that are generated as
//! we encounter `scope_id`.
- let mut result = ~[];
+ let mut result = Vec::new();
self.dfcx_loans.each_gen_bit_frozen(scope_id, |loan_index| {
result.push(loan_index);
true
bccx: &'a BorrowckCtxt,
id_range: IdRange,
move_data: move_data::MoveData,
- all_loans: @RefCell<~[Loan]>,
+ all_loans: @RefCell<Vec<Loan> >,
item_ub: ast::NodeId,
- repeating_ids: ~[ast::NodeId]
-}
+ repeating_ids: Vec<ast::NodeId> }
impl<'a> visit::Visitor<()> for GatherLoanCtxt<'a> {
fn visit_expr(&mut self, ex: &Expr, _: ()) {
}
pub fn gather_loans(bccx: &BorrowckCtxt, decl: &ast::FnDecl, body: &ast::Block)
- -> (IdRange, @RefCell<~[Loan]>, move_data::MoveData) {
+ -> (IdRange, @RefCell<Vec<Loan> >, move_data::MoveData) {
let mut glcx = GatherLoanCtxt {
bccx: bccx,
id_range: IdRange::max(),
- all_loans: @RefCell::new(~[]),
+ all_loans: @RefCell::new(Vec::new()),
item_ub: body.id,
- repeating_ids: ~[body.id],
+ repeating_ids: vec!(body.id),
move_data: MoveData::new()
};
glcx.gather_fn_arg_patterns(decl, body);
pub enum RestrictionResult {
Safe,
- SafeIf(@LoanPath, ~[Restriction])
+ SafeIf(@LoanPath, Vec<Restriction> )
}
pub fn compute_restrictions(bccx: &BorrowckCtxt,
mc::cat_upvar(ty::UpvarId {var_id: local_id, ..}, _) => {
// R-Variable
let lp = @LpVar(local_id);
- SafeIf(lp, ~[Restriction {loan_path: lp,
- set: restrictions}])
+ SafeIf(lp, vec!(Restriction {loan_path: lp,
+ set: restrictions}))
}
mc::cat_downcast(cmt_base) => {
loan_path: @LoanPath,
cmt: mc::cmt,
kind: ty::BorrowKind,
- restrictions: ~[Restriction],
+ restrictions: Vec<Restriction> ,
gen_scope: ast::NodeId,
kill_scope: ast::NodeId,
span: Span,
pub struct MoveData {
/// Move paths. See section "Move paths" in `doc.rs`.
- paths: RefCell<~[MovePath]>,
+ paths: RefCell<Vec<MovePath> >,
/// Cache of loan path to move path index, for easy lookup.
path_map: RefCell<HashMap<@LoanPath, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here.
- moves: RefCell<~[Move]>,
+ moves: RefCell<Vec<Move> >,
/// Assignments to a variable, like `x = foo`. These are assigned
/// bits for dataflow, since we must track them to ensure that
/// immutable variables are assigned at most once along each path.
- var_assignments: RefCell<~[Assignment]>,
+ var_assignments: RefCell<Vec<Assignment> >,
/// Assignments to a path, like `x.f = foo`. These are not
/// assigned dataflow bits, but we track them because they still
/// kill move bits.
- path_assignments: RefCell<~[Assignment]>,
+ path_assignments: RefCell<Vec<Assignment> >,
assignee_ids: RefCell<HashSet<ast::NodeId>>,
}
impl MoveData {
pub fn new() -> MoveData {
MoveData {
- paths: RefCell::new(~[]),
+ paths: RefCell::new(Vec::new()),
path_map: RefCell::new(HashMap::new()),
- moves: RefCell::new(~[]),
- path_assignments: RefCell::new(~[]),
- var_assignments: RefCell::new(~[]),
+ moves: RefCell::new(Vec::new()),
+ path_assignments: RefCell::new(Vec::new()),
+ var_assignments: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(HashSet::new()),
}
}
method_map: typeck::MethodMap,
exit_map: NodeMap<CFGIndex>,
graph: CFGGraph,
- loop_scopes: ~[LoopScope],
+ loop_scopes: Vec<LoopScope> ,
}
struct LoopScope {
graph: graph::Graph::new(),
tcx: tcx,
method_map: method_map,
- loop_scopes: ~[]
+ loop_scopes: Vec::new()
};
let entry = cfg_builder.add_node(0, []);
let exit = cfg_builder.block(blk, entry);
ast::ExprStruct(_, ref fields, base) => {
let base_exit = self.opt_expr(base, pred);
- let field_exprs: ~[@ast::Expr] =
+ let field_exprs: Vec<@ast::Expr> =
fields.iter().map(|f| f.expr).collect();
self.straightline(expr, base_exit, field_exprs)
}
sess: Session,
ast_map: &'a ast_map::Map,
def_map: resolve::DefMap,
- idstack: ~[NodeId]
-}
+ idstack: Vec<NodeId> }
// Make sure a const item doesn't recursively refer to itself
// FIXME: Should use the dependency graph when it's available (#1356)
sess: sess,
ast_map: ast_map,
def_map: def_map,
- idstack: ~[]
+ idstack: Vec::new()
};
visitor.visit_item(it, ());
}
_ => { /* We assume only enum types can be uninhabited */ }
}
- let pats: ~[@Pat] = arms.iter()
+ let pats: Vec<@Pat> = arms.iter()
.filter_map(unguarded_pat)
.flat_map(|pats| pats.move_iter())
.collect();
// Check for unreachable patterns
fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) {
- let mut seen = ~[];
+ let mut seen = Vec::new();
for arm in arms.iter() {
for pat in arm.pats.iter() {
true
});
- let v = ~[*pat];
+ let v = vec!(*pat);
match is_useful(cx, &seen, v) {
not_useful => {
cx.tcx.sess.span_err(pat.span, "unreachable pattern");
}
}
-fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
+fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
assert!((!pats.is_empty()));
- let ext = match is_useful(cx, &pats.map(|p| ~[*p]), [wild()]) {
+ let ext = match is_useful(cx, &pats.map(|p| vec!(*p)), [wild()]) {
not_useful => {
// This is good, wildcard pattern isn't reachable
return;
cx.tcx.sess.span_err(sp, msg);
}
-type matrix = ~[~[@Pat]];
+type matrix = Vec<Vec<@Pat> > ;
#[deriving(Clone)]
enum useful {
return Some(single);
}
ty::ty_enum(eid, _) => {
- let mut found = ~[];
+ let mut found = Vec::new();
for r in m.iter() {
let r = pat_ctor_id(cx, r[0]);
for id in r.iter() {
}
_ => None
}
- }).collect::<~[(uint, bool)]>();
+ }).collect::<Vec<(uint, bool)> >();
// Sort them by length such that for patterns of the same length,
// those with a destructured slice come first.
ctor_id: &ctor,
arity: uint,
left_ty: ty::t)
- -> Option<~[@Pat]> {
+ -> Option<Vec<@Pat> > {
// Sad, but I can't get rid of this easily
let r0 = (*raw_pat(r[0])).clone();
match r0 {
Pat{id: pat_id, node: n, span: pat_span} =>
match n {
PatWild => {
- Some(vec::append(vec::from_elem(arity, wild()), r.tail()))
+ Some(vec_ng::append(vec::from_elem(arity, wild()), r.tail()))
}
PatWildMulti => {
- Some(vec::append(vec::from_elem(arity, wild_multi()), r.tail()))
+ Some(vec_ng::append(vec::from_elem(arity, wild_multi()), r.tail()))
}
PatIdent(_, _, _) => {
let opt_def = {
}
_ => {
Some(
- vec::append(
+ vec_ng::append(
vec::from_elem(arity, wild()),
r.tail()
)
Some(args) => args.iter().map(|x| *x).collect(),
None => vec::from_elem(arity, wild())
};
- Some(vec::append(args, r.tail()))
+ Some(vec_ng::append(args, r.tail()))
}
DefVariant(_, _, _) => None,
}
None => new_args = vec::from_elem(arity, wild())
}
- Some(vec::append(new_args, r.tail()))
+ Some(vec_ng::append(new_args, r.tail()))
}
_ => None
}
_ => wild()
}
});
- Some(vec::append(args, r.tail()))
+ Some(vec_ng::append(args, r.tail()))
} else {
None
}
_ => wild()
}
}).collect();
- Some(vec::append(args, r.tail()))
+ Some(vec_ng::append(args, r.tail()))
}
}
}
PatTup(args) => {
- Some(vec::append(args.iter().map(|x| *x).collect(), r.tail()))
+ Some(vec_ng::append(args.iter().map(|x| *x).collect(), r.tail()))
}
PatUniq(a) | PatRegion(a) => {
- Some(vec::append(~[a], r.tail()))
+ Some(vec_ng::append(vec!(a), r.tail()))
}
PatLit(expr) => {
let e_v = eval_const_expr(cx.tcx, expr);
vec(_) => {
let num_elements = before.len() + after.len();
if num_elements < arity && slice.is_some() {
- let mut result = ~[];
+ let mut result = Vec::new();
for pat in before.iter() {
result.push((*pat).clone());
}
}
Some(result)
} else if num_elements == arity {
- let mut result = ~[];
+ let mut result = Vec::new();
for pat in before.iter() {
result.push((*pat).clone());
}
}
}
-fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<~[@Pat]> {
+fn default(cx: &MatchCheckCtxt, r: &[@Pat]) -> Option<Vec<@Pat> > {
if is_wild(cx, r[0]) { Some(r.tail().to_owned()) }
else { None }
}
const_int(i64),
const_uint(u64),
const_str(InternedString),
- const_binary(Rc<~[u8]>),
+ const_binary(Rc<Vec<u8> >),
const_bool(bool)
}
// the full vector (see the method `compute_id_range()`).
/// bits generated as we exit the scope `id`. Updated by `add_gen()`.
- priv gens: ~[uint],
+ priv gens: Vec<uint> ,
/// bits killed as we exit the scope `id`. Updated by `add_kill()`.
- priv kills: ~[uint],
+ priv kills: Vec<uint> ,
/// bits that are valid on entry to the scope `id`. Updated by
/// `propagate()`.
- priv on_entry: ~[uint]
-}
+ priv on_entry: Vec<uint> }
/// Parameterization for the precise form of data flow that is used.
pub trait DataFlowOperator {
struct LoopScope<'a> {
loop_id: ast::NodeId,
- break_bits: ~[uint]
-}
+ break_bits: Vec<uint> }
impl<O:DataFlowOperator> pprust::PpAnn for DataFlowContext<O> {
fn pre(&self, node: pprust::AnnNode) -> io::IoResult<()> {
debug!("DataFlowContext::new(id_range={:?}, bits_per_id={:?}, words_per_id={:?})",
id_range, bits_per_id, words_per_id);
- let gens = ~[];
- let kills = ~[];
- let on_entry = ~[];
+ let gens = Vec::new();
+ let kills = Vec::new();
+ let on_entry = Vec::new();
DataFlowContext {
tcx: tcx,
};
let mut temp = vec::from_elem(self.words_per_id, 0u);
- let mut loop_scopes = ~[];
+ let mut loop_scopes = Vec::new();
while propcx.changed {
propcx.changed = false;
fn walk_block(&mut self,
blk: &ast::Block,
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_block(blk.id={}, in_out={})",
blk.id, bits_to_str(in_out));
fn walk_stmt(&mut self,
stmt: @ast::Stmt,
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
match stmt.node {
ast::StmtDecl(decl, _) => {
self.walk_decl(decl, in_out, loop_scopes);
fn walk_decl(&mut self,
decl: @ast::Decl,
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
match decl.node {
ast::DeclLocal(local) => {
self.walk_opt_expr(local.init, in_out, loop_scopes);
fn walk_expr(&mut self,
expr: &ast::Expr,
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_expr(expr={}, in_out={})",
expr.repr(self.dfcx.tcx), bits_to_str(in_out));
fn walk_exprs(&mut self,
exprs: &[@ast::Expr],
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
for &expr in exprs.iter() {
self.walk_expr(expr, in_out, loop_scopes);
}
fn walk_opt_expr(&mut self,
opt_expr: Option<@ast::Expr>,
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
for &expr in opt_expr.iter() {
self.walk_expr(expr, in_out, loop_scopes);
}
call_id: ast::NodeId,
args: &[@ast::Expr],
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
self.walk_exprs(args, in_out, loop_scopes);
// FIXME(#6268) nested method calls
fn walk_pat(&mut self,
pat: @ast::Pat,
in_out: &mut [uint],
- _loop_scopes: &mut ~[LoopScope]) {
+ _loop_scopes: &mut Vec<LoopScope> ) {
debug!("DataFlowContext::walk_pat(pat={}, in_out={})",
pat.repr(self.dfcx.tcx), bits_to_str(in_out));
fn walk_pat_alternatives(&mut self,
pats: &[@ast::Pat],
in_out: &mut [uint],
- loop_scopes: &mut ~[LoopScope]) {
+ loop_scopes: &mut Vec<LoopScope> ) {
if pats.len() == 1 {
// Common special case:
return self.walk_pat(pats[0], in_out, loop_scopes);
fn find_scope<'a>(&self,
expr: &ast::Expr,
label: Option<ast::Ident>,
- loop_scopes: &'a mut ~[LoopScope]) -> &'a mut LoopScope {
+ loop_scopes: &'a mut Vec<LoopScope> ) -> &'a mut LoopScope {
let index = match label {
None => {
let len = loop_scopes.len();
}
struct MarkSymbolVisitor {
- worklist: ~[ast::NodeId],
+ worklist: Vec<ast::NodeId> ,
method_map: typeck::MethodMap,
tcx: ty::ctxt,
live_symbols: ~HashSet<ast::NodeId>,
impl MarkSymbolVisitor {
fn new(tcx: ty::ctxt,
method_map: typeck::MethodMap,
- worklist: ~[ast::NodeId]) -> MarkSymbolVisitor {
+ worklist: Vec<ast::NodeId> ) -> MarkSymbolVisitor {
MarkSymbolVisitor {
worklist: worklist,
method_map: method_map,
// 2) We are not sure to be live or not
// * Implementation of a trait method
struct LifeSeeder {
- worklist: ~[ast::NodeId],
+ worklist: Vec<ast::NodeId> ,
}
impl Visitor<()> for LifeSeeder {
fn create_and_seed_worklist(tcx: ty::ctxt,
exported_items: &privacy::ExportedItems,
reachable_symbols: &NodeSet,
- krate: &ast::Crate) -> ~[ast::NodeId] {
- let mut worklist = ~[];
+ krate: &ast::Crate) -> Vec<ast::NodeId> {
+ let mut worklist = Vec::new();
// Preferably, we would only need to seed the worklist with reachable
// symbols. However, since the set of reachable symbols differs
// The functions that one might think are 'main' but aren't, e.g.
// main functions not defined at the top level. For diagnostics.
- non_main_fns: ~[(NodeId, Span)],
+ non_main_fns: Vec<(NodeId, Span)> ,
}
impl<'a> Visitor<()> for EntryContext<'a> {
main_fn: None,
attr_main_fn: None,
start_fn: None,
- non_main_fns: ~[],
+ non_main_fns: Vec::new(),
};
visit::walk_crate(&mut ctxt, krate, ());
def: ast::Def, //< The variable being accessed free.
span: Span //< First span where it is accessed (there can be multiple)
}
-pub type freevar_info = @~[@freevar_entry];
+pub type freevar_info = @Vec<@freevar_entry> ;
pub type freevar_map = NodeMap<freevar_info>;
struct CollectFreevarsVisitor {
seen: NodeSet,
- refs: ~[@freevar_entry],
+ refs: Vec<@freevar_entry> ,
def_map: resolve::DefMap,
}
// in order to start the search.
fn collect_freevars(def_map: resolve::DefMap, blk: &ast::Block) -> freevar_info {
let seen = NodeSet::new();
- let refs = ~[];
+ let refs = Vec::new();
let mut v = CollectFreevarsVisitor {
seen: seen,
use std::vec;
pub struct Graph<N,E> {
- priv nodes: ~[Node<N>],
- priv edges: ~[Edge<E>],
+ priv nodes: Vec<Node<N>> ,
+ priv edges: Vec<Edge<E>> ,
}
pub struct Node<N> {
impl<N,E> Graph<N,E> {
pub fn new() -> Graph<N,E> {
- Graph {nodes: ~[], edges: ~[]}
+ Graph {nodes: Vec::new(), edges: Vec::new()}
}
pub fn with_capacity(num_nodes: uint,
let struct_ty = ty::mk_struct(cx.tcx, struct_did, ty::substs {
regions: ty::NonerasedRegions(opt_vec::Empty),
self_ty: None,
- tps: ~[]
+ tps: Vec::new()
});
if !ty::type_is_sendable(cx.tcx, struct_ty) {
cx.tcx.sess.span_err(span,
// Collect up the regions that appear in the target type. We want to
// ensure that these lifetimes are shorter than all lifetimes that are in
// the source type. See test `src/test/compile-fail/regions-trait-2.rs`
- let mut target_regions = ~[];
+ let mut target_regions = Vec::new();
ty::walk_regions_and_ty(
cx.tcx,
target_ty,
}
pub struct LanguageItems {
- items: ~[Option<ast::DefId>],
+ items: Vec<Option<ast::DefId>> ,
}
impl LanguageItems {
fn foo(_: LangItem) -> Option<ast::DefId> { None }
LanguageItems {
- items: ~[$(foo($variant)),*]
+ items: vec!($(foo($variant)),*)
}
}
// When recursing into an attributed node of the ast which modifies lint
// levels, this stack keeps track of the previous lint levels of whatever
// was modified.
- lint_stack: ~[(Lint, level, LintSource)],
+ lint_stack: Vec<(Lint, level, LintSource)> ,
// id of the last visited negated expression
negated_expr_id: ast::NodeId
exported_items: exported_items,
cur_struct_def_id: -1,
is_doc_hidden: false,
- lint_stack: ~[],
+ lint_stack: Vec::new(),
negated_expr_id: -1
};
live_node_map: RefCell<NodeMap<LiveNode>>,
variable_map: RefCell<NodeMap<Variable>>,
capture_info_map: RefCell<NodeMap<@~[CaptureInfo]>>,
- var_kinds: RefCell<~[VarKind]>,
- lnks: RefCell<~[LiveNodeKind]>,
+ var_kinds: RefCell<Vec<VarKind> >,
+ lnks: RefCell<Vec<LiveNodeKind> >,
}
fn IrMaps(tcx: ty::ctxt,
live_node_map: RefCell::new(NodeMap::new()),
variable_map: RefCell::new(NodeMap::new()),
capture_info_map: RefCell::new(NodeMap::new()),
- var_kinds: RefCell::new(~[]),
- lnks: RefCell::new(~[]),
+ var_kinds: RefCell::new(Vec::new()),
+ lnks: RefCell::new(Vec::new()),
}
}
}
}
- pub fn set_captures(&self, node_id: NodeId, cs: ~[CaptureInfo]) {
+ pub fn set_captures(&self, node_id: NodeId, cs: Vec<CaptureInfo> ) {
let mut capture_info_map = self.capture_info_map.borrow_mut();
capture_info_map.get().insert(node_id, @cs);
}
- pub fn captures(&self, expr: &Expr) -> @~[CaptureInfo] {
+ pub fn captures(&self, expr: &Expr) -> @Vec<CaptureInfo> {
let capture_info_map = self.capture_info_map.borrow();
match capture_info_map.get().find(&expr.id) {
Some(&caps) => caps,
// construction site.
let capture_map = this.capture_map.borrow();
let cvs = capture_map.get().get(&expr.id);
- let mut call_caps = ~[];
+ let mut call_caps = Vec::new();
for cv in cvs.borrow().iter() {
match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => {
tcx: ty::ctxt,
ir: @IrMaps,
s: Specials,
- successors: @RefCell<~[LiveNode]>,
- users: @RefCell<~[Users]>,
+ successors: @RefCell<Vec<LiveNode> >,
+ users: @RefCell<Vec<Users> >,
// The list of node IDs for the nested loop scopes
// we're in.
- loop_scope: @RefCell<~[NodeId]>,
+ loop_scope: @RefCell<Vec<NodeId> >,
// mappings from loop node ID to LiveNode
// ("break" label should map to loop node ID,
// it probably doesn't now)
users: @RefCell::new(vec::from_elem(ir.num_live_nodes.get() *
ir.num_vars.get(),
invalid_users())),
- loop_scope: @RefCell::new(~[]),
+ loop_scope: @RefCell::new(Vec::new()),
break_ln: @RefCell::new(NodeMap::new()),
cont_ln: @RefCell::new(NodeMap::new()),
}
// know what type lies at the other end, so we just call it
// `()` (the empty tuple).
- let opaque_ty = ty::mk_tup(self.tcx(), ~[]);
+ let opaque_ty = ty::mk_tup(self.tcx(), Vec::new());
return self.cat_deref_common(node, base_cmt, deref_cnt, opaque_ty);
}
mode: CaptureMode // How variable is being accessed
}
-pub type CaptureMap = @RefCell<NodeMap<Rc<~[CaptureVar]>>>;
+pub type CaptureMap = @RefCell<NodeMap<Rc<Vec<CaptureVar> >>>;
pub type MovesMap = @RefCell<NodeSet>;
self.consume_expr(arg_expr)
}
- pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<~[CaptureVar]> {
+ pub fn compute_captures(&mut self, fn_expr_id: NodeId) -> Rc<Vec<CaptureVar> > {
debug!("compute_capture_vars(fn_expr_id={:?})", fn_expr_id);
let _indenter = indenter();
});
}
-pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> ~[NodeId] {
- let mut found = ~[];
+pub fn pat_binding_ids(dm: resolve::DefMap, pat: &Pat) -> Vec<NodeId> {
+ let mut found = Vec::new();
pat_bindings(dm, pat, |_bm, b_id, _sp, _pt| found.push(b_id) );
return found;
}
reachable_symbols: @RefCell<NodeSet>,
// A worklist of item IDs. Each item ID in this worklist will be inlined
// and will be scanned for further references.
- worklist: @RefCell<~[ast::NodeId]>,
+ worklist: @RefCell<Vec<ast::NodeId> >,
}
struct MarkSymbolVisitor {
- worklist: @RefCell<~[ast::NodeId]>,
+ worklist: @RefCell<Vec<ast::NodeId> >,
method_map: typeck::MethodMap,
tcx: ty::ctxt,
reachable_symbols: @RefCell<NodeSet>,
tcx: tcx,
method_map: method_map,
reachable_symbols: @RefCell::new(NodeSet::new()),
- worklist: @RefCell::new(~[]),
+ worklist: @RefCell::new(Vec::new()),
}
}
pub struct RegionMaps {
priv scope_map: RefCell<NodeMap<ast::NodeId>>,
priv var_map: RefCell<NodeMap<ast::NodeId>>,
- priv free_region_map: RefCell<HashMap<FreeRegion, ~[FreeRegion]>>,
+ priv free_region_map: RefCell<HashMap<FreeRegion, Vec<FreeRegion> >>,
priv rvalue_scopes: RefCell<NodeMap<ast::NodeId>>,
priv terminating_scopes: RefCell<HashSet<ast::NodeId>>,
}
debug!("relate_free_regions(sub={:?}, sup={:?})", sub, sup);
- free_region_map.get().insert(sub, ~[sup]);
+ free_region_map.get().insert(sub, vec!(sup));
}
pub fn record_encl_scope(&self, sub: ast::NodeId, sup: ast::NodeId) {
// doubles as a way to detect if we've seen a particular FR
// before. Note that we expect this graph to be an *extremely
// shallow* tree.
- let mut queue = ~[sub];
+ let mut queue = vec!(sub);
let mut i = 0;
while i < queue.len() {
let free_region_map = self.free_region_map.borrow();
}
fn ancestors_of(this: &RegionMaps, scope: ast::NodeId)
- -> ~[ast::NodeId]
- {
+ -> Vec<ast::NodeId> {
// debug!("ancestors_of(scope={})", scope);
- let mut result = ~[scope];
+ let mut result = vec!(scope);
let mut scope = scope;
loop {
let scope_map = this.scope_map.borrow();
type BindingMap = HashMap<Name,binding_info>;
// Trait method resolution
-pub type TraitMap = NodeMap<~[DefId]>;
+pub type TraitMap = NodeMap<Vec<DefId> >;
// This is the replacement export map. It maps a module to all of the exports
// within.
-pub type ExportMap2 = @RefCell<NodeMap<~[Export2]>>;
+pub type ExportMap2 = @RefCell<NodeMap<Vec<Export2> >>;
pub struct Export2 {
name: ~str, // The name of the target.
/// One import directive.
struct ImportDirective {
- module_path: ~[Ident],
+ module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass,
span: Span,
id: NodeId,
}
impl ImportDirective {
- fn new(module_path: ~[Ident],
+ fn new(module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass,
span: Span,
id: NodeId,
is_public: bool,
children: RefCell<HashMap<Name, @NameBindings>>,
- imports: RefCell<~[@ImportDirective]>,
+ imports: RefCell<Vec<@ImportDirective> >,
// The external module children of this node that were declared with
// `extern crate`.
kind: Cell::new(kind),
is_public: is_public,
children: RefCell::new(HashMap::new()),
- imports: RefCell::new(~[]),
+ imports: RefCell::new(Vec::new()),
external_module_children: RefCell::new(HashMap::new()),
anonymous_children: RefCell::new(NodeMap::new()),
import_resolutions: RefCell::new(HashMap::new()),
unresolved_imports: 0,
current_module: current_module,
- value_ribs: @RefCell::new(~[]),
- type_ribs: @RefCell::new(~[]),
- label_ribs: @RefCell::new(~[]),
+ value_ribs: @RefCell::new(Vec::new()),
+ type_ribs: @RefCell::new(Vec::new()),
+ label_ribs: @RefCell::new(Vec::new()),
current_trait_refs: None,
primitive_type_table: @PrimitiveTypeTable(),
- namespaces: ~[ TypeNS, ValueNS ],
+ namespaces: Vec<TypeNS, ValueNS > ,
def_map: @RefCell::new(NodeMap::new()),
export_map2: @RefCell::new(NodeMap::new()),
// The current set of local scopes, for values.
// FIXME #4948: Reuse ribs to avoid allocation.
- value_ribs: @RefCell<~[@Rib]>,
+ value_ribs: @RefCell<Vec<@Rib> >,
// The current set of local scopes, for types.
- type_ribs: @RefCell<~[@Rib]>,
+ type_ribs: @RefCell<Vec<@Rib> >,
// The current set of local scopes, for labels.
- label_ribs: @RefCell<~[@Rib]>,
+ label_ribs: @RefCell<Vec<@Rib> >,
// The trait that the current context can refer to.
- current_trait_refs: Option<~[DefId]>,
+ current_trait_refs: Option<Vec<DefId> >,
// The ident for the keyword "self".
self_ident: Ident,
primitive_type_table: @PrimitiveTypeTable,
// The four namespaces.
- namespaces: ~[Namespace],
+ namespaces: Vec<Namespace> ,
def_map: DefMap,
export_map2: ExportMap2,
// globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little.
- let mut module_path = ~[];
+ let mut module_path = Vec::new();
match view_path.node {
ViewPathSimple(_, ref full_path, _) => {
let path_len = full_path.segments.len();
/// Creates and adds an import directive to the given module.
fn build_import_directive(&mut self,
module_: @Module,
- module_path: ~[Ident],
+ module_path: Vec<Ident> ,
subclass: @ImportDirectiveSubclass,
span: Span,
id: NodeId,
}
fn path_idents_to_str(&mut self, path: &Path) -> ~str {
- let identifiers: ~[ast::Ident] = path.segments
+ let identifiers: Vec<ast::Ident> = path.segments
.iter()
.map(|seg| seg.identifier)
.collect();
}
fn record_exports_for_module(&mut self, module_: @Module) {
- let mut exports2 = ~[];
+ let mut exports2 = Vec::new();
self.add_exports_for_module(&mut exports2, module_);
match module_.def_id.get() {
}
fn add_exports_of_namebindings(&mut self,
- exports2: &mut ~[Export2],
+ exports2: &mut Vec<Export2> ,
name: Name,
namebindings: @NameBindings,
ns: Namespace) {
}
fn add_exports_for_module(&mut self,
- exports2: &mut ~[Export2],
+ exports2: &mut Vec<Export2> ,
module_: @Module) {
let import_resolutions = module_.import_resolutions.borrow();
for (name, importresolution) in import_resolutions.get().iter() {
/// Wraps the given definition in the appropriate number of `def_upvar`
/// wrappers.
fn upvarify(&mut self,
- ribs: &mut ~[@Rib],
+ ribs: &mut Vec<@Rib> ,
rib_index: uint,
def_like: DefLike,
span: Span)
}
fn search_ribs(&mut self,
- ribs: &mut ~[@Rib],
+ ribs: &mut Vec<@Rib> ,
name: Name,
span: Span)
-> Option<DefLike> {
TraitImplementation);
// Record the current set of trait references.
- let mut new_trait_refs = ~[];
+ let mut new_trait_refs = Vec::new();
{
let def_map = this.def_map.borrow();
let r = def_map.get().find(&trait_reference.ref_id);
-> Option<~str> {
let this = &mut *self;
- let mut maybes: ~[token::InternedString] = ~[];
- let mut values: ~[uint] = ~[];
+ let mut maybes: Vec<token::InternedString> = Vec::new();
+ let mut values: Vec<uint> = Vec::new();
let mut j = {
let value_ribs = this.value_ribs.borrow();
}
}
- fn search_for_traits_containing_method(&mut self, name: Ident) -> ~[DefId] {
+ fn search_for_traits_containing_method(&mut self, name: Ident) -> Vec<DefId> {
debug!("(searching for traits containing method) looking for '{}'",
token::get_ident(name));
- let mut found_traits = ~[];
+ let mut found_traits = Vec::new();
let mut search_module = self.current_module;
let method_map = self.method_map.borrow();
match method_map.get().find(&name.name) {
}
fn add_trait_info(&self,
- found_traits: &mut ~[DefId],
+ found_traits: &mut Vec<DefId> ,
trait_def_id: DefId,
name: Ident) {
debug!("(adding trait info) found trait {}:{} for method '{}'",
/// A somewhat inefficient routine to obtain the name of a module.
fn module_to_str(&mut self, module_: @Module) -> ~str {
- let mut idents = ~[];
+ let mut idents = Vec::new();
let mut current_module = module_;
loop {
match current_module.parent_link {
if idents.len() == 0 {
return ~"???";
}
- return self.idents_to_str(idents.move_rev_iter().collect::<~[ast::Ident]>());
+ return self.idents_to_str(idents.move_rev_iter().collect::<Vec<ast::Ident> >());
}
#[allow(dead_code)] // useful for debugging
///////////////////////////////////////////////////////////////////////////
// Other types
-impl<T:Subst> Subst for ~[T] {
+impl<T:Subst> Subst for Vec<T> {
fn subst_spanned(&self, tcx: ty::ctxt,
substs: &ty::substs,
- span: Option<Span>) -> ~[T] {
+ span: Option<Span>) -> Vec<T> {
self.map(|t| t.subst_spanned(tcx, substs, span))
}
}
*/
#[deriving(Clone)]
struct Match<'a,'b> {
- pats: ~[@ast::Pat],
+ pats: Vec<@ast::Pat> ,
data: ArmData<'a,'b>,
- bound_ptrs: ~[(Ident, ValueRef)]
-}
+ bound_ptrs: Vec<(Ident, ValueRef)> }
impl<'a,'b> Repr for Match<'a,'b> {
fn repr(&self, tcx: ty::ctxt) -> ~str {
m: &[Match<'r,'b>],
col: uint,
val: ValueRef)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
m.map(|br| {
match br.pats[col].node {
ast::PatIdent(_, ref path, Some(inner)) => {
- let pats = vec::append(
+ let pats = vec_ng::append(
br.pats.slice(0u, col).to_owned(),
- vec::append(~[inner],
+ vec_ng::append(vec!(inner),
br.pats.slice(col + 1u,
br.pats.len())));
}
}
-type enter_pat<'a> = 'a |@ast::Pat| -> Option<~[@ast::Pat]>;
+type enter_pat<'a> = 'a |@ast::Pat| -> Option<Vec<@ast::Pat> >;
fn enter_match<'r,'b>(
bcx: &'b Block<'b>,
col: uint,
val: ValueRef,
e: enter_pat)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_match(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
bcx.val_to_str(val));
let _indenter = indenter();
- let mut result = ~[];
+ let mut result = Vec::new();
for br in m.iter() {
match e(br.pats[col]) {
Some(sub) => {
let pats =
- vec::append(
- vec::append(sub, br.pats.slice(0u, col)),
+ vec_ng::append(
+ vec_ng::append(sub, br.pats.slice(0u, col)),
br.pats.slice(col + 1u, br.pats.len()));
let this = br.pats[col];
col: uint,
val: ValueRef,
chk: &FailureHandler)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_default(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
// Collect all of the matches that can match against anything.
let matches = enter_match(bcx, dm, m, col, val, |p| {
match p.node {
- ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(~[]),
- ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(~[]),
+ ast::PatWild | ast::PatWildMulti | ast::PatTup(_) => Some(Vec::new()),
+ ast::PatIdent(_, _, None) if pat_is_binding(dm, p) => Some(Vec::new()),
_ => None
}
});
_ => false
};
- if is_exhaustive { ~[] } else { matches }
+ if is_exhaustive { Vec::new() } else { matches }
}
// <pcwalton> nmatsakis: what does enter_opt do?
col: uint,
variant_size: uint,
val: ValueRef)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
};
let const_def_id = ast_util::def_id_of_def(const_def);
if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) {
- Some(~[])
+ Some(Vec::new())
} else {
None
}
ast::PatIdent(_, _, None)
if pat_is_variant_or_struct(tcx.def_map, p) => {
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
- Some(~[])
+ Some(Vec::new())
} else {
None
}
}
ast::PatLit(l) => {
- if opt_eq(tcx, &lit(ExprLit(l)), opt) {Some(~[])} else {None}
+ if opt_eq(tcx, &lit(ExprLit(l)), opt) {Some(Vec::new())} else {None}
}
ast::PatRange(l1, l2) => {
- if opt_eq(tcx, &range(l1, l2), opt) {Some(~[])} else {None}
+ if opt_eq(tcx, &range(l1, l2), opt) {Some(Vec::new())} else {None}
}
ast::PatStruct(_, ref field_pats, _) => {
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
// Reorder the patterns into the same order they were
// specified in the struct definition. Also fill in
// unspecified fields with dummy.
- let mut reordered_patterns = ~[];
+ let mut reordered_patterns = Vec::new();
let r = ty::lookup_struct_fields(tcx, struct_id);
for field in r.iter() {
match field_pats.iter().find(|p| p.ident.name
let this_opt = vec_len(n, vec_len_ge(before.len()),
(lo, hi));
if opt_eq(tcx, &this_opt, opt) {
- let mut new_before = ~[];
+ let mut new_before = Vec::new();
for pat in before.iter() {
new_before.push(*pat);
}
None if i >= lo && i <= hi => {
let n = before.len();
if opt_eq(tcx, &vec_len(n, vec_len_eq, (lo,hi)), opt) {
- let mut new_before = ~[];
+ let mut new_before = Vec::new();
for pat in before.iter() {
new_before.push(*pat);
}
col: uint,
fields: &[ast::Ident],
val: ValueRef)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_rec_or_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatStruct(_, ref fpats, _) => {
- let mut pats = ~[];
+ let mut pats = Vec::new();
for fname in fields.iter() {
match fpats.iter().find(|p| p.ident.name == fname.name) {
None => pats.push(dummy),
col: uint,
val: ValueRef,
n_elts: uint)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_tup(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatTup(ref elts) => {
- let mut new_elts = ~[];
+ let mut new_elts = Vec::new();
for elt in elts.iter() {
new_elts.push((*elt).clone())
}
col: uint,
val: ValueRef,
n_elts: uint)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_tuple_struct(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
m: &[Match<'r,'b>],
col: uint,
val: ValueRef)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_uniq(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatUniq(sub) => {
- Some(~[sub])
+ Some(vec!(sub))
}
_ => {
assert_is_binding_or_wild(bcx, p);
- Some(~[dummy])
+ Some(vec!(dummy))
}
}
})
m: &[Match<'r,'b>],
col: uint,
val: ValueRef)
- -> ~[Match<'r,'b>] {
+ -> vec!(Match<'r,'b>) {
debug!("enter_region(bcx={}, m={}, col={}, val={})",
bcx.to_str(),
m.repr(bcx.tcx()),
enter_match(bcx, dm, m, col, val, |p| {
match p.node {
ast::PatRegion(sub) => {
- Some(~[sub])
+ Some(vec!(sub))
}
_ => {
assert_is_binding_or_wild(bcx, p);
- Some(~[dummy])
+ Some(vec!(dummy))
}
}
})
// Returns the options in one column of matches. An option is something that
// needs to be conditionally matched at runtime; for example, the discriminant
// on a set of enum variants or a literal.
-fn get_options(bcx: &Block, m: &[Match], col: uint) -> ~[Opt] {
+fn get_options(bcx: &Block, m: &[Match], col: uint) -> Vec<Opt> {
let ccx = bcx.ccx();
- fn add_to_set(tcx: ty::ctxt, set: &mut ~[Opt], val: Opt) {
+ fn add_to_set(tcx: ty::ctxt, set: &mut Vec<Opt> , val: Opt) {
if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;}
set.push(val);
}
// conditions over-match, we need to be careful about them. This
// means that in order to properly handle things in order, we need
// to not always merge conditions.
- fn add_veclen_to_set(set: &mut ~[Opt], i: uint,
+ fn add_veclen_to_set(set: &mut Vec<Opt> , i: uint,
len: uint, vlo: VecLenOpt) {
match set.last() {
// If the last condition in the list matches the one we want
}
}
- let mut found = ~[];
+ let mut found = Vec::new();
for (i, br) in m.iter().enumerate() {
let cur = br.pats[col];
match cur.node {
}
struct ExtractedBlock<'a> {
- vals: ~[ValueRef],
+ vals: Vec<ValueRef> ,
bcx: &'a Block<'a>,
}
bcx: &'a Block<'a>,
m: &[Match],
col: uint)
- -> Option<~[ast::Ident]> {
- let mut fields: ~[ast::Ident] = ~[];
+ -> Option<Vec<ast::Ident> > {
+ let mut fields: Vec<ast::Ident> = Vec::new();
let mut found = false;
for br in m.iter() {
match br.pats[col].node {
return None;
}
- fn extend(idents: &mut ~[ast::Ident], field_pats: &[ast::FieldPat]) {
+ fn extend(idents: &mut Vec<ast::Ident> , field_pats: &[ast::FieldPat]) {
for field_pat in field_pats.iter() {
let field_ident = field_pat.ident;
if !idents.iter().any(|x| x.name == field_ident.name) {
let tcx = bcx.tcx();
let dm = tcx.def_map;
- let vals_left = vec::append(vals.slice(0u, col).to_owned(),
+ let vals_left = vec_ng::append(vals.slice(0u, col).to_owned(),
vals.slice(col + 1u, vals.len()));
let ccx = bcx.fcx.ccx;
let mut pat_id = 0;
compile_submatch(
bcx,
enter_rec_or_struct(bcx, dm, m, col, *rec_fields, val),
- vec::append(rec_vals, vals_left),
+ vec_ng::append(rec_vals, vals_left),
chk);
});
return;
adt::trans_field_ptr(bcx, tup_repr, val, 0, i)
});
compile_submatch(bcx, enter_tup(bcx, dm, m, col, val, n_tup_elts),
- vec::append(tup_vals, vals_left), chk);
+ vec_ng::append(tup_vals, vals_left), chk);
return;
}
compile_submatch(bcx,
enter_tuple_struct(bcx, dm, m, col, val,
struct_element_count),
- vec::append(llstructvals, vals_left),
+ vec_ng::append(llstructvals, vals_left),
chk);
return;
}
if any_uniq_pat(m, col) {
let llbox = Load(bcx, val);
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
- vec::append(~[llbox], vals_left), chk);
+ vec_ng::append(vec!(llbox), vals_left), chk);
return;
}
if any_region_pat(m, col) {
let loaded_val = Load(bcx, val);
compile_submatch(bcx, enter_region(bcx, dm, m, col, val),
- vec::append(~[loaded_val], vals_left), chk);
+ vec_ng::append(vec!(loaded_val), vals_left), chk);
return;
}
}
let mut size = 0u;
- let mut unpacked = ~[];
+ let mut unpacked = Vec::new();
match *opt {
var(disr_val, repr) => {
let ExtractedBlock {vals: argvals, bcx: new_bcx} =
lit(_) | range(_, _) => ()
}
let opt_ms = enter_opt(opt_cx, m, opt, col, size, val);
- let opt_vals = vec::append(unpacked, vals_left);
+ let opt_vals = vec_ng::append(unpacked, vals_left);
match branch_chk {
None => compile_submatch(opt_cx, opt_ms, opt_vals, chk),
return bcx;
}
- let mut arm_datas = ~[];
- let mut matches = ~[];
+ let mut arm_datas = Vec::new();
+ let mut matches = Vec::new();
for arm in arms.iter() {
let body = fcx.new_id_block("case_body", arm.body.id);
let bindings_map = create_bindings_map(bcx, *arm.pats.get(0));
arm_datas.push(arm_data.clone());
for p in arm.pats.iter() {
matches.push(Match {
- pats: ~[*p],
+ pats: vec!(*p),
data: arm_data.clone(),
- bound_ptrs: ~[],
+ bound_ptrs: Vec::new(),
});
}
}
let lldiscr = discr_datum.val;
compile_submatch(bcx, matches, [lldiscr], &chk);
- let mut arm_cxs = ~[];
+ let mut arm_cxs = Vec::new();
for arm_data in arm_datas.iter() {
let mut bcx = arm_data.bodycx;
* General-case enums: for each case there is a struct, and they
* all start with a field for the discriminant.
*/
- General(IntType, ~[Struct]),
+ General(IntType, Vec<Struct> ),
/**
* Two cases distinguished by a nullable pointer: the case with discriminant
* `nndiscr` is represented by the struct `nonnull`, where the `ptrfield`th
* identity function.
*/
NullablePointer{ nonnull: Struct, nndiscr: Disr, ptrfield: uint,
- nullfields: ~[ty::t] }
+ nullfields: Vec<ty::t> }
}
/// For structs, and struct-like parts of anything fancier.
size: u64,
align: u64,
packed: bool,
- fields: ~[ty::t]
-}
+ fields: Vec<ty::t> }
/**
* Convenience for `represent_type`. There should probably be more or
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
slo: 0, shi: (cases.len() - 1) as i64 };
let ity = range_to_inttype(cx, hint, &bounds);
- let discr = ~[ty_of_inttype(ity)];
+ let discr = vec!(ty_of_inttype(ity));
return General(ity, cases.map(|c| mk_struct(cx, discr + c.tys, false)))
}
_ => cx.sess.bug("adt::represent_type called on non-ADT type")
}
// this should probably all be in ty
-struct Case { discr: Disr, tys: ~[ty::t] }
+struct Case { discr: Disr, tys: Vec<ty::t> }
impl Case {
fn is_zerolen(&self, cx: &CrateContext) -> bool {
mk_struct(cx, self.tys, false).size == 0
}
}
-fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> ~[Case] {
+fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {
ty::enum_variants(tcx, def_id).map(|vi| {
let arg_tys = vi.args.map(|&raw_ty| {
ty::subst(tcx, substs, raw_ty)
};
assert_eq!(machine::llalign_of_min(cx, pad_ty) as u64, align);
assert_eq!(align % discr_size, 0);
- let fields = ~[discr_ty,
+ let fields = vec!(discr_ty,
Type::array(&discr_ty, align / discr_size - 1),
- pad_ty];
+ pad_ty);
match name {
None => Type::struct_(fields, false),
Some(name) => {
}
}
-fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> ~[Type] {
+fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> Vec<Type> {
if sizing {
st.fields.map(|&ty| type_of::sizing_type_of(cx, ty))
} else {
let case = &cases[discr];
let max_sz = cases.iter().map(|x| x.size).max().unwrap();
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
- let contents = build_const_struct(ccx, case, ~[lldiscr] + vals);
+ let contents = build_const_struct(ccx, case, vec!(lldiscr) + vals);
C_struct(contents + &[padding(max_sz - case.size)], false)
}
Univariant(ref st, _dro) => {
// Always use null even if it's not the `ptrfield`th
// field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty))
- });
+ }).collect::<Vec<ValueRef> >();
C_struct(build_const_struct(ccx, nonnull, vals), false)
}
}
* will read the wrong memory.
*/
fn build_const_struct(ccx: &CrateContext, st: &Struct, vals: &[ValueRef])
- -> ~[ValueRef] {
+ -> Vec<ValueRef> {
assert_eq!(vals.len(), st.fields.len());
let mut offset = 0;
- let mut cfields = ~[];
+ let mut cfields = Vec::new();
for (i, &ty) in st.fields.iter().enumerate() {
let llty = type_of::sizing_type_of(ccx, ty);
let type_align = machine::llalign_of_min(ccx, llty)
-> &'a Block<'a> {
let fcx = bcx.fcx;
let mut bcx = bcx;
- let mut constraints = ~[];
- let mut output_types = ~[];
+ let mut constraints = Vec::new();
+ let mut output_types = Vec::new();
let temp_scope = fcx.push_custom_cleanup_scope();
pub use middle::trans::context::task_llcx;
-local_data_key!(task_local_insn_key: ~[&'static str])
+local_data_key!(task_local_insn_key: Vec<&'static str> )
pub fn with_insn_ctxt(blk: |&[&'static str]|) {
local_data::get(task_local_insn_key, |c| {
}
pub fn init_insn_ctxt() {
- local_data::set(task_local_insn_key, ~[]);
+ local_data::set(task_local_insn_key, Vec::new());
}
pub struct _InsnCtxt { _x: () }
pub fn invoke<'a>(
bcx: &'a Block<'a>,
llfn: ValueRef,
- llargs: ~[ValueRef],
+ llargs: Vec<ValueRef> ,
attributes: &[(uint, lib::llvm::Attribute)],
call_info: Option<NodeInfo>)
-> (ValueRef, &'a Block<'a>) {
block_arena: block_arena,
ccx: ccx,
debug_context: debug_context,
- scopes: RefCell::new(~[])
+ scopes: RefCell::new(Vec::new())
};
if has_env {
// appropriate lvalue datums.
pub fn create_datums_for_fn_args(fcx: &FunctionContext,
arg_tys: &[ty::t])
- -> ~[RvalueDatum] {
+ -> Vec<RvalueDatum> {
let _icx = push_ctxt("create_datums_for_fn_args");
// Return an array wrapping the ValueRefs that we get from
arg_scope: cleanup::CustomScopeIndex,
bcx: &'a Block<'a>,
args: &[ast::Arg],
- arg_datums: ~[RvalueDatum])
+ arg_datums: Vec<RvalueDatum> )
-> &'a Block<'a> {
debug!("copy_args_to_allocas");
}
pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::EnumDef,
- id: ast::NodeId, vi: @~[@ty::VariantInfo],
+ id: ast::NodeId, vi: @Vec<@ty::VariantInfo> ,
i: &mut uint) {
for &variant in enum_definition.variants.iter() {
let disr_val = vi[*i].disr_val;
llvm::LLVMBuildPointerCast(bld, rust_main, Type::i8p().to_ref(), buf)
});
- ~[
+ vec!(
opaque_rust_main,
llvm::LLVMGetParam(llfn, 0),
llvm::LLVMGetParam(llfn, 1)
- ]
+ )
};
(start_fn, args)
} else {
debug!("using user-defined start fn");
- let args = ~[
+ let args = vec!(
llvm::LLVMGetParam(llfn, 0 as c_uint),
llvm::LLVMGetParam(llfn, 1 as c_uint)
- ];
+ );
(rust_main, args)
};
}
});
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
- let mut elts: ~[ValueRef] = ~[];
+ let mut elts: Vec<ValueRef> = Vec::new();
// This is not ideal, but the borrow checker doesn't
// like the multiple borrows. At least, it doesn't
// like them on the current snapshot. (2013-06-14)
let keys = {
- let mut keys = ~[];
+ let mut keys = Vec::new();
let module_data = ccx.module_data.borrow();
for (k, _) in module_data.get().iter() {
keys.push(k.clone());
}
pub fn fill_crate_map(ccx: @CrateContext, map: ValueRef) {
- let mut subcrates: ~[ValueRef] = ~[];
+ let mut subcrates: Vec<ValueRef> = Vec::new();
let mut i = 1;
let cstore = ccx.sess.cstore;
while cstore.have_crate_data(i) {
}
}
-pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> ~[u8] {
+pub fn write_metadata(cx: &CrateContext, krate: &ast::Crate) -> Vec<u8> {
use flate;
if !cx.sess.building_library.get() {
- return ~[]
+ return Vec::new()
}
let encode_inlined_item: encoder::EncodeInlinedItem =
}
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
} else {
- let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<~[ValueRef]>();
+ let v = ixs.iter().map(|i| C_i32(*i as i32)).collect::<Vec<ValueRef> >();
self.count_insn("gepi");
self.inbounds_gep(base, v)
}
/// comments are reverse-engineered and may be inaccurate. -NDM
pub struct FnType {
/// The LLVM types of each argument.
- arg_tys: ~[ArgType],
+ arg_tys: Vec<ArgType> ,
/// LLVM return type.
ret_ty: ArgType,
atys: &[Type],
rty: Type,
ret_def: bool) -> FnType {
- let mut arg_tys = ~[];
+ let mut arg_tys = Vec::new();
for &aty in atys.iter() {
let ty = classify_arg_ty(aty);
arg_tys.push(ty);
return None;
}
-fn coerce_to_int(size: uint) -> ~[Type] {
+fn coerce_to_int(size: uint) -> Vec<Type> {
let int_ty = Type::i32();
- let mut args = ~[];
+ let mut args = Vec::new();
let mut n = size / 32;
while n > 0 {
};
let sret = ret_ty.is_indirect();
- let mut arg_tys = ~[];
+ let mut arg_tys = Vec::new();
let mut offset = if sret { 4 } else { 0 };
for aty in atys.iter() {
atys: &[Type],
rty: Type,
ret_def: bool) -> FnType {
- let mut arg_tys = ~[];
+ let mut arg_tys = Vec::new();
let ret_ty;
if !ret_def {
}
}
-fn classify_ty(ty: Type) -> ~[RegClass] {
+fn classify_ty(ty: Type) -> Vec<RegClass> {
fn align(off: uint, ty: Type) -> uint {
let a = ty_align(ty);
return (off + a - 1u) / a * a;
return len;
}
- let mut tys = ~[];
+ let mut tys = Vec::new();
let mut i = 0u;
let e = cls.len();
while i < e {
}
}
- let mut arg_tys = ~[];
+ let mut arg_tys = Vec::new();
for t in atys.iter() {
let ty = x86_64_ty(*t, |cls| cls.is_pass_byval(), ByValAttribute);
arg_tys.push(ty);
vtables.len() - num_method_vtables;
vtables.tailn(num_impl_type_parameters).to_owned()
},
- None => vec::from_elem(num_method_vtables, @~[])
+ None => vec::from_elem(num_method_vtables, @Vec::new())
};
let param_vtables = @(*trait_vtables_fixed + method_vtables);
// written in opt_llretslot (if it is Some) or `llresult` will be
// set appropriately (otherwise).
if is_rust_fn {
- let mut llargs = ~[];
+ let mut llargs = Vec::new();
// Push the out-pointer if we use an out-pointer for this
// return type, otherwise push "undef".
// available, so we have to apply any attributes with ABI
// implications directly to the call instruction. Right now,
// the only attribute we need to worry about is `sret`.
- let mut attrs = ~[];
+ let mut attrs = Vec::new();
if type_of::return_uses_outptr(ccx, ret_ty) {
attrs.push((1, StructRetAttribute));
}
// they are always Rust fns.
assert!(dest.is_some());
- let mut llargs = ~[];
+ let mut llargs = Vec::new();
bcx = trans_args(bcx, args, callee_ty, &mut llargs,
cleanup::CustomScope(arg_cleanup_scope), false);
fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
fn trans_args<'a>(cx: &'a Block<'a>,
args: CallArgs,
fn_ty: ty::t,
- llargs: &mut ~[ValueRef],
+ llargs: &mut Vec<ValueRef> ,
arg_cleanup_scope: cleanup::ScopeId,
ignore_self: bool)
-> &'a Block<'a> {
fn tuplify_box_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
let ptr = ty::mk_imm_ptr(tcx, ty::mk_i8());
- ty::mk_tup(tcx, ~[ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t])
+ ty::mk_tup(tcx, vec!(ty::mk_uint(), ty::mk_nil_ptr(tcx), ptr, ptr, t))
}
fn allocate_cbox<'a>(bcx: &'a Block<'a>,
// Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'a>(
bcx: &'a Block<'a>,
- bound_values: ~[EnvValue],
+ bound_values: Vec<EnvValue> ,
sigil: ast::Sigil)
-> ClosureResult<'a> {
let _icx = push_ctxt("closure::store_environment");
let bcx = bcx0;
// Package up the captured upvars
- let mut env_vals = ~[];
+ let mut env_vals = Vec::new();
for cap_var in cap_vars.iter() {
debug!("Building closure: captured variable {:?}", *cap_var);
let datum = expr::trans_local_var(bcx, cap_var.def);
let bcx = fcx.entry_bcx.get().unwrap();
let args = create_datums_for_fn_args(&fcx, ty::ty_fn_args(closure_ty));
- let mut llargs = ~[];
+ let mut llargs = Vec::new();
match fcx.llretptr.get() {
Some(llretptr) => {
llargs.push(llretptr);
n_llvm_insns: Cell<uint>,
llvm_insns: RefCell<HashMap<~str, uint>>,
// (ident, time-in-ms, llvm-instructions)
- fn_stats: RefCell<~[(~str, uint, uint)]>,
+ fn_stats: RefCell<Vec<(~str, uint, uint)> >,
}
pub struct BuilderRef_res {
// Here `self_ty` is the real type of the self parameter to this method. It
// will only be set in the case of default methods.
pub struct param_substs {
- tys: ~[ty::t],
+ tys: Vec<ty::t> ,
self_ty: Option<ty::t>,
vtables: Option<typeck::vtable_res>,
self_vtables: Option<typeck::vtable_param_res>
debug_context: debuginfo::FunctionDebugContext,
// Cleanup scopes.
- scopes: RefCell<~[cleanup::CleanupScope<'a>]>,
+ scopes: RefCell<Vec<cleanup::CleanupScope<'a>> >,
}
impl<'a> FunctionContext<'a> {
pub fn C_zero_byte_arr(size: uint) -> ValueRef {
unsafe {
let mut i = 0u;
- let mut elts: ~[ValueRef] = ~[];
+ let mut elts: Vec<ValueRef> = Vec::new();
while i < size { elts.push(C_u8(0u)); i += 1u; }
return llvm::LLVMConstArray(Type::i8().to_ref(),
elts.as_ptr(), elts.len() as c_uint);
// Used to identify cached monomorphized functions and vtables
#[deriving(Eq, Hash)]
pub enum mono_param_id {
- mono_precise(ty::t, Option<@~[mono_id]>),
+ mono_precise(ty::t, Option<@Vec<mono_id> >),
mono_any,
mono_repr(uint /* size */,
uint /* align */,
#[deriving(Eq, Hash)]
pub struct mono_id_ {
def: ast::DefId,
- params: ~[mono_param_id]
-}
+ params: Vec<mono_param_id> }
pub type mono_id = @mono_id_;
monomorphize_type(bcx, t)
}
-pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> ~[ty::t] {
+pub fn node_id_type_params(bcx: &Block, id: ast::NodeId, is_method: bool) -> Vec<ty::t> {
let tcx = bcx.tcx();
let params = if is_method {
bcx.ccx().maps.method_map.borrow().get().get(&id).substs.tps.clone()
param_bounds[n_bound].clone()
}
-pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
+pub fn dummy_substs(tps: Vec<ty::t> ) -> ty::substs {
substs {
regions: ty::ErasedRegions,
self_ty: None,
is_local: bool) -> (ValueRef, bool) {
let map_list = |exprs: &[@ast::Expr]| {
exprs.iter().map(|&e| const_expr(cx, e, is_local))
- .fold((~[], true), |(l, all_inlineable), (val, inlineable)| {
- (vec::append_one(l, val), all_inlineable && inlineable)
+ .fold((Vec::new(), true),
+ |(L, all_inlineable), (val, inlineable)| {
+ (vec::append_one(L, val), all_inlineable && inlineable)
})
};
unsafe {
n_closures: Cell::new(0u),
n_llvm_insns: Cell::new(0u),
llvm_insns: RefCell::new(HashMap::new()),
- fn_stats: RefCell::new(~[]),
+ fn_stats: RefCell::new(Vec::new()),
},
tydesc_type: tydesc_type,
int_type: int_type,
indices: &[uint]) -> ValueRef {
debug!("const_inbounds_gepi: pointer={} indices={:?}",
self.tn.val_to_str(pointer), indices);
- let v: ~[ValueRef] =
+ let v: Vec<ValueRef> =
indices.iter().map(|i| C_i32(*i as i32)).collect();
unsafe {
llvm::LLVMConstInBoundsGEP(pointer,
let v_line = loc.line as int;
let v_str = PointerCast(bcx, v_fail_str, Type::i8p());
let v_filename = PointerCast(bcx, v_filename, Type::i8p());
- let args = ~[v_str, v_filename, C_int(ccx, v_line)];
+ let args = vec!(v_str, v_filename, C_int(ccx, v_line));
let did = langcall(bcx, Some(sp), "", FailFnLangItem);
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx;
Unreachable(bcx);
-> &'a Block<'a> {
let _icx = push_ctxt("trans_fail_bounds_check");
let (filename, line) = filename_and_line_num_from_span(bcx, sp);
- let args = ~[filename, line, index, len];
+ let args = vec!(filename, line, index, len);
let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx, did, args, Some(expr::Ignore)).bcx;
Unreachable(bcx);
priv current_debug_location: Cell<DebugLocation>,
priv created_files: RefCell<HashMap<~str, DIFile>>,
priv created_types: RefCell<HashMap<uint, DIType>>,
- priv namespace_map: RefCell<HashMap<~[ast::Name], @NamespaceTreeNode>>,
+ priv namespace_map: RefCell<HashMap<Vec<ast::Name> , @NamespaceTreeNode>>,
// This collection is used to assert that composite types (structs, enums, ...) have their
// members only set once:
priv composite_types_completed: RefCell<HashSet<DIType>>,
name_to_append_suffix_to.push_char('<');
// The list to be filled with template parameters:
- let mut template_params: ~[DIDescriptor] = vec::with_capacity(generics.ty_params.len() + 1);
+ let mut template_params: Vec<DIDescriptor> = vec::with_capacity(generics.ty_params.len() + 1);
// Handle self type
if has_self_type {
impl MemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
- -> ~[MemberDescription] {
+ -> Vec<MemberDescription> {
match *self {
StructMD(ref this) => {
this.create_member_descriptions(cx)
}
struct StructMemberDescriptionFactory {
- fields: ~[ty::field],
+ fields: Vec<ty::field> ,
span: Span,
}
impl StructMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
- -> ~[MemberDescription] {
+ -> Vec<MemberDescription> {
self.fields.map(|field| {
let name = if field.ident.name == special_idents::unnamed_field.name {
~""
}
struct TupleMemberDescriptionFactory {
- component_types: ~[ty::t],
+ component_types: Vec<ty::t> ,
span: Span,
}
impl TupleMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
- -> ~[MemberDescription] {
+ -> Vec<MemberDescription> {
self.component_types.map(|&component_type| {
MemberDescription {
name: ~"",
struct GeneralMemberDescriptionFactory {
type_rep: @adt::Repr,
- variants: @~[@ty::VariantInfo],
+ variants: @Vec<@ty::VariantInfo> ,
discriminant_type_metadata: ValueRef,
containing_scope: DIScope,
file_metadata: DIFile,
impl GeneralMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
- -> ~[MemberDescription] {
+ -> Vec<MemberDescription> {
// Capture type_rep, so we don't have to copy the struct_defs array
let struct_defs = match *self.type_rep {
adt::General(_, ref struct_defs) => struct_defs,
}
struct EnumVariantMemberDescriptionFactory {
- args: ~[(~str, ty::t)],
+ args: Vec<(~str, ty::t)> ,
discriminant_type_metadata: Option<DIType>,
span: Span,
}
impl EnumVariantMemberDescriptionFactory {
fn create_member_descriptions(&self, cx: &CrateContext)
- -> ~[MemberDescription] {
+ -> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription {
name: name.to_str(),
}
// Build an array of (field name, field type) pairs to be captured in the factory closure.
- let args: ~[(~str, ty::t)] = arg_names.iter()
+ let args: Vec<(~str, ty::t)> = arg_names.iter()
.zip(struct_def.fields.iter())
.map(|(s, &t)| (s.to_str(), t))
.collect();
let variants = ty::enum_variants(cx.tcx, enum_def_id);
- let enumerators_metadata: ~[DIDescriptor] = variants
+ let enumerators_metadata: Vec<DIDescriptor> = variants
.iter()
.map(|v| {
token::get_ident(v.name).get().with_c_str(|name| {
let loc = span_start(cx, definition_span);
- let member_metadata: ~[DIDescriptor] = member_descriptions
+ let member_metadata: Vec<DIDescriptor> = member_descriptions
.iter()
.enumerate()
.map(|(i, member_description)| {
let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name);
- let mut signature_metadata: ~[DIType] = vec::with_capacity(signature.inputs.len() + 1);
+ let mut signature_metadata: Vec<DIType> = vec::with_capacity(signature.inputs.len() + 1);
// return type
signature_metadata.push(match ty::get(signature.output).sty {
ident: Option<ast::Ident>
}
- let mut scope_stack = ~[ScopeStackEntry { scope_metadata: fn_metadata, ident: None }];
+ let mut scope_stack = vec!(ScopeStackEntry { scope_metadata: fn_metadata, ident: None });
// Push argument identifiers onto the stack so arguments integrate nicely with variable
// shadowing.
// local helper functions for walking the AST.
fn with_new_scope(cx: &CrateContext,
scope_span: Span,
- scope_stack: &mut ~[ScopeStackEntry],
+ scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>,
inner_walk: |&CrateContext,
- &mut ~[ScopeStackEntry],
+ &mut Vec<ScopeStackEntry> ,
&mut HashMap<ast::NodeId, DIScope>|) {
// Create a new lexical scope and push it onto the stack
let loc = cx.sess.codemap.lookup_char_pos(scope_span.lo);
fn walk_block(cx: &CrateContext,
block: &ast::Block,
- scope_stack: &mut ~[ScopeStackEntry],
+ scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
scope_map.insert(block.id, scope_stack.last().unwrap().scope_metadata);
fn walk_decl(cx: &CrateContext,
decl: &ast::Decl,
- scope_stack: &mut ~[ScopeStackEntry],
+ scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
match *decl {
codemap::Spanned { node: ast::DeclLocal(local), .. } => {
fn walk_pattern(cx: &CrateContext,
pat: @ast::Pat,
- scope_stack: &mut ~[ScopeStackEntry],
+ scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
let def_map = cx.tcx.def_map;
fn walk_expr(cx: &CrateContext,
exp: &ast::Expr,
- scope_stack: &mut ~[ScopeStackEntry],
+ scope_stack: &mut Vec<ScopeStackEntry> ,
scope_map: &mut HashMap<ast::NodeId, DIScope>) {
scope_map.insert(exp.id, scope_stack.last().unwrap().scope_metadata);
};
let mut path = krate.move_iter().chain(path).peekable();
- let mut current_key = ~[];
+ let mut current_key = Vec::new();
let mut parent_node: Option<@NamespaceTreeNode> = None;
// Create/Lookup namespace for each element of the path.
}
ast::ExprTup(ref args) => {
let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr));
- let numbered_fields: ~[(uint, @ast::Expr)] =
+ let numbered_fields: Vec<(uint, @ast::Expr)> =
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
trans_adt(bcx, repr, 0, numbered_fields, None, dest)
}
});
let optbase = match base {
Some(base_expr) => {
- let mut leftovers = ~[];
+ let mut leftovers = Vec::new();
for (i, b) in need_base.iter().enumerate() {
if *b {
leftovers.push((i, field_tys[i].mt.ty))
/// The base expression; will be evaluated after all explicit fields.
expr: @ast::Expr,
/// The indices of fields to copy paired with their types.
- fields: ~[(uint, ty::t)]
-}
+ fields: Vec<(uint, ty::t)> }
/**
* Constructs an ADT instance:
_ => false
}
});
- let modpath: ~[ast_map::PathElem] = path.collect();
+ let modpath: Vec<ast_map::PathElem> = path.collect();
let modname = ast_map::path_to_str(ast_map::Values(modpath.iter()));
(modpath, modname)
})
struct LlvmSignature {
// LLVM versions of the types of this function's arguments.
- llarg_tys: ~[Type],
+ llarg_tys: Vec<Type> ,
// LLVM version of the type that this function returns. Note that
// this *may not be* the declared return type of the foreign
llfn: ValueRef,
llretptr: ValueRef,
llargs_rust: &[ValueRef],
- passed_arg_tys: ~[ty::t])
+ passed_arg_tys: Vec<ty::t> )
-> &'a Block<'a> {
/*!
* Prepares a call to a native function. This requires adapting
let arg_tys: &[cabi::ArgType] = fn_type.arg_tys;
- let mut llargs_foreign = ~[];
+ let mut llargs_foreign = Vec::new();
// If the foreign ABI expects return value by pointer, supply the
// pointer that Rust gave us. Sometimes we have to bitcast
llvm::LLVMPositionBuilderAtEnd(builder, the_block);
// Array for the arguments we will pass to the rust function.
- let mut llrust_args = ~[];
+ let mut llrust_args = Vec::new();
let mut next_foreign_arg_counter: c_uint = 0;
let next_foreign_arg: |pad: bool| -> c_uint = |pad: bool| {
next_foreign_arg_counter += if pad {
}
fn lltype_for_fn_from_foreign_types(tys: &ForeignTypes) -> Type {
- let mut llargument_tys = ~[];
+ let mut llargument_tys = Vec::new();
let ret_ty = tys.fn_ty.ret_ty;
let llreturn_ty = if ret_ty.is_indirect() {
let field_scope = bcx.fcx.push_custom_cleanup_scope();
let self_arg = PointerCast(bcx, v0, params[0]);
- let args = ~[self_arg];
+ let args = vec!(self_arg);
// Add all the fields as a value which needs to be cleaned at the end of
// this scope.
// This requires that atomic intrinsics follow a specific naming pattern:
// "atomic_<operation>[_<ordering>], and no ordering means SeqCst
if name.get().starts_with("atomic_") {
- let split: ~[&str] = name.get().split('_').collect();
+ let split: Vec<&str> = name.get().split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
let order = if split.len() == 2 {
lib::llvm::SequentiallyConsistent
is_method: bool,
rcvr_substs: &[ty::t],
rcvr_origins: typeck::vtable_res)
- -> (~[ty::t], typeck::vtable_res) {
+ -> (Vec<ty::t> , typeck::vtable_res) {
/*!
*
* Creates a concatenated set of substitutions which includes
let node_substs = node_id_type_params(bcx, expr_id, is_method);
debug!("rcvr_substs={:?}", rcvr_substs.repr(ccx.tcx));
let ty_substs
- = vec::append(rcvr_substs.to_owned(),
+ = vec_ng::append(rcvr_substs.to_owned(),
node_substs.tailn(node_substs.len() - n_m_tps));
debug!("n_m_tps={:?}", n_m_tps);
debug!("node_substs={:?}", node_substs.repr(ccx.tcx));
// exist, in which case we need to make them.
let r_m_origins = match node_vtables(bcx, expr_id) {
Some(vt) => vt,
- None => @vec::from_elem(node_substs.len(), @~[])
+ None => @vec::from_elem(node_substs.len(), @Vec::new())
};
let vtables
- = @vec::append(rcvr_origins.to_owned(),
+ = @vec_ng::append(rcvr_origins.to_owned(),
r_m_origins.tailn(r_m_origins.len() - n_m_tps));
(ty_substs, vtables)
unsafe {
let _icx = push_ctxt("meth::make_vtable");
- let mut components = ~[drop_glue];
+ let mut components = vec!(drop_glue);
for &ptr in ptrs.iter() {
components.push(ptr)
}
impl_id: ast::DefId,
substs: &[ty::t],
vtables: typeck::vtable_res)
- -> ~[ValueRef] {
+ -> Vec<ValueRef> {
let ccx = bcx.ccx();
let tcx = ccx.tcx;
// FIXME (possibly #5801): Need a lot of type hints to get
// .collect() to work.
let substs_iter = substs.self_ty.iter().chain(substs.tys.iter());
- let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match substs.vtables {
+ let precise_param_ids: Vec<(ty::t, Option<@Vec<mono_id> >)> = match substs.vtables {
Some(vts) => {
debug!("make_mono_id vtables={} substs={}",
vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx));
(*subst, if !v.is_empty() { Some(@v) } else { None })
}).collect()
}
- None => substs_iter.map(|subst| (*subst, None::<@~[mono_id]>)).collect()
+ None => substs_iter.map(|subst| (*subst, None::<@Vec<mono_id> >)).collect()
};
pub struct Reflector<'a> {
visitor_val: ValueRef,
- visitor_methods: @~[@ty::Method],
+ visitor_methods: @Vec<@ty::Method> ,
final_bcx: &'a Block<'a>,
tydesc_ty: Type,
bcx: &'a Block<'a>
scratch.val
}
- pub fn c_size_and_align(&mut self, t: ty::t) -> ~[ValueRef] {
+ pub fn c_size_and_align(&mut self, t: ty::t) -> Vec<ValueRef> {
let tr = type_of(self.bcx.ccx(), t);
let s = machine::llsize_of_real(self.bcx.ccx(), tr);
let a = machine::llalign_of_min(self.bcx.ccx(), tr);
- return ~[self.c_uint(s as uint),
- self.c_uint(a as uint)];
+ return vec!(self.c_uint(s as uint),
+ self.c_uint(a as uint));
}
pub fn c_tydesc(&mut self, t: ty::t) -> ValueRef {
PointerCast(bcx, static_ti.tydesc, self.tydesc_ty.ptr_to())
}
- pub fn c_mt(&mut self, mt: &ty::mt) -> ~[ValueRef] {
- ~[self.c_uint(mt.mutbl as uint),
- self.c_tydesc(mt.ty)]
+ pub fn c_mt(&mut self, mt: &ty::mt) -> Vec<ValueRef> {
+ vec!(self.c_uint(mt.mutbl as uint),
+ self.c_tydesc(mt.ty))
}
pub fn visit(&mut self, ty_name: &str, args: &[ValueRef]) {
pub fn vstore_name_and_extra(&mut self,
t: ty::t,
vstore: ty::vstore)
- -> (~str, ~[ValueRef]) {
+ -> (~str, Vec<ValueRef> ) {
match vstore {
ty::vstore_fixed(n) => {
- let extra = vec::append(~[self.c_uint(n)],
+ let extra = vec_ng::append(vec!(self.c_uint(n)),
self.c_size_and_align(t));
(~"fixed", extra)
}
- ty::vstore_slice(_) => (~"slice", ~[]),
- ty::vstore_uniq => (~"uniq", ~[]),
+ ty::vstore_slice(_) => (~"slice", Vec::new()),
+ ty::vstore_uniq => (~"uniq", Vec::new()),
}
}
}
ty::ty_tup(ref tys) => {
- let extra = ~[self.c_uint(tys.len())]
+ let extra = vec!(self.c_uint(tys.len()))
+ self.c_size_and_align(t);
self.bracketed("tup", extra, |this| {
for (i, t) in tys.iter().enumerate() {
- let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
+ let extra = vec!(this.c_uint(i), this.c_tydesc(*t));
this.visit("tup_field", extra);
}
})
let pureval = ast_purity_constant(fty.purity);
let sigilval = ast_sigil_constant(fty.sigil);
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
- let extra = ~[self.c_uint(pureval),
+ let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
- self.c_uint(retval)];
+ self.c_uint(retval));
self.visit("enter_fn", extra);
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra);
let pureval = ast_purity_constant(fty.purity);
let sigilval = 0u;
let retval = if ty::type_is_bot(fty.sig.output) {0u} else {1u};
- let extra = ~[self.c_uint(pureval),
+ let extra = vec!(self.c_uint(pureval),
self.c_uint(sigilval),
self.c_uint(fty.sig.inputs.len()),
- self.c_uint(retval)];
+ self.c_uint(retval));
self.visit("enter_fn", extra);
self.visit_sig(retval, &fty.sig);
self.visit("leave_fn", extra);
fields[0].ident.name != special_idents::unnamed_field.name;
}
- let extra = ~[
+ let extra = vec!(
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
t))),
self.c_bool(named_fields),
self.c_uint(fields.len())
- ] + self.c_size_and_align(t);
+ ) + self.c_size_and_align(t);
self.bracketed("class", extra, |this| {
for (i, field) in fields.iter().enumerate() {
- let extra = ~[
+ let extra = vec!(
this.c_uint(i),
this.c_slice(token::get_ident(field.ident)),
this.c_bool(named_fields)
- ] + this.c_mt(&field.mt);
+ ) + this.c_mt(&field.mt);
this.visit("class_field", extra);
}
})
llfdecl
};
- let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
+ let enum_args = vec!(self.c_uint(variants.len()), make_get_disr())
+ self.c_size_and_align(t);
self.bracketed("enum", enum_args, |this| {
for (i, v) in variants.iter().enumerate() {
let name = token::get_ident(v.name);
- let variant_args = ~[this.c_uint(i),
+ let variant_args = vec!(this.c_uint(i),
C_u64(v.disr_val),
this.c_uint(v.args.len()),
- this.c_slice(name)];
+ this.c_slice(name));
this.bracketed("enum_variant", variant_args, |this| {
for (j, a) in v.args.iter().enumerate() {
let bcx = this.bcx;
let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j);
let offset = p2i(ccx, ptr);
- let field_args = ~[this.c_uint(j),
+ let field_args = vec!(this.c_uint(j),
offset,
- this.c_tydesc(*a)];
+ this.c_tydesc(*a));
this.visit("enum_variant_field", field_args);
}
})
ty::ty_infer(_) => self.leaf("infer"),
ty::ty_err => self.leaf("err"),
ty::ty_param(ref p) => {
- let extra = ~[self.c_uint(p.idx)];
+ let extra = vec!(self.c_uint(p.idx));
self.visit("param", extra)
}
ty::ty_self(..) => self.leaf("self")
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
for (i, arg) in sig.inputs.iter().enumerate() {
let modeval = 5u; // "by copy"
- let extra = ~[self.c_uint(i),
+ let extra = vec!(self.c_uint(i),
self.c_uint(modeval),
- self.c_tydesc(*arg)];
+ self.c_tydesc(*arg));
self.visit("fn_input", extra);
}
- let extra = ~[self.c_uint(retval),
+ let extra = vec!(self.c_uint(retval),
self.c_bool(sig.variadic),
- self.c_tydesc(sig.output)];
+ self.c_tydesc(sig.output));
self.visit("fn_output", extra);
}
}
}
}
- pub fn field_types(&self) -> ~[Type] {
+ pub fn field_types(&self) -> Vec<Type> {
unsafe {
let n_elts = llvm::LLVMCountStructElementTypes(self.to_ref()) as uint;
if n_elts == 0 {
- return ~[];
+ return Vec::new();
}
let mut elts = vec::from_elem(n_elts, 0 as TypeRef);
llvm::LLVMGetStructElementTypes(self.to_ref(), &mut elts[0]);
ty!(llvm::LLVMGetReturnType(self.to_ref()))
}
- pub fn func_params(&self) -> ~[Type] {
+ pub fn func_params(&self) -> Vec<Type> {
unsafe {
let n_args = llvm::LLVMCountParamTypes(self.to_ref()) as uint;
let args = vec::from_elem(n_args, 0 as TypeRef);
pub fn type_of_rust_fn(cx: &CrateContext, has_env: bool,
inputs: &[ty::t], output: ty::t) -> Type {
- let mut atys: ~[Type] = ~[];
+ let mut atys: Vec<Type> = Vec::new();
// Arg 0: Output pointer.
// (if the output type is non-immediate)
pub struct Impl {
did: DefId,
ident: Ident,
- methods: ~[@Method]
-}
+ methods: Vec<@Method> }
#[deriving(Clone, Eq, Hash)]
pub struct mt {
// of this node. This only applies to nodes that refer to entities
// parameterized by type parameters, such as generic fns, types, or
// other items.
- node_type_substs: RefCell<NodeMap<~[t]>>,
+ node_type_substs: RefCell<NodeMap<vec!(t)>>,
// Maps from a method to the method "descriptor"
methods: RefCell<DefIdMap<@Method>>,
// Maps from a trait def-id to a list of the def-ids of its methods
- trait_method_def_ids: RefCell<DefIdMap<@~[DefId]>>,
+ trait_method_def_ids: RefCell<DefIdMap<@Vec<DefId> >>,
// A cache for the trait_methods() routine
- trait_methods_cache: RefCell<DefIdMap<@~[@Method]>>,
+ trait_methods_cache: RefCell<DefIdMap<@Vec<@Method> >>,
impl_trait_cache: RefCell<DefIdMap<Option<@ty::TraitRef>>>,
needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>,
tc_cache: RefCell<HashMap<uint, TypeContents>>,
ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>,
- enum_var_cache: RefCell<DefIdMap<@~[@VariantInfo]>>,
+ enum_var_cache: RefCell<DefIdMap<@Vec<@VariantInfo> >>,
ty_param_defs: RefCell<NodeMap<TypeParameterDef>>,
adjustments: RefCell<NodeMap<@AutoAdjustment>>,
normalized_cache: RefCell<HashMap<t, t>>,
lang_items: @middle::lang_items::LanguageItems,
// A mapping of fake provided method def_ids to the default implementation
provided_method_sources: RefCell<DefIdMap<ast::DefId>>,
- supertraits: RefCell<DefIdMap<@~[@TraitRef]>>,
+ supertraits: RefCell<DefIdMap<@Vec<@TraitRef> >>,
// Maps from def-id of a type or region parameter to its
// (inferred) variance.
destructors: RefCell<DefIdSet>,
// Maps a trait onto a list of impls of that trait.
- trait_impls: RefCell<DefIdMap<@RefCell<~[@Impl]>>>,
+ trait_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>,
// Maps a def_id of a type to a list of its inherent impls.
// Contains implementations of methods that are inherent to a type.
// Methods in these implementations don't need to be exported.
- inherent_impls: RefCell<DefIdMap<@RefCell<~[@Impl]>>>,
+ inherent_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>,
// Maps a def_id of an impl to an Impl structure.
// Note that this contains all of the impls that we know about,
#[deriving(Clone, Eq, Hash)]
pub struct FnSig {
binder_id: ast::NodeId,
- inputs: ~[t],
+ inputs: vec!(t),
output: t,
variadic: bool
}
#[deriving(Clone, Eq, Hash)]
pub struct substs {
self_ty: Option<ty::t>,
- tps: ~[t],
+ tps: vec!(t),
regions: RegionSubsts,
}
ty_closure(ClosureTy),
ty_trait(DefId, substs, TraitStore, ast::Mutability, BuiltinBounds),
ty_struct(DefId, substs),
- ty_tup(~[t]),
+ ty_tup(vec!(t)),
ty_param(param_ty), // type parameter
ty_self(DefId), /* special, implicit `self` type parameter;
#[deriving(Eq, Hash)]
pub struct ParamBounds {
builtin_bounds: BuiltinBounds,
- trait_bounds: ~[@TraitRef]
-}
+ trait_bounds: Vec<@TraitRef> }
pub type BuiltinBounds = EnumSet<BuiltinBound>;
#[deriving(Clone)]
pub struct Generics {
/// List of type parameters declared on the item.
- type_param_defs: Rc<~[TypeParameterDef]>,
+ type_param_defs: Rc<Vec<TypeParameterDef> >,
/// List of region parameters declared on the item.
- region_param_defs: Rc<~[RegionParameterDef]>,
+ region_param_defs: Rc<Vec<RegionParameterDef> >,
}
impl Generics {
self_param_bound: Option<@TraitRef>,
/// Bounds on each numbered type parameter
- type_param_bounds: ~[ParamBounds],
+ type_param_bounds: Vec<ParamBounds> ,
}
/// A polytype.
mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable}))
}
-pub fn mk_tup(cx: ctxt, ts: ~[t]) -> t { mk_t(cx, ty_tup(ts)) }
+pub fn mk_tup(cx: ctxt, ts: vec!(t)) -> t { mk_t(cx, ty_tup(ts)) }
pub fn mk_closure(cx: ctxt, fty: ClosureTy) -> t {
mk_t(cx, ty_closure(fty))
// True if instantiating an instance of `r_ty` requires an instance of `r_ty`.
pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
- fn type_requires(cx: ctxt, seen: &mut ~[DefId],
+ fn type_requires(cx: ctxt, seen: &mut Vec<DefId> ,
r_ty: t, ty: t) -> bool {
debug!("type_requires({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty),
return r;
}
- fn subtypes_require(cx: ctxt, seen: &mut ~[DefId],
+ fn subtypes_require(cx: ctxt, seen: &mut Vec<DefId> ,
r_ty: t, ty: t) -> bool {
debug!("subtypes_require({}, {})?",
::util::ppaux::ty_to_str(cx, r_ty),
return r;
}
- let mut seen = ~[];
+ let mut seen = Vec::new();
!subtypes_require(cx, &mut seen, r_ty, r_ty)
}
pub fn is_type_representable(cx: ctxt, ty: t) -> Representability {
// Iterate until something non-representable is found
- fn find_nonrepresentable<It: Iterator<t>>(cx: ctxt, seen: &mut ~[DefId],
+ fn find_nonrepresentable<It: Iterator<t>>(cx: ctxt, seen: &mut Vec<DefId> ,
mut iter: It) -> Representability {
for ty in iter {
let r = type_structurally_recursive(cx, seen, ty);
// Does the type `ty` directly (without indirection through a pointer)
// contain any types on stack `seen`?
- fn type_structurally_recursive(cx: ctxt, seen: &mut ~[DefId],
+ fn type_structurally_recursive(cx: ctxt, seen: &mut Vec<DefId> ,
ty: t) -> Representability {
debug!("type_structurally_recursive: {}",
::util::ppaux::ty_to_str(cx, ty));
// To avoid a stack overflow when checking an enum variant or struct that
// contains a different, structurally recursive type, maintain a stack
// of seen types and check recursion for each of them (issues #3008, #3779).
- let mut seen: ~[DefId] = ~[];
+ let mut seen: Vec<DefId> = Vec::new();
type_structurally_recursive(cx, &mut seen, ty)
}
}
// FIXME(pcwalton): Makes a copy, bleh. Probably better to not do that.
-pub fn node_id_to_type_params(cx: ctxt, id: ast::NodeId) -> ~[t] {
+pub fn node_id_to_type_params(cx: ctxt, id: ast::NodeId) -> Vec<t> {
let node_type_substs = cx.node_type_substs.borrow();
match node_type_substs.get().find(&id) {
- None => return ~[],
+ None => return Vec::new(),
Some(ts) => return (*ts).clone(),
}
}
}
// Type accessors for substructures of types
-pub fn ty_fn_args(fty: t) -> ~[t] {
+pub fn ty_fn_args(fty: t) -> Vec<t> {
match get(fty).sty {
ty_bare_fn(ref f) => f.sig.inputs.clone(),
ty_closure(ref f) => f.sig.inputs.clone(),
}
// Returns a vec of all the input and output types of fty.
-pub fn tys_in_fn_sig(sig: &FnSig) -> ~[t] {
+pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
vec::append_one(sig.inputs.map(|a| *a), sig.output)
}
}
pub struct ParamsTy {
- params: ~[t],
+ params: vec!(t),
ty: t
}
}
pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin)
- -> Rc<~[TypeParameterDef]> {
+ -> Rc<Vec<TypeParameterDef> > {
match origin {
typeck::MethodStatic(did) => {
// n.b.: When we encode impl methods, the bounds
// trait itself. This ought to be harmonized.
let trait_type_param_defs =
lookup_trait_def(tcx, trt_id).generics.type_param_defs();
- Rc::new(vec::append(
+ Rc::new(vec_ng::append(
trait_type_param_defs.to_owned(),
ty::trait_method(tcx,
trt_id,
/// Returns a vector containing the indices of all type parameters that appear
/// in `ty`. The vector may contain duplicates. Probably should be converted
/// to a bitset or some other representation.
-pub fn param_tys_in_type(ty: t) -> ~[param_ty] {
- let mut rslt = ~[];
+pub fn param_tys_in_type(ty: t) -> Vec<param_ty> {
+ let mut rslt = Vec::new();
walk_ty(ty, |ty| {
match get(ty).sty {
ty_param(p) => {
pub fn occurs_check(tcx: ctxt, sp: Span, vid: TyVid, rt: t) {
// Returns a vec of all the type variables occurring in `ty`. It may
// contain duplicates. (Integral type vars aren't counted.)
- fn vars_in_type(ty: t) -> ~[TyVid] {
- let mut rslt = ~[];
+ fn vars_in_type(ty: t) -> Vec<TyVid> {
+ let mut rslt = Vec::new();
walk_ty(ty, |ty| {
match get(ty).sty {
ty_infer(TyVar(v)) => rslt.push(v),
provided_method_sources.get().find(&id).map(|x| *x)
}
-pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] {
+pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> Vec<@Method> {
if is_local(id) {
{
match cx.map.find(id.node) {
}
}
-pub fn trait_supertraits(cx: ctxt, id: ast::DefId) -> @~[@TraitRef] {
+pub fn trait_supertraits(cx: ctxt, id: ast::DefId) -> @Vec<@TraitRef> {
// Check the cache.
{
let supertraits = cx.supertraits.borrow();
return result;
}
-pub fn trait_ref_supertraits(cx: ctxt, trait_ref: &ty::TraitRef) -> ~[@TraitRef] {
+pub fn trait_ref_supertraits(cx: ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> {
let supertrait_refs = trait_supertraits(cx, trait_ref.def_id);
supertrait_refs.map(
|supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs))
}
-pub fn trait_methods(cx: ctxt, trait_did: ast::DefId) -> @~[@Method] {
+pub fn trait_methods(cx: ctxt, trait_did: ast::DefId) -> @Vec<@Method> {
let mut trait_methods_cache = cx.trait_methods_cache.borrow_mut();
match trait_methods_cache.get().find(&trait_did) {
Some(&methods) => methods,
})
}
-pub fn trait_method_def_ids(cx: ctxt, id: ast::DefId) -> @~[DefId] {
+pub fn trait_method_def_ids(cx: ctxt, id: ast::DefId) -> @Vec<DefId> {
let mut trait_method_def_ids = cx.trait_method_def_ids.borrow_mut();
lookup_locally_or_in_crate_store("trait_method_def_ids",
id,
// Enum information
#[deriving(Clone)]
pub struct VariantInfo {
- args: ~[t],
- arg_names: Option<~[ast::Ident]>,
+ args: vec!(t),
+ arg_names: Option<Vec<ast::Ident> >,
ctor_ty: t,
name: ast::Ident,
id: ast::DefId,
match ast_variant.node.kind {
ast::TupleVariantKind(ref args) => {
- let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).map(|a| *a) } else { ~[] };
+ let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).map(|a| *a) } else { Vec::new() };
return VariantInfo {
args: arg_tys,
pub fn substd_enum_variants(cx: ctxt,
id: ast::DefId,
substs: &substs)
- -> ~[@VariantInfo] {
+ -> Vec<@VariantInfo> {
enum_variants(cx, id).iter().map(|variant_info| {
let substd_args = variant_info.args.iter()
.map(|aty| subst(cx, substs, *aty)).collect();
}
}
-pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @~[@VariantInfo] {
+pub fn enum_variants(cx: ctxt, id: ast::DefId) -> @Vec<@VariantInfo> {
{
let enum_var_cache = cx.enum_var_cache.borrow();
match enum_var_cache.get().find(&id) {
// Look up the list of field names and IDs for a given struct
// Fails if the id is not bound to a struct.
-pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] {
+pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> Vec<field_ty> {
if did.krate == ast::LOCAL_CRATE {
{
match cx.map.find(did.node) {
}
}
-fn struct_field_tys(fields: &[StructField]) -> ~[field_ty] {
+fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
fields.map(|field| {
match field.node.kind {
NamedField(ident, visibility) => {
// Returns a list of fields corresponding to the struct's items. trans uses
// this. Takes a list of substs with which to instantiate field types.
pub fn struct_fields(cx: ctxt, did: ast::DefId, substs: &substs)
- -> ~[field] {
+ -> Vec<field> {
lookup_struct_fields(cx, did).map(|f| {
field {
// FIXME #6993: change type of field to Name and get rid of new()
return tbl[tycat(cx, ty)][opcat(op)];
}
-pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> ~[t] {
+pub fn ty_params_to_tys(tcx: ty::ctxt, generics: &ast::Generics) -> Vec<t> {
vec::from_fn(generics.ty_params.len(), |i| {
let id = generics.ty_params.get(i).id;
ty::mk_param(tcx, i, ast_util::local_def(id))
-> bool {
for &bound_trait_ref in bounds.iter() {
let mut supertrait_set = HashMap::new();
- let mut trait_refs = ~[];
+ let mut trait_refs = Vec::new();
let mut i = 0;
// Seed the worklist with the trait from the bound
let substs = substs {
regions: ty::NonerasedRegions(opt_vec::Empty),
self_ty: None,
- tps: ~[]
+ tps: Vec::new()
};
let trait_ref = @TraitRef { def_id: trait_lang_item, substs: substs };
Ok((trait_ref,
let mut trait_impls = tcx.trait_impls.borrow_mut();
match trait_impls.get().find(&trait_def_id) {
None => {
- implementation_list = @RefCell::new(~[]);
+ implementation_list = @RefCell::new(Vec::new());
trait_impls.get().insert(trait_def_id, implementation_list);
}
Some(&existing_implementation_list) => {
let mut inherent_impls = tcx.inherent_impls.borrow_mut();
match inherent_impls.get().find(&type_id) {
None => {
- implementation_list = @RefCell::new(~[]);
+ implementation_list = @RefCell::new(Vec::new());
inherent_impls.get().insert(type_id, implementation_list);
}
Some(&existing_implementation_list) => {
pub fn empty() -> substs {
substs {
self_ty: None,
- tps: ~[],
+ tps: Vec::new(),
regions: NonerasedRegions(opt_vec::Empty)
}
}
pub fn fold_ty_vec<T:TypeFolder>(this: &mut T,
tys: &[ty::t])
- -> ~[ty::t] {
+ -> Vec<ty::t> {
tys.map(|t| this.fold_ty(*t))
}
let fcx = pcx.fcx;
let tcx = pcx.fcx.ccx.tcx;
- let arg_types: ~[ty::t];
+ let arg_types: Vec<ty::t> ;
let kind_name;
// structure_of requires type variables to be resolved.
span: Span,
path: &ast::Path,
fields: &[ast::FieldPat],
- class_fields: ~[ty::field_ty],
+ class_fields: Vec<ty::field_ty> ,
class_id: ast::DefId,
substitutions: &ty::substs,
etc: bool) {
supplied_def_id,
&ty::substs {
self_ty: None,
- tps: ~[],
+ tps: Vec::new(),
regions: ty::ErasedRegions,
});
}
m_name: m_name,
supplied_tps: supplied_tps,
impl_dups: @RefCell::new(HashSet::new()),
- inherent_candidates: @RefCell::new(~[]),
- extension_candidates: @RefCell::new(~[]),
+ inherent_candidates: @RefCell::new(Vec::new()),
+ extension_candidates: @RefCell::new(Vec::new()),
deref_args: deref_args,
check_traits: check_traits,
autoderef_receiver: autoderef_receiver,
m_name: m_name,
supplied_tps: supplied_tps,
impl_dups: @RefCell::new(HashSet::new()),
- inherent_candidates: @RefCell::new(~[]),
- extension_candidates: @RefCell::new(~[]),
+ inherent_candidates: @RefCell::new(Vec::new()),
+ extension_candidates: @RefCell::new(Vec::new()),
deref_args: check::DoDerefArgs,
check_traits: CheckTraitsOnly,
autoderef_receiver: autoderef_receiver,
m_name: ast::Name,
supplied_tps: &'a [ty::t],
impl_dups: @RefCell<HashSet<DefId>>,
- inherent_candidates: @RefCell<~[Candidate]>,
- extension_candidates: @RefCell<~[Candidate]>,
+ inherent_candidates: @RefCell<Vec<Candidate> >,
+ extension_candidates: @RefCell<Vec<Candidate> >,
deref_args: check::DerefArgs,
check_traits: CheckTraitsFlag,
autoderef_receiver: AutoderefReceiverFlag,
// Candidate collection (see comment at start of file)
fn reset_candidates(&self) {
- self.inherent_candidates.set(~[]);
- self.extension_candidates.set(~[]);
+ self.inherent_candidates.set(Vec::new());
+ self.extension_candidates.set(Vec::new());
}
fn push_inherent_candidates(&self, self_ty: ty::t) {
}
fn push_candidates_from_impl(&self,
- candidates: &mut ~[Candidate],
+ candidates: &mut Vec<Candidate> ,
impl_info: &ty::Impl) {
{
let mut impl_dups = self.impl_dups.borrow_mut();
fn consider_candidates(&self,
rcvr_ty: ty::t,
- candidates: &mut ~[Candidate])
+ candidates: &mut Vec<Candidate> )
-> Option<MethodCallee> {
// FIXME(pcwalton): Do we need to clone here?
- let relevant_candidates: ~[Candidate] =
+ let relevant_candidates: Vec<Candidate> =
candidates.iter().map(|c| (*c).clone()).
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
Some(self.confirm_candidate(rcvr_ty, &relevant_candidates[0]))
}
- fn merge_candidates(&self, candidates: &[Candidate]) -> ~[Candidate] {
- let mut merged = ~[];
+ fn merge_candidates(&self, candidates: &[Candidate]) -> Vec<Candidate> {
+ let mut merged = Vec::new();
let mut i = 0;
while i < candidates.len() {
let candidate_a = &candidates[i];
// Construct the full set of type parameters for the method,
// which is equal to the class tps + the method tps.
let all_substs = substs {
- tps: vec::append(candidate.rcvr_substs.tps.clone(), m_substs),
+ tps: vec_ng::append(candidate.rcvr_substs.tps.clone(), m_substs),
regions: candidate.rcvr_substs.regions.clone(),
self_ty: candidate.rcvr_substs.self_ty,
};
// and statement context, but we might as well do write the code only once
let param_env = ty::ParameterEnvironment { free_substs: substs::empty(),
self_param_bound: None,
- type_param_bounds: ~[] };
+ type_param_bounds: Vec::new() };
@FnCtxt {
err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty,
}
pub fn check_no_duplicate_fields(tcx: ty::ctxt,
- fields: ~[(ast::Ident, Span)]) {
+ fields: Vec<(ast::Ident, Span)> ) {
let mut field_names = HashMap::new();
for p in fields.iter() {
// Check for missing methods from trait
let provided_methods = ty::provided_trait_methods(tcx,
impl_trait_ref.def_id);
- let mut missing_methods = ~[];
+ let mut missing_methods = Vec::new();
for trait_method in trait_methods.iter() {
let is_implemented =
impl_methods.iter().any(
// in the self type with free regions. So, for example, if the
// impl type is "&'a str", then this would replace the self
// type with a free region `self`.
- let dummy_impl_tps: ~[ty::t] =
+ let dummy_impl_tps: Vec<ty::t> =
impl_generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i, t.def_id)).
collect();
- let dummy_method_tps: ~[ty::t] =
+ let dummy_method_tps: Vec<ty::t> =
impl_m.generics.type_param_defs().iter().enumerate().
map(|(i,t)| ty::mk_param(tcx, i + impl_tps, t.def_id)).
collect();
bound_region: ty::BrNamed(l.def_id, l.ident)})).
collect();
let dummy_substs = ty::substs {
- tps: vec::append(dummy_impl_tps, dummy_method_tps),
+ tps: vec_ng::append(dummy_impl_tps, dummy_method_tps),
regions: ty::NonerasedRegions(dummy_impl_regions),
self_ty: None };
self_ty: self_ty } = trait_substs.subst(tcx, &dummy_substs);
let substs = substs {
regions: trait_regions,
- tps: vec::append(trait_tps, dummy_method_tps),
+ tps: vec_ng::append(trait_tps, dummy_method_tps),
self_ty: self_ty,
};
debug!("trait_fty (pre-subst): {} substs={}",
impl RegionScope for infer::InferCtxt {
fn anon_regions(&self, span: Span, count: uint)
- -> Result<~[ty::Region], ()> {
+ -> Result<Vec<ty::Region> , ()> {
Ok(vec::from_fn(count, |_| {
self.next_region_var(infer::MiscVariable(span))
}))
* so that trans/borrowck/etc know about this autoderef. */
let mut t1 = t;
- let mut enum_dids = ~[];
+ let mut enum_dids = Vec::new();
let mut autoderefs = 0;
loop {
let sty = structure_of(fcx, sp, t1);
}
}
- fn err_args(len: uint) -> ~[ty::t] {
+ fn err_args(len: uint) -> Vec<ty::t> {
vec::from_fn(len, |_| ty::mk_err())
}
_ => ()
}
- let tps: ~[ty::t] = tys.iter().map(|&ty| fcx.to_ty(ty)).collect();
+ let tps: Vec<ty::t> = tys.iter().map(|&ty| fcx.to_ty(ty)).collect();
match method::lookup(fcx,
expr,
base,
// Make sure the programmer specified all the fields.
assert!(fields_found <= field_types.len());
if fields_found < field_types.len() {
- let mut missing_fields = ~[];
+ let mut missing_fields = Vec::new();
for class_field in field_types.iter() {
let name = class_field.name;
let (_, seen) = *class_field_map.get(&name);
gc_struct_id,
substs {
self_ty: None,
- tps: ~[
+ tps: vec!(
fcx.expr_ty(
subexpr)
- ],
+ ),
regions: regions,
});
fcx.write_ty(id, sty);
vs: &[ast::P<ast::Variant>],
id: ast::NodeId,
hint: attr::ReprAttr)
- -> ~[@ty::VariantInfo] {
+ -> Vec<@ty::VariantInfo> {
let rty = ty::node_id_to_type(ccx.tcx, id);
- let mut variants: ~[@ty::VariantInfo] = ~[];
- let mut disr_vals: ~[ty::Disr] = ~[];
+ let mut variants: Vec<@ty::VariantInfo> = Vec::new();
+ let mut disr_vals: Vec<ty::Disr> = Vec::new();
let mut prev_disr_val: Option<ty::Disr> = None;
for &v in vs.iter() {
// Build up the list of type parameters, inserting the self parameter
// at the appropriate position.
- let mut tps = ~[];
+ let mut tps = Vec::new();
let mut pushed = false;
for (i, ty) in pth.segments.iter()
.flat_map(|segment| segment.types.iter())
let tcx = ccx.tcx;
let name = token::get_ident(it.ident);
let (n_tps, inputs, output) = if name.get().starts_with("atomic_") {
- let split : ~[&str] = name.get().split('_').collect();
+ let split : Vec<&str> = name.get().split('_').collect();
assert!(split.len() >= 2, "Atomic intrinsic not correct format");
//We only care about the operation here
match split[1] {
- "cxchg" => (1, ~[ty::mk_mut_rptr(tcx,
+ "cxchg" => (1, vec!(ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)),
param(ccx, 0),
- param(ccx, 0),
- ], param(ccx, 0)),
+ param(ccx, 0)), param(ccx, 0)),
"load" => (1,
- ~[
+ vec!(
ty::mk_imm_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0))
- ],
+ ),
param(ccx, 0)),
"store" => (1,
- ~[
+ vec!(
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)),
param(ccx, 0)),
param(ccx, 0)
- ],
+ ),
ty::mk_nil()),
"xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" |
"min" | "umax" | "umin" => {
- (1, ~[ty::mk_mut_rptr(tcx,
+ (1, vec!(ty::mk_mut_rptr(tcx,
ty::ReLateBound(it.id, ty::BrAnon(0)),
- param(ccx, 0)), param(ccx, 0) ],
+ param(ccx, 0)), param(ccx, 0) ),
param(ccx, 0))
}
"fence" => {
- (0, ~[], ty::mk_nil())
+ (0, Vec::new(), ty::mk_nil())
}
op => {
tcx.sess.span_err(it.span,
} else {
match name.get() {
- "abort" => (0, ~[], ty::mk_bot()),
- "breakpoint" => (0, ~[], ty::mk_nil()),
+ "abort" => (0, Vec::new(), ty::mk_bot()),
+ "breakpoint" => (0, Vec::new(), ty::mk_nil()),
"size_of" |
- "pref_align_of" | "min_align_of" => (1u, ~[], ty::mk_uint()),
- "init" => (1u, ~[], param(ccx, 0u)),
- "uninit" => (1u, ~[], param(ccx, 0u)),
- "forget" => (1u, ~[ param(ccx, 0) ], ty::mk_nil()),
- "transmute" => (2, ~[ param(ccx, 0) ], param(ccx, 1)),
+ "pref_align_of" | "min_align_of" => (1u, Vec::new(), ty::mk_uint()),
+ "init" => (1u, Vec::new(), param(ccx, 0u)),
+ "uninit" => (1u, Vec::new(), param(ccx, 0u)),
+ "forget" => (1u, vec!( param(ccx, 0) ), ty::mk_nil()),
+ "transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)),
"move_val_init" => {
(1u,
- ~[
+ vec!(
ty::mk_mut_rptr(tcx, ty::ReLateBound(it.id, ty::BrAnon(0)), param(ccx, 0)),
param(ccx, 0u)
- ],
+ ),
ty::mk_nil())
}
- "needs_drop" => (1u, ~[], ty::mk_bool()),
- "owns_managed" => (1u, ~[], ty::mk_bool()),
+ "needs_drop" => (1u, Vec::new(), ty::mk_bool()),
+ "owns_managed" => (1u, Vec::new(), ty::mk_bool()),
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
ty: tydesc_ty,
mutbl: ast::MutImmutable
});
- (1u, ~[], td_ptr)
+ (1u, Vec::new(), td_ptr)
}
"type_id" => {
let langid = ccx.tcx.lang_items.require(TypeIdLangItem);
match langid {
- Ok(did) => (1u, ~[], ty::mk_struct(ccx.tcx, did, substs {
+ Ok(did) => (1u, Vec::new(), ty::mk_struct(ccx.tcx, did, substs {
self_ty: None,
- tps: ~[],
+ tps: Vec::new(),
regions: ty::NonerasedRegions(opt_vec::Empty)
}) ),
Err(msg) => { tcx.sess.span_fatal(it.span, msg); }
ty: tydesc_ty,
mutbl: ast::MutImmutable
});
- (0, ~[ td_ptr, visitor_object_ty ], ty::mk_nil())
+ (0, vec!( td_ptr, visitor_object_ty ), ty::mk_nil())
}
"offset" => {
(1,
- ~[
+ vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}),
ty::mk_int()
- ],
+ ),
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutImmutable
}
"copy_nonoverlapping_memory" => {
(1,
- ~[
+ vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
mutbl: ast::MutImmutable
}),
ty::mk_uint()
- ],
+ ),
ty::mk_nil())
}
"copy_memory" => {
(1,
- ~[
+ vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
mutbl: ast::MutImmutable
}),
ty::mk_uint()
- ],
+ ),
ty::mk_nil())
}
"set_memory" => {
(1,
- ~[
+ vec!(
ty::mk_ptr(tcx, ty::mt {
ty: param(ccx, 0),
mutbl: ast::MutMutable
}),
ty::mk_u8(),
ty::mk_uint()
- ],
+ ),
ty::mk_nil())
}
- "sqrtf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "sqrtf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
+ "sqrtf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "sqrtf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"powif32" => {
(0,
- ~[ ty::mk_f32(), ty::mk_i32() ],
+ vec!( ty::mk_f32(), ty::mk_i32() ),
ty::mk_f32())
}
"powif64" => {
(0,
- ~[ ty::mk_f64(), ty::mk_i32() ],
+ vec!( ty::mk_f64(), ty::mk_i32() ),
ty::mk_f64())
}
- "sinf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "sinf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "cosf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "cosf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
+ "sinf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "sinf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "cosf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "cosf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"powf32" => {
(0,
- ~[ ty::mk_f32(), ty::mk_f32() ],
+ vec!( ty::mk_f32(), ty::mk_f32() ),
ty::mk_f32())
}
"powf64" => {
(0,
- ~[ ty::mk_f64(), ty::mk_f64() ],
+ vec!( ty::mk_f64(), ty::mk_f64() ),
ty::mk_f64())
}
- "expf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "expf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "exp2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "exp2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "logf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "logf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "log10f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "log10f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "log2f32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "log2f64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
+ "expf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "expf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "exp2f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "exp2f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "logf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "logf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "log10f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "log10f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "log2f32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "log2f64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
"fmaf32" => {
(0,
- ~[ ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ],
+ vec!( ty::mk_f32(), ty::mk_f32(), ty::mk_f32() ),
ty::mk_f32())
}
"fmaf64" => {
(0,
- ~[ ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ],
+ vec!( ty::mk_f64(), ty::mk_f64(), ty::mk_f64() ),
ty::mk_f64())
}
- "fabsf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "fabsf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "copysignf32" => (0, ~[ ty::mk_f32(), ty::mk_f32() ], ty::mk_f32()),
- "copysignf64" => (0, ~[ ty::mk_f64(), ty::mk_f64() ], ty::mk_f64()),
- "floorf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "floorf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "ceilf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "ceilf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "truncf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "truncf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "rintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "rintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "nearbyintf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "nearbyintf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "roundf32" => (0, ~[ ty::mk_f32() ], ty::mk_f32()),
- "roundf64" => (0, ~[ ty::mk_f64() ], ty::mk_f64()),
- "ctpop8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
- "ctpop16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
- "ctpop32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
- "ctpop64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
- "ctlz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
- "ctlz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
- "ctlz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
- "ctlz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
- "cttz8" => (0, ~[ ty::mk_i8() ], ty::mk_i8()),
- "cttz16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
- "cttz32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
- "cttz64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
- "bswap16" => (0, ~[ ty::mk_i16() ], ty::mk_i16()),
- "bswap32" => (0, ~[ ty::mk_i32() ], ty::mk_i32()),
- "bswap64" => (0, ~[ ty::mk_i64() ], ty::mk_i64()),
+ "fabsf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "fabsf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "copysignf32" => (0, vec!( ty::mk_f32(), ty::mk_f32() ), ty::mk_f32()),
+ "copysignf64" => (0, vec!( ty::mk_f64(), ty::mk_f64() ), ty::mk_f64()),
+ "floorf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "floorf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "ceilf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "ceilf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "truncf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "truncf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "rintf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "rintf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "nearbyintf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "nearbyintf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "roundf32" => (0, vec!( ty::mk_f32() ), ty::mk_f32()),
+ "roundf64" => (0, vec!( ty::mk_f64() ), ty::mk_f64()),
+ "ctpop8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
+ "ctpop16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
+ "ctpop32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
+ "ctpop64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
+ "ctlz8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
+ "ctlz16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
+ "ctlz32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
+ "ctlz64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
+ "cttz8" => (0, vec!( ty::mk_i8() ), ty::mk_i8()),
+ "cttz16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
+ "cttz32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
+ "cttz64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
+ "bswap16" => (0, vec!( ty::mk_i16() ), ty::mk_i16()),
+ "bswap32" => (0, vec!( ty::mk_i32() ), ty::mk_i32()),
+ "bswap64" => (0, vec!( ty::mk_i64() ), ty::mk_i64()),
"volatile_load" =>
- (1, ~[ ty::mk_imm_ptr(tcx, param(ccx, 0)) ], param(ccx, 0)),
+ (1, vec!( ty::mk_imm_ptr(tcx, param(ccx, 0)) ), param(ccx, 0)),
"volatile_store" =>
- (1, ~[ ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ], ty::mk_nil()),
+ (1, vec!( ty::mk_mut_ptr(tcx, param(ccx, 0)), param(ccx, 0) ), ty::mk_nil()),
"i8_add_with_overflow" | "i8_sub_with_overflow" | "i8_mul_with_overflow" =>
- (0, ~[ty::mk_i8(), ty::mk_i8()],
- ty::mk_tup(tcx, ~[ty::mk_i8(), ty::mk_bool()])),
+ (0, vec!(ty::mk_i8(), ty::mk_i8()),
+ ty::mk_tup(tcx, vec!(ty::mk_i8(), ty::mk_bool()))),
"i16_add_with_overflow" | "i16_sub_with_overflow" | "i16_mul_with_overflow" =>
- (0, ~[ty::mk_i16(), ty::mk_i16()],
- ty::mk_tup(tcx, ~[ty::mk_i16(), ty::mk_bool()])),
+ (0, vec!(ty::mk_i16(), ty::mk_i16()),
+ ty::mk_tup(tcx, vec!(ty::mk_i16(), ty::mk_bool()))),
"i32_add_with_overflow" | "i32_sub_with_overflow" | "i32_mul_with_overflow" =>
- (0, ~[ty::mk_i32(), ty::mk_i32()],
- ty::mk_tup(tcx, ~[ty::mk_i32(), ty::mk_bool()])),
+ (0, vec!(ty::mk_i32(), ty::mk_i32()),
+ ty::mk_tup(tcx, vec!(ty::mk_i32(), ty::mk_bool()))),
"i64_add_with_overflow" | "i64_sub_with_overflow" | "i64_mul_with_overflow" =>
- (0, ~[ty::mk_i64(), ty::mk_i64()],
- ty::mk_tup(tcx, ~[ty::mk_i64(), ty::mk_bool()])),
+ (0, vec!(ty::mk_i64(), ty::mk_i64()),
+ ty::mk_tup(tcx, vec!(ty::mk_i64(), ty::mk_bool()))),
"u8_add_with_overflow" | "u8_sub_with_overflow" | "u8_mul_with_overflow" =>
- (0, ~[ty::mk_u8(), ty::mk_u8()],
- ty::mk_tup(tcx, ~[ty::mk_u8(), ty::mk_bool()])),
+ (0, vec!(ty::mk_u8(), ty::mk_u8()),
+ ty::mk_tup(tcx, vec!(ty::mk_u8(), ty::mk_bool()))),
"u16_add_with_overflow" | "u16_sub_with_overflow" | "u16_mul_with_overflow" =>
- (0, ~[ty::mk_u16(), ty::mk_u16()],
- ty::mk_tup(tcx, ~[ty::mk_u16(), ty::mk_bool()])),
+ (0, vec!(ty::mk_u16(), ty::mk_u16()),
+ ty::mk_tup(tcx, vec!(ty::mk_u16(), ty::mk_bool()))),
"u32_add_with_overflow" | "u32_sub_with_overflow" | "u32_mul_with_overflow"=>
- (0, ~[ty::mk_u32(), ty::mk_u32()],
- ty::mk_tup(tcx, ~[ty::mk_u32(), ty::mk_bool()])),
+ (0, vec!(ty::mk_u32(), ty::mk_u32()),
+ ty::mk_tup(tcx, vec!(ty::mk_u32(), ty::mk_bool()))),
"u64_add_with_overflow" | "u64_sub_with_overflow" | "u64_mul_with_overflow" =>
- (0, ~[ty::mk_u64(), ty::mk_u64()],
- ty::mk_tup(tcx, ~[ty::mk_u64(), ty::mk_bool()])),
+ (0, vec!(ty::mk_u64(), ty::mk_u64()),
+ ty::mk_tup(tcx, vec!(ty::mk_u64(), ty::mk_bool()))),
ref other => {
tcx.sess.span_err(it.span,
*/
let mut rr = RegionRelator { tcx: tcx,
- stack: ~[],
+ stack: Vec::new(),
relate_op: relate_op };
match opt_region {
Some(o_r) => { rr.stack.push(o_r); }
struct RegionRelator<'a> {
tcx: ty::ctxt,
- stack: ~[ty::Region],
+ stack: Vec<ty::Region> ,
relate_op: 'a |ty::Region, ty::Region|,
}
debug!("relate_free_regions >>");
- let mut all_tys = ~[];
+ let mut all_tys = Vec::new();
for arg in fn_sig.inputs.iter() {
all_tys.push(*arg);
}
let tcx = vcx.tcx();
// ty is the value supplied for the type parameter A...
- let mut param_result = ~[];
+ let mut param_result = Vec::new();
ty::each_bound_trait_and_supertraits(tcx, type_param_bounds.trait_bounds, |trait_ref| {
// ...and here trait_ref is each bound that was declared on A,
-> Option<vtable_origin> {
let tcx = vcx.tcx();
- let mut found = ~[];
+ let mut found = Vec::new();
let mut impls_seen = HashSet::new();
// Load the implementations from external metadata if necessary.
let trait_impls = tcx.trait_impls.borrow();
trait_impls.get()
.find(&trait_ref.def_id)
- .map_or(@RefCell::new(~[]), |x| *x)
+ .map_or(@RefCell::new(Vec::new()), |x| *x)
};
// impls is the list of all impls in scope for trait_ref.
let impls = impls.borrow();
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
- trait_bounds: ~[target_trait_ref]
+ trait_bounds: vec!(target_trait_ref)
};
let vtables =
lookup_vtables_for_param(&vcx,
is_early);
if !is_early {
- insert_vtables(fcx, ex.id, @~[vtables]);
+ insert_vtables(fcx, ex.id, @vec!(vtables));
}
// Now, if this is &trait, we need to link the
// but that falls out of doing this.
let param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
- trait_bounds: ~[impl_trait_ref]
+ trait_bounds: vec!(impl_trait_ref)
};
let t = ty::node_id_to_type(tcx, impl_item.id);
let t = t.subst(tcx, ¶m_env.free_substs);
}
fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t])
- -> ~[ty::t] {
+ -> Vec<ty::t> {
tys.map(|t| {
match resolve_type_vars_in_type(fcx, sp, *t) {
Some(t1) => t1,
return;
}
};
- let mut new_tps = ~[];
+ let mut new_tps = Vec::new();
for &subst in method.substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, subst) {
Some(t) => new_tps.push(t),
write_ty_to_tcx(tcx, id, t);
let mut ret = Some(t);
fcx.opt_node_ty_substs(id, |substs| {
- let mut new_tps = ~[];
+ let mut new_tps = Vec::new();
for subst in substs.tps.iter() {
match resolve_type_vars_in_type(fcx, sp, *subst) {
Some(t) => new_tps.push(t),
struct UniversalQuantificationResult {
monotype: t,
- type_variables: ~[ty::t],
- type_param_defs: Rc<~[ty::TypeParameterDef]>
+ type_variables: Vec<ty::t> ,
+ type_param_defs: Rc<Vec<ty::TypeParameterDef> >
}
fn get_base_type(inference_context: &InferCtxt,
// `ProvidedMethodInfo` instance into the `provided_method_sources` map.
fn instantiate_default_methods(&self, impl_id: ast::DefId,
trait_ref: &ty::TraitRef,
- all_methods: &mut ~[@Method]) {
+ all_methods: &mut Vec<@Method> ) {
let tcx = self.crate_context.tcx;
debug!("instantiate_default_methods(impl_id={:?}, trait_ref={})",
impl_id, trait_ref.repr(tcx));
// construct the polytype for the method based on the method_ty
let new_generics = ty::Generics {
type_param_defs:
- Rc::new(vec::append(
+ Rc::new(vec_ng::append(
impl_poly_type.generics.type_param_defs().to_owned(),
new_method_ty.generics.type_param_defs())),
region_param_defs:
let mut inherent_impls = tcx.inherent_impls.borrow_mut();
match inherent_impls.get().find(&base_def_id) {
None => {
- implementation_list = @RefCell::new(~[]);
+ implementation_list = @RefCell::new(Vec::new());
inherent_impls.get().insert(base_def_id, implementation_list);
}
Some(&existing_implementation_list) => {
let mut trait_impls = tcx.trait_impls.borrow_mut();
match trait_impls.get().find(&base_def_id) {
None => {
- implementation_list = @RefCell::new(~[]);
+ implementation_list = @RefCell::new(Vec::new());
trait_impls.get().insert(base_def_id, implementation_list);
}
Some(&existing_implementation_list) => {
let tcx = self.crate_context.tcx;
match item.node {
ItemImpl(_, ref trait_refs, _, ref ast_methods) => {
- let mut methods = ~[];
+ let mut methods = Vec::new();
for ast_method in ast_methods.iter() {
methods.push(ty::method(tcx, local_def(ast_method.id)));
}
// the substitution to any traits that appear in their bounds.
// add in the type parameters from the trait
- let mut new_type_param_defs = ~[];
+ let mut new_type_param_defs = Vec::new();
let substd_type_param_defs =
trait_ty_generics.type_param_defs.subst(tcx, &substs);
new_type_param_defs.push_all(*substd_type_param_defs.borrow());
def_id: dummy_defid,
bounds: @ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
- trait_bounds: ~[self_trait_ref]
+ trait_bounds: vec!(self_trait_ref)
},
default: None
});
}
let self_ty = ty::mk_self(ccx.tcx, local_def(id));
- let mut ty_trait_refs: ~[@ty::TraitRef] = ~[];
+ let mut ty_trait_refs: Vec<@ty::TraitRef> = Vec::new();
let mut bounds = ty::EmptyBuiltinBounds();
for ast_trait_ref in ast_trait_refs.iter() {
let trait_def_id = ty::trait_ref_to_def_id(ccx.tcx, ast_trait_ref);
// itself
ty_param_bounds_and_ty {
generics: ty::Generics {
- type_param_defs: Rc::new(vec::append(
+ type_param_defs: Rc::new(vec_ng::append(
rcvr_ty_generics.type_param_defs().to_owned(),
m_ty_generics.type_param_defs())),
region_param_defs: rcvr_ty_generics.region_param_defs.clone(),
let tpt = ty_param_bounds_and_ty {
generics: ty::Generics {
type_param_defs: ty_generics.type_param_defs.clone(),
- region_param_defs: Rc::new(~[]),
+ region_param_defs: Rc::new(Vec::new()),
},
ty: ty::mk_bare_fn(ccx.tcx, tofd)
};
ast::ForeignItemStatic(t, _) => {
ty::ty_param_bounds_and_ty {
generics: ty::Generics {
- type_param_defs: Rc::new(~[]),
- region_param_defs: Rc::new(~[]),
+ type_param_defs: Rc::new(Vec::new()),
+ region_param_defs: Rc::new(Vec::new()),
},
ty: ast_ty_to_ty(ccx, &ExplicitRscope, t)
}
let mut param_bounds = ty::ParamBounds {
builtin_bounds: ty::EmptyBuiltinBounds(),
- trait_bounds: ~[]
+ trait_bounds: Vec::new()
};
for ast_bound in ast_bounds.iter() {
match *ast_bound {
ty_generics: &ty::Generics,
self_ty: Option<ty::t>) -> ty::substs
{
- let params: ~[ty::t] =
+ let params: Vec<ty::t> =
ty_generics.type_param_defs().iter().enumerate().map(
|(i, t)| ty::mk_param(ccx.tcx, i, t.def_id)).collect();
fn contratys(&self, a: ty::t, b: ty::t) -> cres<ty::t>;
fn tys(&self, a: ty::t, b: ty::t) -> cres<ty::t>;
- fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> {
+ fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<Vec<ty::t> > {
// Note: type parameters are always treated as *invariant*
// (otherwise the type system would be unsound). In the
pub fn super_fn_sigs<C:Combine>(this: &C, a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
- fn argvecs<C:Combine>(this: &C, a_args: &[ty::t], b_args: &[ty::t]) -> cres<~[ty::t]> {
+ fn argvecs<C:Combine>(this: &C, a_args: &[ty::t], b_args: &[ty::t]) -> cres<Vec<ty::t> > {
if a_args.len() == b_args.len() {
result::collect(a_args.iter().zip(b_args.iter())
.map(|(a, b)| this.args(*a, *b)))
pub fn var_ids<T:Combine>(this: &T,
map: &HashMap<ty::BoundRegion, ty::Region>)
- -> ~[RegionVid] {
+ -> Vec<RegionVid> {
map.iter().map(|(_, r)| match *r {
ty::ReInfer(ty::ReVar(r)) => { r }
r => {
fn new_ValsAndBindings<V:Clone,T:Clone>() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: SmallIntMap::new(),
- bindings: ~[]
+ bindings: Vec::new()
}
}
ty::mk_var(self.tcx, self.next_ty_var_id())
}
- pub fn next_ty_vars(&self, n: uint) -> ~[ty::t] {
+ pub fn next_ty_vars(&self, n: uint) -> Vec<ty::t> {
vec::from_fn(n, |_i| self.next_ty_var())
}
pub fn next_region_vars(&self,
origin: RegionVariableOrigin,
count: uint)
- -> ~[ty::Region] {
+ -> Vec<ty::Region> {
vec::from_fn(count, |_| self.next_region_var(origin))
}
pub struct RegionVarBindings {
tcx: ty::ctxt,
- var_origins: RefCell<~[RegionVariableOrigin]>,
+ var_origins: RefCell<Vec<RegionVariableOrigin> >,
constraints: RefCell<HashMap<Constraint, SubregionOrigin>>,
lubs: RefCell<CombineMap>,
glbs: RefCell<CombineMap>,
// actively snapshotting. The reason for this is that otherwise
// we end up adding entries for things like the lower bound on
// a variable and so forth, which can never be rolled back.
- undo_log: RefCell<~[UndoLogEntry]>,
+ undo_log: RefCell<Vec<UndoLogEntry> >,
// This contains the results of inference. It begins as an empty
// option and only acquires a value after inference is complete.
- values: RefCell<Option<~[VarValue]>>,
+ values: RefCell<Option<Vec<VarValue> >>,
}
pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {
RegionVarBindings {
tcx: tcx,
- var_origins: RefCell::new(~[]),
+ var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
constraints: RefCell::new(HashMap::new()),
lubs: RefCell::new(HashMap::new()),
glbs: RefCell::new(HashMap::new()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
- undo_log: RefCell::new(~[])
+ undo_log: RefCell::new(Vec::new())
}
}
}
pub fn vars_created_since_snapshot(&self, snapshot: uint)
- -> ~[RegionVid] {
+ -> Vec<RegionVid> {
let undo_log = self.undo_log.borrow();
undo_log.get().slice_from(snapshot).iter()
.filter_map(|&elt| match elt {
.collect()
}
- pub fn tainted(&self, snapshot: uint, r0: Region) -> ~[Region] {
+ pub fn tainted(&self, snapshot: uint, r0: Region) -> Vec<Region> {
/*!
* Computes all regions that have been related to `r0` in any
* way since the snapshot `snapshot` was taken---`r0` itself
// `result_set` acts as a worklist: we explore all outgoing
// edges and add any new regions we find to result_set. This
// is not a terribly efficient implementation.
- let mut result_set = ~[r0];
+ let mut result_set = vec!(r0);
let mut result_index = 0;
while result_index < result_set.len() {
// nb: can't use uint::range() here because result_set grows
return result_set;
- fn consider_adding_edge(result_set: ~[Region],
+ fn consider_adding_edge(result_set: Vec<Region> ,
r: Region,
r1: Region,
- r2: Region) -> ~[Region]
- {
+ r2: Region) -> Vec<Region> {
let mut result_set = result_set;
if r == r1 { // Clearly, this is potentially inefficient.
if !result_set.iter().any(|x| *x == r2) {
impl RegionVarBindings {
fn infer_variable_values(&self,
errors: &mut OptVec<RegionResolutionError>)
- -> ~[VarValue] {
+ -> Vec<VarValue> {
let mut var_data = self.construct_var_data();
self.expansion(var_data);
self.contraction(var_data);
self.extract_values_and_collect_conflicts(var_data, errors)
}
- fn construct_var_data(&self) -> ~[VarData] {
+ fn construct_var_data(&self) -> Vec<VarData> {
vec::from_fn(self.num_vars(), |_| {
VarData {
// All nodes are initially classified as contracting; during
&self,
var_data: &[VarData],
errors: &mut OptVec<RegionResolutionError>)
- -> ~[VarValue]
- {
+ -> Vec<VarValue> {
debug!("extract_values_and_collect_conflicts()");
// This is the best way that I have found to suppress
orig_node_idx: RegionVid,
dir: Direction,
dup_vec: &mut [uint])
- -> (~[RegionAndOrigin], bool) {
+ -> (Vec<RegionAndOrigin> , bool) {
struct WalkState {
set: HashSet<RegionVid>,
- stack: ~[RegionVid],
- result: ~[RegionAndOrigin],
+ stack: Vec<RegionVid> ,
+ result: Vec<RegionAndOrigin> ,
dup_found: bool
}
let mut state = WalkState {
set: HashSet::new(),
- stack: ~[orig_node_idx],
- result: ~[],
+ stack: vec!(orig_node_idx),
+ result: Vec::new(),
dup_found: false
};
state.set.insert(orig_node_idx);
infcx: &'a InferCtxt,
modes: uint,
err: Option<fixup_err>,
- v_seen: ~[TyVid],
+ v_seen: Vec<TyVid> ,
type_depth: uint
}
infcx: infcx,
modes: modes,
err: None,
- v_seen: ~[],
+ v_seen: Vec::new(),
type_depth: 0
}
}
fn setup_env(test_name: &str, source_string: &str) -> Env {
let messages = @DVec();
- let matches = getopts(~[~"-Z", ~"verbose"], optgroups()).get();
+ let matches = getopts(vec!(~"-Z", ~"verbose"), optgroups()).get();
let diag = diagnostic::collect(messages);
let sessopts = build_session_options(~"rustc", &matches, diag);
let sess = build_session(sessopts, None, diag);
proto: ast::ProtoBare,
onceness: ast::Many,
region: ty::ReStatic,
- bounds: @~[]},
+ bounds: @Vec::new()},
sig: FnSig {
inputs: inputs,
output: output_ty,
pub struct ValsAndBindings<V, T> {
vals: SmallIntMap<VarValue<V, T>>,
- bindings: ~[(V, VarValue<V, T>)],
+ bindings: Vec<(V, VarValue<V, T>)> ,
}
pub struct Node<V, T> {
// of the method to be invoked
pub type MethodMap = @RefCell<NodeMap<MethodCallee>>;
-pub type vtable_param_res = @~[vtable_origin];
+pub type vtable_param_res = @Vec<vtable_origin> ;
// Resolutions for bounds of all parameters, left to right, for a given path.
-pub type vtable_res = @~[vtable_param_res];
+pub type vtable_res = @Vec<vtable_param_res> ;
#[deriving(Clone)]
pub enum vtable_origin {
from whence comes the vtable, and tys are the type substs.
vtable_res is the vtable itself
*/
- vtable_static(ast::DefId, ~[ty::t], vtable_res),
+ vtable_static(ast::DefId, Vec<ty::t> , vtable_res),
/*
Dynamic vtable, comes from a parameter that has a bound on it:
}
pub fn write_substs_to_tcx(tcx: ty::ctxt,
node_id: ast::NodeId,
- substs: ~[ty::t]) {
+ substs: Vec<ty::t> ) {
if substs.len() > 0u {
debug!("write_substs_to_tcx({}, {:?})", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
ty::ty_param_bounds_and_ty {
- generics: ty::Generics {type_param_defs: Rc::new(~[]),
- region_param_defs: Rc::new(~[])},
+ generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),
+ region_param_defs: Rc::new(Vec::new())},
ty: t
}
}
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
binder_id: main_id,
- inputs: ~[],
+ inputs: Vec::new(),
output: ty::mk_nil(),
variadic: false
}
abis: abi::AbiSet::Rust(),
sig: ty::FnSig {
binder_id: start_id,
- inputs: ~[
+ inputs: vec!(
ty::mk_int(),
ty::mk_imm_ptr(tcx, ty::mk_imm_ptr(tcx, ty::mk_u8()))
- ],
+ ),
output: ty::mk_int(),
variadic: false
}
fn anon_regions(&self,
span: Span,
count: uint)
- -> Result<~[ty::Region], ()>;
+ -> Result<Vec<ty::Region> , ()>;
}
// A scope in which all regions must be explicitly named
fn anon_regions(&self,
_span: Span,
_count: uint)
- -> Result<~[ty::Region], ()> {
+ -> Result<Vec<ty::Region> , ()> {
Err(())
}
}
fn anon_regions(&self,
_: Span,
count: uint)
- -> Result<~[ty::Region], ()> {
+ -> Result<Vec<ty::Region> , ()> {
let idx = self.anon_bindings.get();
self.anon_bindings.set(idx + count);
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
inferred_map: HashMap<ast::NodeId, InferredIndex>,
// Maps from an InferredIndex to the info for that variable.
- inferred_infos: ~[InferredInfo<'a>],
+ inferred_infos: Vec<InferredInfo<'a>> ,
}
enum ParamKind { TypeParam, RegionParam, SelfParam }
tcx: tcx,
arena: arena,
inferred_map: HashMap::new(),
- inferred_infos: ~[],
+ inferred_infos: Vec::new(),
// cache and share the variance struct used for items with
// no type/region parameters
invariant: VarianceTermPtr<'a>,
bivariant: VarianceTermPtr<'a>,
- constraints: ~[Constraint<'a>],
+ constraints: Vec<Constraint<'a>> ,
}
/// Declares that the variable `decl_id` appears in a location with
contravariant: contravariant,
invariant: invariant,
bivariant: bivariant,
- constraints: ~[],
+ constraints: Vec::new(),
};
visit::walk_crate(&mut constraint_cx, krate, ());
constraint_cx
struct SolveContext<'a> {
terms_cx: TermsContext<'a>,
- constraints: ~[Constraint<'a>],
+ constraints: Vec<Constraint<'a>> ,
// Maps from an InferredIndex to the inferred value for that variable.
- solutions: ~[ty::Variance]
-}
+ solutions: Vec<ty::Variance> }
fn solve_constraints(constraints_cx: ConstraintContext) {
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; }
-pub fn field_exprs(fields: ~[ast::Field]) -> ~[@ast::Expr] {
+pub fn field_exprs(fields: Vec<ast::Field> ) -> Vec<@ast::Expr> {
fields.map(|f| f.expr)
}
did: ast::DefId,
is_trait: bool) -> ~str {
- let mut strs = ~[];
+ let mut strs = Vec::new();
match *regions {
ty::ErasedRegions => { }
ty::NonerasedRegions(ref regions) => {
// This is necessary to handle types like Option<~[T]>, for which
// autoderef cannot convert the &[T] handler
-impl<T:Repr> Repr for ~[T] {
+impl<T:Repr> Repr for Vec<T> {
fn repr(&self, tcx: ctxt) -> ~str {
repr_vec(tcx, *self)
}
impl Repr for ty::ParamBounds {
fn repr(&self, tcx: ctxt) -> ~str {
- let mut res = ~[];
+ let mut res = Vec::new();
for b in self.builtin_bounds.iter() {
res.push(match b {
ty::BoundStatic => ~"'static",
impl UserString for ty::BuiltinBounds {
fn user_string(&self, tcx: ctxt) -> ~str {
if self.is_empty() { ~"<no-bounds>" } else {
- let mut result = ~[];
+ let mut result = Vec::new();
for bb in self.iter() {
result.push(bb.user_string(tcx));
}
/// Convenience function that retrieves the result of a digest as a
/// newly allocated vec of bytes.
- fn result_bytes(&mut self) -> ~[u8] {
+ fn result_bytes(&mut self) -> Vec<u8> {
let mut buf = vec::from_elem((self.output_bits()+7)/8, 0u8);
self.result(buf);
buf
#[test]
fn test_sha256() {
// Examples from wikipedia
- let wikipedia_tests = ~[
+ let wikipedia_tests = vec!(
Test {
input: ~"",
output_str: ~"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
Test {
input: ~"The quick brown fox jumps over the lazy dog.",
output_str: ~"ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c"
- },
- ];
+ });
let tests = wikipedia_tests;