STD_PKG_NAME := rust-std-$(CFG_PACKAGE_VERS)
DOC_PKG_NAME := rust-docs-$(CFG_PACKAGE_VERS)
MINGW_PKG_NAME := rust-mingw-$(CFG_PACKAGE_VERS)
+SRC_PKG_NAME := rust-src-$(CFG_PACKAGE_VERS)
# License suitable for displaying in a popup
LICENSE.txt: $(S)COPYRIGHT $(S)LICENSE-APACHE $(S)LICENSE-MIT
UNROOTED_PKG_FILES := $(patsubst $(S)%,./%,$(PKG_FILES))
-$(PKG_TAR): $(PKG_FILES)
- @$(call E, making dist dir)
- $(Q)rm -Rf tmp/dist/$(PKG_NAME)
- $(Q)mkdir -p tmp/dist/$(PKG_NAME)
+tmp/dist/$$(SRC_PKG_NAME)-image: $(PKG_FILES)
+ @$(call E, making src image)
+ $(Q)rm -Rf tmp/dist/$(SRC_PKG_NAME)-image
+ $(Q)mkdir -p tmp/dist/$(SRC_PKG_NAME)-image/lib/rustlib/src/rust
$(Q)tar \
-C $(S) \
-f - \
--exclude=*/llvm/test/*/*/*.ll \
--exclude=*/llvm/test/*/*/*.td \
--exclude=*/llvm/test/*/*/*.s \
- -c $(UNROOTED_PKG_FILES) | tar -x -f - -C tmp/dist/$(PKG_NAME)
+ -c $(UNROOTED_PKG_FILES) | tar -x -f - -C tmp/dist/$(SRC_PKG_NAME)-image/lib/rustlib/src/rust
+
+$(PKG_TAR): tmp/dist/$$(SRC_PKG_NAME)-image
@$(call E, making $@)
- $(Q)tar -czf $(PKG_TAR) -C tmp/dist $(PKG_NAME)
- $(Q)rm -Rf tmp/dist/$(PKG_NAME)
+ $(Q)tar -czf $(PKG_TAR) -C tmp/dist/$(SRC_PKG_NAME)-image/lib/rustlib/src rust --transform 's,^rust,$(PKG_NAME),S'
dist-tar-src: $(PKG_TAR)
$(foreach host,$(CFG_HOST),\
$(eval $(call DEF_INSTALLER,$(host))))
+dist/$(SRC_PKG_NAME).tar.gz: tmp/dist/$(SRC_PKG_NAME)-image
+ @$(call E, build: $@)
+ $(Q)$(S)src/rust-installer/gen-installer.sh \
+ --product-name=Rust \
+ --rel-manifest-dir=rustlib \
+ --success-message=Awesome-Source. \
+ --image-dir=tmp/dist/$(SRC_PKG_NAME)-image \
+ --work-dir=tmp/dist \
+ --output-dir=dist \
+ --package-name=$(SRC_PKG_NAME) \
+ --component-name=rust-src \
+ --legacy-manifest-dirs=rustlib,cargo
+
# When generating packages for the standard library, we've actually got a lot of
# artifacts to choose from. Each of the CFG_HOST compilers will have a copy of
# the standard library for each CFG_TARGET, but we only want to generate one
# Primary targets (dist, distcheck)
######################################################################
-MAYBE_DIST_TAR_SRC=dist-tar-src
-MAYBE_DISTCHECK_TAR_SRC=distcheck-tar-src
+MAYBE_DIST_TAR_SRC=dist-tar-src dist/$(SRC_PKG_NAME).tar.gz
+MAYBE_DISTCHECK_TAR_SRC=distcheck-tar-src dist/$(SRC_PKG_NAME).tar.gz
# FIXME #13224: On OS X don't produce tarballs simply because --exclude-vcs don't work.
# This is a huge hack because I just don't have time to figure out another solution.
"libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.73 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "aho-corasick"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "build_helper"
version = "0.1.0"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "num_cpus"
version = "0.2.11"
"libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "regex"
+version = "0.1.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "rustc-serialize"
version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "thread-id"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "toml"
version = "0.1.28"
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "winapi"
version = "0.2.6"
gcc = { git = "https://github.com/alexcrichton/gcc-rs" }
libc = "0.2"
md5 = "0.1"
+regex = "0.1.73"
use std::process::Command;
use {Build, Compiler};
-use util::{cp_r, libdir, is_dylib};
+use util::{cp_r, libdir, is_dylib, cp_filtered, copy};
+use regex::{RegexSet, quote};
fn package_vers(build: &Build) -> &str {
match &build.config.channel[..] {
t!(fs::remove_dir_all(&image));
}
+/// Creates the `rust-src` installer component and the plain source tarball
+pub fn rust_src(build: &Build) {
+ println!("Dist src");
+ let plain_name = format!("rustc-{}-src", package_vers(build));
+ let name = format!("rust-src-{}", package_vers(build));
+ let image = tmpdir(build).join(format!("{}-image", name));
+ let _ = fs::remove_dir_all(&image);
+
+ let dst = image.join("lib/rustlib/src");
+ let dst_src = dst.join("rust");
+ let plain_dst_src = dst.join(&plain_name);
+ t!(fs::create_dir_all(&dst_src));
+
+ // This is the set of root paths which will become part of the source package
+ let src_files = [
+ "COPYRIGHT",
+ "LICENSE-APACHE",
+ "LICENSE-MIT",
+ "CONTRIBUTING.md",
+ "README.md",
+ "RELEASES.md",
+ "configure",
+ "Makefile.in"
+ ];
+ let src_dirs = [
+ "man",
+ "src",
+ "mk"
+ ];
+
+ // Exclude paths matching these wildcard expressions
+ let excludes = [
+ // exclude-vcs
+ "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules", ".gitattributes", ".cvsignore",
+ ".svn", ".arch-ids", "{arch}", "=RELEASE-ID", "=meta-update", "=update", ".bzr",
+ ".bzrignore", ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs",
+ // extensions
+ "*~", "*.pyc",
+ // misc
+ "llvm/test/*/*.ll",
+ "llvm/test/*/*.td",
+ "llvm/test/*/*.s",
+ "llvm/test/*/*/*.ll",
+ "llvm/test/*/*/*.td",
+ "llvm/test/*/*/*.s"
+ ];
+
+ // Construct a set of regexes for efficiently testing whether paths match one of the above
+ // expressions.
+ let regex_set = t!(RegexSet::new(
+ // This converts a wildcard expression to a regex
+ excludes.iter().map(|&s| {
+ // Prefix ensures that matching starts on a path separator boundary
+ r"^(.*[\\/])?".to_owned() + (
+ // Escape the expression to produce a regex matching exactly that string
+ "e(s)
+ // Replace slashes with a pattern matching either forward or backslash
+ .replace(r"/", r"[\\/]")
+ // Replace wildcards with a pattern matching a single path segment, ie. containing
+ // no slashes.
+ .replace(r"\*", r"[^\\/]*")
+ // Suffix anchors to the end of the path
+ ) + "$"
+ })
+ ));
+
+ // Create a filter which skips files which match the regex set or contain invalid unicode
+ let filter_fn = move |path: &Path| {
+ if let Some(path) = path.to_str() {
+ !regex_set.is_match(path)
+ } else {
+ false
+ }
+ };
+
+ // Copy the directories using our filter
+ for item in &src_dirs {
+ let dst = &dst_src.join(item);
+ t!(fs::create_dir(dst));
+ cp_filtered(&build.src.join(item), dst, &filter_fn);
+ }
+ // Copy the files normally
+ for item in &src_files {
+ copy(&build.src.join(item), &dst_src.join(item));
+ }
+
+ // Create source tarball in rust-installer format
+ let mut cmd = Command::new("sh");
+ cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ .arg("--product-name=Rust")
+ .arg("--rel-manifest-dir=rustlib")
+ .arg("--success-message=Awesome-Source.")
+ .arg(format!("--image-dir={}", sanitize_sh(&image)))
+ .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+ .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg(format!("--package-name={}", name))
+ .arg("--component-name=rust-src")
+ .arg("--legacy-manifest-dirs=rustlib,cargo");
+ build.run(&mut cmd);
+
+ // Rename directory, so that root folder of tarball has the correct name
+ t!(fs::rename(&dst_src, &plain_dst_src));
+
+ // Create plain source tarball
+ let mut cmd = Command::new("tar");
+ cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", plain_name))))
+ .arg(&plain_name)
+ .current_dir(&dst);
+ build.run(&mut cmd);
+
+ t!(fs::remove_dir_all(&image));
+}
+
fn install(src: &Path, dstdir: &Path, perms: u32) {
let dst = dstdir.join(src.file_name().unwrap());
t!(fs::create_dir_all(dstdir));
extern crate num_cpus;
extern crate rustc_serialize;
extern crate toml;
+extern crate regex;
use std::cell::RefCell;
use std::collections::HashMap;
DistMingw { _dummy } => dist::mingw(self, target.target),
DistRustc { stage } => dist::rustc(self, stage, target.target),
DistStd { compiler } => dist::std(self, &compiler, target.target),
+ DistSrc { _dummy } => dist::rust_src(self),
DebuggerScripts { stage } => {
let compiler = Compiler::new(stage, target.target);
(dist_mingw, DistMingw { _dummy: () }),
(dist_rustc, DistRustc { stage: u32 }),
(dist_std, DistStd { compiler: Compiler<'a> }),
+ (dist_src, DistSrc { _dummy: () }),
// Misc targets
(android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
vec![self.libtest(compiler)]
}
}
+ Source::DistSrc { _dummy: _ } => Vec::new(),
Source::Dist { stage } => {
let mut base = Vec::new();
for host in build.config.host.iter() {
let host = self.target(host);
+ base.push(host.dist_src(()));
base.push(host.dist_rustc(stage));
if host.target.contains("windows-gnu") {
base.push(host.dist_mingw(()));
}
}
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called. Unwanted files or directories can be skipped
+/// by returning `false` from the filter function.
+pub fn cp_filtered<F: Fn(&Path) -> bool>(src: &Path, dst: &Path, filter: &F) {
+ // Inner function does the actual work
+ fn recurse<F: Fn(&Path) -> bool>(src: &Path, dst: &Path, relative: &Path, filter: &F) {
+ for f in t!(fs::read_dir(src)) {
+ let f = t!(f);
+ let path = f.path();
+ let name = path.file_name().unwrap();
+ let dst = dst.join(name);
+ let relative = relative.join(name);
+ // Only copy file or directory if the filter function returns true
+ if filter(&relative) {
+ if t!(f.file_type()).is_dir() {
+ let _ = fs::remove_dir_all(&dst);
+ t!(fs::create_dir(&dst));
+ recurse(&path, &dst, &relative, filter);
+ } else {
+ let _ = fs::remove_file(&dst);
+ copy(&path, &dst);
+ }
+ }
+ }
+ }
+ // Immediately recurse with an empty relative path
+ recurse(src, dst, Path::new(""), filter)
+}
+
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
pub fn exe(name: &str, target: &str) -> String {
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum StatementKind<'tcx> {
+ /// Write the RHS Rvalue to the LHS Lvalue.
Assign(Lvalue<'tcx>, Rvalue<'tcx>),
- SetDiscriminant{ lvalue: Lvalue<'tcx>, variant_index: usize },
+
+ /// Write the discriminant for a variant to the enum Lvalue.
+ SetDiscriminant { lvalue: Lvalue<'tcx>, variant_index: usize },
+
+ /// Start a live range for the storage of the local.
+ StorageLive(Lvalue<'tcx>),
+
+ /// End the current live range for the storage of the local.
+ StorageDead(Lvalue<'tcx>),
}
impl<'tcx> Debug for Statement<'tcx> {
use self::StatementKind::*;
match self.kind {
Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv),
+ StorageLive(ref lv) => write!(fmt, "StorageLive({:?})", lv),
+ StorageDead(ref lv) => write!(fmt, "StorageDead({:?})", lv),
SetDiscriminant{lvalue: ref lv, variant_index: index} => {
write!(fmt, "discriminant({:?}) = {:?}", lv, index)
}
StatementKind::SetDiscriminant{ ref $($mutability)* lvalue, .. } => {
self.visit_lvalue(lvalue, LvalueContext::Store);
}
+ StatementKind::StorageLive(ref $($mutability)* lvalue) => {
+ self.visit_lvalue(lvalue, LvalueContext::StorageLive);
+ }
+ StatementKind::StorageDead(ref $($mutability)* lvalue) => {
+ self.visit_lvalue(lvalue, LvalueContext::StorageDead);
+ }
}
}
// Consumed as part of an operand
Consume,
+
+ // Starting and ending a storage live range
+ StorageLive,
+ StorageDead,
}
sets.kill_set.add(&moi);
});
}
+ repr::StatementKind::StorageLive(_) |
+ repr::StatementKind::StorageDead(_) => {}
}
}
repr::StatementKind::Assign(ref lvalue, ref rvalue) => {
(lvalue, rvalue)
}
+ repr::StatementKind::StorageLive(_) |
+ repr::StatementKind::StorageDead(_) => continue,
repr::StatementKind::SetDiscriminant{ .. } =>
span_bug!(stmt.source_info.span,
"sanity_check should run before Deaggregator inserts SetDiscriminant"),
Rvalue::InlineAsm { .. } => {}
}
}
+ StatementKind::StorageLive(_) |
+ StatementKind::StorageDead(_) => {}
StatementKind::SetDiscriminant{ .. } => {
span_bug!(stmt.source_info.span,
"SetDiscriminant should not exist during borrowck");
move_data.rev_lookup.find(lvalue),
|moi| callback(moi, DropFlagState::Present))
}
+ repr::StatementKind::StorageLive(_) |
+ repr::StatementKind::StorageDead(_) => {}
},
None => {
debug!("drop_flag_effects: replace {:?}", block.terminator());
}
pub fn meta_section_name(target: &Target) -> &'static str {
+ // Historical note:
+ //
+ // When using link.exe it was seen that the section name `.note.rustc`
+ // was getting shortened to `.note.ru`, and according to the PE and COFF
+ // specification:
+ //
+ // > Executable images do not use a string table and do not support
+ // > section names longer than 8Â characters
+ //
+ // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
+ //
+ // As a result, we choose a slightly shorter name! As to why
+ // `.note.rustc` works on MinGW, that's another good question...
+
if target.options.is_like_osx {
- "__DATA,__note.rustc"
- } else if target.options.is_like_msvc {
- // When using link.exe it was seen that the section name `.note.rustc`
- // was getting shortened to `.note.ru`, and according to the PE and COFF
- // specification:
- //
- // > Executable images do not use a string table and do not support
- // > section names longer than 8Â characters
- //
- // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
- //
- // As a result, we choose a slightly shorter name! As to why
- // `.note.rustc` works on MinGW, that's another good question...
- ".rustc"
+ "__DATA,.rustc"
} else {
- ".note.rustc"
+ ".rustc"
}
}
-pub fn read_meta_section_name(target: &Target) -> &'static str {
- if target.options.is_like_osx {
- "__note.rustc"
- } else if target.options.is_like_msvc {
- ".rustc"
- } else {
- ".note.rustc"
- }
+pub fn read_meta_section_name(_target: &Target) -> &'static str {
+ ".rustc"
}
// A diagnostic function for dumping crate metadata to an output stream
// FIXME #30046 ^~~~
this.expr_into_pattern(block, pattern, init)
}));
+ } else {
+ this.storage_live_for_bindings(block, &pattern);
}
// Enter the visibility scope, after evaluating the initializer.
let temp = this.temp(expr_ty.clone());
let temp_lifetime = expr.temp_lifetime;
let expr_span = expr.span;
+ let source_info = this.source_info(expr_span);
+
+ if temp_lifetime.is_some() {
+ this.cfg.push(block, Statement {
+ source_info: source_info,
+ kind: StatementKind::StorageLive(temp.clone())
+ });
+ }
// Careful here not to cause an infinite cycle. If we always
// called `into`, then for lvalues like `x.f`, it would
Category::Lvalue => {
let lvalue = unpack!(block = this.as_lvalue(block, expr));
let rvalue = Rvalue::Use(Operand::Consume(lvalue));
- let source_info = this.source_info(expr_span);
this.cfg.push_assign(block, source_info, &temp, rvalue);
}
_ => {
PatternKind::Binding { mode: BindingMode::ByValue,
var,
subpattern: None, .. } => {
+ self.storage_live_for_bindings(block, &irrefutable_pat);
let lvalue = Lvalue::Var(self.var_indices[&var]);
return self.into(&lvalue, block, initializer);
}
}
var_scope
}
+
+ /// Emit `StorageLive` for every binding in the pattern.
+ pub fn storage_live_for_bindings(&mut self,
+ block: BasicBlock,
+ pattern: &Pattern<'tcx>) {
+ match *pattern.kind {
+ PatternKind::Binding { var, ref subpattern, .. } => {
+ let lvalue = Lvalue::Var(self.var_indices[&var]);
+ let source_info = self.source_info(pattern.span);
+ self.cfg.push(block, Statement {
+ source_info: source_info,
+ kind: StatementKind::StorageLive(lvalue)
+ });
+
+ if let Some(subpattern) = subpattern.as_ref() {
+ self.storage_live_for_bindings(block, subpattern);
+ }
+ }
+ PatternKind::Array { ref prefix, ref slice, ref suffix } |
+ PatternKind::Slice { ref prefix, ref slice, ref suffix } => {
+ for subpattern in prefix.iter().chain(slice).chain(suffix) {
+ self.storage_live_for_bindings(block, subpattern);
+ }
+ }
+ PatternKind::Constant { .. } | PatternKind::Range { .. } | PatternKind::Wild => {
+ }
+ PatternKind::Deref { ref subpattern } => {
+ self.storage_live_for_bindings(block, subpattern);
+ }
+ PatternKind::Leaf { ref subpatterns } |
+ PatternKind::Variant { ref subpatterns, .. } => {
+ for subpattern in subpatterns {
+ self.storage_live_for_bindings(block, &subpattern.pattern);
+ }
+ }
+ }
+ }
}
/// List of blocks for each arm (and potentially other metadata in the
};
let source_info = self.source_info(binding.span);
+ self.cfg.push(block, Statement {
+ source_info: source_info,
+ kind: StatementKind::StorageLive(Lvalue::Var(var_index))
+ });
self.cfg.push_assign(block, source_info,
&Lvalue::Var(var_index), rvalue);
}
let span = tcx.map.span(item_id);
let mut builder = Builder::new(hir, span);
- let extent = ROOT_CODE_EXTENT;
+ let extent = tcx.region_maps.temporary_scope(ast_expr.id)
+ .unwrap_or(ROOT_CODE_EXTENT);
let mut block = START_BLOCK;
let _ = builder.in_scope(extent, block, |builder| {
let expr = builder.hir.mirror(ast_expr);
/// `ScopeAuxiliary`, but kept here for convenience
extent: CodeExtent,
+ /// Whether there's anything to do for the cleanup path, that is,
+ /// when unwinding through this scope. This includes destructors,
+ /// but not StorageDead statements, which don't get emitted at all
+ /// for unwinding, for several reasons:
+ /// * clang doesn't emit llvm.lifetime.end for C++ unwinding
+ /// * LLVM's memory dependency analysis can't handle it atm
+ /// * pollutting the cleanup MIR with StorageDead creates
+ /// landing pads even though there's no actual destructors
+ /// * freeing up stack space has no effect during unwinding
+ needs_cleanup: bool,
+
/// set of lvalues to drop when exiting this scope. This starts
/// out empty but grows as variables are declared during the
/// building process. This is a stack, so we always drop from the
/// lvalue to drop
location: Lvalue<'tcx>,
- /// The cached block for the cleanups-on-diverge path. This block
- /// contains code to run the current drop and all the preceding
- /// drops (i.e. those having lower index in Drop’s Scope drop
- /// array)
- cached_block: Option<BasicBlock>
+ /// Whether this is a full value Drop, or just a StorageDead.
+ kind: DropKind
+}
+
+enum DropKind {
+ Value {
+ /// The cached block for the cleanups-on-diverge path. This block
+ /// contains code to run the current drop and all the preceding
+ /// drops (i.e. those having lower index in Drop’s Scope drop
+ /// array)
+ cached_block: Option<BasicBlock>
+ },
+ Storage
}
struct FreeData<'tcx> {
fn invalidate_cache(&mut self) {
self.cached_exits = FnvHashMap();
for dropdata in &mut self.drops {
- dropdata.cached_block = None;
+ if let DropKind::Value { ref mut cached_block } = dropdata.kind {
+ *cached_block = None;
+ }
}
if let Some(ref mut freedata) = self.free {
freedata.cached_block = None;
/// Precondition: the caches must be fully filled (i.e. diverge_cleanup is called) in order for
/// this method to work correctly.
fn cached_block(&self) -> Option<BasicBlock> {
- if let Some(data) = self.drops.last() {
- Some(data.cached_block.expect("drop cache is not filled"))
+ let mut drops = self.drops.iter().rev().filter_map(|data| {
+ match data.kind {
+ DropKind::Value { cached_block } => Some(cached_block),
+ DropKind::Storage => None
+ }
+ });
+ if let Some(cached_block) = drops.next() {
+ Some(cached_block.expect("drop cache is not filled"))
} else if let Some(ref data) = self.free {
Some(data.cached_block.expect("free cache is not filled"))
} else {
id: id,
visibility_scope: vis_scope,
extent: extent,
+ needs_cleanup: false,
drops: vec![],
free: None,
cached_exits: FnvHashMap()
extent: CodeExtent,
lvalue: &Lvalue<'tcx>,
lvalue_ty: Ty<'tcx>) {
- if !self.hir.needs_drop(lvalue_ty) {
- return
- }
+ let needs_drop = self.hir.needs_drop(lvalue_ty);
+ let drop_kind = if needs_drop {
+ DropKind::Value { cached_block: None }
+ } else {
+ // Only temps and vars need their storage dead.
+ match *lvalue {
+ Lvalue::Temp(_) | Lvalue::Var(_) => DropKind::Storage,
+ _ => return
+ }
+ };
+
for scope in self.scopes.iter_mut().rev() {
if scope.extent == extent {
+ if let DropKind::Value { .. } = drop_kind {
+ scope.needs_cleanup = true;
+ }
+
// No need to invalidate any caches here. The just-scheduled drop will branch into
// the drop that comes before it in the vector.
scope.drops.push(DropData {
span: span,
location: lvalue.clone(),
- cached_block: None
+ kind: drop_kind
});
return;
} else {
// We must invalidate all the cached_blocks leading up to the scope we’re
// looking for, because all of the blocks in the chain will become incorrect.
- scope.invalidate_cache()
+ if let DropKind::Value { .. } = drop_kind {
+ scope.invalidate_cache()
+ }
}
}
span_bug!(span, "extent {:?} not in scope to drop {:?}", extent, lvalue);
// We also must invalidate the caches in the scope for which the free is scheduled
// because the drops must branch into the free we schedule here.
scope.invalidate_cache();
+ scope.needs_cleanup = true;
scope.free = Some(FreeData {
span: span,
value: value.clone(),
/// See module comment for more details. None indicates there’s no
/// cleanup to do at this point.
pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> {
- if self.scopes.iter().all(|scope| scope.drops.is_empty() && scope.free.is_none()) {
+ if !self.scopes.iter().any(|scope| scope.needs_cleanup) {
return None;
}
- assert!(!self.scopes.is_empty()); // or `all` above would be true
+ assert!(!self.scopes.is_empty()); // or `any` above would be false
let unit_temp = self.get_unit_temp();
let Builder { ref mut hir, ref mut cfg, ref mut scopes,
resumeblk
};
- for scope in scopes.iter_mut().filter(|s| !s.drops.is_empty() || s.free.is_some()) {
+ for scope in scopes.iter_mut().filter(|s| s.needs_cleanup) {
target = build_diverge_scope(hir.tcx(), cfg, &unit_temp, scope, target);
}
Some(target)
-> BlockAnd<()> {
let mut iter = scope.drops.iter().rev().peekable();
while let Some(drop_data) = iter.next() {
- // Try to find the next block with its cached block for us to diverge into in case the
- // drop panics.
- let on_diverge = iter.peek().iter().flat_map(|dd| dd.cached_block.into_iter()).next();
- // If there’s no `cached_block`s within current scope, we must look for one in the
- // enclosing scope.
- let on_diverge = on_diverge.or_else(||{
- earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
- });
- let next = cfg.start_new_block();
- cfg.terminate(block, scope.source_info(drop_data.span), TerminatorKind::Drop {
- location: drop_data.location.clone(),
- target: next,
- unwind: on_diverge
- });
- block = next;
+ let source_info = scope.source_info(drop_data.span);
+ if let DropKind::Value { .. } = drop_data.kind {
+ // Try to find the next block with its cached block
+ // for us to diverge into in case the drop panics.
+ let on_diverge = iter.peek().iter().filter_map(|dd| {
+ match dd.kind {
+ DropKind::Value { cached_block } => cached_block,
+ DropKind::Storage => None
+ }
+ }).next();
+ // If there’s no `cached_block`s within current scope,
+ // we must look for one in the enclosing scope.
+ let on_diverge = on_diverge.or_else(||{
+ earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
+ });
+ let next = cfg.start_new_block();
+ cfg.terminate(block, source_info, TerminatorKind::Drop {
+ location: drop_data.location.clone(),
+ target: next,
+ unwind: on_diverge
+ });
+ block = next;
+ }
+ match drop_data.kind {
+ DropKind::Value { .. } |
+ DropKind::Storage => {
+ // Only temps and vars need their storage dead.
+ match drop_data.location {
+ Lvalue::Temp(_) | Lvalue::Var(_) => {}
+ _ => continue
+ }
+
+ cfg.push(block, Statement {
+ source_info: source_info,
+ kind: StatementKind::StorageDead(drop_data.location.clone())
+ });
+ }
+ }
}
block.unit()
}
// *forward* order, so that we generate drops[0] first (right to
// left in diagram above).
for drop_data in &mut scope.drops {
- target = if let Some(cached_block) = drop_data.cached_block {
+ // Only full value drops are emitted in the diverging path,
+ // not StorageDead.
+ let cached_block = match drop_data.kind {
+ DropKind::Value { ref mut cached_block } => cached_block,
+ DropKind::Storage => continue
+ };
+ target = if let Some(cached_block) = *cached_block {
cached_block
} else {
let block = cfg.start_new_cleanup_block();
target: target,
unwind: None
});
- drop_data.cached_block = Some(block);
+ *cached_block = Some(block);
block
};
}
let orig_stmt = bb.statements.pop().unwrap();
let (lhs, rhs) = match orig_stmt.kind {
StatementKind::Assign(ref lhs, ref rhs) => (lhs, rhs),
- StatementKind::SetDiscriminant{ .. } =>
- span_bug!(src_info.span, "expected aggregate, not {:?}", orig_stmt.kind),
+ _ => span_bug!(src_info.span, "expected assign, not {:?}", orig_stmt),
};
let (agg_kind, operands) = match rhs {
&Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),
let ref statement = statements[i];
let rhs = match statement.kind {
StatementKind::Assign(_, ref rhs) => rhs,
- StatementKind::SetDiscriminant{ .. } => continue,
+ _ => continue,
};
let (kind, operands) = match rhs {
&Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),
if let Lvalue::Temp(index) = *lvalue {
// Ignore drops, if the temp gets promoted,
// then it's constant and thus drop is noop.
- if let LvalueContext::Drop = context {
- return;
+ // Storage live ranges are also irrelevant.
+ match context {
+ LvalueContext::Drop |
+ LvalueContext::StorageLive |
+ LvalueContext::StorageDead => return,
+ _ => {}
}
let temp = &mut self.temps[index];
let (mut rvalue, mut call) = (None, None);
let source_info = if stmt_idx < no_stmts {
let statement = &mut self.source[bb].statements[stmt_idx];
- let mut rhs = match statement.kind {
+ let rhs = match statement.kind {
StatementKind::Assign(_, ref mut rhs) => rhs,
- StatementKind::SetDiscriminant{ .. } =>
- span_bug!(statement.source_info.span,
- "cannot promote SetDiscriminant {:?}",
- statement),
+ _ => {
+ span_bug!(statement.source_info.span, "{:?} is not an assignment",
+ statement);
+ }
};
if self.keep_original {
rvalue = Some(rhs.clone());
StatementKind::Assign(_, ref mut rvalue) => {
mem::replace(rvalue, Rvalue::Use(new_operand))
}
- StatementKind::SetDiscriminant{ .. } => {
- span_bug!(statement.source_info.span,
- "cannot promote SetDiscriminant {:?}",
- statement);
- }
+ _ => bug!()
}
}
Candidate::ShuffleIndices(bb) => {
let statement = &mir[bb].statements[stmt_idx];
let dest = match statement.kind {
StatementKind::Assign(ref dest, _) => dest,
- StatementKind::SetDiscriminant{ .. } =>
- panic!("cannot promote SetDiscriminant"),
+ _ => {
+ span_bug!(statement.source_info.span,
+ "expected assignment to promote");
+ }
};
if let Lvalue::Temp(index) = *dest {
if temps[index] == TempState::PromotedOut {
for block in mir.basic_blocks_mut() {
block.statements.retain(|statement| {
match statement.kind {
- StatementKind::Assign(Lvalue::Temp(index), _) => {
+ StatementKind::Assign(Lvalue::Temp(index), _) |
+ StatementKind::StorageLive(Lvalue::Temp(index)) |
+ StatementKind::StorageDead(Lvalue::Temp(index)) => {
!promoted(index)
}
_ => true
fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>) {
assert_eq!(self.location.block, bb);
- self.nest(|this| this.super_statement(bb, statement));
+ self.nest(|this| {
+ this.visit_source_info(&statement.source_info);
+ match statement.kind {
+ StatementKind::Assign(ref lvalue, ref rvalue) => {
+ this.visit_assign(bb, lvalue, rvalue);
+ }
+ StatementKind::SetDiscriminant { .. } |
+ StatementKind::StorageLive(_) |
+ StatementKind::StorageDead(_) => {}
+ }
+ });
self.location.statement_index += 1;
}
variant_index);
};
}
+ StatementKind::StorageLive(ref lv) |
+ StatementKind::StorageDead(ref lv) => {
+ match *lv {
+ Lvalue::Temp(_) | Lvalue::Var(_) => {}
+ _ => {
+ span_mirbug!(self, stmt, "bad lvalue: expected temp or var");
+ }
+ }
+ }
}
}
};
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
- let name =
+ let section_name =
cx.tcx().sess.cstore.metadata_section_name(&cx.sess().target.target);
- let name = CString::new(name).unwrap();
- llvm::LLVMSetSection(llglobal, name.as_ptr())
+ let name = CString::new(section_name).unwrap();
+ llvm::LLVMSetSection(llglobal, name.as_ptr());
+
+ // Also generate a .section directive to force no
+ // flags, at least for ELF outputs, so that the
+ // metadata doesn't get loaded into memory.
+ let directive = format!(".section {}", section_name);
+ let directive = CString::new(directive).unwrap();
+ llvm::LLVMSetModuleInlineAsm(cx.metadata_llmod(), directive.as_ptr())
}
return metadata;
}
LvalueContext::Call => {
self.mark_assigned(index);
}
- LvalueContext::Consume => {
- }
+
+ LvalueContext::StorageLive |
+ LvalueContext::StorageDead |
+ LvalueContext::Consume => {}
+
LvalueContext::Store |
LvalueContext::Inspect |
LvalueContext::Borrow { .. } |
LvalueContext::Projection => {
self.mark_as_lvalue(index);
}
+
LvalueContext::Drop => {
let ty = lvalue.ty(self.mir, self.bcx.tcx());
let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx()));
Err(err) => if failure.is_ok() { failure = Err(err); }
}
}
+ mir::StatementKind::StorageLive(_) |
+ mir::StatementKind::StorageDead(_) => {}
mir::StatementKind::SetDiscriminant{ .. } => {
span_bug!(span, "SetDiscriminant should not appear in constants?");
}
use rustc::mir::repr as mir;
+use base;
use common::{self, BlockAndBuilder};
use super::MirContext;
);
bcx
}
+ mir::StatementKind::StorageLive(ref lvalue) => {
+ self.trans_storage_liveness(bcx, lvalue, base::Lifetime::Start)
+ }
+ mir::StatementKind::StorageDead(ref lvalue) => {
+ self.trans_storage_liveness(bcx, lvalue, base::Lifetime::End)
+ }
+ }
+ }
+
+ fn trans_storage_liveness(&self,
+ bcx: BlockAndBuilder<'bcx, 'tcx>,
+ lvalue: &mir::Lvalue<'tcx>,
+ intrinsic: base::Lifetime)
+ -> BlockAndBuilder<'bcx, 'tcx> {
+ if let Some(index) = self.mir.local_index(lvalue) {
+ if let LocalRef::Lvalue(tr_lval) = self.locals[index] {
+ intrinsic.call(&bcx, tr_lval.llval);
+ }
}
+ bcx
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -O -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+#![feature(rustc_attrs)]
+
+// CHECK-LABEL: @test
+#[no_mangle]
+#[rustc_mir] // FIXME #27840 MIR has different codegen.
+pub fn test() {
+ let a = 0;
+ &a; // keep variable in an alloca
+
+// CHECK: [[S_a:%[0-9]+]] = bitcast i32* %a to i8*
+// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_a]])
+
+ {
+ let b = &Some(a);
+ &b; // keep variable in an alloca
+
+// CHECK: [[S_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
+// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]])
+
+// CHECK: [[S_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
+// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_tmp2]])
+
+// CHECK: [[E_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
+// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_tmp2]])
+
+// CHECK: [[E_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
+// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]])
+ }
+
+ let c = 1;
+ &c; // keep variable in an alloca
+
+// CHECK: [[S_c:%[0-9]+]] = bitcast i32* %c to i8*
+// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_c]])
+
+// CHECK: [[E_c:%[0-9]+]] = bitcast i32* %c to i8*
+// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_c]])
+
+// CHECK: [[E_a:%[0-9]+]] = bitcast i32* %a to i8*
+// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_a]])
+}
}
fn loop_expr(mut x: u64, y: u64, z: u64) -> u64 {
- loop { // #break
- x += z;
+ loop {
+ x += z; // #break
if x + y > 1000 {
return x;
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let a = 0;
+ {
+ let b = &Some(a);
+ }
+ let c = 1;
+}
+
+// END RUST SOURCE
+// START rustc.node4.PreTrans.after.mir
+// bb0: {
+// StorageLive(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
+// var0 = const 0i32; // scope 0 at storage_ranges.rs:12:13: 12:14
+// StorageLive(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
+// StorageLive(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
+// StorageLive(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
+// tmp2 = var0; // scope 1 at storage_ranges.rs:14:23: 14:24
+// tmp1 = std::prelude::v1::Some<i32>(tmp2,); // scope 1 at storage_ranges.rs:14:18: 14:25
+// var1 = &tmp1; // scope 1 at storage_ranges.rs:14:17: 14:25
+// StorageDead(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
+// tmp0 = (); // scope 2 at storage_ranges.rs:13:5: 15:6
+// StorageDead(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
+// StorageDead(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
+// StorageLive(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
+// var2 = const 1i32; // scope 1 at storage_ranges.rs:16:13: 16:14
+// return = (); // scope 3 at storage_ranges.rs:11:11: 17:2
+// StorageDead(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
+// StorageDead(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
+// goto -> bb1; // scope 0 at storage_ranges.rs:11:1: 17:2
+// }
+//
+// bb1: {
+// return; // scope 0 at storage_ranges.rs:11:1: 17:2
+// }
+// END rustc.node4.PreTrans.after.mir
// except according to those terms.
use std::path::Path;
+use std::ffi::OsStr;
const CARGO_LOCK: &'static str = "Cargo.lock";
super::walk(path,
&mut |path| super::filter_dirs(path) || path.ends_with("src/test"),
&mut |file| {
- let name = file.file_name().unwrap().to_string_lossy();
- if name == CARGO_LOCK {
+ if let Some(CARGO_LOCK) = file.file_name().and_then(OsStr::to_str) {
let rel_path = file.strip_prefix(path).unwrap();
+ let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/");
let ret_code = Command::new("git")
- .arg("diff-index")
- .arg("--quiet")
+ .arg("diff")
+ .arg("--exit-code")
+ .arg("--patch")
.arg("HEAD")
- .arg(rel_path)
+ .arg(&git_friendly_path)
.current_dir(path)
.status()
.unwrap_or_else(|e| {