Derive `Hash` on `AssociatedKind`.
This is a trivial change useful in downstream code poking in rustc's
innards, in particular the semver verification tool I'm currently working on.
r? @eddyb
/// A mutable memory location.
///
+/// # Examples
+///
+/// Here you can see how using `Cell<T>` allows to use mutable field inside
+/// immutable struct (which is also called 'interior mutability').
+///
+/// ```
+/// use std::cell::Cell;
+///
+/// struct SomeStruct {
+/// regular_field: u8,
+/// special_field: Cell<u8>,
+/// }
+///
+/// let my_struct = SomeStruct {
+/// regular_field: 0,
+/// special_field: Cell::new(1),
+/// };
+///
+/// let new_value = 100;
+///
+/// // ERROR, because my_struct is immutable
+/// // my_struct.regular_field = new_value;
+///
+/// // WORKS, although `my_struct` is immutable, field `special_field` is mutable because it is Cell
+/// my_struct.special_field.set(new_value);
+/// assert_eq!(my_struct.special_field.get(), new_value);
+/// ```
+///
/// See the [module-level documentation](index.html) for more.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Cell<T> {
use ty::{ClosureSubsts, Region, Ty};
use mir::*;
use rustc_const_math::ConstUsize;
-use rustc_data_structures::indexed_vec::Idx;
use syntax_pos::Span;
// # The MIR Visitor
fn super_mir(&mut self,
mir: & $($mutability)* Mir<'tcx>) {
- for index in 0..mir.basic_blocks().len() {
- let block = BasicBlock::new(index);
- self.visit_basic_block_data(block, &$($mutability)* mir[block]);
+ // for best performance, we want to use an iterator rather
+ // than a for-loop, to avoid calling Mir::invalidate for
+ // each basic block.
+ macro_rules! basic_blocks {
+ (mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
+ () => (mir.basic_blocks().iter_enumerated());
+ };
+ for (bb, data) in basic_blocks!($($mutability)*) {
+ self.visit_basic_block_data(bb, data);
}
for scope in &$($mutability)* mir.visibility_scopes {
pub struct Union;
impl BitwiseOperator for Union {
+ #[inline]
fn join(&self, a: usize, b: usize) -> usize { a | b }
}
pub struct Subtract;
impl BitwiseOperator for Subtract {
+ #[inline]
fn join(&self, a: usize, b: usize) -> usize { a & !b }
}
///
/// If this archive is used with a mutable method, then an error will be
/// raised.
- pub fn open(dst: &Path) -> Option<ArchiveRO> {
+ pub fn open(dst: &Path) -> Result<ArchiveRO, String> {
return unsafe {
let s = path2cstr(dst);
let ar = ::LLVMRustOpenArchive(s.as_ptr());
if ar.is_null() {
- None
+ Err(::last_error().unwrap_or("failed to open archive".to_string()))
} else {
- Some(ArchiveRO { ptr: ar })
+ Ok(ArchiveRO { ptr: ar })
}
};
let tcx = this.hir.tcx();
// Enter the remainder scope, i.e. the bindings' destruction scope.
- this.push_scope(remainder_scope);
+ this.push_scope((remainder_scope, source_info));
let_extent_stack.push(remainder_scope);
// Declare the bindings, which may create a visibility scope.
.collect();
let success = this.cfg.start_new_block();
- let cleanup = this.diverge_cleanup(expr_span);
+ let cleanup = this.diverge_cleanup();
this.cfg.terminate(block, source_info, TerminatorKind::Call {
func: fun,
args: args,
let bool_ty = self.hir.bool_ty();
let eq_result = self.temp(bool_ty, test.span);
let eq_block = self.cfg.start_new_block();
- let cleanup = self.diverge_cleanup(test.span);
+ let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, source_info, TerminatorKind::Call {
func: Operand::Constant(box Constant {
span: test.span,
/// the extent of this scope within source code.
extent: CodeExtent,
+ /// the span of that extent
+ extent_span: Span,
+
/// Whether there's anything to do for the cleanup path, that is,
/// when unwinding through this scope. This includes destructors,
/// but not StorageDead statements, which don't get emitted at all
/// * pollutting the cleanup MIR with StorageDead creates
/// landing pads even though there's no actual destructors
/// * freeing up stack space has no effect during unwinding
- pub(super) needs_cleanup: bool,
+ needs_cleanup: bool,
/// set of lvalues to drop when exiting this scope. This starts
/// out empty but grows as variables are declared during the
pub break_destination: Lvalue<'tcx>,
}
+impl DropKind {
+ fn may_panic(&self) -> bool {
+ match *self {
+ DropKind::Value { .. } => true,
+ DropKind::Storage => false
+ }
+ }
+}
+
impl<'tcx> Scope<'tcx> {
/// Invalidate all the cached blocks in the scope.
///
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
{
debug!("in_opt_scope(opt_extent={:?}, block={:?})", opt_extent, block);
- if let Some(extent) = opt_extent { self.push_scope(extent.0); }
+ if let Some(extent) = opt_extent { self.push_scope(extent); }
let rv = unpack!(block = f(self));
if let Some(extent) = opt_extent {
unpack!(block = self.pop_scope(extent, block));
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
{
debug!("in_scope(extent={:?}, block={:?})", extent, block);
- self.push_scope(extent.0);
+ self.push_scope(extent);
let rv = unpack!(block = f(self));
unpack!(block = self.pop_scope(extent, block));
debug!("in_scope: exiting extent={:?} block={:?}", extent, block);
/// scope and call `pop_scope` afterwards. Note that these two
/// calls must be paired; using `in_scope` as a convenience
/// wrapper maybe preferable.
- pub fn push_scope(&mut self, extent: CodeExtent) {
+ pub fn push_scope(&mut self, extent: (CodeExtent, SourceInfo)) {
debug!("push_scope({:?})", extent);
let vis_scope = self.visibility_scope;
self.scopes.push(Scope {
visibility_scope: vis_scope,
- extent: extent,
+ extent: extent.0,
+ extent_span: extent.1.span,
needs_cleanup: false,
drops: vec![],
free: None,
mut block: BasicBlock)
-> BlockAnd<()> {
debug!("pop_scope({:?}, {:?})", extent, block);
- // We need to have `cached_block`s available for all the drops, so we call diverge_cleanup
- // to make sure all the `cached_block`s are filled in.
- self.diverge_cleanup(extent.1.span);
+ // If we are emitting a `drop` statement, we need to have the cached
+ // diverge cleanup pads ready in case that drop panics.
+ let may_panic =
+ self.scopes.last().unwrap().drops.iter().any(|s| s.kind.may_panic());
+ if may_panic {
+ self.diverge_cleanup();
+ }
let scope = self.scopes.pop().unwrap();
assert_eq!(scope.extent, extent.0);
unpack!(block = build_scope_drops(&mut self.cfg,
let len = self.scopes.len();
assert!(scope_count < len, "should not use `exit_scope` to pop ALL scopes");
let tmp = self.get_unit_temp();
+
+ // If we are emitting a `drop` statement, we need to have the cached
+ // diverge cleanup pads ready in case that drop panics.
+ let may_panic = self.scopes[(len - scope_count)..].iter()
+ .any(|s| s.drops.iter().any(|s| s.kind.may_panic()));
+ if may_panic {
+ self.diverge_cleanup();
+ }
+
{
let mut rest = &mut self.scopes[(len - scope_count)..];
while let Some((scope, rest_)) = {rest}.split_last_mut() {
/// This path terminates in Resume. Returns the start of the path.
/// See module comment for more details. None indicates there’s no
/// cleanup to do at this point.
- pub fn diverge_cleanup(&mut self, span: Span) -> Option<BasicBlock> {
+ pub fn diverge_cleanup(&mut self) -> Option<BasicBlock> {
if !self.scopes.iter().any(|scope| scope.needs_cleanup) {
return None;
}
};
for scope in scopes.iter_mut() {
- target = build_diverge_scope(hir.tcx(), cfg, &unit_temp, span, scope, target);
+ target = build_diverge_scope(
+ hir.tcx(), cfg, &unit_temp, scope.extent_span, scope, target);
}
Some(target)
}
}
let source_info = self.source_info(span);
let next_target = self.cfg.start_new_block();
- let diverge_target = self.diverge_cleanup(span);
+ let diverge_target = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::Drop {
location: location,
value: Operand<'tcx>) -> BlockAnd<()> {
let source_info = self.source_info(span);
let next_target = self.cfg.start_new_block();
- let diverge_target = self.diverge_cleanup(span);
+ let diverge_target = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::DropAndReplace {
location: location,
let source_info = self.source_info(span);
let success_block = self.cfg.start_new_block();
- let cleanup = self.diverge_cleanup(span);
+ let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::Assert {
mut block: BasicBlock,
arg_count: usize)
-> BlockAnd<()> {
+ debug!("build_scope_drops({:?} -> {:?})", block, scope);
let mut iter = scope.drops.iter().rev().peekable();
while let Some(drop_data) = iter.next() {
let source_info = scope.source_info(drop_data.span);
- if let DropKind::Value { .. } = drop_data.kind {
- // Try to find the next block with its cached block
- // for us to diverge into in case the drop panics.
- let on_diverge = iter.peek().iter().filter_map(|dd| {
- match dd.kind {
- DropKind::Value { cached_block } => cached_block,
- DropKind::Storage => None
- }
- }).next();
- // If there’s no `cached_block`s within current scope,
- // we must look for one in the enclosing scope.
- let on_diverge = on_diverge.or_else(||{
- earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
- });
- let next = cfg.start_new_block();
- cfg.terminate(block, source_info, TerminatorKind::Drop {
- location: drop_data.location.clone(),
- target: next,
- unwind: on_diverge
- });
- block = next;
- }
match drop_data.kind {
- DropKind::Value { .. } |
- DropKind::Storage => {
- // Only temps and vars need their storage dead.
- match drop_data.location {
- Lvalue::Local(index) if index.index() > arg_count => {}
- _ => continue
- }
+ DropKind::Value { .. } => {
+ // Try to find the next block with its cached block
+ // for us to diverge into in case the drop panics.
+ let on_diverge = iter.peek().iter().filter_map(|dd| {
+ match dd.kind {
+ DropKind::Value { cached_block: None } =>
+ span_bug!(drop_data.span, "cached block not present?"),
+ DropKind::Value { cached_block } => cached_block,
+ DropKind::Storage => None
+ }
+ }).next();
+ // If there’s no `cached_block`s within current scope,
+ // we must look for one in the enclosing scope.
+ let on_diverge = on_diverge.or_else(|| {
+ earlier_scopes.iter().rev().flat_map(|s| s.cached_block()).next()
+ });
+ let next = cfg.start_new_block();
+ cfg.terminate(block, source_info, TerminatorKind::Drop {
+ location: drop_data.location.clone(),
+ target: next,
+ unwind: on_diverge
+ });
+ block = next;
+ }
+ DropKind::Storage => {}
+ }
+ // Drop the storage for both value and storage drops.
+ // Only temps and vars need their storage dead.
+ match drop_data.location {
+ Lvalue::Local(index) if index.index() > arg_count => {
cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageDead(drop_data.location.clone())
});
}
+ _ => continue
}
}
block.unit()
}
pub fn simplify(mut self) {
+ self.strip_nops();
+
loop {
let mut changed = false;
if !changed { break }
}
-
- self.strip_nops()
}
// Collapse a goto chain starting from `start`
Some(ref src) => src,
None => return None,
};
- self.src_archive = Some(ArchiveRO::open(src));
+ self.src_archive = Some(ArchiveRO::open(src).ok());
self.src_archive.as_ref().unwrap().as_ref()
}
where F: FnMut(&str) -> bool + 'static
{
let archive = match ArchiveRO::open(archive) {
- Some(ar) => ar,
- None => return Err(io::Error::new(io::ErrorKind::Other,
- "failed to open archive")),
+ Ok(ar) => ar,
+ Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
};
self.additions.push(Addition::Archive {
archive: archive,
// just keeping the archive along while the metadata is in use.
let archive = ArchiveRO::open(filename)
.map(|ar| OwningRef::new(box ar))
- .ok_or_else(|| {
- debug!("llvm didn't like `{}`", filename.display());
- format!("failed to read rlib metadata: '{}'", filename.display())
- })?;
+ .map_err(|e| {
+ debug!("llvm didn't like `{}`: {}", filename.display(), e);
+ format!("failed to read rlib metadata in '{}': {}", filename.display(), e)
+ })?;
let buf: OwningRef<_, [u8]> = archive
.try_map(|ar| {
ar.iter()
.find(|sect| sect.name() == Some(METADATA_FILENAME))
.map(|s| s.data())
.ok_or_else(|| {
- debug!("didn't find '{}' in the archive", METADATA_FILENAME);
- format!("failed to read rlib metadata: '{}'",
- filename.display())
- })
+ debug!("didn't find '{}' in the archive", METADATA_FILENAME);
+ format!("failed to read rlib metadata: '{}'",
+ filename.display())
+ })
})?;
Ok(buf.erase_owner())
}
Tuple,
RawPointer,
Reference,
+ Fn,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Copy, Debug)]
Tuple(..) => Some(PrimitiveType::Tuple),
RawPointer(..) => Some(PrimitiveType::RawPointer),
BorrowedRef { type_: box Generic(..), .. } => Some(PrimitiveType::Reference),
+ BareFunction(..) => Some(PrimitiveType::Fn),
_ => None,
}
}
"tuple" => Some(PrimitiveType::Tuple),
"pointer" => Some(PrimitiveType::RawPointer),
"reference" => Some(PrimitiveType::Reference),
+ "fn" => Some(PrimitiveType::Fn),
_ => None,
}
}
Tuple => "tuple",
RawPointer => "pointer",
Reference => "reference",
+ Fn => "fn",
}
}
Tuple => None,
RawPointer => tcx.lang_items.const_ptr_impl(),
Reference => None,
+ Fn => None,
};
if let Some(did) = did {
if !did.is_local() {
decl.generics,
decl.decl)
} else {
- write!(f, "{}{}fn{}{}",
- UnsafetySpace(decl.unsafety),
- AbiSpace(decl.abi),
- decl.generics,
- decl.decl)
+ write!(f, "{}{}", UnsafetySpace(decl.unsafety), AbiSpace(decl.abi))?;
+ primitive_link(f, PrimitiveType::Fn, "fn")?;
+ write!(f, "{}{}", decl.generics, decl.decl)
}
}
clean::Tuple(ref typs) => {
}
impl<T: Hash + Eq> HashSet<T, RandomState> {
- /// Creates an empty HashSet.
+ /// Creates an empty `HashSet`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
- /// let mut set: HashSet<i32> = HashSet::new();
+ /// let set: HashSet<i32> = HashSet::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
///
/// ```
/// use std::collections::HashSet;
- /// let mut set: HashSet<i32> = HashSet::with_capacity(10);
+ /// let set: HashSet<i32> = HashSet::with_capacity(10);
+ /// assert!(set.capacity() >= 10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// Returns a reference to the set's [`BuildHasher`].
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::HashSet;
+ /// use std::collections::hash_map::RandomState;
+ ///
+ /// let hasher = RandomState::new();
+ /// let set: HashSet<i32> = HashSet::with_hasher(hasher);
+ /// let hasher: &RandomState = set.hasher();
+ /// ```
#[stable(feature = "hashmap_public_hasher", since = "1.9.0")]
pub fn hasher(&self) -> &S {
self.map.hasher()
/// use std::collections::HashSet;
/// let mut set: HashSet<i32> = HashSet::new();
/// set.reserve(10);
+ /// assert!(set.capacity() >= 10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
/// println!("{}", x); // Print 1
/// }
///
- /// let diff: HashSet<_> = a.difference(&b).cloned().collect();
- /// assert_eq!(diff, [1].iter().cloned().collect());
+ /// let diff: HashSet<_> = a.difference(&b).collect();
+ /// assert_eq!(diff, [1].iter().collect());
///
/// // Note that difference is not symmetric,
/// // and `b - a` means something else:
- /// let diff: HashSet<_> = b.difference(&a).cloned().collect();
- /// assert_eq!(diff, [4].iter().cloned().collect());
+ /// let diff: HashSet<_> = b.difference(&a).collect();
+ /// assert_eq!(diff, [4].iter().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn difference<'a>(&'a self, other: &'a HashSet<T, S>) -> Difference<'a, T, S> {
/// println!("{}", x);
/// }
///
- /// let diff1: HashSet<_> = a.symmetric_difference(&b).cloned().collect();
- /// let diff2: HashSet<_> = b.symmetric_difference(&a).cloned().collect();
+ /// let diff1: HashSet<_> = a.symmetric_difference(&b).collect();
+ /// let diff2: HashSet<_> = b.symmetric_difference(&a).collect();
///
/// assert_eq!(diff1, diff2);
- /// assert_eq!(diff1, [1, 4].iter().cloned().collect());
+ /// assert_eq!(diff1, [1, 4].iter().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(&'a self,
/// println!("{}", x);
/// }
///
- /// let intersection: HashSet<_> = a.intersection(&b).cloned().collect();
- /// assert_eq!(intersection, [2, 3].iter().cloned().collect());
+ /// let intersection: HashSet<_> = a.intersection(&b).collect();
+ /// assert_eq!(intersection, [2, 3].iter().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn intersection<'a>(&'a self, other: &'a HashSet<T, S>) -> Intersection<'a, T, S> {
/// println!("{}", x);
/// }
///
- /// let union: HashSet<_> = a.union(&b).cloned().collect();
- /// assert_eq!(union, [1, 2, 3, 4].iter().cloned().collect());
+ /// let union: HashSet<_> = a.union(&b).collect();
+ /// assert_eq!(union, [1, 2, 3, 4].iter().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn union<'a>(&'a self, other: &'a HashSet<T, S>) -> Union<'a, T, S> {
}
/// Clears the set, returning all elements in an iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::HashSet;
+ ///
+ /// let mut set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert!(!set.is_empty());
+ ///
+ /// // print 1, 2, 3 in an arbitrary order
+ /// for i in set.drain() {
+ /// println!("{}", i);
+ /// }
+ ///
+ /// assert!(set.is_empty());
+ /// ```
#[inline]
#[stable(feature = "drain", since = "1.6.0")]
pub fn drain(&mut self) -> Drain<T> {
#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64;
#[stable(feature = "pthread_t", since = "1.8.0")]
-pub type pthread_t = usize;
+pub type pthread_t = u32;
#[repr(C)]
#[derive(Clone)]
/// locally known.
#[stable(feature = "rust1", since = "1.0.0")]
mod prim_ref { }
+
+#[doc(primitive = "fn")]
+//
+/// Function pointers, like `fn(usize) -> bool`.
+///
+/// *See also the traits [`Fn`], [`FnMut`], and [`FnOnce`].*
+///
+/// [`Fn`]: ops/trait.Fn.html
+/// [`FnMut`]: ops/trait.FnMut.html
+/// [`FnOnce`]: ops/trait.FnOnce.html
+///
+/// Plain function pointers are obtained by casting either plain functions, or closures that don't
+/// capture an environment:
+///
+/// ```
+/// fn add_one(x: usize) -> usize {
+/// x + 1
+/// }
+///
+/// let ptr: fn(usize) -> usize = add_one;
+/// assert_eq!(ptr(5), 6);
+///
+/// let clos: fn(usize) -> usize = |x| x + 5;
+/// assert_eq!(clos(5), 10);
+/// ```
+///
+/// In addition to varying based on their signature, function pointers come in two flavors: safe
+/// and unsafe. Plain `fn()` function pointers can only point to safe functions,
+/// while `unsafe fn()` function pointers can point to safe or unsafe functions.
+///
+/// ```
+/// fn add_one(x: usize) -> usize {
+/// x + 1
+/// }
+///
+/// unsafe fn add_one_unsafely(x: usize) -> usize {
+/// x + 1
+/// }
+///
+/// let safe_ptr: fn(usize) -> usize = add_one;
+///
+/// //ERROR: mismatched types: expected normal fn, found unsafe fn
+/// //let bad_ptr: fn(usize) -> usize = add_one_unsafely;
+///
+/// let unsafe_ptr: unsafe fn(usize) -> usize = add_one_unsafely;
+/// let really_safe_ptr: unsafe fn(usize) -> usize = add_one;
+/// ```
+///
+/// On top of that, function pointers can vary based on what ABI they use. This is achieved by
+/// adding the `extern` keyword to the type name, followed by the ABI in question. For example,
+/// `fn()` is different from `extern "C" fn()`, which itself is different from `extern "stdcall"
+/// fn()`, and so on for the various ABIs that Rust supports. Non-`extern` functions have an ABI
+/// of `"Rust"`, and `extern` functions without an explicit ABI have an ABI of `"C"`. For more
+/// information, see [the nomicon's section on foreign calling conventions][nomicon-abi].
+///
+/// [nomicon-abi]: ../nomicon/ffi.html#foreign-calling-conventions
+///
+/// Extern function declarations with the "C" or "cdecl" ABIs can also be *variadic*, allowing them
+/// to be called with a variable number of arguments. Normal rust functions, even those with an
+/// `extern "ABI"`, cannot be variadic. For more information, see [the nomicon's section on
+/// variadic functions][nomicon-variadic].
+///
+/// [nomicon-variadic]: ../nomicon/ffi.html#variadic-functions
+///
+/// These markers can be combined, so `unsafe extern "stdcall" fn()` is a valid type.
+///
+/// Like references in rust, function pointers are assumed to not be null, so if you want to pass a
+/// function pointer over FFI and be able to accomodate null pointers, make your type
+/// `Option<fn()>` with your required signature.
+///
+/// Function pointers implement the following traits:
+///
+/// * [`Clone`]
+/// * [`PartialEq`]
+/// * [`Eq`]
+/// * [`PartialOrd`]
+/// * [`Ord`]
+/// * [`Hash`]
+/// * [`Pointer`]
+/// * [`Debug`]
+///
+/// [`Clone`]: clone/trait.Clone.html
+/// [`PartialEq`]: cmp/trait.PartialEq.html
+/// [`Eq`]: cmp/trait.Eq.html
+/// [`PartialOrd`]: cmp/trait.PartialOrd.html
+/// [`Ord`]: cmp/trait.Ord.html
+/// [`Hash`]: hash/trait.Hash.html
+/// [`Pointer`]: fmt/trait.Pointer.html
+/// [`Debug`]: fmt/trait.Debug.html
+///
+/// Due to a temporary restriction in Rust's type system, these traits are only implemented on
+/// functions that take 12 arguments or less, with the `"Rust"` and `"C"` ABIs. In the future, this
+/// may change.
+///
+/// In addition, function pointers of *any* signature, ABI, or safety are [`Copy`], and all *safe*
+/// function pointers implement [`Fn`], [`FnMut`], and [`FnOnce`]. This works because these traits
+/// are specially known to the compiler.
+///
+/// [`Copy`]: marker/trait.Copy.html
+#[stable(feature = "rust1", since = "1.0.0")]
+mod prim_fn { }
// StorageDead(_3);
// StorageLive(_4);
// _4 = std::option::Option<std::boxed::Box<u32>>::None;
+// StorageLive(_5);
// StorageLive(_6);
-// StorageLive(_7);
-// _7 = _4;
-// replace(_6 <- _7) -> [return: bb6, unwind: bb7];
+// _6 = _4;
+// replace(_5 <- _6) -> [return: bb1, unwind: bb5];
// }
// bb1: {
-// resume;
+// drop(_6) -> [return: bb6, unwind: bb4];
// }
// bb2: {
-// drop(_4) -> bb1;
+// resume;
// }
// bb3: {
-// goto -> bb2;
+// drop(_4) -> bb2;
// }
// bb4: {
-// drop(_6) -> bb3;
+// drop(_5) -> bb3;
// }
// bb5: {
-// goto -> bb4;
+// drop(_6) -> bb4;
// }
// bb6: {
-// drop(_7) -> [return: bb8, unwind: bb4];
+// StorageDead(_6);
+// _0 = ();
+// drop(_5) -> [return: bb7, unwind: bb3];
// }
// bb7: {
-// drop(_7) -> bb5;
+// StorageDead(_5);
+// drop(_4) -> bb8;
// }
// bb8: {
-// StorageDead(_7);
-// _0 = ();
-// drop(_6) -> [return: bb9, unwind: bb2];
-// }
-// bb9: {
-// StorageDead(_6);
-// drop(_4) -> bb10;
-// }
-// bb10: {
// StorageDead(_4);
// StorageDead(_2);
// StorageDead(_1);
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
// let mut _0: ();
// let _1: D;
-// let _3: i32;
-// let _4: &'6_2rce i32;
+// let _2: i32;
+// let _3: &'6_2rce i32;
// let _7: &'6_4rce i32;
-// let mut _5: ();
-// let mut _6: i32;
-//
+// let mut _4: ();
+// let mut _5: i32;
+// let mut _6: ();
// bb0: {
// StorageLive(_1);
// _1 = D::{{constructor}}(const 0i32,);
+// StorageLive(_2);
+// _2 = const 0i32;
// StorageLive(_3);
-// _3 = const 0i32;
-// StorageLive(_4);
-// _4 = &'6_2rce _3;
-// StorageLive(_6);
-// _6 = (*_4);
-// _5 = const foo(_6) -> [return: bb2, unwind: bb3];
+// _3 = &'6_2rce _2;
+// StorageLive(_5);
+// _5 = (*_3);
+// _4 = const foo(_5) -> [return: bb1, unwind: bb3];
// }
// bb1: {
-// resume;
-// }
-// bb2: {
-// StorageDead(_6);
+// StorageDead(_5);
// StorageLive(_7);
-// _7 = &'6_4rce _3;
+// _7 = &'6_4rce _2;
// _0 = ();
// StorageDead(_7);
// EndRegion('6_4rce);
-// StorageDead(_4);
-// EndRegion('6_2rce);
// StorageDead(_3);
+// EndRegion('6_2rce);
+// StorageDead(_2);
// drop(_1) -> bb4;
// }
+// bb2: {
+// resume;
+// }
// bb3: {
// EndRegion('6_2rce);
-// drop(_1) -> bb1;
+// drop(_1) -> bb2;
// }
// bb4: {
// StorageDead(_1);
// let mut _0: ();
// let _1: D;
// let mut _2: ();
-// let mut _3: ();
-// let mut _4: [closure@NodeId(18) d: &'19mce D];
-// let mut _5: &'19mce D;
-//
+// let mut _3: [closure@NodeId(18) d:&'19mce D];
+// let mut _4: &'19mce D;
+// let mut _5: ();
// bb0: {
// StorageLive(_1);
// _1 = D::{{constructor}}(const 0i32,);
+// StorageLive(_3);
// StorageLive(_4);
-// StorageLive(_5);
-// _5 = &'19mce _1;
-// _4 = [closure@NodeId(18)] { d: _5 };
-// StorageDead(_5);
-// _3 = const foo(_4) -> [return: bb2, unwind: bb3];
+// _4 = &'19mce _1;
+// _3 = [closure@NodeId(18)] { d: _4 };
+// StorageDead(_4);
+// _2 = const foo(_3) -> [return: bb1, unwind: bb3];
// }
// bb1: {
-// resume;
-// }
-// bb2: {
-// StorageDead(_4);
+// StorageDead(_3);
// EndRegion('19mce);
// _0 = ();
// drop(_1) -> bb4;
// }
+// bb2: {
+// resume;
+// }
// bb3: {
// EndRegion('19mce);
-// drop(_1) -> bb1;
+// drop(_1) -> bb2;
// }
// bb4: {
// StorageDead(_1);
// END RUST SOURCE
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
+// fn main() -> () {
// let mut _0: ();
// let _1: D;
// let mut _2: ();
-// let mut _3: ();
-// let mut _4: [closure@NodeId(22) d:&'23mce D];
-// let mut _5: &'23mce D;
-//
+// let mut _3: [closure@NodeId(22) d:&'23mce D];
+// let mut _4: &'23mce D;
+// let mut _5: ();
// bb0: {
// StorageLive(_1);
// _1 = D::{{constructor}}(const 0i32,);
+// StorageLive(_3);
// StorageLive(_4);
-// StorageLive(_5);
-// _5 = &'23mce _1;
-// _4 = [closure@NodeId(22)] { d: _5 };
-// StorageDead(_5);
-// _3 = const foo(_4) -> [return: bb2, unwind: bb3];
+// _4 = &'23mce _1;
+// _3 = [closure@NodeId(22)] { d: _4 };
+// StorageDead(_4);
+// _2 = const foo(_3) -> [return: bb1, unwind: bb3];
// }
// bb1: {
-// resume;
-// }
-// bb2: {
-// StorageDead(_4);
+// StorageDead(_3);
// EndRegion('23mce);
// _0 = ();
// drop(_1) -> bb4;
// }
+// bb2: {
+// resume;
+// }
// bb3: {
// EndRegion('23mce);
-// drop(_1) -> bb1;
+// drop(_1) -> bb2;
// }
// bb4: {
// StorageDead(_1);
// let mut _0: ();
// let _1: D;
// let mut _2: ();
-// let mut _3: ();
-// let mut _4: [closure@NodeId(22) d:D];
-// let mut _5: D;
+// let mut _3: [closure@NodeId(22) d:D];
+// let mut _4: D;
+// let mut _5: ();
//
// bb0: {
// StorageLive(_1);
// _1 = D::{{constructor}}(const 0i32,);
+// StorageLive(_3);
// StorageLive(_4);
-// StorageLive(_5);
-// _5 = _1;
-// _4 = [closure@NodeId(22)] { d: _5 };
-// drop(_5) -> [return: bb4, unwind: bb3];
+// _4 = _1;
+// _3 = [closure@NodeId(22)] { d: _4 };
+// drop(_4) -> [return: bb4, unwind: bb3];
// }
// bb1: {
// resume;
// drop(_1) -> bb1;
// }
// bb3: {
-// drop(_4) -> bb2;
+// drop(_3) -> bb2;
// }
// bb4: {
-// StorageDead(_5);
-// _3 = const foo(_4) -> [return: bb5, unwind: bb3];
+// StorageDead(_4);
+// _2 = const foo(_3) -> [return: bb5, unwind: bb3];
// }
// bb5: {
-// drop(_4) -> [return: bb6, unwind: bb2];
+// drop(_3) -> [return: bb6, unwind: bb2];
// }
// bb6: {
-// StorageDead(_4);
+// StorageDead(_3);
// _0 = ();
// drop(_1) -> bb7;
// }
// fn main::{{closure}}(_1: [closure@NodeId(22) d:D]) -> i32 {
// let mut _0: i32;
// let _2: &'14_0rce D;
-// let mut _3: ();
-// let mut _4: i32;
+// let mut _3: i32;
+// let mut _4: ();
//
// bb0: {
// StorageLive(_2);
// _2 = &'14_0rce (_1.0: D);
-// StorageLive(_4);
-// _4 = ((*_2).0: i32);
-// _0 = _4;
-// StorageDead(_4);
+// StorageLive(_3);
+// _3 = ((*_2).0: i32);
+// _0 = _3;
+// StorageDead(_3);
// StorageDead(_2);
// EndRegion('14_0rce);
// drop(_1) -> bb1;
// END RUST SOURCE
// START rustc.node4.SimplifyCfg-qualify-consts.after.mir
// fn main() -> () {
-// let mut _0: ();
-// let _1: D;
-// let _3: &'6_1rce D;
-// let mut _2: ();
-// let mut _4: ();
-// let mut _5: [closure@NodeId(22) r:&'6_1rce D];
-// let mut _6: &'6_1rce D;
-//
-// bb0: {
-// StorageLive(_1);
-// _1 = D::{{constructor}}(const 0i32,);
-// StorageLive(_3);
-// _3 = &'6_1rce _1;
-// StorageLive(_5);
-// StorageLive(_6);
-// _6 = _3;
-// _5 = [closure@NodeId(22)] { r: _6 };
-// StorageDead(_6);
-// _4 = const foo(_5) -> [return: bb2, unwind: bb3];
-// }
-// bb1: {
-// resume;
-// }
-// bb2: {
-// StorageDead(_5);
-// _0 = ();
-// StorageDead(_3);
-// EndRegion('6_1rce);
-// drop(_1) -> bb4;
-// }
-// bb3: {
-// EndRegion('6_1rce);
-// drop(_1) -> bb1;
-// }
-// bb4: {
-// StorageDead(_1);
-// return;
-// }
+// let mut _0: ();
+// let _1: D;
+// let _2: &'6_1rce D;
+// let mut _3: ();
+// let mut _4: [closure@NodeId(22) r:&'6_1rce D];
+// let mut _5: &'6_1rce D;
+// let mut _6: ();
+// bb0: {
+// StorageLive(_1);
+// _1 = D::{{constructor}}(const 0i32,);
+// StorageLive(_2);
+// _2 = &'6_1rce _1;
+// StorageLive(_4);
+// StorageLive(_5);
+// _5 = _2;
+// _4 = [closure@NodeId(22)] { r: _5 };
+// StorageDead(_5);
+// _3 = const foo(_4) -> [return: bb1, unwind: bb3];
+// }
+// bb1: {
+// StorageDead(_4);
+// _0 = ();
+// StorageDead(_2);
+// EndRegion('6_1rce);
+// drop(_1) -> bb4;
+// }
+// bb2: {
+// resume;
+// }
+// bb3: {
+// EndRegion('6_1rce);
+// drop(_1) -> bb2;
+// }
+// bb4: {
+// StorageDead(_1);
+// return;
+// }
// }
// END rustc.node4.SimplifyCfg-qualify-consts.after.mir
// END RUST SOURCE
// START rustc.node4.ElaborateDrops.after.mir
+// let mut _0: ();
+// let _1: ();
// let mut _2: S;
-// let mut _3: ();
+// let mut _3: S;
// let mut _4: S;
-// let mut _5: S;
+// let mut _5: ();
// let mut _6: bool;
//
// bb0: {
// END rustc.node4.ElaborateDrops.after.mir
// START rustc.node13.ElaborateDrops.after.mir
-// let mut _2: ();
-// let mut _4: ();
-// let mut _5: S;
+// let mut _0: ();
+// let _1: S;
+// let mut _2: S;
+// let mut _3: ();
+// let mut _4: S;
+// let mut _5: ();
// let mut _6: S;
// let mut _7: bool;
//