]> git.lizzy.rs Git - rust.git/commitdiff
MIR: s/lv(al(ue)?)?/place in function/variable/module names.
authorEduard-Mihai Burtescu <edy.burt@gmail.com>
Fri, 1 Dec 2017 12:39:51 +0000 (14:39 +0200)
committerEduard-Mihai Burtescu <edy.burt@gmail.com>
Fri, 1 Dec 2017 16:48:57 +0000 (18:48 +0200)
62 files changed:
src/librustc/ich/impls_mir.rs
src/librustc/mir/mod.rs
src/librustc/mir/tcx.rs
src/librustc/mir/visit.rs
src/librustc_mir/borrow_check.rs
src/librustc_mir/build/cfg.rs
src/librustc_mir/build/expr/as_lvalue.rs [deleted file]
src/librustc_mir/build/expr/as_operand.rs
src/librustc_mir/build/expr/as_place.rs [new file with mode: 0644]
src/librustc_mir/build/expr/as_rvalue.rs
src/librustc_mir/build/expr/as_temp.rs
src/librustc_mir/build/expr/mod.rs
src/librustc_mir/build/expr/stmt.rs
src/librustc_mir/build/matches/mod.rs
src/librustc_mir/build/matches/simplify.rs
src/librustc_mir/build/matches/test.rs
src/librustc_mir/build/matches/util.rs
src/librustc_mir/build/misc.rs
src/librustc_mir/build/mod.rs
src/librustc_mir/build/scope.rs
src/librustc_mir/dataflow/drop_flag_effects.rs
src/librustc_mir/dataflow/impls/borrows.rs
src/librustc_mir/dataflow/impls/mod.rs
src/librustc_mir/dataflow/impls/storage_liveness.rs
src/librustc_mir/dataflow/mod.rs
src/librustc_mir/dataflow/move_paths/builder.rs
src/librustc_mir/dataflow/move_paths/mod.rs
src/librustc_mir/hair/cx/expr.rs
src/librustc_mir/shim.rs
src/librustc_mir/transform/add_validation.rs
src/librustc_mir/transform/check_unsafety.rs
src/librustc_mir/transform/copy_prop.rs
src/librustc_mir/transform/deaggregator.rs
src/librustc_mir/transform/elaborate_drops.rs
src/librustc_mir/transform/generator.rs
src/librustc_mir/transform/inline.rs
src/librustc_mir/transform/instcombine.rs
src/librustc_mir/transform/lower_128bit.rs
src/librustc_mir/transform/nll/constraint_generation.rs
src/librustc_mir/transform/promote_consts.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/rustc_peek.rs
src/librustc_mir/transform/type_check.rs
src/librustc_mir/util/alignment.rs
src/librustc_mir/util/def_use.rs
src/librustc_mir/util/elaborate_drops.rs
src/librustc_mir/util/patch.rs
src/librustc_mir/util/pretty.rs
src/librustc_passes/mir_stats.rs
src/librustc_trans/abi.rs
src/librustc_trans/asm.rs
src/librustc_trans/base.rs
src/librustc_trans/intrinsic.rs
src/librustc_trans/mir/analyze.rs
src/librustc_trans/mir/block.rs
src/librustc_trans/mir/constant.rs
src/librustc_trans/mir/lvalue.rs [deleted file]
src/librustc_trans/mir/mod.rs
src/librustc_trans/mir/operand.rs
src/librustc_trans/mir/place.rs [new file with mode: 0644]
src/librustc_trans/mir/rvalue.rs
src/librustc_trans/mir/statement.rs

index 8ea60cf442c66e6cdfe96ac98be767022fc82501..331b44ac119c6b7f0fddb4704cd5585364b0e983 100644 (file)
@@ -272,24 +272,24 @@ fn hash_stable<W: StableHasherResult>(&self,
         mem::discriminant(self).hash_stable(hcx, hasher);
 
         match *self {
-            mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::StatementKind::Assign(ref place, ref rvalue) => {
+                place.hash_stable(hcx, hasher);
                 rvalue.hash_stable(hcx, hasher);
             }
-            mir::StatementKind::SetDiscriminant { ref lvalue, variant_index } => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::StatementKind::SetDiscriminant { ref place, variant_index } => {
+                place.hash_stable(hcx, hasher);
                 variant_index.hash_stable(hcx, hasher);
             }
-            mir::StatementKind::StorageLive(ref lvalue) |
-            mir::StatementKind::StorageDead(ref lvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::StatementKind::StorageLive(ref place) |
+            mir::StatementKind::StorageDead(ref place) => {
+                place.hash_stable(hcx, hasher);
             }
             mir::StatementKind::EndRegion(ref region_scope) => {
                 region_scope.hash_stable(hcx, hasher);
             }
-            mir::StatementKind::Validate(ref op, ref lvalues) => {
+            mir::StatementKind::Validate(ref op, ref places) => {
                 op.hash_stable(hcx, hasher);
-                lvalues.hash_stable(hcx, hasher);
+                places.hash_stable(hcx, hasher);
             }
             mir::StatementKind::Nop => {}
             mir::StatementKind::InlineAsm { ref asm, ref outputs, ref inputs } => {
@@ -309,7 +309,7 @@ fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'gcx>,
                                           hasher: &mut StableHasher<W>)
     {
-        self.lval.hash_stable(hcx, hasher);
+        self.place.hash_stable(hcx, hasher);
         self.ty.hash_stable(hcx, hasher);
         self.re.hash_stable(hcx, hasher);
         self.mutbl.hash_stable(hcx, hasher);
@@ -330,8 +330,8 @@ fn hash_stable<W: StableHasherResult>(&self,
             mir::Place::Static(ref statik) => {
                 statik.hash_stable(hcx, hasher);
             }
-            mir::Place::Projection(ref lvalue_projection) => {
-                lvalue_projection.hash_stable(hcx, hasher);
+            mir::Place::Projection(ref place_projection) => {
+                place_projection.hash_stable(hcx, hasher);
             }
         }
     }
@@ -420,11 +420,11 @@ fn hash_stable<W: StableHasherResult>(&self,
         mem::discriminant(self).hash_stable(hcx, hasher);
 
         match *self {
-            mir::Operand::Copy(ref lvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::Operand::Copy(ref place) => {
+                place.hash_stable(hcx, hasher);
             }
-            mir::Operand::Move(ref lvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::Operand::Move(ref place) => {
+                place.hash_stable(hcx, hasher);
             }
             mir::Operand::Constant(ref constant) => {
                 constant.hash_stable(hcx, hasher);
@@ -447,13 +447,13 @@ fn hash_stable<W: StableHasherResult>(&self,
                 operand.hash_stable(hcx, hasher);
                 val.hash_stable(hcx, hasher);
             }
-            mir::Rvalue::Ref(region, borrow_kind, ref lvalue) => {
+            mir::Rvalue::Ref(region, borrow_kind, ref place) => {
                 region.hash_stable(hcx, hasher);
                 borrow_kind.hash_stable(hcx, hasher);
-                lvalue.hash_stable(hcx, hasher);
+                place.hash_stable(hcx, hasher);
             }
-            mir::Rvalue::Len(ref lvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::Rvalue::Len(ref place) => {
+                place.hash_stable(hcx, hasher);
             }
             mir::Rvalue::Cast(cast_kind, ref operand, ty) => {
                 cast_kind.hash_stable(hcx, hasher);
@@ -470,8 +470,8 @@ fn hash_stable<W: StableHasherResult>(&self,
                 op.hash_stable(hcx, hasher);
                 operand.hash_stable(hcx, hasher);
             }
-            mir::Rvalue::Discriminant(ref lvalue) => {
-                lvalue.hash_stable(hcx, hasher);
+            mir::Rvalue::Discriminant(ref place) => {
+                place.hash_stable(hcx, hasher);
             }
             mir::Rvalue::NullaryOp(op, ty) => {
                 op.hash_stable(hcx, hasher);
index 0123ff7c5298985c10dd75118cebcaaf4999adc5..0cbd945095a959fbabd7da8d403ce532f71d7939 100644 (file)
@@ -139,7 +139,7 @@ pub fn new(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
                upvar_decls: Vec<UpvarDecl>,
                span: Span) -> Self
     {
-        // We need `arg_count` locals, and one for the return pointer
+        // We need `arg_count` locals, and one for the return place
         assert!(local_decls.len() >= arg_count + 1,
             "expected at least {} locals, got {}", arg_count + 1, local_decls.len());
 
@@ -200,7 +200,7 @@ pub fn local_kind(&self, local: Local) -> LocalKind {
         let index = local.0 as usize;
         if index == 0 {
             debug_assert!(self.local_decls[local].mutability == Mutability::Mut,
-                          "return pointer should be mutable");
+                          "return place should be mutable");
 
             LocalKind::ReturnPointer
         } else if index < self.arg_count + 1 {
@@ -249,7 +249,7 @@ pub fn args_iter(&self) -> impl Iterator<Item=Local> {
     }
 
     /// Returns an iterator over all user-defined variables and compiler-generated temporaries (all
-    /// locals that are neither arguments nor the return pointer).
+    /// locals that are neither arguments nor the return place).
     #[inline]
     pub fn vars_and_temps_iter(&self) -> impl Iterator<Item=Local> {
         let arg_count = self.arg_count;
@@ -280,7 +280,7 @@ pub fn source_info(&self, location: Location) -> &SourceInfo {
 
     /// Return the return type, it always return first element from `local_decls` array
     pub fn return_ty(&self) -> Ty<'tcx> {
-        self.local_decls[RETURN_POINTER].ty
+        self.local_decls[RETURN_PLACE].ty
     }
 }
 
@@ -417,7 +417,7 @@ pub enum BorrowKind {
 newtype_index!(Local
     {
         DEBUG_FORMAT = "_{}",
-        const RETURN_POINTER = 0,
+        const RETURN_PLACE = 0,
     });
 
 /// Classifies locals into categories. See `Mir::local_kind`.
@@ -436,12 +436,12 @@ pub enum LocalKind {
 /// A MIR local.
 ///
 /// This can be a binding declared by the user, a temporary inserted by the compiler, a function
-/// argument, or the return pointer.
+/// argument, or the return place.
 #[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
 pub struct LocalDecl<'tcx> {
     /// `let mut x` vs `let x`.
     ///
-    /// Temporaries and the return pointer are always mutable.
+    /// Temporaries and the return place are always mutable.
     pub mutability: Mutability,
 
     /// True if this corresponds to a user-declared local variable.
@@ -520,11 +520,11 @@ pub fn new_internal(ty: Ty<'tcx>, span: Span) -> Self {
         }
     }
 
-    /// Builds a `LocalDecl` for the return pointer.
+    /// Builds a `LocalDecl` for the return place.
     ///
     /// This must be inserted into the `local_decls` list as the first local.
     #[inline]
-    pub fn new_return_pointer(return_ty: Ty, span: Span) -> LocalDecl {
+    pub fn new_return_place(return_ty: Ty, span: Span) -> LocalDecl {
         LocalDecl {
             mutability: Mutability::Mut,
             ty: return_ty,
@@ -634,8 +634,8 @@ pub enum TerminatorKind<'tcx> {
     /// continue. Emitted by build::scope::diverge_cleanup.
     Resume,
 
-    /// Indicates a normal return. The return pointer lvalue should
-    /// have been filled in by now. This should occur at most once.
+    /// Indicates a normal return. The return place should have
+    /// been filled in by now. This should occur at most once.
     Return,
 
     /// Indicates a terminator that can never be reached.
@@ -650,7 +650,7 @@ pub enum TerminatorKind<'tcx> {
 
     /// Drop the Place and assign the new value over it. This ensures
     /// that the assignment to LV occurs *even if* the destructor for
-    /// lvalue unwinds. Its semantics are best explained by by the
+    /// place unwinds. Its semantics are best explained by by the
     /// elaboration:
     ///
     /// ```
@@ -878,7 +878,7 @@ pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result {
         use self::TerminatorKind::*;
         match *self {
             Goto { .. } => write!(fmt, "goto"),
-            SwitchInt { discr: ref lv, .. } => write!(fmt, "switchInt({:?})", lv),
+            SwitchInt { discr: ref place, .. } => write!(fmt, "switchInt({:?})", place),
             Return => write!(fmt, "return"),
             GeneratorDrop => write!(fmt, "generator_drop"),
             Resume => write!(fmt, "resume"),
@@ -1006,7 +1006,7 @@ pub enum StatementKind<'tcx> {
     Assign(Place<'tcx>, Rvalue<'tcx>),
 
     /// Write the discriminant for a variant to the enum Place.
-    SetDiscriminant { lvalue: Place<'tcx>, variant_index: usize },
+    SetDiscriminant { place: Place<'tcx>, variant_index: usize },
 
     /// Start a live range for the storage of the local.
     StorageLive(Local),
@@ -1021,7 +1021,7 @@ pub enum StatementKind<'tcx> {
         inputs: Vec<Operand<'tcx>>
     },
 
-    /// Assert the given lvalues to be valid inhabitants of their type.  These statements are
+    /// Assert the given places to be valid inhabitants of their type.  These statements are
     /// currently only interpreted by miri and only generated when "-Z mir-emit-validate" is passed.
     /// See <https://internals.rust-lang.org/t/types-as-contracts/5562/73> for more details.
     Validate(ValidationOp, Vec<ValidationOperand<'tcx, Place<'tcx>>>),
@@ -1038,9 +1038,9 @@ pub enum StatementKind<'tcx> {
 /// `Validate` statement.
 #[derive(Copy, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq)]
 pub enum ValidationOp {
-    /// Recursively traverse the lvalue following the type and validate that all type
+    /// Recursively traverse the place following the type and validate that all type
     /// invariants are maintained.  Furthermore, acquire exclusive/read-only access to the
-    /// memory reachable from the lvalue.
+    /// memory reachable from the place.
     Acquire,
     /// Recursive traverse the *mutable* part of the type and relinquish all exclusive
     /// access.
@@ -1065,7 +1065,7 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
 // This is generic so that it can be reused by miri
 #[derive(Clone, RustcEncodable, RustcDecodable)]
 pub struct ValidationOperand<'tcx, T> {
-    pub lval: T,
+    pub place: T,
     pub ty: Ty<'tcx>,
     pub re: Option<region::Scope>,
     pub mutbl: hir::Mutability,
@@ -1073,7 +1073,7 @@ pub struct ValidationOperand<'tcx, T> {
 
 impl<'tcx, T: Debug> Debug for ValidationOperand<'tcx, T> {
     fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
-        write!(fmt, "{:?}: {:?}", self.lval, self.ty)?;
+        write!(fmt, "{:?}: {:?}", self.place, self.ty)?;
         if let Some(ce) = self.re {
             // (reuse lifetime rendering policy from ppaux.)
             write!(fmt, "/{}", ty::ReScope(ce))?;
@@ -1089,14 +1089,14 @@ impl<'tcx> Debug for Statement<'tcx> {
     fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
         use self::StatementKind::*;
         match self.kind {
-            Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv),
+            Assign(ref place, ref rv) => write!(fmt, "{:?} = {:?}", place, rv),
             // (reuse lifetime rendering policy from ppaux.)
             EndRegion(ref ce) => write!(fmt, "EndRegion({})", ty::ReScope(*ce)),
-            Validate(ref op, ref lvalues) => write!(fmt, "Validate({:?}, {:?})", op, lvalues),
-            StorageLive(ref lv) => write!(fmt, "StorageLive({:?})", lv),
-            StorageDead(ref lv) => write!(fmt, "StorageDead({:?})", lv),
-            SetDiscriminant{lvalue: ref lv, variant_index: index} => {
-                write!(fmt, "discriminant({:?}) = {:?}", lv, index)
+            Validate(ref op, ref places) => write!(fmt, "Validate({:?}, {:?})", op, places),
+            StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place),
+            StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place),
+            SetDiscriminant { ref place, variant_index } => {
+                write!(fmt, "discriminant({:?}) = {:?}", place, variant_index)
             },
             InlineAsm { ref asm, ref outputs, ref inputs } => {
                 write!(fmt, "asm!({:?} : {:?} : {:?})", asm, outputs, inputs)
@@ -1119,7 +1119,7 @@ pub enum Place<'tcx> {
     /// static or static mut variable
     Static(Box<Static<'tcx>>),
 
-    /// projection out of an lvalue (access a field, deref a pointer, etc)
+    /// projection out of a place (access a field, deref a pointer, etc)
     Projection(Box<PlaceProjection<'tcx>>),
 }
 
@@ -1184,11 +1184,11 @@ pub enum ProjectionElem<'tcx, V, T> {
     Downcast(&'tcx AdtDef, usize),
 }
 
-/// Alias for projections as they appear in lvalues, where the base is an lvalue
+/// Alias for projections as they appear in places, where the base is a place
 /// and the index is a local.
 pub type PlaceProjection<'tcx> = Projection<'tcx, Place<'tcx>, Local, Ty<'tcx>>;
 
-/// Alias for projections as they appear in lvalues, where the base is an lvalue
+/// Alias for projections as they appear in places, where the base is a place
 /// and the index is a local.
 pub type PlaceElem<'tcx> = ProjectionElem<'tcx, Local, Ty<'tcx>>;
 
@@ -1273,13 +1273,13 @@ pub struct VisibilityScopeData {
 // Operands
 
 /// These are values that can appear inside an rvalue (or an index
-/// lvalue). They are intentionally limited to prevent rvalues from
+/// place). They are intentionally limited to prevent rvalues from
 /// being nested in one another.
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum Operand<'tcx> {
     /// Copy: The value must be available for use afterwards.
     ///
-    /// This implies that the type of the lvalue must be `Copy`; this is true
+    /// This implies that the type of the place must be `Copy`; this is true
     /// by construction during build, but also checked by the MIR type checker.
     Copy(Place<'tcx>),
     /// Move: The value (including old borrows of it) will not be used again.
@@ -1296,8 +1296,8 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
         use self::Operand::*;
         match *self {
             Constant(ref a) => write!(fmt, "{:?}", a),
-            Copy(ref lv) => write!(fmt, "{:?}", lv),
-            Move(ref lv) => write!(fmt, "move {:?}", lv),
+            Copy(ref place) => write!(fmt, "{:?}", place),
+            Move(ref place) => write!(fmt, "move {:?}", place),
         }
     }
 }
@@ -1470,18 +1470,20 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
         use self::Rvalue::*;
 
         match *self {
-            Use(ref lvalue) => write!(fmt, "{:?}", lvalue),
+            Use(ref place) => write!(fmt, "{:?}", place),
             Repeat(ref a, ref b) => write!(fmt, "[{:?}; {:?}]", a, b),
             Len(ref a) => write!(fmt, "Len({:?})", a),
-            Cast(ref kind, ref lv, ref ty) => write!(fmt, "{:?} as {:?} ({:?})", lv, ty, kind),
+            Cast(ref kind, ref place, ref ty) => {
+                write!(fmt, "{:?} as {:?} ({:?})", place, ty, kind)
+            }
             BinaryOp(ref op, ref a, ref b) => write!(fmt, "{:?}({:?}, {:?})", op, a, b),
             CheckedBinaryOp(ref op, ref a, ref b) => {
                 write!(fmt, "Checked{:?}({:?}, {:?})", op, a, b)
             }
             UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a),
-            Discriminant(ref lval) => write!(fmt, "discriminant({:?})", lval),
+            Discriminant(ref place) => write!(fmt, "discriminant({:?})", place),
             NullaryOp(ref op, ref t) => write!(fmt, "{:?}({:?})", op, t),
-            Ref(region, borrow_kind, ref lv) => {
+            Ref(region, borrow_kind, ref place) => {
                 let kind_str = match borrow_kind {
                     BorrowKind::Shared => "",
                     BorrowKind::Mut | BorrowKind::Unique => "mut ",
@@ -1496,26 +1498,26 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
                     // Do not even print 'static
                     "".to_owned()
                 };
-                write!(fmt, "&{}{}{:?}", region, kind_str, lv)
+                write!(fmt, "&{}{}{:?}", region, kind_str, place)
             }
 
-            Aggregate(ref kind, ref lvs) => {
-                fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result {
+            Aggregate(ref kind, ref places) => {
+                fn fmt_tuple(fmt: &mut Formatter, places: &[Operand]) -> fmt::Result {
                     let mut tuple_fmt = fmt.debug_tuple("");
-                    for lv in lvs {
-                        tuple_fmt.field(lv);
+                    for place in places {
+                        tuple_fmt.field(place);
                     }
                     tuple_fmt.finish()
                 }
 
                 match **kind {
-                    AggregateKind::Array(_) => write!(fmt, "{:?}", lvs),
+                    AggregateKind::Array(_) => write!(fmt, "{:?}", places),
 
                     AggregateKind::Tuple => {
-                        match lvs.len() {
+                        match places.len() {
                             0 => write!(fmt, "()"),
-                            1 => write!(fmt, "({:?},)", lvs[0]),
-                            _ => fmt_tuple(fmt, lvs),
+                            1 => write!(fmt, "({:?},)", places[0]),
+                            _ => fmt_tuple(fmt, places),
                         }
                     }
 
@@ -1526,11 +1528,11 @@ fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result {
 
                         match variant_def.ctor_kind {
                             CtorKind::Const => Ok(()),
-                            CtorKind::Fn => fmt_tuple(fmt, lvs),
+                            CtorKind::Fn => fmt_tuple(fmt, places),
                             CtorKind::Fictive => {
                                 let mut struct_fmt = fmt.debug_struct("");
-                                for (field, lv) in variant_def.fields.iter().zip(lvs) {
-                                    struct_fmt.field(&field.name.as_str(), lv);
+                                for (field, place) in variant_def.fields.iter().zip(places) {
+                                    struct_fmt.field(&field.name.as_str(), place);
                                 }
                                 struct_fmt.finish()
                             }
@@ -1547,9 +1549,9 @@ fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result {
                             let mut struct_fmt = fmt.debug_struct(&name);
 
                             tcx.with_freevars(node_id, |freevars| {
-                                for (freevar, lv) in freevars.iter().zip(lvs) {
+                                for (freevar, place) in freevars.iter().zip(places) {
                                     let var_name = tcx.hir.name(freevar.var_id());
-                                    struct_fmt.field(&var_name.as_str(), lv);
+                                    struct_fmt.field(&var_name.as_str(), place);
                                 }
                             });
 
@@ -1565,14 +1567,14 @@ fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result {
                             let mut struct_fmt = fmt.debug_struct(&name);
 
                             tcx.with_freevars(node_id, |freevars| {
-                                for (freevar, lv) in freevars.iter().zip(lvs) {
+                                for (freevar, place) in freevars.iter().zip(places) {
                                     let var_name = tcx.hir.name(freevar.var_id());
-                                    struct_fmt.field(&var_name.as_str(), lv);
+                                    struct_fmt.field(&var_name.as_str(), place);
                                 }
-                                struct_fmt.field("$state", &lvs[freevars.len()]);
-                                for i in (freevars.len() + 1)..lvs.len() {
+                                struct_fmt.field("$state", &places[freevars.len()]);
+                                for i in (freevars.len() + 1)..places.len() {
                                     struct_fmt.field(&format!("${}", i - freevars.len() - 1),
-                                                     &lvs[i]);
+                                                     &places[i]);
                                 }
                             });
 
@@ -1831,7 +1833,7 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
 impl<'tcx> TypeFoldable<'tcx> for ValidationOperand<'tcx, Place<'tcx>> {
     fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
         ValidationOperand {
-            lval: self.lval.fold_with(folder),
+            place: self.place.fold_with(folder),
             ty: self.ty.fold_with(folder),
             re: self.re,
             mutbl: self.mutbl,
@@ -1839,7 +1841,7 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
     }
 
     fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
-        self.lval.visit_with(visitor) || self.ty.visit_with(visitor)
+        self.place.visit_with(visitor) || self.ty.visit_with(visitor)
     }
 }
 
@@ -1848,9 +1850,9 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
         use mir::StatementKind::*;
 
         let kind = match self.kind {
-            Assign(ref lval, ref rval) => Assign(lval.fold_with(folder), rval.fold_with(folder)),
-            SetDiscriminant { ref lvalue, variant_index } => SetDiscriminant {
-                lvalue: lvalue.fold_with(folder),
+            Assign(ref place, ref rval) => Assign(place.fold_with(folder), rval.fold_with(folder)),
+            SetDiscriminant { ref place, variant_index } => SetDiscriminant {
+                place: place.fold_with(folder),
                 variant_index,
             },
             StorageLive(ref local) => StorageLive(local.fold_with(folder)),
@@ -1867,9 +1869,9 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
             // trait with a `fn fold_scope`.
             EndRegion(ref region_scope) => EndRegion(region_scope.clone()),
 
-            Validate(ref op, ref lvals) =>
+            Validate(ref op, ref places) =>
                 Validate(op.clone(),
-                         lvals.iter().map(|operand| operand.fold_with(folder)).collect()),
+                         places.iter().map(|operand| operand.fold_with(folder)).collect()),
 
             Nop => Nop,
         };
@@ -1883,8 +1885,8 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
         use mir::StatementKind::*;
 
         match self.kind {
-            Assign(ref lval, ref rval) => { lval.visit_with(visitor) || rval.visit_with(visitor) }
-            SetDiscriminant { ref lvalue, .. } => lvalue.visit_with(visitor),
+            Assign(ref place, ref rval) => { place.visit_with(visitor) || rval.visit_with(visitor) }
+            SetDiscriminant { ref place, .. } => place.visit_with(visitor),
             StorageLive(ref local) |
             StorageDead(ref local) => local.visit_with(visitor),
             InlineAsm { ref outputs, ref inputs, .. } =>
@@ -1896,8 +1898,8 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
             // trait with a `fn visit_scope`.
             EndRegion(ref _scope) => false,
 
-            Validate(ref _op, ref lvalues) =>
-                lvalues.iter().any(|ty_and_lvalue| ty_and_lvalue.visit_with(visitor)),
+            Validate(ref _op, ref places) =>
+                places.iter().any(|ty_and_place| ty_and_place.visit_with(visitor)),
 
             Nop => false,
         }
@@ -2035,15 +2037,16 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
         match *self {
             Use(ref op) => Use(op.fold_with(folder)),
             Repeat(ref op, len) => Repeat(op.fold_with(folder), len),
-            Ref(region, bk, ref lval) => Ref(region.fold_with(folder), bk, lval.fold_with(folder)),
-            Len(ref lval) => Len(lval.fold_with(folder)),
+            Ref(region, bk, ref place) =>
+                Ref(region.fold_with(folder), bk, place.fold_with(folder)),
+            Len(ref place) => Len(place.fold_with(folder)),
             Cast(kind, ref op, ty) => Cast(kind, op.fold_with(folder), ty.fold_with(folder)),
             BinaryOp(op, ref rhs, ref lhs) =>
                 BinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
             CheckedBinaryOp(op, ref rhs, ref lhs) =>
                 CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
             UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
-            Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
+            Discriminant(ref place) => Discriminant(place.fold_with(folder)),
             NullaryOp(op, ty) => NullaryOp(op, ty.fold_with(folder)),
             Aggregate(ref kind, ref fields) => {
                 let kind = box match **kind {
@@ -2068,14 +2071,14 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
         match *self {
             Use(ref op) => op.visit_with(visitor),
             Repeat(ref op, _) => op.visit_with(visitor),
-            Ref(region, _, ref lval) => region.visit_with(visitor) || lval.visit_with(visitor),
-            Len(ref lval) => lval.visit_with(visitor),
+            Ref(region, _, ref place) => region.visit_with(visitor) || place.visit_with(visitor),
+            Len(ref place) => place.visit_with(visitor),
             Cast(_, ref op, ty) => op.visit_with(visitor) || ty.visit_with(visitor),
             BinaryOp(_, ref rhs, ref lhs) |
             CheckedBinaryOp(_, ref rhs, ref lhs) =>
                 rhs.visit_with(visitor) || lhs.visit_with(visitor),
             UnaryOp(_, ref val) => val.visit_with(visitor),
-            Discriminant(ref lval) => lval.visit_with(visitor),
+            Discriminant(ref place) => place.visit_with(visitor),
             NullaryOp(_, ty) => ty.visit_with(visitor),
             Aggregate(ref kind, ref fields) => {
                 (match **kind {
@@ -2094,16 +2097,16 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
 impl<'tcx> TypeFoldable<'tcx> for Operand<'tcx> {
     fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
         match *self {
-            Operand::Copy(ref lval) => Operand::Copy(lval.fold_with(folder)),
-            Operand::Move(ref lval) => Operand::Move(lval.fold_with(folder)),
+            Operand::Copy(ref place) => Operand::Copy(place.fold_with(folder)),
+            Operand::Move(ref place) => Operand::Move(place.fold_with(folder)),
             Operand::Constant(ref c) => Operand::Constant(c.fold_with(folder)),
         }
     }
 
     fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
         match *self {
-            Operand::Copy(ref lval) |
-            Operand::Move(ref lval) => lval.visit_with(visitor),
+            Operand::Copy(ref place) |
+            Operand::Move(ref place) => place.visit_with(visitor),
             Operand::Constant(ref c) => c.visit_with(visitor)
         }
     }
index 6f4c156d54158df1d7b2b5cc2b95ee2a57780c7b..23f360d5c3922a6e6a998675df01594e70ac18d7 100644 (file)
@@ -151,11 +151,11 @@ pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> T
             Rvalue::Repeat(ref operand, count) => {
                 tcx.mk_array_const_usize(operand.ty(local_decls, tcx), count)
             }
-            Rvalue::Ref(reg, bk, ref lv) => {
-                let lv_ty = lv.ty(local_decls, tcx).to_ty(tcx);
+            Rvalue::Ref(reg, bk, ref place) => {
+                let place_ty = place.ty(local_decls, tcx).to_ty(tcx);
                 tcx.mk_ref(reg,
                     ty::TypeAndMut {
-                        ty: lv_ty,
+                        ty: place_ty,
                         mutbl: bk.to_mutbl_lossy()
                     }
                 )
@@ -177,8 +177,8 @@ pub fn ty<'a, 'gcx, D>(&self, local_decls: &D, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> T
             Rvalue::UnaryOp(UnOp::Neg, ref operand) => {
                 operand.ty(local_decls, tcx)
             }
-            Rvalue::Discriminant(ref lval) => {
-                let ty = lval.ty(local_decls, tcx).to_ty(tcx);
+            Rvalue::Discriminant(ref place) => {
+                let ty = place.ty(local_decls, tcx).to_ty(tcx);
                 if let ty::TyAdt(adt_def, _) = ty.sty {
                     adt_def.repr.discr_type().to_ty(tcx)
                 } else {
index 1a542cd1d615880429ba75162d45e4f2da0bbf35..d90bf1b61a7d31bde107340606270a3dbc48b878 100644 (file)
@@ -107,10 +107,10 @@ fn visit_statement(&mut self,
 
             fn visit_assign(&mut self,
                             block: BasicBlock,
-                            lvalue: & $($mutability)* Place<'tcx>,
+                            place: & $($mutability)* Place<'tcx>,
                             rvalue: & $($mutability)* Rvalue<'tcx>,
                             location: Location) {
-                self.super_assign(block, lvalue, rvalue, location);
+                self.super_assign(block, place, rvalue, location);
             }
 
             fn visit_terminator(&mut self,
@@ -145,11 +145,11 @@ fn visit_operand(&mut self,
                 self.super_operand(operand, location);
             }
 
-            fn visit_lvalue(&mut self,
-                            lvalue: & $($mutability)* Place<'tcx>,
+            fn visit_place(&mut self,
+                            place: & $($mutability)* Place<'tcx>,
                             context: PlaceContext<'tcx>,
                             location: Location) {
-                self.super_lvalue(lvalue, context, location);
+                self.super_place(place, context, location);
             }
 
             fn visit_static(&mut self,
@@ -160,17 +160,17 @@ fn visit_static(&mut self,
             }
 
             fn visit_projection(&mut self,
-                                lvalue: & $($mutability)* PlaceProjection<'tcx>,
+                                place: & $($mutability)* PlaceProjection<'tcx>,
                                 context: PlaceContext<'tcx>,
                                 location: Location) {
-                self.super_projection(lvalue, context, location);
+                self.super_projection(place, context, location);
             }
 
             fn visit_projection_elem(&mut self,
-                                     lvalue: & $($mutability)* PlaceElem<'tcx>,
+                                     place: & $($mutability)* PlaceElem<'tcx>,
                                      context: PlaceContext<'tcx>,
                                      location: Location) {
-                self.super_projection_elem(lvalue, context, location);
+                self.super_projection_elem(place, context, location);
             }
 
             fn visit_branch(&mut self,
@@ -350,21 +350,21 @@ fn super_statement(&mut self,
 
                 self.visit_source_info(source_info);
                 match *kind {
-                    StatementKind::Assign(ref $($mutability)* lvalue,
+                    StatementKind::Assign(ref $($mutability)* place,
                                           ref $($mutability)* rvalue) => {
-                        self.visit_assign(block, lvalue, rvalue, location);
+                        self.visit_assign(block, place, rvalue, location);
                     }
                     StatementKind::EndRegion(_) => {}
-                    StatementKind::Validate(_, ref $($mutability)* lvalues) => {
-                        for operand in lvalues {
-                            self.visit_lvalue(& $($mutability)* operand.lval,
+                    StatementKind::Validate(_, ref $($mutability)* places) => {
+                        for operand in places {
+                            self.visit_place(& $($mutability)* operand.place,
                                               PlaceContext::Validate, location);
                             self.visit_ty(& $($mutability)* operand.ty,
                                           TyContext::Location(location));
                         }
                     }
-                    StatementKind::SetDiscriminant{ ref $($mutability)* lvalue, .. } => {
-                        self.visit_lvalue(lvalue, PlaceContext::Store, location);
+                    StatementKind::SetDiscriminant{ ref $($mutability)* place, .. } => {
+                        self.visit_place(place, PlaceContext::Store, location);
                     }
                     StatementKind::StorageLive(ref $($mutability)* local) => {
                         self.visit_local(local, PlaceContext::StorageLive, location);
@@ -376,7 +376,7 @@ fn super_statement(&mut self,
                                                ref $($mutability)* inputs,
                                                asm: _ } => {
                         for output in & $($mutability)* outputs[..] {
-                            self.visit_lvalue(output, PlaceContext::Store, location);
+                            self.visit_place(output, PlaceContext::Store, location);
                         }
                         for input in & $($mutability)* inputs[..] {
                             self.visit_operand(input, location);
@@ -388,10 +388,10 @@ fn super_statement(&mut self,
 
             fn super_assign(&mut self,
                             _block: BasicBlock,
-                            lvalue: &$($mutability)* Place<'tcx>,
+                            place: &$($mutability)* Place<'tcx>,
                             rvalue: &$($mutability)* Rvalue<'tcx>,
                             location: Location) {
-                self.visit_lvalue(lvalue, PlaceContext::Store, location);
+                self.visit_place(place, PlaceContext::Store, location);
                 self.visit_rvalue(rvalue, location);
             }
 
@@ -440,7 +440,7 @@ fn super_terminator_kind(&mut self,
                     TerminatorKind::Drop { ref $($mutability)* location,
                                            target,
                                            unwind } => {
-                        self.visit_lvalue(location, PlaceContext::Drop, source_location);
+                        self.visit_place(location, PlaceContext::Drop, source_location);
                         self.visit_branch(block, target);
                         unwind.map(|t| self.visit_branch(block, t));
                     }
@@ -449,7 +449,7 @@ fn super_terminator_kind(&mut self,
                                                      ref $($mutability)* value,
                                                      target,
                                                      unwind } => {
-                        self.visit_lvalue(location, PlaceContext::Drop, source_location);
+                        self.visit_place(location, PlaceContext::Drop, source_location);
                         self.visit_operand(value, source_location);
                         self.visit_branch(block, target);
                         unwind.map(|t| self.visit_branch(block, t));
@@ -464,7 +464,7 @@ fn super_terminator_kind(&mut self,
                             self.visit_operand(arg, source_location);
                         }
                         if let Some((ref $($mutability)* destination, target)) = *destination {
-                            self.visit_lvalue(destination, PlaceContext::Call, source_location);
+                            self.visit_place(destination, PlaceContext::Call, source_location);
                             self.visit_branch(block, target);
                         }
                         cleanup.map(|t| self.visit_branch(block, t));
@@ -532,14 +532,14 @@ fn super_rvalue(&mut self,
 
                     Rvalue::Ref(ref $($mutability)* r, bk, ref $($mutability)* path) => {
                         self.visit_region(r, location);
-                        self.visit_lvalue(path, PlaceContext::Borrow {
+                        self.visit_place(path, PlaceContext::Borrow {
                             region: *r,
                             kind: bk
                         }, location);
                     }
 
                     Rvalue::Len(ref $($mutability)* path) => {
-                        self.visit_lvalue(path, PlaceContext::Inspect, location);
+                        self.visit_place(path, PlaceContext::Inspect, location);
                     }
 
                     Rvalue::Cast(_cast_kind,
@@ -563,8 +563,8 @@ fn super_rvalue(&mut self,
                         self.visit_operand(op, location);
                     }
 
-                    Rvalue::Discriminant(ref $($mutability)* lvalue) => {
-                        self.visit_lvalue(lvalue, PlaceContext::Inspect, location);
+                    Rvalue::Discriminant(ref $($mutability)* place) => {
+                        self.visit_place(place, PlaceContext::Inspect, location);
                     }
 
                     Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
@@ -611,11 +611,11 @@ fn super_operand(&mut self,
                              operand: & $($mutability)* Operand<'tcx>,
                              location: Location) {
                 match *operand {
-                    Operand::Copy(ref $($mutability)* lvalue) => {
-                        self.visit_lvalue(lvalue, PlaceContext::Copy, location);
+                    Operand::Copy(ref $($mutability)* place) => {
+                        self.visit_place(place, PlaceContext::Copy, location);
                     }
-                    Operand::Move(ref $($mutability)* lvalue) => {
-                        self.visit_lvalue(lvalue, PlaceContext::Move, location);
+                    Operand::Move(ref $($mutability)* place) => {
+                        self.visit_place(place, PlaceContext::Move, location);
                     }
                     Operand::Constant(ref $($mutability)* constant) => {
                         self.visit_constant(constant, location);
@@ -623,11 +623,11 @@ fn super_operand(&mut self,
                 }
             }
 
-            fn super_lvalue(&mut self,
-                            lvalue: & $($mutability)* Place<'tcx>,
+            fn super_place(&mut self,
+                            place: & $($mutability)* Place<'tcx>,
                             context: PlaceContext<'tcx>,
                             location: Location) {
-                match *lvalue {
+                match *place {
                     Place::Local(ref $($mutability)* local) => {
                         self.visit_local(local, context, location);
                     }
@@ -665,7 +665,7 @@ fn super_projection(&mut self,
                 } else {
                     PlaceContext::Projection(Mutability::Not)
                 };
-                self.visit_lvalue(base, context, location);
+                self.visit_place(base, context, location);
                 self.visit_projection_elem(elem, context, location);
             }
 
@@ -847,10 +847,10 @@ pub enum PlaceContext<'tcx> {
     // Being borrowed
     Borrow { region: Region<'tcx>, kind: BorrowKind },
 
-    // Used as base for another lvalue, e.g. `x` in `x.y`.
+    // Used as base for another place, e.g. `x` in `x.y`.
     //
     // The `Mutability` argument specifies whether the projection is being performed in order to
-    // (potentially) mutate the lvalue. For example, the projection `x.y` is marked as a mutation
+    // (potentially) mutate the place. For example, the projection `x.y` is marked as a mutation
     // in these cases:
     //
     //     x.y = ...;
@@ -875,7 +875,7 @@ pub enum PlaceContext<'tcx> {
 }
 
 impl<'tcx> PlaceContext<'tcx> {
-    /// Returns true if this lvalue context represents a drop.
+    /// Returns true if this place context represents a drop.
     pub fn is_drop(&self) -> bool {
         match *self {
             PlaceContext::Drop => true,
@@ -883,7 +883,7 @@ pub fn is_drop(&self) -> bool {
         }
     }
 
-    /// Returns true if this lvalue context represents a storage live or storage dead marker.
+    /// Returns true if this place context represents a storage live or storage dead marker.
     pub fn is_storage_marker(&self) -> bool {
         match *self {
             PlaceContext::StorageLive | PlaceContext::StorageDead => true,
@@ -891,7 +891,7 @@ pub fn is_storage_marker(&self) -> bool {
         }
     }
 
-    /// Returns true if this lvalue context represents a storage live marker.
+    /// Returns true if this place context represents a storage live marker.
     pub fn is_storage_live_marker(&self) -> bool {
         match *self {
             PlaceContext::StorageLive => true,
@@ -899,7 +899,7 @@ pub fn is_storage_live_marker(&self) -> bool {
         }
     }
 
-    /// Returns true if this lvalue context represents a storage dead marker.
+    /// Returns true if this place context represents a storage dead marker.
     pub fn is_storage_dead_marker(&self) -> bool {
         match *self {
             PlaceContext::StorageDead => true,
@@ -907,7 +907,7 @@ pub fn is_storage_dead_marker(&self) -> bool {
         }
     }
 
-    /// Returns true if this lvalue context represents a use that potentially changes the value.
+    /// Returns true if this place context represents a use that potentially changes the value.
     pub fn is_mutating_use(&self) -> bool {
         match *self {
             PlaceContext::Store | PlaceContext::Call |
@@ -924,7 +924,7 @@ pub fn is_mutating_use(&self) -> bool {
         }
     }
 
-    /// Returns true if this lvalue context represents a use that does not change the value.
+    /// Returns true if this place context represents a use that does not change the value.
     pub fn is_nonmutating_use(&self) -> bool {
         match *self {
             PlaceContext::Inspect | PlaceContext::Borrow { kind: BorrowKind::Shared, .. } |
index 0edc486a1625d1106b7ec2cba32ac619ae152132..63b4175ce449f8278b1be4849665063e610939d7 100644 (file)
@@ -247,17 +247,17 @@ fn visit_statement_entry(&mut self,
                 // NOTE: NLL RFC calls for *shallow* write; using Deep
                 // for short-term compat w/ AST-borrowck. Also, switch
                 // to shallow requires to dataflow: "if this is an
-                // assignment `lv = <rvalue>`, then any loan for some
-                // path P of which `lv` is a prefix is killed."
-                self.mutate_lvalue(ContextKind::AssignLhs.new(location),
+                // assignment `place = <rvalue>`, then any loan for some
+                // path P of which `place` is a prefix is killed."
+                self.mutate_place(ContextKind::AssignLhs.new(location),
                                    (lhs, span), Deep, JustWrite, flow_state);
 
                 self.consume_rvalue(ContextKind::AssignRhs.new(location),
                                     (rhs, span), location, flow_state);
             }
-            StatementKind::SetDiscriminant { ref lvalue, variant_index: _ } => {
-                self.mutate_lvalue(ContextKind::SetDiscrim.new(location),
-                                   (lvalue, span),
+            StatementKind::SetDiscriminant { ref place, variant_index: _ } => {
+                self.mutate_place(ContextKind::SetDiscrim.new(location),
+                                   (place, span),
                                    Shallow(Some(ArtificialField::Discriminant)),
                                    JustWrite,
                                    flow_state);
@@ -267,8 +267,8 @@ fn visit_statement_entry(&mut self,
                 for (o, output) in asm.outputs.iter().zip(outputs) {
                     if o.is_indirect {
                         // FIXME(eddyb) indirect inline asm outputs should
-                        // be encoeded through MIR lvalue derefs instead.
-                        self.access_lvalue(context,
+                        // be encoeded through MIR place derefs instead.
+                        self.access_place(context,
                                            (output, span),
                                            (Deep, Read(ReadKind::Copy)),
                                            LocalMutationIsAllowed::No,
@@ -276,7 +276,7 @@ fn visit_statement_entry(&mut self,
                         self.check_if_path_is_moved(context, InitializationRequiringAction::Use,
                                                     (output, span), flow_state);
                     } else {
-                        self.mutate_lvalue(context,
+                        self.mutate_place(context,
                                            (output, span),
                                            Deep,
                                            if o.is_rw { WriteAndRead } else { JustWrite },
@@ -299,7 +299,7 @@ fn visit_statement_entry(&mut self,
             }
 
             StatementKind::StorageDead(local) => {
-                self.access_lvalue(ContextKind::StorageDead.new(location),
+                self.access_place(ContextKind::StorageDead.new(location),
                     (&Place::Local(local), span),
                     (Shallow(None), Write(WriteKind::StorageDeadOrDrop)),
                     LocalMutationIsAllowed::Yes,
@@ -321,19 +321,19 @@ fn visit_terminator_entry(&mut self,
                 self.consume_operand(ContextKind::SwitchInt.new(loc),
                                      (discr, span), flow_state);
             }
-            TerminatorKind::Drop { location: ref drop_lvalue, target: _, unwind: _ } => {
-                self.access_lvalue(ContextKind::Drop.new(loc),
-                                   (drop_lvalue, span),
+            TerminatorKind::Drop { location: ref drop_place, target: _, unwind: _ } => {
+                self.access_place(ContextKind::Drop.new(loc),
+                                   (drop_place, span),
                                    (Deep, Write(WriteKind::StorageDeadOrDrop)),
                                    LocalMutationIsAllowed::Yes,
                                    flow_state);
             }
-            TerminatorKind::DropAndReplace { location: ref drop_lvalue,
+            TerminatorKind::DropAndReplace { location: ref drop_place,
                                              value: ref new_value,
                                              target: _,
                                              unwind: _ } => {
-                self.mutate_lvalue(ContextKind::DropAndReplace.new(loc),
-                                   (drop_lvalue, span),
+                self.mutate_place(ContextKind::DropAndReplace.new(loc),
+                                   (drop_place, span),
                                    Deep,
                                    JustWrite,
                                    flow_state);
@@ -348,7 +348,7 @@ fn visit_terminator_entry(&mut self,
                                          (arg, span), flow_state);
                 }
                 if let Some((ref dest, _/*bb*/)) = *destination {
-                    self.mutate_lvalue(ContextKind::CallDest.new(loc),
+                    self.mutate_place(ContextKind::CallDest.new(loc),
                                        (dest, span),
                                        Deep,
                                        JustWrite,
@@ -385,24 +385,24 @@ fn visit_terminator_entry(&mut self,
                 // so this "extra check" serves as a kind of backup.
                 let domain = flow_state.borrows.base_results.operator();
                 for borrow in domain.borrows() {
-                    let root_lvalue = self.prefixes(
-                        &borrow.lvalue,
+                    let root_place = self.prefixes(
+                        &borrow.place,
                         PrefixSet::All
                     ).last().unwrap();
-                    match root_lvalue {
+                    match root_place {
                         Place::Static(_) => {
-                            self.access_lvalue(
+                            self.access_place(
                                 ContextKind::StorageDead.new(loc),
-                                (&root_lvalue, self.mir.source_info(borrow.location).span),
+                                (&root_place, self.mir.source_info(borrow.location).span),
                                 (Deep, Write(WriteKind::StorageDeadOrDrop)),
                                 LocalMutationIsAllowed::Yes,
                                 flow_state
                             );
                         }
                         Place::Local(_) => {
-                            self.access_lvalue(
+                            self.access_place(
                                 ContextKind::StorageDead.new(loc),
-                                (&root_lvalue, self.mir.source_info(borrow.location).span),
+                                (&root_place, self.mir.source_info(borrow.location).span),
                                 (Shallow(None), Write(WriteKind::StorageDeadOrDrop)),
                                 LocalMutationIsAllowed::Yes,
                                 flow_state
@@ -446,7 +446,7 @@ enum ShallowOrDeep {
     Shallow(Option<ArtificialField>),
 
     /// From the RFC: "A *deep* access means that all data reachable
-    /// through the given lvalue may be invalidated or accesses by
+    /// through the given place may be invalidated or accesses by
     /// this action."
     Deep,
 }
@@ -483,8 +483,8 @@ enum WriteKind {
     Move,
 }
 
-/// When checking permissions for an lvalue access, this flag is used to indicate that an immutable
-/// local lvalue can be mutated.
+/// When checking permissions for a place access, this flag is used to indicate that an immutable
+/// local place can be mutated.
 ///
 /// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications:
 /// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`
@@ -526,21 +526,21 @@ fn as_verb_in_past_tense(self) -> &'static str {
 }
 
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
-    /// Checks an access to the given lvalue to see if it is allowed. Examines the set of borrows
+    /// Checks an access to the given place to see if it is allowed. Examines the set of borrows
     /// that are in scope, as well as which paths have been initialized, to ensure that (a) the
-    /// lvalue is initialized and (b) it is not borrowed in some way that would prevent this
+    /// place is initialized and (b) it is not borrowed in some way that would prevent this
     /// access.
     ///
     /// Returns true if an error is reported, false otherwise.
-    fn access_lvalue(&mut self,
+    fn access_place(&mut self,
                      context: Context,
-                     lvalue_span: (&Place<'tcx>, Span),
+                     place_span: (&Place<'tcx>, Span),
                      kind: (ShallowOrDeep, ReadOrWrite),
                      is_local_mutation_allowed: LocalMutationIsAllowed,
                      flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
         let (sd, rw) = kind;
 
-        let storage_dead_or_drop_local = match (lvalue_span.0, rw) {
+        let storage_dead_or_drop_local = match (place_span.0, rw) {
             (&Place::Local(local), Write(WriteKind::StorageDeadOrDrop)) => Some(local),
             _ => None
         };
@@ -553,12 +553,12 @@ fn access_lvalue(&mut self,
         }
 
         // Check permissions
-        let mut error_reported = self.check_access_permissions(lvalue_span,
+        let mut error_reported = self.check_access_permissions(place_span,
                                                                rw,
                                                                is_local_mutation_allowed);
 
         self.each_borrow_involving_path(
-            context, (sd, lvalue_span.0), flow_state, |this, _index, borrow, common_prefix| {
+            context, (sd, place_span.0), flow_state, |this, _index, borrow, common_prefix| {
                 match (rw, borrow.kind) {
                     (Read(_), BorrowKind::Shared) => {
                         Control::Continue
@@ -569,7 +569,7 @@ fn access_lvalue(&mut self,
                             ReadKind::Copy => {
                                 error_reported = true;
                                 this.report_use_while_mutably_borrowed(
-                                    context, lvalue_span, borrow)
+                                    context, place_span, borrow)
                             },
                             ReadKind::Borrow(bk) => {
                                 let end_issued_loan_span =
@@ -577,7 +577,7 @@ fn access_lvalue(&mut self,
                                         &borrow.region);
                                 error_reported = true;
                                 this.report_conflicting_borrow(
-                                    context, common_prefix, lvalue_span, bk,
+                                    context, common_prefix, place_span, bk,
                                     &borrow, end_issued_loan_span)
                             }
                         }
@@ -591,7 +591,7 @@ fn access_lvalue(&mut self,
                                         &borrow.region);
                                 error_reported = true;
                                 this.report_conflicting_borrow(
-                                    context, common_prefix, lvalue_span, bk,
+                                    context, common_prefix, place_span, bk,
                                     &borrow, end_issued_loan_span)
                             }
                              WriteKind::StorageDeadOrDrop => {
@@ -600,17 +600,17 @@ fn access_lvalue(&mut self,
                                         &borrow.region);
                                 error_reported = true;
                                 this.report_borrowed_value_does_not_live_long_enough(
-                                    context, lvalue_span, end_span)
+                                    context, place_span, end_span)
                             },
                             WriteKind::Mutate => {
                                 error_reported = true;
                                 this.report_illegal_mutation_of_borrowed(
-                                    context, lvalue_span, borrow)
+                                    context, place_span, borrow)
                             },
                             WriteKind::Move => {
                                 error_reported = true;
                                 this.report_move_out_while_borrowed(
-                                    context, lvalue_span, &borrow)
+                                    context, place_span, &borrow)
                             },
                         }
                         Control::Break
@@ -625,9 +625,9 @@ fn access_lvalue(&mut self,
         }
     }
 
-    fn mutate_lvalue(&mut self,
+    fn mutate_place(&mut self,
                      context: Context,
-                     lvalue_span: (&Place<'tcx>, Span),
+                     place_span: (&Place<'tcx>, Span),
                      kind: ShallowOrDeep,
                      mode: MutateMode,
                      flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
@@ -635,21 +635,21 @@ fn mutate_lvalue(&mut self,
         match mode {
             MutateMode::WriteAndRead => {
                 self.check_if_path_is_moved(context, InitializationRequiringAction::Update,
-                                            lvalue_span, flow_state);
+                                            place_span, flow_state);
             }
             MutateMode::JustWrite => {
-                self.check_if_assigned_path_is_moved(context, lvalue_span, flow_state);
+                self.check_if_assigned_path_is_moved(context, place_span, flow_state);
             }
         }
 
-        self.access_lvalue(context,
-                           lvalue_span,
+        self.access_place(context,
+                           place_span,
                            (kind, Write(WriteKind::Mutate)),
                            LocalMutationIsAllowed::Yes,
                            flow_state);
 
         // check for reassignments to immutable local variables
-        self.check_if_reassignment_to_immutable_state(context, lvalue_span, flow_state);
+        self.check_if_reassignment_to_immutable_state(context, place_span, flow_state);
     }
 
     fn consume_rvalue(&mut self,
@@ -658,19 +658,19 @@ fn consume_rvalue(&mut self,
                       _location: Location,
                       flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
         match *rvalue {
-            Rvalue::Ref(_/*rgn*/, bk, ref lvalue) => {
+            Rvalue::Ref(_/*rgn*/, bk, ref place) => {
                 let access_kind = match bk {
                     BorrowKind::Shared => (Deep, Read(ReadKind::Borrow(bk))),
                     BorrowKind::Unique |
                     BorrowKind::Mut => (Deep, Write(WriteKind::MutableBorrow(bk))),
                 };
-                self.access_lvalue(context,
-                                   (lvalue, span),
+                self.access_place(context,
+                                   (place, span),
                                    access_kind,
                                    LocalMutationIsAllowed::No,
                                    flow_state);
                 self.check_if_path_is_moved(context, InitializationRequiringAction::Borrow,
-                                            (lvalue, span), flow_state);
+                                            (place, span), flow_state);
             }
 
             Rvalue::Use(ref operand) |
@@ -680,20 +680,20 @@ fn consume_rvalue(&mut self,
                 self.consume_operand(context, (operand, span), flow_state)
             }
 
-            Rvalue::Len(ref lvalue) |
-            Rvalue::Discriminant(ref lvalue) => {
+            Rvalue::Len(ref place) |
+            Rvalue::Discriminant(ref place) => {
                 let af = match *rvalue {
                     Rvalue::Len(..) => ArtificialField::ArrayLength,
                     Rvalue::Discriminant(..) => ArtificialField::Discriminant,
                     _ => unreachable!(),
                 };
-                self.access_lvalue(context,
-                                   (lvalue, span),
+                self.access_place(context,
+                                   (place, span),
                                    (Shallow(Some(af)), Read(ReadKind::Copy)),
                                    LocalMutationIsAllowed::No,
                                    flow_state);
                 self.check_if_path_is_moved(context, InitializationRequiringAction::Use,
-                                            (lvalue, span), flow_state);
+                                            (place, span), flow_state);
             }
 
             Rvalue::BinaryOp(_bin_op, ref operand1, ref operand2) |
@@ -723,30 +723,30 @@ fn consume_operand(&mut self,
                        (operand, span): (&Operand<'tcx>, Span),
                        flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
         match *operand {
-            Operand::Copy(ref lvalue) => {
-                // copy of lvalue: check if this is "copy of frozen path"
+            Operand::Copy(ref place) => {
+                // copy of place: check if this is "copy of frozen path"
                 // (FIXME: see check_loans.rs)
-                self.access_lvalue(context,
-                                   (lvalue, span),
+                self.access_place(context,
+                                   (place, span),
                                    (Deep, Read(ReadKind::Copy)),
                                    LocalMutationIsAllowed::No,
                                    flow_state);
 
                 // Finally, check if path was already moved.
                 self.check_if_path_is_moved(context, InitializationRequiringAction::Use,
-                                            (lvalue, span), flow_state);
+                                            (place, span), flow_state);
             }
-            Operand::Move(ref lvalue) => {
-                // move of lvalue: check if this is move of already borrowed path
-                self.access_lvalue(context,
-                                   (lvalue, span),
+            Operand::Move(ref place) => {
+                // move of place: check if this is move of already borrowed path
+                self.access_place(context,
+                                   (place, span),
                                    (Deep, Write(WriteKind::Move)),
                                    LocalMutationIsAllowed::Yes,
                                    flow_state);
 
                 // Finally, check if path was already moved.
                 self.check_if_path_is_moved(context, InitializationRequiringAction::Use,
-                                            (lvalue, span), flow_state);
+                                            (place, span), flow_state);
             }
             Operand::Constant(_) => {}
         }
@@ -756,32 +756,32 @@ fn consume_operand(&mut self,
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
     fn check_if_reassignment_to_immutable_state(&mut self,
                                                 context: Context,
-                                                (lvalue, span): (&Place<'tcx>, Span),
+                                                (place, span): (&Place<'tcx>, Span),
                                                 flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
         let move_data = self.move_data;
 
         // determine if this path has a non-mut owner (and thus needs checking).
-        if let Ok(()) = self.is_mutable(lvalue, LocalMutationIsAllowed::No) {
+        if let Ok(()) = self.is_mutable(place, LocalMutationIsAllowed::No) {
             return;
         }
 
-        if let Err(_) = self.is_mutable(lvalue, LocalMutationIsAllowed::Yes) {
+        if let Err(_) = self.is_mutable(place, LocalMutationIsAllowed::Yes) {
             return;
         }
 
-        match self.move_path_closest_to(lvalue) {
+        match self.move_path_closest_to(place) {
             Ok(mpi) => {
                 for ii in &move_data.init_path_map[mpi] {
                     if flow_state.ever_inits.curr_state.contains(ii) {
                         let first_assign_span = self.move_data.inits[*ii].span;
                         self.report_illegal_reassignment(
-                            context, (lvalue, span), first_assign_span);
+                            context, (place, span), first_assign_span);
                         break;
                     }
                 }
             },
             Err(NoMovePathFound::ReachedStatic) => {
-                let item_msg = match self.describe_lvalue(lvalue) {
+                let item_msg = match self.describe_place(place) {
                     Some(name) => format!("immutable static item `{}`", name),
                     None => "immutable static item".to_owned()
                 };
@@ -795,11 +795,11 @@ fn check_if_reassignment_to_immutable_state(&mut self,
     fn check_if_path_is_moved(&mut self,
                               context: Context,
                               desired_action: InitializationRequiringAction,
-                              lvalue_span: (&Place<'tcx>, Span),
+                              place_span: (&Place<'tcx>, Span),
                               flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
-        // FIXME: analogous code in check_loans first maps `lvalue` to
+        // FIXME: analogous code in check_loans first maps `place` to
         // its base_path ... but is that what we want here?
-        let lvalue = self.base_path(lvalue_span.0);
+        let place = self.base_path(place_span.0);
 
         let maybe_uninits = &flow_state.uninits;
         let curr_move_outs = &flow_state.move_outs.curr_state;
@@ -836,16 +836,16 @@ fn check_if_path_is_moved(&mut self,
 
         // Therefore, if we seek out the *closest* prefix for which we
         // have a MovePath, that should capture the initialization
-        // state for the lvalue scenario.
+        // state for the place scenario.
         //
         // This code covers scenarios 1, 2, and 4.
 
-        debug!("check_if_path_is_moved part1 lvalue: {:?}", lvalue);
-        match self.move_path_closest_to(lvalue) {
+        debug!("check_if_path_is_moved part1 place: {:?}", place);
+        match self.move_path_closest_to(place) {
             Ok(mpi) => {
                 if maybe_uninits.curr_state.contains(&mpi) {
                     self.report_use_of_moved_or_uninitialized(context, desired_action,
-                                                              lvalue_span, mpi,
+                                                              place_span, mpi,
                                                               curr_move_outs);
                     return; // don't bother finding other problems.
                 }
@@ -862,40 +862,40 @@ fn check_if_path_is_moved(&mut self,
             // to do such a query based on partial-init feature-gate.)
         }
 
-        // A move of any shallow suffix of `lvalue` also interferes
-        // with an attempt to use `lvalue`. This is scenario 3 above.
+        // A move of any shallow suffix of `place` also interferes
+        // with an attempt to use `place`. This is scenario 3 above.
         //
         // (Distinct from handling of scenarios 1+2+4 above because
-        // `lvalue` does not interfere with suffixes of its prefixes,
+        // `place` does not interfere with suffixes of its prefixes,
         // e.g. `a.b.c` does not interfere with `a.b.d`)
 
-        debug!("check_if_path_is_moved part2 lvalue: {:?}", lvalue);
-        if let Some(mpi) = self.move_path_for_lvalue(lvalue) {
+        debug!("check_if_path_is_moved part2 place: {:?}", place);
+        if let Some(mpi) = self.move_path_for_place(place) {
             if let Some(child_mpi) = maybe_uninits.has_any_child_of(mpi) {
                 self.report_use_of_moved_or_uninitialized(context, desired_action,
-                                                          lvalue_span, child_mpi,
+                                                          place_span, child_mpi,
                                                           curr_move_outs);
                 return; // don't bother finding other problems.
             }
         }
     }
 
-    /// Currently MoveData does not store entries for all lvalues in
+    /// Currently MoveData does not store entries for all places in
     /// the input MIR. For example it will currently filter out
-    /// lvalues that are Copy; thus we do not track lvalues of shared
-    /// reference type. This routine will walk up an lvalue along its
-    /// prefixes, searching for a foundational lvalue that *is*
+    /// places that are Copy; thus we do not track places of shared
+    /// reference type. This routine will walk up a place along its
+    /// prefixes, searching for a foundational place that *is*
     /// tracked in the MoveData.
     ///
     /// An Err result includes a tag indicated why the search failed.
-    /// Currenly this can only occur if the lvalue is built off of a
+    /// Currenly this can only occur if the place is built off of a
     /// static variable, as we do not track those in the MoveData.
-    fn move_path_closest_to(&mut self, lvalue: &Place<'tcx>)
+    fn move_path_closest_to(&mut self, place: &Place<'tcx>)
                             -> Result<MovePathIndex, NoMovePathFound>
     {
-        let mut last_prefix = lvalue;
-        for prefix in self.prefixes(lvalue, PrefixSet::All) {
-            if let Some(mpi) = self.move_path_for_lvalue(prefix) {
+        let mut last_prefix = place;
+        for prefix in self.prefixes(place, PrefixSet::All) {
+            if let Some(mpi) = self.move_path_for_place(prefix) {
                 return Ok(mpi);
             }
             last_prefix = prefix;
@@ -907,15 +907,15 @@ fn move_path_closest_to(&mut self, lvalue: &Place<'tcx>)
         }
     }
 
-    fn move_path_for_lvalue(&mut self,
-                            lvalue: &Place<'tcx>)
+    fn move_path_for_place(&mut self,
+                            place: &Place<'tcx>)
                             -> Option<MovePathIndex>
     {
         // If returns None, then there is no move path corresponding
-        // to a direct owner of `lvalue` (which means there is nothing
+        // to a direct owner of `place` (which means there is nothing
         // that borrowck tracks for its analysis).
 
-        match self.move_data.rev_lookup.find(lvalue) {
+        match self.move_data.rev_lookup.find(place) {
             LookupResult::Parent(_) => None,
             LookupResult::Exact(mpi) => Some(mpi),
         }
@@ -923,12 +923,12 @@ fn move_path_for_lvalue(&mut self,
 
     fn check_if_assigned_path_is_moved(&mut self,
                                        context: Context,
-                                       (lvalue, span): (&Place<'tcx>, Span),
+                                       (place, span): (&Place<'tcx>, Span),
                                        flow_state: &InProgress<'cx, 'gcx, 'tcx>) {
-        // recur down lvalue; dispatch to check_if_path_is_moved when necessary
-        let mut lvalue = lvalue;
+        // recur down place; dispatch to check_if_path_is_moved when necessary
+        let mut place = place;
         loop {
-            match *lvalue {
+            match *place {
                 Place::Local(_) | Place::Static(_) => {
                     // assigning to `x` does not require `x` be initialized.
                     break;
@@ -977,35 +977,35 @@ fn check_if_assigned_path_is_moved(&mut self,
                         }
                     }
 
-                    lvalue = base;
+                    place = base;
                     continue;
                 }
             }
         }
     }
 
-    /// Check the permissions for the given lvalue and read or write kind
+    /// Check the permissions for the given place and read or write kind
     ///
     /// Returns true if an error is reported, false otherwise.
     fn check_access_permissions(&self,
-                                (lvalue, span): (&Place<'tcx>, Span),
+                                (place, span): (&Place<'tcx>, Span),
                                 kind: ReadOrWrite,
                                 is_local_mutation_allowed: LocalMutationIsAllowed)
                                 -> bool {
         debug!("check_access_permissions({:?}, {:?}, {:?})",
-               lvalue, kind, is_local_mutation_allowed);
+               place, kind, is_local_mutation_allowed);
         let mut error_reported = false;
         match kind {
             Write(WriteKind::MutableBorrow(BorrowKind::Unique)) => {
-                if let Err(_lvalue_err) = self.is_unique(lvalue) {
-                    span_bug!(span, "&unique borrow for {:?} should not fail", lvalue);
+                if let Err(_place_err) = self.is_unique(place) {
+                    span_bug!(span, "&unique borrow for {:?} should not fail", place);
                 }
             },
             Write(WriteKind::MutableBorrow(BorrowKind::Mut)) => {
-                if let Err(lvalue_err) = self.is_mutable(lvalue, is_local_mutation_allowed) {
+                if let Err(place_err) = self.is_mutable(place, is_local_mutation_allowed) {
                     error_reported = true;
 
-                    let item_msg = match self.describe_lvalue(lvalue) {
+                    let item_msg = match self.describe_place(place) {
                         Some(name) => format!("immutable item `{}`", name),
                         None => "immutable item".to_owned()
                     };
@@ -1015,8 +1015,8 @@ fn check_access_permissions(&self,
                         Origin::Mir);
                     err.span_label(span, "cannot borrow as mutable");
 
-                    if lvalue != lvalue_err {
-                        if let Some(name) = self.describe_lvalue(lvalue_err) {
+                    if place != place_err {
+                        if let Some(name) = self.describe_place(place_err) {
                             err.note(&format!("Value not mutable causing this error: `{}`", name));
                         }
                     }
@@ -1025,10 +1025,10 @@ fn check_access_permissions(&self,
                 }
             },
             Write(WriteKind::Mutate) => {
-                if let Err(lvalue_err) = self.is_mutable(lvalue, is_local_mutation_allowed) {
+                if let Err(place_err) = self.is_mutable(place, is_local_mutation_allowed) {
                     error_reported = true;
 
-                    let item_msg = match self.describe_lvalue(lvalue) {
+                    let item_msg = match self.describe_place(place) {
                         Some(name) => format!("immutable item `{}`", name),
                         None => "immutable item".to_owned()
                     };
@@ -1038,8 +1038,8 @@ fn check_access_permissions(&self,
                         Origin::Mir);
                     err.span_label(span, "cannot mutate");
 
-                    if lvalue != lvalue_err {
-                        if let Some(name) = self.describe_lvalue(lvalue_err) {
+                    if place != place_err {
+                        if let Some(name) = self.describe_place(place_err) {
                             err.note(&format!("Value not mutable causing this error: `{}`", name));
                         }
                     }
@@ -1050,10 +1050,10 @@ fn check_access_permissions(&self,
             Write(WriteKind::Move) |
             Write(WriteKind::StorageDeadOrDrop) |
             Write(WriteKind::MutableBorrow(BorrowKind::Shared)) => {
-                if let Err(_lvalue_err) = self.is_mutable(lvalue, is_local_mutation_allowed) {
+                if let Err(_place_err) = self.is_mutable(place, is_local_mutation_allowed) {
                     self.tcx.sess.delay_span_bug(span,
                         &format!("Accessing `{:?}` with the kind `{:?}` shouldn't be possible",
-                            lvalue,
+                            place,
                             kind));
                 }
             },
@@ -1068,24 +1068,24 @@ fn check_access_permissions(&self,
 
     /// Can this value be written or borrowed mutably
     fn is_mutable<'d>(&self,
-                      lvalue: &'d Place<'tcx>,
+                      place: &'d Place<'tcx>,
                       is_local_mutation_allowed: LocalMutationIsAllowed)
                       -> Result<(), &'d Place<'tcx>> {
-        match *lvalue {
+        match *place {
             Place::Local(local) => {
                 let local = &self.mir.local_decls[local];
                 match local.mutability {
                     Mutability::Not =>
                         match is_local_mutation_allowed {
                             LocalMutationIsAllowed::Yes => Ok(()),
-                            LocalMutationIsAllowed::No => Err(lvalue),
+                            LocalMutationIsAllowed::No => Err(place),
                         },
                     Mutability::Mut => Ok(())
                 }
             },
             Place::Static(ref static_) => {
                 if !self.tcx.is_static_mut(static_.def_id) {
-                    Err(lvalue)
+                    Err(place)
                 } else {
                     Ok(())
                 }
@@ -1100,7 +1100,7 @@ fn is_mutable<'d>(&self,
                             ty::TyRef(_, tnm) => {
                                 match tnm.mutbl {
                                     // Shared borrowed data is never mutable
-                                    hir::MutImmutable => Err(lvalue),
+                                    hir::MutImmutable => Err(place),
                                     // Mutably borrowed data is mutable, but only if we have a
                                     // unique path to the `&mut`
                                     hir::MutMutable => {
@@ -1115,7 +1115,7 @@ fn is_mutable<'d>(&self,
                             ty::TyRawPtr(tnm) => {
                                 match tnm.mutbl {
                                     // `*const` raw pointers are not mutable
-                                    hir::MutImmutable => Err(lvalue),
+                                    hir::MutImmutable => Err(place),
                                     // `*mut` raw pointers are always mutable, regardless of context
                                     // The users have to check by themselve.
                                     hir::MutMutable => Ok(()),
@@ -1135,14 +1135,14 @@ fn is_mutable<'d>(&self,
                     ProjectionElem::ConstantIndex{..} |
                     ProjectionElem::Subslice{..} |
                     ProjectionElem::Downcast(..) => {
-                        let field_projection = self.is_upvar_field_projection(lvalue);
+                        let field_projection = self.is_upvar_field_projection(place);
 
                         if let Some(field) = field_projection {
                             let decl = &self.mir.upvar_decls[field.index()];
 
                             return match decl.mutability {
                                 Mutability::Mut => self.is_unique(&proj.base),
-                                Mutability::Not => Err(lvalue),
+                                Mutability::Not => Err(place),
                             };
                         }
 
@@ -1153,16 +1153,16 @@ fn is_mutable<'d>(&self,
         }
     }
 
-    /// Does this lvalue have a unique path
-    fn is_unique<'d>(&self, lvalue: &'d Place<'tcx>) -> Result<(), &'d Place<'tcx>> {
-        match *lvalue {
+    /// Does this place have a unique path
+    fn is_unique<'d>(&self, place: &'d Place<'tcx>) -> Result<(), &'d Place<'tcx>> {
+        match *place {
             Place::Local(..) => {
                 // Local variables are unique
                 Ok(())
             },
             Place::Static(..) => {
                 // Static variables are not
-                Err(lvalue)
+                Err(place)
             },
             Place::Projection(ref proj) => {
                 match proj.elem {
@@ -1178,8 +1178,8 @@ fn is_unique<'d>(&self, lvalue: &'d Place<'tcx>) -> Result<(), &'d Place<'tcx>>
                         match base_ty.sty {
                             ty::TyRef(_, tnm) => {
                                 match tnm.mutbl {
-                                    // lvalue represent an aliased location
-                                    hir::MutImmutable => Err(lvalue),
+                                    // place represent an aliased location
+                                    hir::MutImmutable => Err(place),
                                     // `&mut T` is as unique as the context in which it is found
                                     hir::MutMutable => self.is_unique(&proj.base),
                                 }
@@ -1217,14 +1217,14 @@ enum NoMovePathFound {
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
     fn each_borrow_involving_path<F>(&mut self,
                                      _context: Context,
-                                     access_lvalue: (ShallowOrDeep, &Place<'tcx>),
+                                     access_place: (ShallowOrDeep, &Place<'tcx>),
                                      flow_state: &InProgress<'cx, 'gcx, 'tcx>,
                                      mut op: F)
         where F: FnMut(&mut Self, BorrowIndex, &BorrowData<'tcx>, &Place<'tcx>) -> Control
     {
-        let (access, lvalue) = access_lvalue;
+        let (access, place) = access_place;
 
-        // FIXME: analogous code in check_loans first maps `lvalue` to
+        // FIXME: analogous code in check_loans first maps `place` to
         // its base_path.
 
         let domain = flow_state.borrows.base_results.operator();
@@ -1235,21 +1235,21 @@ fn each_borrow_involving_path<F>(&mut self,
         'next_borrow: for i in flow_state.borrows.elems_incoming() {
             let borrowed = &data[i];
 
-            // Is `lvalue` (or a prefix of it) already borrowed? If
+            // Is `place` (or a prefix of it) already borrowed? If
             // so, that's relevant.
             //
             // FIXME: Differs from AST-borrowck; includes drive-by fix
             // to #38899. Will probably need back-compat mode flag.
-            for accessed_prefix in self.prefixes(lvalue, PrefixSet::All) {
-                if *accessed_prefix == borrowed.lvalue {
+            for accessed_prefix in self.prefixes(place, PrefixSet::All) {
+                if *accessed_prefix == borrowed.place {
                     // FIXME: pass in enum describing case we are in?
                     let ctrl = op(self, i, borrowed, accessed_prefix);
                     if ctrl == Control::Break { return; }
                 }
             }
 
-            // Is `lvalue` a prefix (modulo access type) of the
-            // `borrowed.lvalue`? If so, that's relevant.
+            // Is `place` a prefix (modulo access type) of the
+            // `borrowed.place`? If so, that's relevant.
 
             let prefix_kind = match access {
                 Shallow(Some(ArtificialField::Discriminant)) |
@@ -1258,7 +1258,7 @@ fn each_borrow_involving_path<F>(&mut self,
                     // additional fields on the type; they do not
                     // overlap any existing data there. Furthermore,
                     // they cannot actually be a prefix of any
-                    // borrowed lvalue (at least in MIR as it is
+                    // borrowed place (at least in MIR as it is
                     // currently.)
                     continue 'next_borrow;
                 }
@@ -1266,8 +1266,8 @@ fn each_borrow_involving_path<F>(&mut self,
                 Deep => PrefixSet::Supporting,
             };
 
-            for borrowed_prefix in self.prefixes(&borrowed.lvalue, prefix_kind) {
-                if borrowed_prefix == lvalue {
+            for borrowed_prefix in self.prefixes(&borrowed.place, prefix_kind) {
+                if borrowed_prefix == place {
                     // FIXME: pass in enum describing case we are in?
                     let ctrl = op(self, i, borrowed, borrowed_prefix);
                     if ctrl == Control::Break { return; }
@@ -1280,7 +1280,7 @@ fn each_borrow_involving_path<F>(&mut self,
 use self::prefixes::PrefixSet;
 
 /// From the NLL RFC: "The deep [aka 'supporting'] prefixes for an
-/// lvalue are formed by stripping away fields and derefs, except that
+/// place are formed by stripping away fields and derefs, except that
 /// we stop when we reach the deref of a shared reference. [...] "
 ///
 /// "Shallow prefixes are found by stripping away fields, but stop at
@@ -1337,15 +1337,15 @@ pub(super) enum PrefixSet {
     }
 
     impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
-        /// Returns an iterator over the prefixes of `lvalue`
+        /// Returns an iterator over the prefixes of `place`
         /// (inclusive) from longest to smallest, potentially
         /// terminating the iteration early based on `kind`.
         pub(super) fn prefixes(&self,
-                               lvalue: &'cx Place<'tcx>,
+                               place: &'cx Place<'tcx>,
                                kind: PrefixSet)
                                -> Prefixes<'cx, 'gcx, 'tcx>
         {
-            Prefixes { next: Some(lvalue), kind, mir: self.mir, tcx: self.tcx }
+            Prefixes { next: Some(place), kind, mir: self.mir, tcx: self.tcx }
         }
     }
 
@@ -1354,11 +1354,11 @@ impl<'cx, 'gcx, 'tcx> Iterator for Prefixes<'cx, 'gcx, 'tcx> {
         fn next(&mut self) -> Option<Self::Item> {
             let mut cursor = match self.next {
                 None => return None,
-                Some(lvalue) => lvalue,
+                Some(place) => place,
             };
 
-            // Post-processing `lvalue`: Enqueue any remaining
-            // work. Also, `lvalue` may not be a prefix itself, but
+            // Post-processing `place`: Enqueue any remaining
+            // work. Also, `place` may not be a prefix itself, but
             // may hold one further down (e.g. we never return
             // downcasts here, but may return a base of a downcast).
 
@@ -1447,7 +1447,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
     fn report_use_of_moved_or_uninitialized(&mut self,
                            _context: Context,
                            desired_action: InitializationRequiringAction,
-                           (lvalue, span): (&Place<'tcx>, Span),
+                           (place, span): (&Place<'tcx>, Span),
                            mpi: MovePathIndex,
                            curr_move_out: &IdxSetBuf<MoveOutIndex>) {
 
@@ -1455,13 +1455,13 @@ fn report_use_of_moved_or_uninitialized(&mut self,
             |moi| curr_move_out.contains(moi)).collect::<Vec<_>>();
 
         if mois.is_empty() {
-            let item_msg = match self.describe_lvalue(lvalue) {
+            let item_msg = match self.describe_place(place) {
                 Some(name) => format!("`{}`", name),
                 None => "value".to_owned()
             };
             self.tcx.cannot_act_on_uninitialized_variable(span,
                                                           desired_action.as_noun(),
-                                                          &self.describe_lvalue(lvalue)
+                                                          &self.describe_place(place)
                                                             .unwrap_or("_".to_owned()),
                                                           Origin::Mir)
                     .span_label(span, format!("use of possibly uninitialized {}", item_msg))
@@ -1472,7 +1472,7 @@ fn report_use_of_moved_or_uninitialized(&mut self,
             let mut err = self.tcx.cannot_act_on_moved_value(span,
                                                              desired_action.as_noun(),
                                                              msg,
-                                                             &self.describe_lvalue(lvalue)
+                                                             &self.describe_place(place)
                                                                 .unwrap_or("_".to_owned()),
                                                              Origin::Mir);
 
@@ -1496,18 +1496,18 @@ fn report_use_of_moved_or_uninitialized(&mut self,
 
     fn report_move_out_while_borrowed(&mut self,
                                       _context: Context,
-                                      (lvalue, span): (&Place<'tcx>, Span),
+                                      (place, span): (&Place<'tcx>, Span),
                                       borrow: &BorrowData<'tcx>) {
-        let value_msg = match self.describe_lvalue(lvalue) {
+        let value_msg = match self.describe_place(place) {
             Some(name) => format!("`{}`", name),
             None => "value".to_owned()
         };
-        let borrow_msg = match self.describe_lvalue(&borrow.lvalue) {
+        let borrow_msg = match self.describe_place(&borrow.place) {
             Some(name) => format!("`{}`", name),
             None => "value".to_owned()
         };
         self.tcx.cannot_move_when_borrowed(span,
-                                           &self.describe_lvalue(lvalue).unwrap_or("_".to_owned()),
+                                           &self.describe_place(place).unwrap_or("_".to_owned()),
                                            Origin::Mir)
                 .span_label(self.retrieve_borrow_span(borrow),
                             format!("borrow of {} occurs here", borrow_msg))
@@ -1517,14 +1517,14 @@ fn report_move_out_while_borrowed(&mut self,
 
     fn report_use_while_mutably_borrowed(&mut self,
                                          _context: Context,
-                                         (lvalue, span): (&Place<'tcx>, Span),
+                                         (place, span): (&Place<'tcx>, Span),
                                          borrow : &BorrowData<'tcx>) {
 
         let mut err = self.tcx.cannot_use_when_mutably_borrowed(
             span,
-            &self.describe_lvalue(lvalue).unwrap_or("_".to_owned()),
+            &self.describe_place(place).unwrap_or("_".to_owned()),
             self.retrieve_borrow_span(borrow),
-            &self.describe_lvalue(&borrow.lvalue).unwrap_or("_".to_owned()),
+            &self.describe_place(&borrow.place).unwrap_or("_".to_owned()),
             Origin::Mir);
 
         err.emit();
@@ -1555,9 +1555,9 @@ fn find_closure_span(
                 break;
             }
 
-            if let StatementKind::Assign(_, Rvalue::Aggregate(ref kind, ref lvs)) = stmt.kind {
+            if let StatementKind::Assign(_, Rvalue::Aggregate(ref kind, ref places)) = stmt.kind {
                 if let AggregateKind::Closure(def_id, _) = **kind {
-                    debug!("find_closure_span: found closure {:?}", lvs);
+                    debug!("find_closure_span: found closure {:?}", places);
 
                     return if let Some(node_id) = self.tcx.hir.as_local_node_id(def_id) {
                         let args_span = if let ExprClosure(_, _, _, span, _) =
@@ -1569,8 +1569,8 @@ fn find_closure_span(
                         };
 
                         self.tcx.with_freevars(node_id, |freevars| {
-                            for (v, lv) in freevars.iter().zip(lvs) {
-                                match *lv {
+                            for (v, place) in freevars.iter().zip(places) {
+                                match *place {
                                     Operand::Copy(Place::Local(l)) |
                                     Operand::Move(Place::Local(l)) if local == l => {
                                         debug!(
@@ -1597,14 +1597,14 @@ fn find_closure_span(
     fn report_conflicting_borrow(&mut self,
                                  context: Context,
                                  common_prefix: &Place<'tcx>,
-                                 (lvalue, span): (&Place<'tcx>, Span),
+                                 (place, span): (&Place<'tcx>, Span),
                                  gen_borrow_kind: BorrowKind,
                                  issued_borrow: &BorrowData,
                                  end_issued_loan_span: Option<Span>) {
         use self::prefixes::IsPrefixOf;
 
-        assert!(common_prefix.is_prefix_of(lvalue));
-        assert!(common_prefix.is_prefix_of(&issued_borrow.lvalue));
+        assert!(common_prefix.is_prefix_of(place));
+        assert!(common_prefix.is_prefix_of(&issued_borrow.place));
 
         let issued_span = self.retrieve_borrow_span(issued_borrow);
 
@@ -1613,7 +1613,7 @@ fn report_conflicting_borrow(&mut self,
         let old_closure_span = self.find_closure_span(issued_span, issued_borrow.location);
         let issued_span = old_closure_span.map(|(args, _)| args).unwrap_or(issued_span);
 
-        let desc_lvalue = self.describe_lvalue(lvalue).unwrap_or("_".to_owned());
+        let desc_place = self.describe_place(place).unwrap_or("_".to_owned());
 
         // FIXME: supply non-"" `opt_via` when appropriate
         let mut err = match (gen_borrow_kind, "immutable", "mutable",
@@ -1621,27 +1621,27 @@ fn report_conflicting_borrow(&mut self,
             (BorrowKind::Shared, lft, _, BorrowKind::Mut, _, rgt) |
             (BorrowKind::Mut, _, lft, BorrowKind::Shared, rgt, _) =>
                 self.tcx.cannot_reborrow_already_borrowed(
-                    span, &desc_lvalue, "", lft, issued_span,
+                    span, &desc_place, "", lft, issued_span,
                     "it", rgt, "", end_issued_loan_span, Origin::Mir),
 
             (BorrowKind::Mut, _, _, BorrowKind::Mut, _, _) =>
                 self.tcx.cannot_mutably_borrow_multiply(
-                    span, &desc_lvalue, "", issued_span,
+                    span, &desc_place, "", issued_span,
                     "", end_issued_loan_span, Origin::Mir),
 
             (BorrowKind::Unique, _, _, BorrowKind::Unique, _, _) =>
                 self.tcx.cannot_uniquely_borrow_by_two_closures(
-                    span, &desc_lvalue, issued_span,
+                    span, &desc_place, issued_span,
                     end_issued_loan_span, Origin::Mir),
 
             (BorrowKind::Unique, _, _, _, _, _) =>
                 self.tcx.cannot_uniquely_borrow_by_one_closure(
-                    span, &desc_lvalue, "",
+                    span, &desc_place, "",
                     issued_span, "it", "", end_issued_loan_span, Origin::Mir),
 
             (_, _, _, BorrowKind::Unique, _, _) =>
                 self.tcx.cannot_reborrow_already_uniquely_borrowed(
-                    span, &desc_lvalue, "it", "",
+                    span, &desc_place, "it", "",
                     issued_span, "", end_issued_loan_span, Origin::Mir),
 
             (BorrowKind::Shared, _, _, BorrowKind::Shared, _, _) =>
@@ -1651,14 +1651,14 @@ fn report_conflicting_borrow(&mut self,
         if let Some((_, var_span)) = old_closure_span {
             err.span_label(
                 var_span,
-                format!("previous borrow occurs due to use of `{}` in closure", desc_lvalue),
+                format!("previous borrow occurs due to use of `{}` in closure", desc_place),
             );
         }
 
         if let Some((_, var_span)) = new_closure_span {
             err.span_label(
                 var_span,
-                format!("borrow occurs due to use of `{}` in closure", desc_lvalue),
+                format!("borrow occurs due to use of `{}` in closure", desc_place),
             );
         }
 
@@ -1667,9 +1667,9 @@ fn report_conflicting_borrow(&mut self,
 
     fn report_borrowed_value_does_not_live_long_enough(&mut self,
                                                        _: Context,
-                                                       (lvalue, span): (&Place, Span),
+                                                       (place, span): (&Place, Span),
                                                        end_span: Option<Span>) {
-        let proper_span = match *lvalue {
+        let proper_span = match *place {
             Place::Local(local) => self.mir.local_decls[local].source_info.span,
             _ => span
         };
@@ -1688,12 +1688,12 @@ fn report_borrowed_value_does_not_live_long_enough(&mut self,
 
     fn report_illegal_mutation_of_borrowed(&mut self,
                                            _: Context,
-                                           (lvalue, span): (&Place<'tcx>, Span),
+                                           (place, span): (&Place<'tcx>, Span),
                                            loan: &BorrowData) {
         let mut err = self.tcx.cannot_assign_to_borrowed(
             span,
             self.retrieve_borrow_span(loan),
-            &self.describe_lvalue(lvalue).unwrap_or("_".to_owned()),
+            &self.describe_place(place).unwrap_or("_".to_owned()),
             Origin::Mir);
 
         err.emit();
@@ -1701,14 +1701,14 @@ fn report_illegal_mutation_of_borrowed(&mut self,
 
     fn report_illegal_reassignment(&mut self,
                                    _context: Context,
-                                   (lvalue, span): (&Place<'tcx>, Span),
+                                   (place, span): (&Place<'tcx>, Span),
                                    assigned_span: Span) {
         let mut err = self.tcx.cannot_reassign_immutable(span,
-                                           &self.describe_lvalue(lvalue).unwrap_or("_".to_owned()),
+                                           &self.describe_place(place).unwrap_or("_".to_owned()),
                                            Origin::Mir);
         err.span_label(span, "cannot assign twice to immutable variable");
         if span != assigned_span {
-            let value_msg = match self.describe_lvalue(lvalue) {
+            let value_msg = match self.describe_place(place) {
                 Some(name) => format!("`{}`", name),
                 None => "value".to_owned()
             };
@@ -1719,11 +1719,11 @@ fn report_illegal_reassignment(&mut self,
 }
 
 impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
-    // End-user visible description of `lvalue` if one can be found. If the
-    // lvalue is a temporary for instance, None will be returned.
-    fn describe_lvalue(&self, lvalue: &Place<'tcx>) -> Option<String> {
+    // End-user visible description of `place` if one can be found. If the
+    // place is a temporary for instance, None will be returned.
+    fn describe_place(&self, place: &Place<'tcx>) -> Option<String> {
         let mut buf = String::new();
-        match self.append_lvalue_to_string(lvalue, &mut buf, false) {
+        match self.append_place_to_string(place, &mut buf, false) {
             Ok(()) => Some(buf),
             Err(()) => None
         }
@@ -1733,8 +1733,8 @@ fn describe_lvalue(&self, lvalue: &Place<'tcx>) -> Option<String> {
     /// then returns the index of the field being projected. Note that this closure will always
     /// be `self` in the current MIR, because that is the only time we directly access the fields
     /// of a closure type.
-    fn is_upvar_field_projection(&self, lvalue: &Place<'tcx>) -> Option<Field> {
-        match *lvalue {
+    fn is_upvar_field_projection(&self, place: &Place<'tcx>) -> Option<Field> {
+        match *place {
             Place::Projection(ref proj) => {
                 match proj.elem {
                     ProjectionElem::Field(field, _ty) => {
@@ -1754,12 +1754,12 @@ fn is_upvar_field_projection(&self, lvalue: &Place<'tcx>) -> Option<Field> {
         }
     }
 
-    // Appends end-user visible description of `lvalue` to `buf`.
-    fn append_lvalue_to_string(&self,
-                               lvalue: &Place<'tcx>,
+    // Appends end-user visible description of `place` to `buf`.
+    fn append_place_to_string(&self,
+                               place: &Place<'tcx>,
                                buf: &mut String,
                                mut autoderef: bool) -> Result<(), ()> {
-        match *lvalue {
+        match *place {
             Place::Local(local) => {
                 self.append_local_to_string(local, buf,)?;
             }
@@ -1779,33 +1779,33 @@ fn append_lvalue_to_string(&self,
                             }
                         } else {
                             if autoderef {
-                                self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                                self.append_place_to_string(&proj.base, buf, autoderef)?;
                             } else {
                                 buf.push_str(&"*");
-                                self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                                self.append_place_to_string(&proj.base, buf, autoderef)?;
                             }
                         }
                     },
                     ProjectionElem::Downcast(..) => {
-                        self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                        self.append_place_to_string(&proj.base, buf, autoderef)?;
                     },
                     ProjectionElem::Field(field, _ty) => {
                         autoderef = true;
 
-                        if let Some(field) = self.is_upvar_field_projection(lvalue) {
+                        if let Some(field) = self.is_upvar_field_projection(place) {
                             let var_index = field.index();
                             let name = self.mir.upvar_decls[var_index].debug_name.to_string();
                             buf.push_str(&name);
                         } else {
                             let field_name = self.describe_field(&proj.base, field);
-                            self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                            self.append_place_to_string(&proj.base, buf, autoderef)?;
                             buf.push_str(&format!(".{}", field_name));
                         }
                     },
                     ProjectionElem::Index(index) => {
                         autoderef = true;
 
-                        self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                        self.append_place_to_string(&proj.base, buf, autoderef)?;
                         buf.push_str("[");
                         if let Err(_) = self.append_local_to_string(index, buf) {
                             buf.push_str("..");
@@ -1817,7 +1817,7 @@ fn append_lvalue_to_string(&self,
                         // Since it isn't possible to borrow an element on a particular index and
                         // then use another while the borrow is held, don't output indices details
                         // to avoid confusing the end-user
-                        self.append_lvalue_to_string(&proj.base, buf, autoderef)?;
+                        self.append_place_to_string(&proj.base, buf, autoderef)?;
                         buf.push_str(&"[..]");
                     },
                 };
@@ -1827,7 +1827,7 @@ fn append_lvalue_to_string(&self,
         Ok(())
     }
 
-    // Appends end-user visible description of the `local` lvalue to `buf`. If `local` doesn't have
+    // Appends end-user visible description of the `local` place to `buf`. If `local` doesn't have
     // a name, then `Err` is returned
     fn append_local_to_string(&self, local_index: Local, buf: &mut String) -> Result<(), ()> {
         let local = &self.mir.local_decls[local_index];
@@ -1923,20 +1923,20 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
     // moves out of a Box. They should be removed when/if we stop
     // treating Box specially (e.g. when/if DerefMove is added...)
 
-    fn base_path<'d>(&self, lvalue: &'d Place<'tcx>) -> &'d Place<'tcx> {
+    fn base_path<'d>(&self, place: &'d Place<'tcx>) -> &'d Place<'tcx> {
         //! Returns the base of the leftmost (deepest) dereference of an
-        //! Box in `lvalue`. If there is no dereference of an Box
-        //! in `lvalue`, then it just returns `lvalue` itself.
+        //! Box in `place`. If there is no dereference of an Box
+        //! in `place`, then it just returns `place` itself.
 
-        let mut cursor = lvalue;
-        let mut deepest = lvalue;
+        let mut cursor = place;
+        let mut deepest = place;
         loop {
             let proj = match *cursor {
                 Place::Local(..) | Place::Static(..) => return deepest,
                 Place::Projection(ref proj) => proj,
             };
             if proj.elem == ProjectionElem::Deref &&
-                lvalue.ty(self.mir, self.tcx).to_ty(self.tcx).is_box()
+                place.ty(self.mir, self.tcx).to_ty(self.tcx).is_box()
             {
                 deepest = &proj.base;
             }
index 242eea8cb934464cccaeb88442d11481ee3d9be4..d1bb1f39e221c88b93e82325f3bd5b4855a4871f 100644 (file)
@@ -61,11 +61,11 @@ pub fn push_end_region<'a, 'gcx:'a+'tcx>(&mut self,
     pub fn push_assign(&mut self,
                        block: BasicBlock,
                        source_info: SourceInfo,
-                       lvalue: &Place<'tcx>,
+                       place: &Place<'tcx>,
                        rvalue: Rvalue<'tcx>) {
         self.push(block, Statement {
             source_info,
-            kind: StatementKind::Assign(lvalue.clone(), rvalue)
+            kind: StatementKind::Assign(place.clone(), rvalue)
         });
     }
 
@@ -81,8 +81,8 @@ pub fn push_assign_constant(&mut self,
     pub fn push_assign_unit(&mut self,
                             block: BasicBlock,
                             source_info: SourceInfo,
-                            lvalue: &Place<'tcx>) {
-        self.push_assign(block, source_info, lvalue, Rvalue::Aggregate(
+                            place: &Place<'tcx>) {
+        self.push_assign(block, source_info, place, Rvalue::Aggregate(
             box AggregateKind::Tuple, vec![]
         ));
     }
diff --git a/src/librustc_mir/build/expr/as_lvalue.rs b/src/librustc_mir/build/expr/as_lvalue.rs
deleted file mode 100644 (file)
index c59005b..0000000
+++ /dev/null
@@ -1,135 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! See docs in build/expr/mod.rs
-
-use build::{BlockAnd, BlockAndExtension, Builder};
-use build::expr::category::Category;
-use hair::*;
-use rustc::mir::*;
-
-use rustc_data_structures::indexed_vec::Idx;
-
-impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
-    /// Compile `expr`, yielding an lvalue that we can move from etc.
-    pub fn as_lvalue<M>(&mut self,
-                        block: BasicBlock,
-                        expr: M)
-                        -> BlockAnd<Place<'tcx>>
-        where M: Mirror<'tcx, Output=Expr<'tcx>>
-    {
-        let expr = self.hir.mirror(expr);
-        self.expr_as_lvalue(block, expr)
-    }
-
-    fn expr_as_lvalue(&mut self,
-                      mut block: BasicBlock,
-                      expr: Expr<'tcx>)
-                      -> BlockAnd<Place<'tcx>> {
-        debug!("expr_as_lvalue(block={:?}, expr={:?})", block, expr);
-
-        let this = self;
-        let expr_span = expr.span;
-        let source_info = this.source_info(expr_span);
-        match expr.kind {
-            ExprKind::Scope { region_scope, lint_level, value } => {
-                this.in_scope((region_scope, source_info), lint_level, block, |this| {
-                    this.as_lvalue(block, value)
-                })
-            }
-            ExprKind::Field { lhs, name } => {
-                let lvalue = unpack!(block = this.as_lvalue(block, lhs));
-                let lvalue = lvalue.field(name, expr.ty);
-                block.and(lvalue)
-            }
-            ExprKind::Deref { arg } => {
-                let lvalue = unpack!(block = this.as_lvalue(block, arg));
-                let lvalue = lvalue.deref();
-                block.and(lvalue)
-            }
-            ExprKind::Index { lhs, index } => {
-                let (usize_ty, bool_ty) = (this.hir.usize_ty(), this.hir.bool_ty());
-
-                let slice = unpack!(block = this.as_lvalue(block, lhs));
-                // region_scope=None so lvalue indexes live forever. They are scalars so they
-                // do not need storage annotations, and they are often copied between
-                // places.
-                let idx = unpack!(block = this.as_temp(block, None, index));
-
-                // bounds check:
-                let (len, lt) = (this.temp(usize_ty.clone(), expr_span),
-                                 this.temp(bool_ty, expr_span));
-                this.cfg.push_assign(block, source_info, // len = len(slice)
-                                     &len, Rvalue::Len(slice.clone()));
-                this.cfg.push_assign(block, source_info, // lt = idx < len
-                                     &lt, Rvalue::BinaryOp(BinOp::Lt,
-                                                           Operand::Copy(Place::Local(idx)),
-                                                           Operand::Copy(len.clone())));
-
-                let msg = AssertMessage::BoundsCheck {
-                    len: Operand::Move(len),
-                    index: Operand::Copy(Place::Local(idx))
-                };
-                let success = this.assert(block, Operand::Move(lt), true,
-                                          msg, expr_span);
-                success.and(slice.index(idx))
-            }
-            ExprKind::SelfRef => {
-                block.and(Place::Local(Local::new(1)))
-            }
-            ExprKind::VarRef { id } => {
-                let index = this.var_indices[&id];
-                block.and(Place::Local(index))
-            }
-            ExprKind::StaticRef { id } => {
-                block.and(Place::Static(Box::new(Static { def_id: id, ty: expr.ty })))
-            }
-
-            ExprKind::Array { .. } |
-            ExprKind::Tuple { .. } |
-            ExprKind::Adt { .. } |
-            ExprKind::Closure { .. } |
-            ExprKind::Unary { .. } |
-            ExprKind::Binary { .. } |
-            ExprKind::LogicalOp { .. } |
-            ExprKind::Box { .. } |
-            ExprKind::Cast { .. } |
-            ExprKind::Use { .. } |
-            ExprKind::NeverToAny { .. } |
-            ExprKind::ReifyFnPointer { .. } |
-            ExprKind::ClosureFnPointer { .. } |
-            ExprKind::UnsafeFnPointer { .. } |
-            ExprKind::Unsize { .. } |
-            ExprKind::Repeat { .. } |
-            ExprKind::Borrow { .. } |
-            ExprKind::If { .. } |
-            ExprKind::Match { .. } |
-            ExprKind::Loop { .. } |
-            ExprKind::Block { .. } |
-            ExprKind::Assign { .. } |
-            ExprKind::AssignOp { .. } |
-            ExprKind::Break { .. } |
-            ExprKind::Continue { .. } |
-            ExprKind::Return { .. } |
-            ExprKind::Literal { .. } |
-            ExprKind::InlineAsm { .. } |
-            ExprKind::Yield { .. } |
-            ExprKind::Call { .. } => {
-                // these are not lvalues, so we need to make a temporary.
-                debug_assert!(match Category::of(&expr.kind) {
-                    Some(Category::Place) => false,
-                    _ => true,
-                });
-                let temp = unpack!(block = this.as_temp(block, expr.temp_lifetime, expr));
-                block.and(Place::Local(temp))
-            }
-        }
-    }
-}
index 15fada248ee0be9f87275a3b66fdccaf3aedce19..7eae414a391377b682419f36d6b9a45385560a39 100644 (file)
@@ -32,7 +32,7 @@ pub fn as_local_operand<M>(&mut self, block: BasicBlock, expr: M)
     }
 
     /// Compile `expr` into a value that can be used as an operand.
-    /// If `expr` is an lvalue like `x`, this will introduce a
+    /// If `expr` is a place like `x`, this will introduce a
     /// temporary `tmp = x`, so that we capture the value of `x` at
     /// this time.
     ///
diff --git a/src/librustc_mir/build/expr/as_place.rs b/src/librustc_mir/build/expr/as_place.rs
new file mode 100644 (file)
index 0000000..9e21790
--- /dev/null
@@ -0,0 +1,135 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! See docs in build/expr/mod.rs
+
+use build::{BlockAnd, BlockAndExtension, Builder};
+use build::expr::category::Category;
+use hair::*;
+use rustc::mir::*;
+
+use rustc_data_structures::indexed_vec::Idx;
+
+impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
+    /// Compile `expr`, yielding a place that we can move from etc.
+    pub fn as_place<M>(&mut self,
+                        block: BasicBlock,
+                        expr: M)
+                        -> BlockAnd<Place<'tcx>>
+        where M: Mirror<'tcx, Output=Expr<'tcx>>
+    {
+        let expr = self.hir.mirror(expr);
+        self.expr_as_place(block, expr)
+    }
+
+    fn expr_as_place(&mut self,
+                      mut block: BasicBlock,
+                      expr: Expr<'tcx>)
+                      -> BlockAnd<Place<'tcx>> {
+        debug!("expr_as_place(block={:?}, expr={:?})", block, expr);
+
+        let this = self;
+        let expr_span = expr.span;
+        let source_info = this.source_info(expr_span);
+        match expr.kind {
+            ExprKind::Scope { region_scope, lint_level, value } => {
+                this.in_scope((region_scope, source_info), lint_level, block, |this| {
+                    this.as_place(block, value)
+                })
+            }
+            ExprKind::Field { lhs, name } => {
+                let place = unpack!(block = this.as_place(block, lhs));
+                let place = place.field(name, expr.ty);
+                block.and(place)
+            }
+            ExprKind::Deref { arg } => {
+                let place = unpack!(block = this.as_place(block, arg));
+                let place = place.deref();
+                block.and(place)
+            }
+            ExprKind::Index { lhs, index } => {
+                let (usize_ty, bool_ty) = (this.hir.usize_ty(), this.hir.bool_ty());
+
+                let slice = unpack!(block = this.as_place(block, lhs));
+                // region_scope=None so place indexes live forever. They are scalars so they
+                // do not need storage annotations, and they are often copied between
+                // places.
+                let idx = unpack!(block = this.as_temp(block, None, index));
+
+                // bounds check:
+                let (len, lt) = (this.temp(usize_ty.clone(), expr_span),
+                                 this.temp(bool_ty, expr_span));
+                this.cfg.push_assign(block, source_info, // len = len(slice)
+                                     &len, Rvalue::Len(slice.clone()));
+                this.cfg.push_assign(block, source_info, // lt = idx < len
+                                     &lt, Rvalue::BinaryOp(BinOp::Lt,
+                                                           Operand::Copy(Place::Local(idx)),
+                                                           Operand::Copy(len.clone())));
+
+                let msg = AssertMessage::BoundsCheck {
+                    len: Operand::Move(len),
+                    index: Operand::Copy(Place::Local(idx))
+                };
+                let success = this.assert(block, Operand::Move(lt), true,
+                                          msg, expr_span);
+                success.and(slice.index(idx))
+            }
+            ExprKind::SelfRef => {
+                block.and(Place::Local(Local::new(1)))
+            }
+            ExprKind::VarRef { id } => {
+                let index = this.var_indices[&id];
+                block.and(Place::Local(index))
+            }
+            ExprKind::StaticRef { id } => {
+                block.and(Place::Static(Box::new(Static { def_id: id, ty: expr.ty })))
+            }
+
+            ExprKind::Array { .. } |
+            ExprKind::Tuple { .. } |
+            ExprKind::Adt { .. } |
+            ExprKind::Closure { .. } |
+            ExprKind::Unary { .. } |
+            ExprKind::Binary { .. } |
+            ExprKind::LogicalOp { .. } |
+            ExprKind::Box { .. } |
+            ExprKind::Cast { .. } |
+            ExprKind::Use { .. } |
+            ExprKind::NeverToAny { .. } |
+            ExprKind::ReifyFnPointer { .. } |
+            ExprKind::ClosureFnPointer { .. } |
+            ExprKind::UnsafeFnPointer { .. } |
+            ExprKind::Unsize { .. } |
+            ExprKind::Repeat { .. } |
+            ExprKind::Borrow { .. } |
+            ExprKind::If { .. } |
+            ExprKind::Match { .. } |
+            ExprKind::Loop { .. } |
+            ExprKind::Block { .. } |
+            ExprKind::Assign { .. } |
+            ExprKind::AssignOp { .. } |
+            ExprKind::Break { .. } |
+            ExprKind::Continue { .. } |
+            ExprKind::Return { .. } |
+            ExprKind::Literal { .. } |
+            ExprKind::InlineAsm { .. } |
+            ExprKind::Yield { .. } |
+            ExprKind::Call { .. } => {
+                // these are not places, so we need to make a temporary.
+                debug_assert!(match Category::of(&expr.kind) {
+                    Some(Category::Place) => false,
+                    _ => true,
+                });
+                let temp = unpack!(block = this.as_temp(block, expr.temp_lifetime, expr));
+                block.and(Place::Local(temp))
+            }
+        }
+    }
+}
index e23cd7822b76c1a90c2f1b36939322e32327f196..88f1fb4f57518a3a3330b686b663bde1a6448408 100644 (file)
@@ -68,8 +68,8 @@ fn expr_as_rvalue(&mut self,
                 block.and(Rvalue::Repeat(value_operand, count))
             }
             ExprKind::Borrow { region, borrow_kind, arg } => {
-                let arg_lvalue = unpack!(block = this.as_lvalue(block, arg));
-                block.and(Rvalue::Ref(region, borrow_kind, arg_lvalue))
+                let arg_place = unpack!(block = this.as_place(block, arg));
+                block.and(Rvalue::Ref(region, borrow_kind, arg_place))
             }
             ExprKind::Binary { op, lhs, rhs } => {
                 let lhs = unpack!(block = this.as_operand(block, scope, lhs));
@@ -229,7 +229,7 @@ fn expr_as_rvalue(&mut self,
                 let field_names = this.hir.all_fields(adt_def, variant_index);
 
                 let fields = if let Some(FruInfo { base, field_types }) = base {
-                    let base = unpack!(block = this.as_lvalue(block, base));
+                    let base = unpack!(block = this.as_place(block, base));
 
                     // MIR does not natively support FRU, so for each
                     // base-supplied field, generate an operand that
index 9576a6d70f16350539c63ad80491b7e8b13966d7..1fc608c52c658a80bde7801c12990e7325350357 100644 (file)
@@ -58,16 +58,16 @@ fn expr_as_temp(&mut self,
         }
 
         // Careful here not to cause an infinite cycle. If we always
-        // called `into`, then for lvalues like `x.f`, it would
+        // called `into`, then for places like `x.f`, it would
         // eventually fallback to us, and we'd loop. There's a reason
         // for this: `as_temp` is the point where we bridge the "by
-        // reference" semantics of `as_lvalue` with the "by value"
+        // reference" semantics of `as_place` with the "by value"
         // semantics of `into`, `as_operand`, `as_rvalue`, and (of
         // course) `as_temp`.
         match Category::of(&expr.kind).unwrap() {
             Category::Place => {
-                let lvalue = unpack!(block = this.as_lvalue(block, expr));
-                let rvalue = Rvalue::Use(this.consume_by_copy_or_move(lvalue));
+                let place = unpack!(block = this.as_place(block, expr));
+                let rvalue = Rvalue::Use(this.consume_by_copy_or_move(place));
                 this.cfg.push_assign(block, source_info, &Place::Local(temp), rvalue);
             }
             _ => {
index 17b34f4586e8b6e0cdc65db49e54a359cbb4987f..025e77343e718b39d65d5897af20019020761f1e 100644 (file)
 //! - `as_operand` -- evaluates the value and yields an `Operand`,
 //!   suitable for use as an argument to an `Rvalue`
 //! - `as_temp` -- evaluates into a temporary; this is similar to `as_operand`
-//!   except it always returns a fresh lvalue, even for constants
+//!   except it always returns a fresh place, even for constants
 //! - `as_rvalue` -- yields an `Rvalue`, suitable for use in an assignment;
 //!   as of this writing, never needed outside of the `expr` module itself
 //!
 //! Sometimes though want the expression's *location*. An example
 //! would be during a match statement, or the operand of the `&`
-//! operator. In that case, you want `as_lvalue`. This will create a
+//! operator. In that case, you want `as_place`. This will create a
 //! temporary if necessary.
 //!
 //! Finally, if it's a constant you seek, then call
@@ -46,7 +46,7 @@
 //! struct expression (or other expression that creates a new value)
 //! is typically easiest to write in terms of `as_rvalue` or `into`,
 //! whereas a reference to a field is easiest to write in terms of
-//! `as_lvalue`. (The exception to this is scope and paren
+//! `as_place`. (The exception to this is scope and paren
 //! expressions, which have no category.)
 //!
 //! Therefore, the various functions above make use of one another in
 //! the most suitable spot to implement it, and then just let the
 //! other fns cycle around. The handoff works like this:
 //!
-//! - `into(lv)` -> fallback is to create a rvalue with `as_rvalue` and assign it to `lv`
+//! - `into(place)` -> fallback is to create a rvalue with `as_rvalue` and assign it to `place`
 //! - `as_rvalue` -> fallback is to create an Operand with `as_operand` and use `Rvalue::use`
 //! - `as_operand` -> either invokes `as_constant` or `as_temp`
 //! - `as_constant` -> (no fallback)
-//! - `as_temp` -> creates a temporary and either calls `as_lvalue` or `into`
-//! - `as_lvalue` -> for rvalues, falls back to `as_temp` and returns that
+//! - `as_temp` -> creates a temporary and either calls `as_place` or `into`
+//! - `as_place` -> for rvalues, falls back to `as_temp` and returns that
 //!
 //! As you can see, there is a cycle where `into` can (in theory) fallback to `as_temp`
 //! which can fallback to `into`. So if one of the `ExprKind` variants is not, in fact,
 //! Of those fallbacks, the most interesting one is `as_temp`, because
 //! it discriminates based on the category of the expression. This is
 //! basically the point where the "by value" operations are bridged
-//! over to the "by reference" mode (`as_lvalue`).
+//! over to the "by reference" mode (`as_place`).
 
 mod as_constant;
-mod as_lvalue;
+mod as_place;
 mod as_rvalue;
 mod as_operand;
 mod as_temp;
index 3a3d65327f2d740691f9b0a73978f2f2a107e36c..6f1fe8335780db3ff8e1f921adae6906daad6e2d 100644 (file)
@@ -41,14 +41,14 @@ pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd
                 // dropped.
                 if this.hir.needs_drop(lhs.ty) {
                     let rhs = unpack!(block = this.as_local_operand(block, rhs));
-                    let lhs = unpack!(block = this.as_lvalue(block, lhs));
+                    let lhs = unpack!(block = this.as_place(block, lhs));
                     unpack!(block = this.build_drop_and_replace(
                         block, lhs_span, lhs, rhs
                     ));
                     block.unit()
                 } else {
                     let rhs = unpack!(block = this.as_local_rvalue(block, rhs));
-                    let lhs = unpack!(block = this.as_lvalue(block, lhs));
+                    let lhs = unpack!(block = this.as_place(block, lhs));
                     this.cfg.push_assign(block, source_info, &lhs, rhs);
                     block.unit()
                 }
@@ -67,7 +67,7 @@ pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd
 
                 // As above, RTL.
                 let rhs = unpack!(block = this.as_local_operand(block, rhs));
-                let lhs = unpack!(block = this.as_lvalue(block, lhs));
+                let lhs = unpack!(block = this.as_place(block, lhs));
 
                 // we don't have to drop prior contents or anything
                 // because AssignOp is only legal for Copy types
@@ -107,12 +107,12 @@ pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd
             ExprKind::Return { value } => {
                 block = match value {
                     Some(value) => {
-                        unpack!(this.into(&Place::Local(RETURN_POINTER), block, value))
+                        unpack!(this.into(&Place::Local(RETURN_PLACE), block, value))
                     }
                     None => {
                         this.cfg.push_assign_unit(block,
                                                   source_info,
-                                                  &Place::Local(RETURN_POINTER));
+                                                  &Place::Local(RETURN_PLACE));
                         block
                     }
                 };
@@ -123,7 +123,7 @@ pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd
             }
             ExprKind::InlineAsm { asm, outputs, inputs } => {
                 let outputs = outputs.into_iter().map(|output| {
-                    unpack!(block = this.as_lvalue(block, output))
+                    unpack!(block = this.as_place(block, output))
                 }).collect();
                 let inputs = inputs.into_iter().map(|input| {
                     unpack!(block = this.as_local_operand(block, input))
index e748c35f4bdce43935c29a92be2a2504ff6db059..23095bc4269b5e645c33ca66d9218e2cf85172c9 100644 (file)
@@ -36,7 +36,7 @@ pub fn match_expr(&mut self,
                       discriminant: ExprRef<'tcx>,
                       arms: Vec<Arm<'tcx>>)
                       -> BlockAnd<()> {
-        let discriminant_lvalue = unpack!(block = self.as_lvalue(block, discriminant));
+        let discriminant_place = unpack!(block = self.as_place(block, discriminant));
 
         let mut arm_blocks = ArmBlocks {
             blocks: arms.iter()
@@ -77,7 +77,7 @@ pub fn match_expr(&mut self,
                        (pre_binding_block, next_candidate_pre_binding_block))| {
                     Candidate {
                         span: pattern.span,
-                        match_pairs: vec![MatchPair::new(discriminant_lvalue.clone(), pattern)],
+                        match_pairs: vec![MatchPair::new(discriminant_place.clone(), pattern)],
                         bindings: vec![],
                         guard,
                         arm_index,
@@ -91,7 +91,7 @@ pub fn match_expr(&mut self,
         self.cfg.terminate(*pre_binding_blocks.last().unwrap(),
                            outer_source_info, TerminatorKind::Unreachable);
 
-        // this will generate code to test discriminant_lvalue and
+        // this will generate code to test discriminant_place and
         // branch to the appropriate arm block
         let otherwise = self.match_candidates(span, &mut arm_blocks, candidates, block);
 
@@ -139,19 +139,19 @@ pub fn expr_into_pattern(&mut self,
             PatternKind::Binding { mode: BindingMode::ByValue,
                                    var,
                                    subpattern: None, .. } => {
-                let lvalue = self.storage_live_binding(block, var, irrefutable_pat.span);
-                unpack!(block = self.into(&lvalue, block, initializer));
+                let place = self.storage_live_binding(block, var, irrefutable_pat.span);
+                unpack!(block = self.into(&place, block, initializer));
                 self.schedule_drop_for_binding(var, irrefutable_pat.span);
                 block.unit()
             }
             _ => {
-                let lvalue = unpack!(block = self.as_lvalue(block, initializer));
-                self.lvalue_into_pattern(block, irrefutable_pat, &lvalue)
+                let place = unpack!(block = self.as_place(block, initializer));
+                self.place_into_pattern(block, irrefutable_pat, &place)
             }
         }
     }
 
-    pub fn lvalue_into_pattern(&mut self,
+    pub fn place_into_pattern(&mut self,
                                mut block: BasicBlock,
                                irrefutable_pat: Pattern<'tcx>,
                                initializer: &Place<'tcx>)
@@ -315,8 +315,8 @@ struct Binding<'tcx> {
 
 #[derive(Clone, Debug)]
 pub struct MatchPair<'pat, 'tcx:'pat> {
-    // this lvalue...
-    lvalue: Place<'tcx>,
+    // this place...
+    place: Place<'tcx>,
 
     // ... must match this pattern.
     pattern: &'pat Pattern<'tcx>,
@@ -635,7 +635,7 @@ fn test_candidates<'pat>(&mut self,
         match test.kind {
             TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => {
                 for candidate in candidates.iter() {
-                    if !self.add_cases_to_switch(&match_pair.lvalue,
+                    if !self.add_cases_to_switch(&match_pair.place,
                                                  candidate,
                                                  switch_ty,
                                                  options,
@@ -646,7 +646,7 @@ fn test_candidates<'pat>(&mut self,
             }
             TestKind::Switch { adt_def: _, ref mut variants} => {
                 for candidate in candidates.iter() {
-                    if !self.add_variants_to_switch(&match_pair.lvalue,
+                    if !self.add_variants_to_switch(&match_pair.place,
                                                     candidate,
                                                     variants) {
                         break;
@@ -661,7 +661,7 @@ fn test_candidates<'pat>(&mut self,
         // vector of candidates. Those are the candidates that still
         // apply if the test has that particular outcome.
         debug!("match_candidates: test={:?} match_pair={:?}", test, match_pair);
-        let target_blocks = self.perform_test(block, &match_pair.lvalue, &test);
+        let target_blocks = self.perform_test(block, &match_pair.place, &test);
         let mut target_candidates: Vec<_> = (0..target_blocks.len()).map(|_| vec![]).collect();
 
         // Sort the candidates into the appropriate vector in
@@ -670,7 +670,7 @@ fn test_candidates<'pat>(&mut self,
         // that point, we stop sorting.
         let tested_candidates =
             candidates.iter()
-                      .take_while(|c| self.sort_candidate(&match_pair.lvalue,
+                      .take_while(|c| self.sort_candidate(&match_pair.place,
                                                           &test,
                                                           c,
                                                           &mut target_candidates))
index a7599f19244c215df39ef654b64adafff34e81f0..4ae373c7c8223c15e0edb37edf51d4fd7a984edd 100644 (file)
 
 //! Simplifying Candidates
 //!
-//! *Simplifying* a match pair `lvalue @ pattern` means breaking it down
+//! *Simplifying* a match pair `place @ pattern` means breaking it down
 //! into bindings or other, simpler match pairs. For example:
 //!
-//! - `lvalue @ (P1, P2)` can be simplified to `[lvalue.0 @ P1, lvalue.1 @ P2]`
-//! - `lvalue @ x` can be simplified to `[]` by binding `x` to `lvalue`
+//! - `place @ (P1, P2)` can be simplified to `[place.0 @ P1, place.1 @ P2]`
+//! - `place @ x` can be simplified to `[]` by binding `x` to `place`
 //!
 //! The `simplify_candidate` routine just repeatedly applies these
 //! sort of simplifications until there is nothing left to
@@ -73,7 +73,7 @@ fn simplify_match_pair<'pat>(&mut self,
                     name,
                     mutability,
                     span: match_pair.pattern.span,
-                    source: match_pair.lvalue.clone(),
+                    source: match_pair.place.clone(),
                     var_id: var,
                     var_ty: ty,
                     binding_mode: mode,
@@ -81,7 +81,7 @@ fn simplify_match_pair<'pat>(&mut self,
 
                 if let Some(subpattern) = subpattern.as_ref() {
                     // this is the `x @ P` case; have to keep matching against `P` now
-                    candidate.match_pairs.push(MatchPair::new(match_pair.lvalue, subpattern));
+                    candidate.match_pairs.push(MatchPair::new(match_pair.place, subpattern));
                 }
 
                 Ok(())
@@ -105,8 +105,8 @@ fn simplify_match_pair<'pat>(&mut self,
                     }
                 });
                 if irrefutable {
-                    let lvalue = match_pair.lvalue.downcast(adt_def, variant_index);
-                    candidate.match_pairs.extend(self.field_match_pairs(lvalue, subpatterns));
+                    let place = match_pair.place.downcast(adt_def, variant_index);
+                    candidate.match_pairs.extend(self.field_match_pairs(place, subpatterns));
                     Ok(())
                 } else {
                     Err(match_pair)
@@ -115,7 +115,7 @@ fn simplify_match_pair<'pat>(&mut self,
 
             PatternKind::Array { ref prefix, ref slice, ref suffix } => {
                 self.prefix_slice_suffix(&mut candidate.match_pairs,
-                                         &match_pair.lvalue,
+                                         &match_pair.place,
                                          prefix,
                                          slice.as_ref(),
                                          suffix);
@@ -125,13 +125,13 @@ fn simplify_match_pair<'pat>(&mut self,
             PatternKind::Leaf { ref subpatterns } => {
                 // tuple struct, match subpats (if any)
                 candidate.match_pairs
-                         .extend(self.field_match_pairs(match_pair.lvalue, subpatterns));
+                         .extend(self.field_match_pairs(match_pair.place, subpatterns));
                 Ok(())
             }
 
             PatternKind::Deref { ref subpattern } => {
-                let lvalue = match_pair.lvalue.deref();
-                candidate.match_pairs.push(MatchPair::new(lvalue, subpattern));
+                let place = match_pair.place.deref();
+                candidate.match_pairs.push(MatchPair::new(place, subpattern));
                 Ok(())
             }
         }
index 2e866013fd33924a0c5f9e6a676b172866efa6a7..7c9f190670ba8bcc97445ba8e1ba2fdff54516bc 100644 (file)
@@ -109,21 +109,21 @@ pub fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> {
     }
 
     pub fn add_cases_to_switch<'pat>(&mut self,
-                                     test_lvalue: &Place<'tcx>,
+                                     test_place: &Place<'tcx>,
                                      candidate: &Candidate<'pat, 'tcx>,
                                      switch_ty: Ty<'tcx>,
                                      options: &mut Vec<&'tcx ty::Const<'tcx>>,
                                      indices: &mut FxHashMap<&'tcx ty::Const<'tcx>, usize>)
                                      -> bool
     {
-        let match_pair = match candidate.match_pairs.iter().find(|mp| mp.lvalue == *test_lvalue) {
+        let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) {
             Some(match_pair) => match_pair,
             _ => { return false; }
         };
 
         match *match_pair.pattern.kind {
             PatternKind::Constant { value } => {
-                // if the lvalues match, the type should match
+                // if the places match, the type should match
                 assert_eq!(match_pair.pattern.ty, switch_ty);
 
                 indices.entry(value)
@@ -150,12 +150,12 @@ pub fn add_cases_to_switch<'pat>(&mut self,
     }
 
     pub fn add_variants_to_switch<'pat>(&mut self,
-                                        test_lvalue: &Place<'tcx>,
+                                        test_place: &Place<'tcx>,
                                         candidate: &Candidate<'pat, 'tcx>,
                                         variants: &mut BitVector)
                                         -> bool
     {
-        let match_pair = match candidate.match_pairs.iter().find(|mp| mp.lvalue == *test_lvalue) {
+        let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) {
             Some(match_pair) => match_pair,
             _ => { return false; }
         };
@@ -177,7 +177,7 @@ pub fn add_variants_to_switch<'pat>(&mut self,
     /// Generates the code to perform a test.
     pub fn perform_test(&mut self,
                         block: BasicBlock,
-                        lvalue: &Place<'tcx>,
+                        place: &Place<'tcx>,
                         test: &Test<'tcx>)
                         -> Vec<BasicBlock> {
         let source_info = self.source_info(test.span);
@@ -212,7 +212,7 @@ pub fn perform_test(&mut self,
                 let discr_ty = adt_def.repr.discr_type().to_ty(tcx);
                 let discr = self.temp(discr_ty, test.span);
                 self.cfg.push_assign(block, source_info, &discr,
-                                     Rvalue::Discriminant(lvalue.clone()));
+                                     Rvalue::Discriminant(place.clone()));
                 assert_eq!(values.len() + 1, targets.len());
                 self.cfg.terminate(block, source_info, TerminatorKind::SwitchInt {
                     discr: Operand::Move(discr),
@@ -233,7 +233,7 @@ pub fn perform_test(&mut self,
                         ConstVal::Bool(false) => vec![false_bb, true_bb],
                         v => span_bug!(test.span, "expected boolean value but got {:?}", v)
                     };
-                    (ret, TerminatorKind::if_(self.hir.tcx(), Operand::Copy(lvalue.clone()),
+                    (ret, TerminatorKind::if_(self.hir.tcx(), Operand::Copy(place.clone()),
                                               true_bb, false_bb))
                 } else {
                     // The switch may be inexhaustive so we
@@ -248,7 +248,7 @@ pub fn perform_test(&mut self,
                         v.val.to_const_int().expect("switching on integral")
                     ).collect();
                     (targets.clone(), TerminatorKind::SwitchInt {
-                        discr: Operand::Copy(lvalue.clone()),
+                        discr: Operand::Copy(place.clone()),
                         switch_ty,
                         values: From::from(values),
                         targets,
@@ -259,14 +259,14 @@ pub fn perform_test(&mut self,
             }
 
             TestKind::Eq { value, mut ty } => {
-                let mut val = Operand::Copy(lvalue.clone());
+                let mut val = Operand::Copy(place.clone());
 
                 // If we're using b"..." as a pattern, we need to insert an
                 // unsizing coercion, as the byte string has the type &[u8; N].
                 let expect = if let ConstVal::ByteStr(bytes) = value.val {
                     let tcx = self.hir.tcx();
 
-                    // Unsize the lvalue to &[u8], too, if necessary.
+                    // Unsize the place to &[u8], too, if necessary.
                     if let ty::TyRef(region, mt) = ty.sty {
                         if let ty::TyArray(_, _) = mt.ty.sty {
                             ty = tcx.mk_imm_ref(region, tcx.mk_slice(tcx.types.u8));
@@ -335,7 +335,7 @@ pub fn perform_test(&mut self,
                 // Test `val` by computing `lo <= val && val <= hi`, using primitive comparisons.
                 let lo = self.literal_operand(test.span, ty.clone(), lo.clone());
                 let hi = self.literal_operand(test.span, ty.clone(), hi.clone());
-                let val = Operand::Copy(lvalue.clone());
+                let val = Operand::Copy(place.clone());
 
                 let fail = self.cfg.start_new_block();
                 let block = self.compare(block, fail, test.span, BinOp::Le, lo, val.clone());
@@ -352,9 +352,9 @@ pub fn perform_test(&mut self,
                 let (actual, result) = (self.temp(usize_ty, test.span),
                                         self.temp(bool_ty, test.span));
 
-                // actual = len(lvalue)
+                // actual = len(place)
                 self.cfg.push_assign(block, source_info,
-                                     &actual, Rvalue::Len(lvalue.clone()));
+                                     &actual, Rvalue::Len(place.clone()));
 
                 // expected = <N>
                 let expected = self.push_usize(block, source_info, len);
@@ -399,7 +399,7 @@ fn compare(&mut self,
         target_block
     }
 
-    /// Given that we are performing `test` against `test_lvalue`,
+    /// Given that we are performing `test` against `test_place`,
     /// this job sorts out what the status of `candidate` will be
     /// after the test. The `resulting_candidates` vector stores, for
     /// each possible outcome of `test`, a vector of the candidates
@@ -430,12 +430,12 @@ fn compare(&mut self,
     /// not apply to this candidate, but it might be we can get
     /// tighter match code if we do something a bit different.
     pub fn sort_candidate<'pat>(&mut self,
-                                test_lvalue: &Place<'tcx>,
+                                test_place: &Place<'tcx>,
                                 test: &Test<'tcx>,
                                 candidate: &Candidate<'pat, 'tcx>,
                                 resulting_candidates: &mut [Vec<Candidate<'pat, 'tcx>>])
                                 -> bool {
-        // Find the match_pair for this lvalue (if any). At present,
+        // Find the match_pair for this place (if any). At present,
         // afaik, there can be at most one. (In the future, if we
         // adopted a more general `@` operator, there might be more
         // than one, but it'd be very unusual to have two sides that
@@ -443,12 +443,12 @@ pub fn sort_candidate<'pat>(&mut self,
         // away.)
         let tested_match_pair = candidate.match_pairs.iter()
                                                      .enumerate()
-                                                     .filter(|&(_, mp)| mp.lvalue == *test_lvalue)
+                                                     .filter(|&(_, mp)| mp.place == *test_place)
                                                      .next();
         let (match_pair_index, match_pair) = match tested_match_pair {
             Some(pair) => pair,
             None => {
-                // We are not testing this lvalue. Therefore, this
+                // We are not testing this place. Therefore, this
                 // candidate applies to ALL outcomes.
                 return false;
             }
@@ -614,7 +614,7 @@ fn candidate_after_slice_test<'pat>(&mut self,
             self.candidate_without_match_pair(match_pair_index, candidate);
         self.prefix_slice_suffix(
             &mut new_candidate.match_pairs,
-            &candidate.match_pairs[match_pair_index].lvalue,
+            &candidate.match_pairs[match_pair_index].place,
             prefix,
             opt_slice,
             suffix);
@@ -635,15 +635,15 @@ fn candidate_after_variant_switch<'pat>(&mut self,
         // we want to create a set of derived match-patterns like
         // `(x as Variant).0 @ P1` and `(x as Variant).1 @ P1`.
         let elem = ProjectionElem::Downcast(adt_def, variant_index);
-        let downcast_lvalue = match_pair.lvalue.clone().elem(elem); // `(x as Variant)`
+        let downcast_place = match_pair.place.clone().elem(elem); // `(x as Variant)`
         let consequent_match_pairs =
             subpatterns.iter()
                        .map(|subpattern| {
                            // e.g., `(x as Variant).0`
-                           let lvalue = downcast_lvalue.clone().field(subpattern.field,
+                           let place = downcast_place.clone().field(subpattern.field,
                                                                       subpattern.pattern.ty);
                            // e.g., `(x as Variant).0 @ P1`
-                           MatchPair::new(lvalue, &subpattern.pattern)
+                           MatchPair::new(place, &subpattern.pattern)
                        });
 
         // In addition, we need all the other match pairs from the old candidate.
index cf8e4fd167c5e8338eb36417737513b2dae9d0ab..cfd9100fc6ae7c8482b3a1667fd3a01f5d28b010 100644 (file)
 
 impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
     pub fn field_match_pairs<'pat>(&mut self,
-                                   lvalue: Place<'tcx>,
+                                   place: Place<'tcx>,
                                    subpatterns: &'pat [FieldPattern<'tcx>])
                                    -> Vec<MatchPair<'pat, 'tcx>> {
         subpatterns.iter()
                    .map(|fieldpat| {
-                       let lvalue = lvalue.clone().field(fieldpat.field,
+                       let place = place.clone().field(fieldpat.field,
                                                          fieldpat.pattern.ty);
-                       MatchPair::new(lvalue, &fieldpat.pattern)
+                       MatchPair::new(place, &fieldpat.pattern)
                    })
                    .collect()
     }
 
     pub fn prefix_slice_suffix<'pat>(&mut self,
                                      match_pairs: &mut Vec<MatchPair<'pat, 'tcx>>,
-                                     lvalue: &Place<'tcx>,
+                                     place: &Place<'tcx>,
                                      prefix: &'pat [Pattern<'tcx>],
                                      opt_slice: Option<&'pat Pattern<'tcx>>,
                                      suffix: &'pat [Pattern<'tcx>]) {
@@ -47,13 +47,13 @@ pub fn prefix_slice_suffix<'pat>(&mut self,
                           min_length,
                           from_end: false,
                       };
-                      let lvalue = lvalue.clone().elem(elem);
-                      MatchPair::new(lvalue, subpattern)
+                      let place = place.clone().elem(elem);
+                      MatchPair::new(place, subpattern)
                   })
         );
 
         if let Some(subslice_pat) = opt_slice {
-            let subslice = lvalue.clone().elem(ProjectionElem::Subslice {
+            let subslice = place.clone().elem(ProjectionElem::Subslice {
                 from: prefix.len() as u32,
                 to: suffix.len() as u32
             });
@@ -70,17 +70,17 @@ pub fn prefix_slice_suffix<'pat>(&mut self,
                           min_length,
                           from_end: true,
                       };
-                      let lvalue = lvalue.clone().elem(elem);
-                      MatchPair::new(lvalue, subpattern)
+                      let place = place.clone().elem(elem);
+                      MatchPair::new(place, subpattern)
                   })
         );
     }
 }
 
 impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
-    pub fn new(lvalue: Place<'tcx>, pattern: &'pat Pattern<'tcx>) -> MatchPair<'pat, 'tcx> {
+    pub fn new(place: Place<'tcx>, pattern: &'pat Pattern<'tcx>) -> MatchPair<'pat, 'tcx> {
         MatchPair {
-            lvalue,
+            place,
             pattern,
             slice_len_checked: false,
         }
index d24ee4bf153f15824a85ade1ffea9c6d72513902..8486c63baac66624875a978d4622a78010fdff69 100644 (file)
@@ -29,10 +29,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
     /// call `schedule_drop` once the temporary is initialized.
     pub fn temp(&mut self, ty: Ty<'tcx>, span: Span) -> Place<'tcx> {
         let temp = self.local_decls.push(LocalDecl::new_temp(ty, span));
-        let lvalue = Place::Local(temp);
+        let place = Place::Local(temp);
         debug!("temp: created temp {:?} with type {:?}",
-               lvalue, self.local_decls[temp].ty);
-        lvalue
+               place, self.local_decls[temp].ty);
+        place
     }
 
     pub fn literal_operand(&mut self,
@@ -134,13 +134,13 @@ pub fn push_usize(&mut self,
         temp
     }
 
-    pub fn consume_by_copy_or_move(&self, lvalue: Place<'tcx>) -> Operand<'tcx> {
+    pub fn consume_by_copy_or_move(&self, place: Place<'tcx>) -> Operand<'tcx> {
         let tcx = self.hir.tcx();
-        let ty = lvalue.ty(&self.local_decls, tcx).to_ty(tcx);
+        let ty = place.ty(&self.local_decls, tcx).to_ty(tcx);
         if self.hir.type_moves_by_default(ty, DUMMY_SP) {
-            Operand::Move(lvalue)
+            Operand::Move(place)
         } else {
-            Operand::Copy(lvalue)
+            Operand::Copy(place)
         }
     }
 }
index b65c1036eb8d9a27e2aad72b1a9040cc3172616f..4349820dbe9e1255225558dbd08474888fe3a998 100644 (file)
@@ -480,7 +480,7 @@ fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
 
     let mut block = START_BLOCK;
     let expr = builder.hir.mirror(ast_expr);
-    unpack!(block = builder.into_expr(&Place::Local(RETURN_POINTER), block, expr));
+    unpack!(block = builder.into_expr(&Place::Local(RETURN_PLACE), block, expr));
 
     let source_info = builder.source_info(span);
     builder.cfg.terminate(block, source_info, TerminatorKind::Return);
@@ -523,7 +523,7 @@ fn new(hir: Cx<'a, 'gcx, 'tcx>,
             push_unsafe_count: 0,
             unpushed_unsafe: safety,
             breakable_scopes: vec![],
-            local_decls: IndexVec::from_elem_n(LocalDecl::new_return_pointer(return_ty,
+            local_decls: IndexVec::from_elem_n(LocalDecl::new_return_place(return_ty,
                                                                              span), 1),
             var_indices: NodeMap(),
             unit_temp: None,
@@ -597,9 +597,9 @@ fn args_and_body(&mut self,
         let mut scope = None;
         // Bind the argument patterns
         for (index, &(ty, pattern)) in arguments.iter().enumerate() {
-            // Function arguments always get the first Local indices after the return pointer
+            // Function arguments always get the first Local indices after the return place
             let local = Local::new(index + 1);
-            let lvalue = Place::Local(local);
+            let place = Place::Local(local);
 
             if let Some(pattern) = pattern {
                 let pattern = self.hir.pattern_from_hir(pattern);
@@ -613,14 +613,14 @@ fn args_and_body(&mut self,
                     _ => {
                         scope = self.declare_bindings(scope, ast_body.span,
                                                       LintLevel::Inherited, &pattern);
-                        unpack!(block = self.lvalue_into_pattern(block, pattern, &lvalue));
+                        unpack!(block = self.place_into_pattern(block, pattern, &place));
                     }
                 }
             }
 
             // Make sure we drop (parts of) the argument even when not matched on.
             self.schedule_drop(pattern.as_ref().map_or(ast_body.span, |pat| pat.span),
-                               argument_scope, &lvalue, ty);
+                               argument_scope, &place, ty);
 
         }
 
@@ -630,7 +630,7 @@ fn args_and_body(&mut self,
         }
 
         let body = self.hir.mirror(ast_body);
-        self.into(&Place::Local(RETURN_POINTER), block, body)
+        self.into(&Place::Local(RETURN_PLACE), block, body)
     }
 
     fn get_unit_temp(&mut self) -> Place<'tcx> {
index aeda34a7a574d10c080c2babd77bd4368aa17fae..3814dde17bb679339c44f18970b2a14ead563eaa 100644 (file)
 ### Drops
 
 The primary purpose for scopes is to insert drops: while translating
-the contents, we also accumulate lvalues that need to be dropped upon
+the contents, we also accumulate places that need to be dropped upon
 exit from each scope. This is done by calling `schedule_drop`. Once a
 drop is scheduled, whenever we branch out we will insert drops of all
-those lvalues onto the outgoing edge. Note that we don't know the full
+those places onto the outgoing edge. Note that we don't know the full
 set of scheduled drops up front, and so whenever we exit from the
 scope we only drop the values scheduled thus far. For example, consider
 the scope S corresponding to this loop:
@@ -120,7 +120,7 @@ pub struct Scope<'tcx> {
     ///  * freeing up stack space has no effect during unwinding
     needs_cleanup: bool,
 
-    /// set of lvalues to drop when exiting this scope. This starts
+    /// set of places to drop when exiting this scope. This starts
     /// out empty but grows as variables are declared during the
     /// building process. This is a stack, so we always drop from the
     /// end of the vector (top of the stack) first.
@@ -138,10 +138,10 @@ pub struct Scope<'tcx> {
 
 #[derive(Debug)]
 struct DropData<'tcx> {
-    /// span where drop obligation was incurred (typically where lvalue was declared)
+    /// span where drop obligation was incurred (typically where place was declared)
     span: Span,
 
-    /// lvalue to drop
+    /// place to drop
     location: Place<'tcx>,
 
     /// Whether this is a full value Drop, or just a StorageDead.
@@ -608,19 +608,19 @@ pub fn local_scope(&self) -> Option<region::Scope> {
 
     // Scheduling drops
     // ================
-    /// Indicates that `lvalue` should be dropped on exit from
+    /// Indicates that `place` should be dropped on exit from
     /// `region_scope`.
     pub fn schedule_drop(&mut self,
                          span: Span,
                          region_scope: region::Scope,
-                         lvalue: &Place<'tcx>,
-                         lvalue_ty: Ty<'tcx>) {
-        let needs_drop = self.hir.needs_drop(lvalue_ty);
+                         place: &Place<'tcx>,
+                         place_ty: Ty<'tcx>) {
+        let needs_drop = self.hir.needs_drop(place_ty);
         let drop_kind = if needs_drop {
             DropKind::Value { cached_block: CachedBlock::default() }
         } else {
             // Only temps and vars need their storage dead.
-            match *lvalue {
+            match *place {
                 Place::Local(index) if index.index() > self.arg_count => DropKind::Storage,
                 _ => return
             }
@@ -685,13 +685,13 @@ pub fn schedule_drop(&mut self,
                 let scope_end = region_scope_span.with_lo(region_scope_span.hi());
                 scope.drops.push(DropData {
                     span: scope_end,
-                    location: lvalue.clone(),
+                    location: place.clone(),
                     kind: drop_kind
                 });
                 return;
             }
         }
-        span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, lvalue);
+        span_bug!(span, "region scope {:?} not in scope to drop {:?}", region_scope, place);
     }
 
     // Other
index 981d0b9f0d978d7cc5641b6296ebb2d9469b9d9a..f0ecaea15649c71e4eeae8016fe8e522999e5d7d 100644 (file)
@@ -24,7 +24,7 @@ pub fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>,
 {
     let mut next_child = move_data.move_paths[path].first_child;
     while let Some(child_index) = next_child {
-        match move_data.move_paths[child_index].lvalue {
+        match move_data.move_paths[child_index].place {
             mir::Place::Projection(ref proj) => {
                 if cond(proj) {
                     return Some(child_index)
@@ -56,19 +56,19 @@ pub fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>,
 /// is no need to maintain separate drop flags to track such state.
 ///
 /// FIXME: we have to do something for moving slice patterns.
-fn lvalue_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
+fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
                                                             mir: &Mir<'tcx>,
-                                                            lv: &mir::Place<'tcx>) -> bool {
-    let ty = lv.ty(mir, tcx).to_ty(tcx);
+                                                            place: &mir::Place<'tcx>) -> bool {
+    let ty = place.ty(mir, tcx).to_ty(tcx);
     match ty.sty {
         ty::TyArray(..) | ty::TySlice(..) | ty::TyRef(..) | ty::TyRawPtr(..) => {
-            debug!("lvalue_contents_drop_state_cannot_differ lv: {:?} ty: {:?} refd => true",
-                   lv, ty);
+            debug!("place_contents_drop_state_cannot_differ place: {:?} ty: {:?} refd => true",
+                   place, ty);
             true
         }
         ty::TyAdt(def, _) if (def.has_dtor(tcx) && !def.is_box()) || def.is_union() => {
-            debug!("lvalue_contents_drop_state_cannot_differ lv: {:?} ty: {:?} Drop => true",
-                   lv, ty);
+            debug!("place_contents_drop_state_cannot_differ place: {:?} ty: {:?} Drop => true",
+                   place, ty);
             true
         }
         _ => {
@@ -109,8 +109,8 @@ fn is_terminal_path<'a, 'gcx, 'tcx>(
         move_data: &MoveData<'tcx>,
         path: MovePathIndex) -> bool
     {
-        lvalue_contents_drop_state_cannot_differ(
-            tcx, mir, &move_data.move_paths[path].lvalue)
+        place_contents_drop_state_cannot_differ(
+            tcx, mir, &move_data.move_paths[path].place)
     }
 
     fn on_all_children_bits<'a, 'gcx, 'tcx, F>(
@@ -145,9 +145,9 @@ pub(crate) fn on_all_drop_children_bits<'a, 'gcx, 'tcx, F>(
     where F: FnMut(MovePathIndex)
 {
     on_all_children_bits(tcx, mir, &ctxt.move_data, path, |child| {
-        let lvalue = &ctxt.move_data.move_paths[path].lvalue;
-        let ty = lvalue.ty(mir, tcx).to_ty(tcx);
-        debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, lvalue, ty);
+        let place = &ctxt.move_data.move_paths[path].place;
+        let ty = place.ty(mir, tcx).to_ty(tcx);
+        debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty);
 
         let gcx = tcx.global_tcx();
         let erased_ty = gcx.lift(&tcx.erase_regions(&ty)).unwrap();
@@ -168,8 +168,8 @@ pub(crate) fn drop_flag_effects_for_function_entry<'a, 'gcx, 'tcx, F>(
 {
     let move_data = &ctxt.move_data;
     for arg in mir.args_iter() {
-        let lvalue = mir::Place::Local(arg);
-        let lookup_result = move_data.rev_lookup.find(&lvalue);
+        let place = mir::Place::Local(arg);
+        let lookup_result = move_data.rev_lookup.find(&place);
         on_lookup_result_bits(tcx, mir, move_data,
                               lookup_result,
                               |mpi| callback(mpi, DropFlagState::Present));
index 83a988760c4d1fed72bb8770ba6076a8a02d9b3b..32dcf28cd948376a86de0242cbe1fa139026072e 100644 (file)
@@ -42,7 +42,7 @@ pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> {
 }
 
 // temporarily allow some dead fields: `kind` and `region` will be
-// needed by borrowck; `lvalue` will probably be a MovePathIndex when
+// needed by borrowck; `place` will probably be a MovePathIndex when
 // that is extended to include borrowed data paths.
 #[allow(dead_code)]
 #[derive(Debug)]
@@ -50,7 +50,7 @@ pub struct BorrowData<'tcx> {
     pub(crate) location: Location,
     pub(crate) kind: mir::BorrowKind,
     pub(crate) region: Region<'tcx>,
-    pub(crate) lvalue: mir::Place<'tcx>,
+    pub(crate) place: mir::Place<'tcx>,
 }
 
 impl<'tcx> fmt::Display for BorrowData<'tcx> {
@@ -62,7 +62,7 @@ fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
         };
         let region = format!("{}", self.region);
         let region = if region.len() > 0 { format!("{} ", region) } else { region };
-        write!(w, "&{}{}{:?}", region, kind, self.lvalue)
+        write!(w, "&{}{}{:?}", region, kind, self.place)
     }
 }
 
@@ -101,11 +101,11 @@ impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> {
             fn visit_rvalue(&mut self,
                             rvalue: &mir::Rvalue<'tcx>,
                             location: mir::Location) {
-                if let mir::Rvalue::Ref(region, kind, ref lvalue) = *rvalue {
-                    if is_unsafe_lvalue(self.tcx, self.mir, lvalue) { return; }
+                if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
+                    if is_unsafe_place(self.tcx, self.mir, place) { return; }
 
                     let borrow = BorrowData {
-                        location: location, kind: kind, region: region, lvalue: lvalue.clone(),
+                        location: location, kind: kind, region: region, place: place.clone(),
                     };
                     let idx = self.idx_vec.push(borrow);
                     self.location_map.insert(location, idx);
@@ -206,8 +206,8 @@ fn statement_effect(&self,
             }
 
             mir::StatementKind::Assign(_, ref rhs) => {
-                if let mir::Rvalue::Ref(region, _, ref lvalue) = *rhs {
-                    if is_unsafe_lvalue(self.tcx, self.mir, lvalue) { return; }
+                if let mir::Rvalue::Ref(region, _, ref place) = *rhs {
+                    if is_unsafe_place(self.tcx, self.mir, place) { return; }
                     let index = self.location_map.get(&location).unwrap_or_else(|| {
                         panic!("could not find BorrowIndex for location {:?}", location);
                     });
@@ -269,7 +269,7 @@ fn propagate_call_return(&self,
                              _in_out: &mut IdxSet<BorrowIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             _dest_lval: &mir::Place) {
+                             _dest_place: &mir::Place) {
         // there are no effects on the region scopes from method calls.
     }
 }
@@ -288,15 +288,15 @@ fn bottom_value() -> bool {
     }
 }
 
-fn is_unsafe_lvalue<'a, 'gcx: 'tcx, 'tcx: 'a>(
+fn is_unsafe_place<'a, 'gcx: 'tcx, 'tcx: 'a>(
     tcx: TyCtxt<'a, 'gcx, 'tcx>,
     mir: &'a Mir<'tcx>,
-    lvalue: &mir::Place<'tcx>
+    place: &mir::Place<'tcx>
 ) -> bool {
     use self::mir::Place::*;
     use self::mir::ProjectionElem;
 
-    match *lvalue {
+    match *place {
         Local(_) => false,
         Static(ref static_) => tcx.is_static_mut(static_.def_id),
         Projection(ref proj) => {
@@ -306,13 +306,13 @@ fn is_unsafe_lvalue<'a, 'gcx: 'tcx, 'tcx: 'a>(
                 ProjectionElem::Subslice { .. } |
                 ProjectionElem::ConstantIndex { .. } |
                 ProjectionElem::Index(_) => {
-                    is_unsafe_lvalue(tcx, mir, &proj.base)
+                    is_unsafe_place(tcx, mir, &proj.base)
                 }
                 ProjectionElem::Deref => {
                     let ty = proj.base.ty(mir, tcx).to_ty(tcx);
                     match ty.sty {
                         ty::TyRawPtr(..) => true,
-                        _ => is_unsafe_lvalue(tcx, mir, &proj.base),
+                        _ => is_unsafe_place(tcx, mir, &proj.base),
                     }
                 }
             }
index 94c928f5565dac489f3aaed051fd94fd8d1b9758..50c8df3c2e3d01665a63bde683944baa7712d0d1 100644 (file)
@@ -368,11 +368,11 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<MovePathIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             dest_lval: &mir::Place) {
+                             dest_place: &mir::Place) {
         // when a call returns successfully, that means we need to set
-        // the bits for that dest_lval to 1 (initialized).
+        // the bits for that dest_place to 1 (initialized).
         on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
-                              self.move_data().rev_lookup.find(dest_lval),
+                              self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.add(&mpi); });
     }
 }
@@ -384,7 +384,7 @@ fn bits_per_block(&self) -> usize {
         self.move_data().move_paths.len()
     }
 
-    // sets on_entry bits for Arg lvalues
+    // sets on_entry bits for Arg places
     fn start_block_effect(&self, sets: &mut BlockSets<MovePathIndex>) {
         // set all bits to 1 (uninit) before gathering counterevidence
         for e in sets.on_entry.words_mut() { *e = !0; }
@@ -423,11 +423,11 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<MovePathIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             dest_lval: &mir::Place) {
+                             dest_place: &mir::Place) {
         // when a call returns successfully, that means we need to set
-        // the bits for that dest_lval to 0 (initialized).
+        // the bits for that dest_place to 0 (initialized).
         on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
-                              self.move_data().rev_lookup.find(dest_lval),
+                              self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.remove(&mpi); });
     }
 }
@@ -439,7 +439,7 @@ fn bits_per_block(&self) -> usize {
         self.move_data().move_paths.len()
     }
 
-    // sets on_entry bits for Arg lvalues
+    // sets on_entry bits for Arg places
     fn start_block_effect(&self, sets: &mut BlockSets<MovePathIndex>) {
         for e in sets.on_entry.words_mut() { *e = 0; }
 
@@ -477,11 +477,11 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<MovePathIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             dest_lval: &mir::Place) {
+                             dest_place: &mir::Place) {
         // when a call returns successfully, that means we need to set
-        // the bits for that dest_lval to 1 (initialized).
+        // the bits for that dest_place to 1 (initialized).
         on_lookup_result_bits(self.tcx, self.mir, self.move_data(),
-                              self.move_data().rev_lookup.find(dest_lval),
+                              self.move_data().rev_lookup.find(dest_place),
                               |mpi| { in_out.add(&mpi); });
     }
 }
@@ -561,7 +561,7 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<MoveOutIndex>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             dest_lval: &mir::Place) {
+                             dest_place: &mir::Place) {
         let move_data = self.move_data();
         let bits_per_block = self.bits_per_block();
 
@@ -569,7 +569,7 @@ fn propagate_call_return(&self,
         on_lookup_result_bits(self.tcx,
                               self.mir,
                               move_data,
-                              move_data.rev_lookup.find(dest_lval),
+                              move_data.rev_lookup.find(dest_place),
                               |mpi| for moi in &path_map[mpi] {
                                   assert!(moi.index() < bits_per_block);
                                   in_out.remove(&moi);
@@ -647,7 +647,7 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<InitIndex>,
                              call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             _dest_lval: &mir::Place) {
+                             _dest_place: &mir::Place) {
         let move_data = self.move_data();
         let bits_per_block = self.bits_per_block();
         let init_loc_map = &move_data.init_loc_map;
index 484dcf99a9b6c3b225209e37633e7ebd92cd8da1..fe6cd660b1e83647daffc68c6f9f44f42fdad490 100644 (file)
@@ -62,7 +62,7 @@ fn propagate_call_return(&self,
                              _in_out: &mut IdxSet<Local>,
                              _call_bb: mir::BasicBlock,
                              _dest_bb: mir::BasicBlock,
-                             _dest_lval: &mir::Place) {
+                             _dest_place: &mir::Place) {
         // Nothing to do when a call returns successfully
     }
 }
index 4851f5c96683addcb2f30acd612dbed7870dd93b..6be006b1ea972437de0af167d873abafc17de2cd 100644 (file)
@@ -610,7 +610,7 @@ fn propagate_call_return(&self,
                              in_out: &mut IdxSet<Self::Idx>,
                              call_bb: mir::BasicBlock,
                              dest_bb: mir::BasicBlock,
-                             dest_lval: &mir::Place);
+                             dest_place: &mir::Place);
 }
 
 impl<'a, 'gcx, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> where D: BitDenotation
@@ -714,11 +714,11 @@ fn propagate_bits_into_graph_successors_of(
                         self.propagate_bits_into_entry_set_for(in_out, changed, unwind);
                     }
                 }
-                if let Some((ref dest_lval, ref dest_bb)) = *destination {
+                if let Some((ref dest_place, ref dest_bb)) = *destination {
                     // N.B.: This must be done *last*, after all other
                     // propagation, as documented in comment above.
                     self.flow_state.operator.propagate_call_return(
-                        in_out, bb, *dest_bb, dest_lval);
+                        in_out, bb, *dest_bb, dest_place);
                     self.propagate_bits_into_entry_set_for(in_out, changed, dest_bb);
                 }
             }
index 3d623acb415ce13370ebf2938d90fcdacdeef87c..c20beb7d8c2a72a4c1edce1b816a152ff46861cc 100644 (file)
@@ -68,14 +68,14 @@ fn new_move_path(move_paths: &mut IndexVec<MovePathIndex, MovePath<'tcx>>,
                      path_map: &mut IndexVec<MovePathIndex, Vec<MoveOutIndex>>,
                      init_path_map: &mut IndexVec<MovePathIndex, Vec<InitIndex>>,
                      parent: Option<MovePathIndex>,
-                     lvalue: Place<'tcx>)
+                     place: Place<'tcx>)
                      -> MovePathIndex
     {
         let move_path = move_paths.push(MovePath {
             next_sibling: None,
             first_child: None,
             parent,
-            lvalue,
+            place,
         });
 
         if let Some(parent) = parent {
@@ -95,52 +95,52 @@ fn new_move_path(move_paths: &mut IndexVec<MovePathIndex, MovePath<'tcx>>,
 }
 
 impl<'b, 'a, 'gcx, 'tcx> Gatherer<'b, 'a, 'gcx, 'tcx> {
-    /// This creates a MovePath for a given lvalue, returning an `MovePathError`
-    /// if that lvalue can't be moved from.
+    /// This creates a MovePath for a given place, returning an `MovePathError`
+    /// if that place can't be moved from.
     ///
-    /// NOTE: lvalues behind references *do not* get a move path, which is
+    /// NOTE: places behind references *do not* get a move path, which is
     /// problematic for borrowck.
     ///
     /// Maybe we should have separate "borrowck" and "moveck" modes.
-    fn move_path_for(&mut self, lval: &Place<'tcx>)
+    fn move_path_for(&mut self, place: &Place<'tcx>)
                      -> Result<MovePathIndex, MoveError<'tcx>>
     {
-        debug!("lookup({:?})", lval);
-        match *lval {
+        debug!("lookup({:?})", place);
+        match *place {
             Place::Local(local) => Ok(self.builder.data.rev_lookup.locals[local]),
             Place::Static(..) => {
                 let span = self.builder.mir.source_info(self.loc).span;
                 Err(MoveError::cannot_move_out_of(span, Static))
             }
             Place::Projection(ref proj) => {
-                self.move_path_for_projection(lval, proj)
+                self.move_path_for_projection(place, proj)
             }
         }
     }
 
-    fn create_move_path(&mut self, lval: &Place<'tcx>) {
+    fn create_move_path(&mut self, place: &Place<'tcx>) {
         // This is an assignment, not a move, so this not being a valid
         // move path is OK.
-        let _ = self.move_path_for(lval);
+        let _ = self.move_path_for(place);
     }
 
     fn move_path_for_projection(&mut self,
-                                lval: &Place<'tcx>,
+                                place: &Place<'tcx>,
                                 proj: &PlaceProjection<'tcx>)
                                 -> Result<MovePathIndex, MoveError<'tcx>>
     {
         let base = try!(self.move_path_for(&proj.base));
         let mir = self.builder.mir;
         let tcx = self.builder.tcx;
-        let lv_ty = proj.base.ty(mir, tcx).to_ty(tcx);
-        match lv_ty.sty {
+        let place_ty = proj.base.ty(mir, tcx).to_ty(tcx);
+        match place_ty.sty {
             ty::TyRef(..) | ty::TyRawPtr(..) =>
                 return Err(MoveError::cannot_move_out_of(mir.source_info(self.loc).span,
                                                          BorrowedContent)),
             ty::TyAdt(adt, _) if adt.has_dtor(tcx) && !adt.is_box() =>
                 return Err(MoveError::cannot_move_out_of(mir.source_info(self.loc).span,
                                                          InteriorOfTypeWithDestructor {
-                    container_ty: lv_ty
+                    container_ty: place_ty
                 })),
             // move out of union - always move the entire union
             ty::TyAdt(adt, _) if adt.is_union() =>
@@ -149,7 +149,7 @@ fn move_path_for_projection(&mut self,
                 return Err(MoveError::cannot_move_out_of(
                     mir.source_info(self.loc).span,
                     InteriorOfSliceOrArray {
-                        ty: lv_ty, is_index: match proj.elem {
+                        ty: place_ty, is_index: match proj.elem {
                             ProjectionElem::Index(..) => true,
                             _ => false
                         },
@@ -159,7 +159,7 @@ fn move_path_for_projection(&mut self,
                     return Err(MoveError::cannot_move_out_of(
                         mir.source_info(self.loc).span,
                         InteriorOfSliceOrArray {
-                            ty: lv_ty, is_index: true
+                            ty: place_ty, is_index: true
                         })),
                 _ => {
                     // FIXME: still badly broken
@@ -175,7 +175,7 @@ fn move_path_for_projection(&mut self,
                     &mut self.builder.data.path_map,
                     &mut self.builder.data.init_path_map,
                     Some(base),
-                    lval.clone()
+                    place.clone()
                 );
                 ent.insert(path);
                 Ok(path)
@@ -265,16 +265,16 @@ struct Gatherer<'b, 'a: 'b, 'gcx: 'tcx, 'tcx: 'a> {
 impl<'b, 'a, 'gcx, 'tcx> Gatherer<'b, 'a, 'gcx, 'tcx> {
     fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
         match stmt.kind {
-            StatementKind::Assign(ref lval, ref rval) => {
-                self.create_move_path(lval);
+            StatementKind::Assign(ref place, ref rval) => {
+                self.create_move_path(place);
                 if let RvalueInitializationState::Shallow = rval.initialization_state() {
                     // Box starts out uninitialized - need to create a separate
                     // move-path for the interior so it will be separate from
                     // the exterior.
-                    self.create_move_path(&lval.clone().deref());
-                    self.gather_init(lval, InitKind::Shallow);
+                    self.create_move_path(&place.clone().deref());
+                    self.gather_init(place, InitKind::Shallow);
                 } else {
-                    self.gather_init(lval, InitKind::Deep);
+                    self.gather_init(place, InitKind::Deep);
                 }
                 self.gather_rvalue(rval);
             }
@@ -318,7 +318,7 @@ fn gather_rvalue(&mut self, rvalue: &Rvalue<'tcx>) {
             Rvalue::NullaryOp(NullOp::Box, _) => {
                 // This returns an rvalue with uninitialized contents. We can't
                 // move out of it here because it is an rvalue - assignments always
-                // completely initialize their lvalue.
+                // completely initialize their place.
                 //
                 // However, this does not matter - MIR building is careful to
                 // only emit a shallow free for the partially-initialized
@@ -339,7 +339,7 @@ fn gather_terminator(&mut self, term: &Terminator<'tcx>) {
             TerminatorKind::Unreachable => { }
 
             TerminatorKind::Return => {
-                self.gather_move(&Place::Local(RETURN_POINTER));
+                self.gather_move(&Place::Local(RETURN_PLACE));
             }
 
             TerminatorKind::Assert { .. } |
@@ -376,16 +376,16 @@ fn gather_operand(&mut self, operand: &Operand<'tcx>) {
         match *operand {
             Operand::Constant(..) |
             Operand::Copy(..) => {} // not-a-move
-            Operand::Move(ref lval) => { // a move
-                self.gather_move(lval);
+            Operand::Move(ref place) => { // a move
+                self.gather_move(place);
             }
         }
     }
 
-    fn gather_move(&mut self, lval: &Place<'tcx>) {
-        debug!("gather_move({:?}, {:?})", self.loc, lval);
+    fn gather_move(&mut self, place: &Place<'tcx>) {
+        debug!("gather_move({:?}, {:?})", self.loc, place);
 
-        let path = match self.move_path_for(lval) {
+        let path = match self.move_path_for(place) {
             Ok(path) | Err(MoveError::UnionMove { path }) => path,
             Err(error @ MoveError::IllegalMove { .. }) => {
                 self.builder.errors.push(error);
@@ -395,16 +395,16 @@ fn gather_move(&mut self, lval: &Place<'tcx>) {
         let move_out = self.builder.data.moves.push(MoveOut { path: path, source: self.loc });
 
         debug!("gather_move({:?}, {:?}): adding move {:?} of {:?}",
-               self.loc, lval, move_out, path);
+               self.loc, place, move_out, path);
 
         self.builder.data.path_map[path].push(move_out);
         self.builder.data.loc_map[self.loc].push(move_out);
     }
 
-    fn gather_init(&mut self, lval: &Place<'tcx>, kind: InitKind) {
-        debug!("gather_init({:?}, {:?})", self.loc, lval);
+    fn gather_init(&mut self, place: &Place<'tcx>, kind: InitKind) {
+        debug!("gather_init({:?}, {:?})", self.loc, place);
 
-        if let LookupResult::Exact(path) = self.builder.data.rev_lookup.find(lval) {
+        if let LookupResult::Exact(path) = self.builder.data.rev_lookup.find(place) {
             let init = self.builder.data.inits.push(Init {
                 span: self.builder.mir.source_info(self.loc).span,
                 path,
@@ -412,7 +412,7 @@ fn gather_init(&mut self, lval: &Place<'tcx>, kind: InitKind) {
             });
 
             debug!("gather_init({:?}, {:?}): adding init {:?} of {:?}",
-               self.loc, lval, init, path);
+               self.loc, place, init, path);
 
             self.builder.data.init_path_map[path].push(init);
             self.builder.data.init_loc_map[self.loc].push(init);
index 5e20d9dafd1f7e6be1e9c0bac7be298996d626e1..294f48178a8ac6d98630ef1b6fad808152d43ddf 100644 (file)
@@ -94,7 +94,7 @@ pub struct MovePath<'tcx> {
     pub next_sibling: Option<MovePathIndex>,
     pub first_child: Option<MovePathIndex>,
     pub parent: Option<MovePathIndex>,
-    pub lvalue: Place<'tcx>,
+    pub place: Place<'tcx>,
 }
 
 impl<'tcx> fmt::Debug for MovePath<'tcx> {
@@ -109,13 +109,13 @@ fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
         if let Some(next_sibling) = self.next_sibling {
             write!(w, " next_sibling: {:?}", next_sibling)?;
         }
-        write!(w, " lvalue: {:?} }}", self.lvalue)
+        write!(w, " place: {:?} }}", self.place)
     }
 }
 
 impl<'tcx> fmt::Display for MovePath<'tcx> {
     fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
-        write!(w, "{:?}", self.lvalue)
+        write!(w, "{:?}", self.place)
     }
 }
 
@@ -224,11 +224,11 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
 pub struct MovePathLookup<'tcx> {
     locals: IndexVec<Local, MovePathIndex>,
 
-    /// projections are made from a base-lvalue and a projection
-    /// elem. The base-lvalue will have a unique MovePathIndex; we use
+    /// projections are made from a base-place and a projection
+    /// elem. The base-place will have a unique MovePathIndex; we use
     /// the latter as the index into the outer vector (narrowing
     /// subsequent search so that it is solely relative to that
-    /// base-lvalue). For the remaining lookup, we map the projection
+    /// base-place). For the remaining lookup, we map the projection
     /// elem to the associated MovePathIndex.
     projections: FxHashMap<(MovePathIndex, AbstractElem<'tcx>), MovePathIndex>
 }
@@ -246,8 +246,8 @@ impl<'tcx> MovePathLookup<'tcx> {
     // alternative will *not* create a MovePath on the fly for an
     // unknown l-value, but will rather return the nearest available
     // parent.
-    pub fn find(&self, lval: &Place<'tcx>) -> LookupResult {
-        match *lval {
+    pub fn find(&self, place: &Place<'tcx>) -> LookupResult {
+        match *place {
             Place::Local(local) => LookupResult::Exact(self.locals[local]),
             Place::Static(..) => LookupResult::Parent(None),
             Place::Projection(ref proj) => {
index 00d7cdc0ff7a424c0035aba338e6d86293ca7646..848c2d3c811e99495a83aade112bffb9a4b100c6 100644 (file)
@@ -116,7 +116,7 @@ fn apply_adjustment<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
                 },
             };
 
-            overloaded_lvalue(cx, hir_expr, adjustment.target, Some(call), vec![expr.to_ref()])
+            overloaded_place(cx, hir_expr, adjustment.target, Some(call), vec![expr.to_ref()])
         }
         Adjust::Borrow(AutoBorrow::Ref(r, m)) => {
             ExprKind::Borrow {
@@ -335,7 +335,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
 
         hir::ExprIndex(ref lhs, ref index) => {
             if cx.tables().is_method_call(expr) {
-                overloaded_lvalue(cx, expr, expr_ty, None, vec![lhs.to_ref(), index.to_ref()])
+                overloaded_place(cx, expr, expr_ty, None, vec![lhs.to_ref(), index.to_ref()])
             } else {
                 ExprKind::Index {
                     lhs: lhs.to_ref(),
@@ -346,7 +346,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
 
         hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => {
             if cx.tables().is_method_call(expr) {
-                overloaded_lvalue(cx, expr, expr_ty, None, vec![arg.to_ref()])
+                overloaded_place(cx, expr, expr_ty, None, vec![arg.to_ref()])
             } else {
                 ExprKind::Deref { arg: arg.to_ref() }
             }
@@ -844,15 +844,15 @@ fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
     }
 }
 
-fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
+fn overloaded_place<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
                                      expr: &'tcx hir::Expr,
-                                     lvalue_ty: Ty<'tcx>,
+                                     place_ty: Ty<'tcx>,
                                      custom_callee: Option<(DefId, &'tcx Substs<'tcx>)>,
                                      args: Vec<ExprRef<'tcx>>)
                                      -> ExprKind<'tcx> {
     // For an overloaded *x or x[y] expression of type T, the method
     // call returns an &T and we must add the deref so that the types
-    // line up (this is because `*x` and `x[y]` represent lvalues):
+    // line up (this is because `*x` and `x[y]` represent places):
 
     let recv_ty = match args[0] {
         ExprRef::Hair(e) => cx.tables().expr_ty_adjusted(e),
@@ -864,10 +864,10 @@ fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
     // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
     let (region, mt) = match recv_ty.sty {
         ty::TyRef(region, mt) => (region, mt),
-        _ => span_bug!(expr.span, "overloaded_lvalue: receiver is not a reference"),
+        _ => span_bug!(expr.span, "overloaded_place: receiver is not a reference"),
     };
     let ref_ty = cx.tcx.mk_ref(region, ty::TypeAndMut {
-        ty: lvalue_ty,
+        ty: place_ty,
         mutbl: mt.mutbl,
     });
 
index 767ccda67416af215836b1c9ec5d8b34333255a0..ad228e24e6b13a249c20f5e3980579b4b32a0e0b 100644 (file)
@@ -384,14 +384,14 @@ fn copy_shim(&mut self) {
         let rcvr = Place::Local(Local::new(1+0)).deref();
         let ret_statement = self.make_statement(
             StatementKind::Assign(
-                Place::Local(RETURN_POINTER),
+                Place::Local(RETURN_PLACE),
                 Rvalue::Use(Operand::Copy(rcvr))
             )
         );
         self.block(vec![ret_statement], TerminatorKind::Return, false);
     }
 
-    fn make_lvalue(&mut self, mutability: Mutability, ty: Ty<'tcx>) -> Place<'tcx> {
+    fn make_place(&mut self, mutability: Mutability, ty: Ty<'tcx>) -> Place<'tcx> {
         let span = self.span;
         Place::Local(
             self.local_decls.push(temp_decl(mutability, ty, span))
@@ -427,7 +427,7 @@ fn make_clone_call(
             },
         });
 
-        let ref_loc = self.make_lvalue(
+        let ref_loc = self.make_place(
             Mutability::Not,
             tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
                 ty,
@@ -435,7 +435,7 @@ fn make_clone_call(
             })
         );
 
-        let loc = self.make_lvalue(Mutability::Not, ty);
+        let loc = self.make_place(Mutability::Not, ty);
 
         // `let ref_loc: &ty = &rcvr_field;`
         let statement = self.make_statement(
@@ -466,7 +466,7 @@ fn loop_header(
     ) {
         let tcx = self.tcx;
 
-        let cond = self.make_lvalue(Mutability::Mut, tcx.types.bool);
+        let cond = self.make_place(Mutability::Mut, tcx.types.bool);
         let compute_cond = self.make_statement(
             StatementKind::Assign(
                 cond.clone(),
@@ -502,8 +502,8 @@ fn array_shim(&mut self, ty: Ty<'tcx>, len: u64) {
         let rcvr = Place::Local(Local::new(1+0)).deref();
 
         let beg = self.local_decls.push(temp_decl(Mutability::Mut, tcx.types.usize, span));
-        let end = self.make_lvalue(Mutability::Not, tcx.types.usize);
-        let ret = self.make_lvalue(Mutability::Mut, tcx.mk_array(ty, len));
+        let end = self.make_place(Mutability::Not, tcx.types.usize);
+        let ret = self.make_place(Mutability::Mut, tcx.mk_array(ty, len));
 
         // BB #0
         // `let mut beg = 0;`
@@ -567,7 +567,7 @@ fn array_shim(&mut self, ty: Ty<'tcx>, len: u64) {
         // `return ret;`
         let ret_statement = self.make_statement(
             StatementKind::Assign(
-                Place::Local(RETURN_POINTER),
+                Place::Local(RETURN_PLACE),
                 Rvalue::Use(Operand::Move(ret.clone())),
             )
         );
@@ -663,7 +663,7 @@ fn tuple_like_shim(&mut self, tys: &[ty::Ty<'tcx>], kind: AggregateKind<'tcx>) {
         // `return kind(returns[0], returns[1], ..., returns[tys.len() - 1]);`
         let ret_statement = self.make_statement(
             StatementKind::Assign(
-                Place::Local(RETURN_POINTER),
+                Place::Local(RETURN_PLACE),
                 Rvalue::Aggregate(
                     box kind,
                     returns.into_iter().map(Operand::Move).collect()
@@ -749,8 +749,8 @@ fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     if let Some(untuple_args) = untuple_args {
         args.extend(untuple_args.iter().enumerate().map(|(i, ity)| {
-            let arg_lv = Place::Local(Local::new(1+1));
-            Operand::Move(arg_lv.field(Field::new(i), *ity))
+            let arg_place = Place::Local(Local::new(1+1));
+            Operand::Move(arg_place.field(Field::new(i), *ity))
         }));
     } else {
         args.extend((1..sig.inputs().len()).map(|i| {
@@ -771,7 +771,7 @@ fn build_call_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     block(&mut blocks, statements, TerminatorKind::Call {
         func: callee,
         args,
-        destination: Some((Place::Local(RETURN_POINTER),
+        destination: Some((Place::Local(RETURN_PLACE),
                            BasicBlock::new(1))),
         cleanup: if let Adjustment::RefMut = rcvr_adjustment {
             Some(BasicBlock::new(3))
@@ -864,7 +864,7 @@ pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
         statements: vec![Statement {
             source_info,
             kind: StatementKind::Assign(
-                Place::Local(RETURN_POINTER),
+                Place::Local(RETURN_PLACE),
                 Rvalue::Aggregate(
                     box AggregateKind::Adt(adt_def, variant_no, substs, None),
                     (1..sig.inputs().len()+1).map(|i| {
index fa90d511d943a9cc7a54972df8ef4eabc1c4e962..f0f6add3f6f81c399bdb785eb8994aa44101e102 100644 (file)
@@ -22,9 +22,9 @@
 
 pub struct AddValidation;
 
-/// Determine the "context" of the lval: Mutability and region.
-fn lval_context<'a, 'tcx, D>(
-    lval: &Place<'tcx>,
+/// Determine the "context" of the place: Mutability and region.
+fn place_context<'a, 'tcx, D>(
+    place: &Place<'tcx>,
     local_decls: &D,
     tcx: TyCtxt<'a, 'tcx, 'tcx>
 ) -> (Option<region::Scope>, hir::Mutability)
@@ -32,7 +32,7 @@ fn lval_context<'a, 'tcx, D>(
 {
     use rustc::mir::Place::*;
 
-    match *lval {
+    match *place {
         Local { .. } => (None, hir::MutMutable),
         Static(_) => (None, hir::MutImmutable),
         Projection(ref proj) => {
@@ -66,7 +66,7 @@ fn lval_context<'a, 'tcx, D>(
                         // This is already as restricted as it gets, no need to even recurse
                         context
                     } else {
-                        let base_context = lval_context(&proj.base, local_decls, tcx);
+                        let base_context = place_context(&proj.base, local_decls, tcx);
                         // The region of the outermost Deref is always most restrictive.
                         let re = context.0.or(base_context.0);
                         let mutbl = context.1.and(base_context.1);
@@ -74,7 +74,7 @@ fn lval_context<'a, 'tcx, D>(
                     }
 
                 }
-                _ => lval_context(&proj.base, local_decls, tcx),
+                _ => place_context(&proj.base, local_decls, tcx),
             }
         }
     }
@@ -198,11 +198,11 @@ fn run_pass<'a, 'tcx>(&self,
         let restricted_validation = emit_validate == 1 && fn_contains_unsafe(tcx, src);
         let local_decls = mir.local_decls.clone(); // FIXME: Find a way to get rid of this clone.
 
-        // Convert an lvalue to a validation operand.
-        let lval_to_operand = |lval: Place<'tcx>| -> ValidationOperand<'tcx, Place<'tcx>> {
-            let (re, mutbl) = lval_context(&lval, &local_decls, tcx);
-            let ty = lval.ty(&local_decls, tcx).to_ty(tcx);
-            ValidationOperand { lval, ty, re, mutbl }
+        // Convert a place to a validation operand.
+        let place_to_operand = |place: Place<'tcx>| -> ValidationOperand<'tcx, Place<'tcx>> {
+            let (re, mutbl) = place_context(&place, &local_decls, tcx);
+            let ty = place.ty(&local_decls, tcx).to_ty(tcx);
+            ValidationOperand { place, ty, re, mutbl }
         };
 
         // Emit an Acquire at the beginning of the given block.  If we are in restricted emission
@@ -237,7 +237,7 @@ fn run_pass<'a, 'tcx>(&self,
             };
             // Gather all arguments, skip return value.
             let operands = mir.local_decls.iter_enumerated().skip(1).take(mir.arg_count)
-                    .map(|(local, _)| lval_to_operand(Place::Local(local))).collect();
+                    .map(|(local, _)| place_to_operand(Place::Local(local))).collect();
             emit_acquire(&mut mir.basic_blocks_mut()[START_BLOCK], source_info, operands);
         }
 
@@ -256,13 +256,13 @@ fn run_pass<'a, 'tcx>(&self,
                         let release_stmt = Statement {
                             source_info,
                             kind: StatementKind::Validate(ValidationOp::Release,
-                                destination.iter().map(|dest| lval_to_operand(dest.0.clone()))
+                                destination.iter().map(|dest| place_to_operand(dest.0.clone()))
                                 .chain(
                                     args.iter().filter_map(|op| {
                                         match op {
-                                            &Operand::Copy(ref lval) |
-                                            &Operand::Move(ref lval) =>
-                                                Some(lval_to_operand(lval.clone())),
+                                            &Operand::Copy(ref place) |
+                                            &Operand::Move(ref place) =>
+                                                Some(place_to_operand(place.clone())),
                                             &Operand::Constant(..) => { None },
                                         }
                                     })
@@ -275,16 +275,16 @@ fn run_pass<'a, 'tcx>(&self,
                         returns.push((source_info, destination.0.clone(), destination.1));
                     }
                 }
-                Some(Terminator { kind: TerminatorKind::Drop { location: ref lval, .. },
+                Some(Terminator { kind: TerminatorKind::Drop { location: ref place, .. },
                                   source_info }) |
-                Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref lval, .. },
+                Some(Terminator { kind: TerminatorKind::DropAndReplace { location: ref place, .. },
                                   source_info }) => {
                     // Before the call: Release all arguments
                     if !restricted_validation {
                         let release_stmt = Statement {
                             source_info,
                             kind: StatementKind::Validate(ValidationOp::Release,
-                                    vec![lval_to_operand(lval.clone())]),
+                                    vec![place_to_operand(place.clone())]),
                         };
                         block_data.statements.push(release_stmt);
                     }
@@ -296,11 +296,11 @@ fn run_pass<'a, 'tcx>(&self,
             }
         }
         // Now we go over the returns we collected to acquire the return values.
-        for (source_info, dest_lval, dest_block) in returns {
+        for (source_info, dest_place, dest_block) in returns {
             emit_acquire(
                 &mut mir.basic_blocks_mut()[dest_block],
                 source_info,
-                vec![lval_to_operand(dest_lval)]
+                vec![place_to_operand(dest_place)]
             );
         }
 
@@ -321,22 +321,20 @@ fn run_pass<'a, 'tcx>(&self,
                     StatementKind::Assign(_, Rvalue::Ref(_, _, _)) => {
                         // Due to a lack of NLL; we can't capture anything directly here.
                         // Instead, we have to re-match and clone there.
-                        let (dest_lval, re, src_lval) = match block_data.statements[i].kind {
-                            StatementKind::Assign(ref dest_lval,
-                                                  Rvalue::Ref(re, _, ref src_lval)) => {
-                                (dest_lval.clone(), re, src_lval.clone())
+                        let (dest_place, re, src_place) = match block_data.statements[i].kind {
+                            StatementKind::Assign(ref dest_place,
+                                                  Rvalue::Ref(re, _, ref src_place)) => {
+                                (dest_place.clone(), re, src_place.clone())
                             },
                             _ => bug!("We already matched this."),
                         };
                         // So this is a ref, and we got all the data we wanted.
                         // Do an acquire of the result -- but only what it points to, so add a Deref
                         // projection.
-                        let dest_lval = Projection { base: dest_lval, elem: ProjectionElem::Deref };
-                        let dest_lval = Place::Projection(Box::new(dest_lval));
                         let acquire_stmt = Statement {
                             source_info: block_data.statements[i].source_info,
                             kind: StatementKind::Validate(ValidationOp::Acquire,
-                                    vec![lval_to_operand(dest_lval)]),
+                                    vec![place_to_operand(dest_place.deref())]),
                         };
                         block_data.statements.insert(i+1, acquire_stmt);
 
@@ -349,7 +347,7 @@ fn run_pass<'a, 'tcx>(&self,
                         };
                         let release_stmt = Statement {
                             source_info: block_data.statements[i].source_info,
-                            kind: StatementKind::Validate(op, vec![lval_to_operand(src_lval)]),
+                            kind: StatementKind::Validate(op, vec![place_to_operand(src_place)]),
                         };
                         block_data.statements.insert(i, release_stmt);
                     }
@@ -360,13 +358,13 @@ fn run_pass<'a, 'tcx>(&self,
                     {
                         // Due to a lack of NLL; we can't capture anything directly here.
                         // Instead, we have to re-match and clone there.
-                        let (dest_lval, src_lval) = match block_data.statements[i].kind {
-                            StatementKind::Assign(ref dest_lval,
-                                    Rvalue::Cast(_, Operand::Copy(ref src_lval), _)) |
-                            StatementKind::Assign(ref dest_lval,
-                                    Rvalue::Cast(_, Operand::Move(ref src_lval), _)) =>
+                        let (dest_place, src_place) = match block_data.statements[i].kind {
+                            StatementKind::Assign(ref dest_place,
+                                    Rvalue::Cast(_, Operand::Copy(ref src_place), _)) |
+                            StatementKind::Assign(ref dest_place,
+                                    Rvalue::Cast(_, Operand::Move(ref src_place), _)) =>
                             {
-                                (dest_lval.clone(), src_lval.clone())
+                                (dest_place.clone(), src_place.clone())
                             },
                             _ => bug!("We already matched this."),
                         };
@@ -375,7 +373,7 @@ fn run_pass<'a, 'tcx>(&self,
                         let acquire_stmt = Statement {
                             source_info: block_data.statements[i].source_info,
                             kind: StatementKind::Validate(ValidationOp::Acquire,
-                                    vec![lval_to_operand(dest_lval)]),
+                                    vec![place_to_operand(dest_place)]),
                         };
                         block_data.statements.insert(i+1, acquire_stmt);
 
@@ -383,7 +381,7 @@ fn run_pass<'a, 'tcx>(&self,
                         let release_stmt = Statement {
                             source_info: block_data.statements[i].source_info,
                             kind: StatementKind::Validate(ValidationOp::Release,
-                                                            vec![lval_to_operand(src_lval)]),
+                                                            vec![place_to_operand(src_place)]),
                         };
                         block_data.statements.insert(i, release_stmt);
                     }
index c1baf537ff92325a5801f6134a0aa83e257d4c15..7833f4bbac7aaff50c3748e8611f5505cd4a2f90 100644 (file)
@@ -135,12 +135,12 @@ fn visit_rvalue(&mut self,
         self.super_rvalue(rvalue, location);
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &Place<'tcx>,
                     context: PlaceContext<'tcx>,
                     location: Location) {
         if let PlaceContext::Borrow { .. } = context {
-            if util::is_disaligned(self.tcx, self.mir, self.param_env, lvalue) {
+            if util::is_disaligned(self.tcx, self.mir, self.param_env, place) {
                 let source_info = self.source_info;
                 let lint_root =
                     self.visibility_scope_info[source_info.scope].lint_root;
@@ -152,7 +152,7 @@ fn visit_lvalue(&mut self,
             }
         }
 
-        match lvalue {
+        match place {
             &Place::Projection(box Projection {
                 ref base, ref elem
             }) => {
@@ -180,7 +180,7 @@ fn visit_lvalue(&mut self,
                                     _ => span_bug!(
                                         self.source_info.span,
                                         "non-field projection {:?} from union?",
-                                        lvalue)
+                                        place)
                                 };
                                 if elem_ty.moves_by_default(self.tcx, self.param_env,
                                                             self.source_info.span) {
@@ -216,7 +216,7 @@ fn visit_lvalue(&mut self,
                 }
             }
         };
-        self.super_lvalue(lvalue, context, location);
+        self.super_place(place, context, location);
     }
 }
 
index 2371cbff08c1833382265d3745768dc34194f613..6047b4e174a23216d668e116ba961b8e56f32534 100644 (file)
@@ -108,8 +108,8 @@ fn run_pass<'a, 'tcx>(&self,
                             dest_local);
                         continue;
                     }
-                    let dest_lvalue_def = dest_use_info.defs_not_including_drop().next().unwrap();
-                    location = dest_lvalue_def.location;
+                    let dest_place_def = dest_use_info.defs_not_including_drop().next().unwrap();
+                    location = dest_place_def.location;
 
                     let basic_block = &mir[location.block];
                     let statement_index = location.statement_index;
@@ -126,9 +126,9 @@ fn run_pass<'a, 'tcx>(&self,
                         StatementKind::Assign(Place::Local(local), Rvalue::Use(ref operand)) if
                                 local == dest_local => {
                             let maybe_action = match *operand {
-                                Operand::Copy(ref src_lvalue) |
-                                Operand::Move(ref src_lvalue) => {
-                                    Action::local_copy(&mir, &def_use_analysis, src_lvalue)
+                                Operand::Copy(ref src_place) |
+                                Operand::Move(ref src_place) => {
+                                    Action::local_copy(&mir, &def_use_analysis, src_place)
                                 }
                                 Operand::Constant(ref src_constant) => {
                                     Action::constant(src_constant)
@@ -202,10 +202,10 @@ enum Action<'tcx> {
 }
 
 impl<'tcx> Action<'tcx> {
-    fn local_copy(mir: &Mir<'tcx>, def_use_analysis: &DefUseAnalysis, src_lvalue: &Place<'tcx>)
+    fn local_copy(mir: &Mir<'tcx>, def_use_analysis: &DefUseAnalysis, src_place: &Place<'tcx>)
                   -> Option<Action<'tcx>> {
         // The source must be a local.
-        let src_local = if let Place::Local(local) = *src_lvalue {
+        let src_local = if let Place::Local(local) = *src_place {
             local
         } else {
             debug!("  Can't copy-propagate local: source is not a local");
@@ -269,14 +269,14 @@ fn perform(self,
                 debug!("  Replacing all uses of {:?} with {:?} (local)",
                        dest_local,
                        src_local);
-                for lvalue_use in &def_use_analysis.local_info(dest_local).defs_and_uses {
-                    if lvalue_use.context.is_storage_marker() {
-                        mir.make_statement_nop(lvalue_use.location)
+                for place_use in &def_use_analysis.local_info(dest_local).defs_and_uses {
+                    if place_use.context.is_storage_marker() {
+                        mir.make_statement_nop(place_use.location)
                     }
                 }
-                for lvalue_use in &def_use_analysis.local_info(src_local).defs_and_uses {
-                    if lvalue_use.context.is_storage_marker() {
-                        mir.make_statement_nop(lvalue_use.location)
+                for place_use in &def_use_analysis.local_info(src_local).defs_and_uses {
+                    if place_use.context.is_storage_marker() {
+                        mir.make_statement_nop(place_use.location)
                     }
                 }
 
@@ -297,22 +297,22 @@ fn perform(self,
                        dest_local,
                        src_constant);
                 let dest_local_info = def_use_analysis.local_info(dest_local);
-                for lvalue_use in &dest_local_info.defs_and_uses {
-                    if lvalue_use.context.is_storage_marker() {
-                        mir.make_statement_nop(lvalue_use.location)
+                for place_use in &dest_local_info.defs_and_uses {
+                    if place_use.context.is_storage_marker() {
+                        mir.make_statement_nop(place_use.location)
                     }
                 }
 
                 // Replace all uses of the destination local with the constant.
                 let mut visitor = ConstantPropagationVisitor::new(dest_local,
                                                                   src_constant);
-                for dest_lvalue_use in &dest_local_info.defs_and_uses {
-                    visitor.visit_location(mir, dest_lvalue_use.location)
+                for dest_place_use in &dest_local_info.defs_and_uses {
+                    visitor.visit_location(mir, dest_place_use.location)
                 }
 
                 // Zap the assignment instruction if we eliminated all the uses. We won't have been
                 // able to do that if the destination was used in a projection, because projections
-                // must have lvalues on their LHS.
+                // must have places on their LHS.
                 let use_count = dest_local_info.use_count();
                 if visitor.uses_replaced == use_count {
                     debug!("  {} of {} use(s) replaced; deleting assignment",
index 74e03c2dce8236f644fdcbc4b8a096a9f9f16d35..eccb0d231b89d4a5f2d3d4bceadb934a2340988f 100644 (file)
@@ -92,7 +92,7 @@ fn run_pass<'a, 'tcx>(&self,
                 if adt_def.is_enum() {
                     let set_discriminant = Statement {
                         kind: StatementKind::SetDiscriminant {
-                            lvalue: lhs.clone(),
+                            place: lhs.clone(),
                             variant_index: variant,
                         },
                         source_info: src_info,
index 8cf3929db6a69b5499698152d4d96724c0d2e7a1..cb79fc8d7eb27e360337c5b51ba39fa62a92c9ee 100644 (file)
@@ -358,7 +358,7 @@ fn collect_drop_flags(&mut self)
             });
 
             let path = self.move_data().rev_lookup.find(location);
-            debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
+            debug!("collect_drop_flags: {:?}, place {:?} ({:?})",
                    bb, location, path);
 
             let path = match path {
@@ -368,7 +368,7 @@ fn collect_drop_flags(&mut self)
                     let (_maybe_live, maybe_dead) = init_data.state(parent);
                     if maybe_dead {
                         span_bug!(terminator.source_info.span,
-                                  "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
+                                  "drop of untracked, uninitialized value {:?}, place {:?} ({:?})",
                                   bb, location, path);
                     }
                     continue
@@ -443,7 +443,7 @@ fn elaborate_drops(&mut self)
     /// The desugaring drops the location if needed, and then writes
     /// the value (including setting the drop flag) over it in *both* arms.
     ///
-    /// The `replace` terminator can also be called on lvalues that
+    /// The `replace` terminator can also be called on places that
     /// are not tracked by elaboration (for example,
     /// `replace x[i] <- tmp0`). The borrow checker requires that
     /// these locations are initialized before the assignment,
@@ -554,12 +554,12 @@ fn drop_flags_on_init(&mut self) {
     fn drop_flags_for_fn_rets(&mut self) {
         for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
             if let TerminatorKind::Call {
-                destination: Some((ref lv, tgt)), cleanup: Some(_), ..
+                destination: Some((ref place, tgt)), cleanup: Some(_), ..
             } = data.terminator().kind {
                 assert!(!self.patch.is_patched(bb));
 
                 let loc = Location { block: tgt, statement_index: 0 };
-                let path = self.move_data().rev_lookup.find(lv);
+                let path = self.move_data().rev_lookup.find(place);
                 on_lookup_result_bits(
                     self.tcx, self.mir, self.move_data(), path,
                     |child| self.set_drop_flag(loc, child, DropFlagState::Present)
@@ -628,12 +628,12 @@ fn drop_flags_for_locs(&mut self) {
             // so mark the return as initialized *before* the
             // call.
             if let TerminatorKind::Call {
-                destination: Some((ref lv, _)), cleanup: None, ..
+                destination: Some((ref place, _)), cleanup: None, ..
             } = data.terminator().kind {
                 assert!(!self.patch.is_patched(bb));
 
                 let loc = Location { block: bb, statement_index: data.statements.len() };
-                let path = self.move_data().rev_lookup.find(lv);
+                let path = self.move_data().rev_lookup.find(place);
                 on_lookup_result_bits(
                     self.tcx, self.mir, self.move_data(), path,
                     |child| self.set_drop_flag(loc, child, DropFlagState::Present)
index 470ce847e021a385b46ee2b7ebc7565984aa86c2..aaa28634eb82c99d56fa16116c8e38f77cfdfdf3 100644 (file)
@@ -108,17 +108,17 @@ fn visit_local(&mut self,
         assert_ne!(*local, self_arg());
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &mut Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &mut Place<'tcx>,
                     context: PlaceContext<'tcx>,
                     location: Location) {
-        if *lvalue == Place::Local(self_arg()) {
-            *lvalue = Place::Projection(Box::new(Projection {
-                base: lvalue.clone(),
+        if *place == Place::Local(self_arg()) {
+            *place = Place::Projection(Box::new(Projection {
+                base: place.clone(),
                 elem: ProjectionElem::Deref,
             }));
         } else {
-            self.super_lvalue(lvalue, context, location);
+            self.super_place(place, context, location);
         }
     }
 }
@@ -151,7 +151,7 @@ struct TransformVisitor<'a, 'tcx: 'a> {
     // A list of suspension points, generated during the transform
     suspension_points: Vec<SuspensionPoint>,
 
-    // The original RETURN_POINTER local
+    // The original RETURN_PLACE local
     new_ret_local: Local,
 }
 
@@ -200,17 +200,17 @@ fn visit_local(&mut self,
         assert_eq!(self.remap.get(local), None);
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &mut Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &mut Place<'tcx>,
                     context: PlaceContext<'tcx>,
                     location: Location) {
-        if let Place::Local(l) = *lvalue {
+        if let Place::Local(l) = *place {
             // Replace an Local in the remap with a generator struct access
             if let Some(&(ty, idx)) = self.remap.get(&l) {
-                *lvalue = self.make_field(idx, ty);
+                *place = self.make_field(idx, ty);
             }
         } else {
-            self.super_lvalue(lvalue, context, location);
+            self.super_place(place, context, location);
         }
     }
 
@@ -244,7 +244,7 @@ fn visit_basic_block_data(&mut self,
             // We must assign the value first in case it gets declared dead below
             data.statements.push(Statement {
                 source_info,
-                kind: StatementKind::Assign(Place::Local(RETURN_POINTER),
+                kind: StatementKind::Assign(Place::Local(RETURN_PLACE),
                     self.make_state(state_idx, v)),
             });
             let state = if let Some(resume) = resume { // Yield
@@ -310,7 +310,7 @@ fn replace_result_variable<'tcx>(ret_ty: Ty<'tcx>,
     mir.local_decls.swap(0, new_ret_local.index());
 
     RenameLocalVisitor {
-        from: RETURN_POINTER,
+        from: RETURN_PLACE,
         to: new_ret_local,
     }.visit_mir(mir);
 
@@ -557,7 +557,7 @@ fn create_generator_drop_shim<'a, 'tcx>(
     }
 
     // Replace the return variable
-    mir.local_decls[RETURN_POINTER] = LocalDecl {
+    mir.local_decls[RETURN_PLACE] = LocalDecl {
         mutability: Mutability::Mut,
         ty: tcx.mk_nil(),
         name: None,
@@ -783,8 +783,8 @@ fn run_pass<'a, 'tcx>(&self,
             Kind::from(mir.return_ty())].iter());
         let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
 
-        // We rename RETURN_POINTER which has type mir.return_ty to new_ret_local
-        // RETURN_POINTER then is a fresh unused local with type ret_ty.
+        // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
+        // RETURN_PLACE then is a fresh unused local with type ret_ty.
         let new_ret_local = replace_result_variable(ret_ty, mir);
 
         // Extract locals which are live across suspension point into `layout`
index f6e4e9538b378eef5955a60113e0e5739de91e3f..7216384795278915cf5d256500729916824c074a 100644 (file)
@@ -406,9 +406,9 @@ fn inline_call(&self,
                 // `i : &mut usize`, then just duplicating the `a[*i]`
                 // Place could result in two different locations if `f`
                 // writes to `i`. To prevent this we need to create a temporary
-                // borrow of the lvalue and pass the destination as `*temp` instead.
-                fn dest_needs_borrow(lval: &Place) -> bool {
-                    match *lval {
+                // borrow of the place and pass the destination as `*temp` instead.
+                fn dest_needs_borrow(place: &Place) -> bool {
+                    match *place {
                         Place::Projection(ref p) => {
                             match p.elem {
                                 ProjectionElem::Deref |
@@ -456,8 +456,8 @@ fn dest_needs_borrow(lval: &Place) -> bool {
                     // needs to generate the cast.
                     // FIXME: we should probably just generate correct MIR in the first place...
 
-                    let arg = if let Operand::Move(ref lval) = args[0] {
-                        lval.clone()
+                    let arg = if let Operand::Move(ref place) = args[0] {
+                        place.clone()
                     } else {
                         bug!("Constant arg to \"box_free\"");
                     };
@@ -715,13 +715,13 @@ fn visit_local(&mut self,
                    local: &mut Local,
                    _ctxt: PlaceContext<'tcx>,
                    _location: Location) {
-        if *local == RETURN_POINTER {
+        if *local == RETURN_PLACE {
             match self.destination {
                 Place::Local(l) => {
                     *local = l;
                     return;
                 },
-                ref lval => bug!("Return lvalue is {:?}, not local", lval)
+                ref place => bug!("Return place is {:?}, not local", place)
             }
         }
         let idx = local.index() - 1;
@@ -732,15 +732,15 @@ fn visit_local(&mut self,
         *local = self.local_map[Local::new(idx - self.args.len())];
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &mut Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &mut Place<'tcx>,
                     _ctxt: PlaceContext<'tcx>,
                     _location: Location) {
-        if let Place::Local(RETURN_POINTER) = *lvalue {
-            // Return pointer; update the lvalue itself
-            *lvalue = self.destination.clone();
+        if let Place::Local(RETURN_PLACE) = *place {
+            // Return pointer; update the place itself
+            *place = self.destination.clone();
         } else {
-            self.super_lvalue(lvalue, _ctxt, _location);
+            self.super_place(place, _ctxt, _location);
         }
     }
 
index 5fb66edf9441bd9d8c27605d12f76d2581babc51..8856d263864cd19380ea19cdeef0e2a189d8e7c1 100644 (file)
@@ -52,14 +52,14 @@ impl<'tcx> MutVisitor<'tcx> for InstCombineVisitor<'tcx> {
     fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) {
         if self.optimizations.and_stars.remove(&location) {
             debug!("Replacing `&*`: {:?}", rvalue);
-            let new_lvalue = match *rvalue {
+            let new_place = match *rvalue {
                 Rvalue::Ref(_, _, Place::Projection(ref mut projection)) => {
                     // Replace with dummy
                     mem::replace(&mut projection.base, Place::Local(Local::new(0)))
                 }
                 _ => bug!("Detected `&*` but didn't find `&*`!"),
             };
-            *rvalue = Rvalue::Use(Operand::Copy(new_lvalue))
+            *rvalue = Rvalue::Use(Operand::Copy(new_place))
         }
 
         if let Some(constant) = self.optimizations.arrays_lengths.remove(&location) {
@@ -98,9 +98,9 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             }
         }
 
-        if let Rvalue::Len(ref lvalue) = *rvalue {
-            let lvalue_ty = lvalue.ty(&self.mir.local_decls, self.tcx).to_ty(self.tcx);
-            if let TypeVariants::TyArray(_, len) = lvalue_ty.sty {
+        if let Rvalue::Len(ref place) = *rvalue {
+            let place_ty = place.ty(&self.mir.local_decls, self.tcx).to_ty(self.tcx);
+            if let TypeVariants::TyArray(_, len) = place_ty.sty {
                 let span = self.mir.source_info(location).span;
                 let ty = self.tcx.types.usize;
                 let literal = Literal::Value { value: len };
index 85109b43602eea399ac9c26930ee9241073c0e9f..7027d827c84f5caf1404c53a3b620dc098f0ffb4 100644 (file)
@@ -75,19 +75,19 @@ fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<
                 };
 
                 let bin_statement = block.statements.pop().unwrap();
-                let (source_info, lvalue, lhs, mut rhs) = match bin_statement {
+                let (source_info, place, lhs, mut rhs) = match bin_statement {
                     Statement {
                         source_info,
                         kind: StatementKind::Assign(
-                            lvalue,
+                            place,
                             Rvalue::BinaryOp(_, lhs, rhs))
-                    } => (source_info, lvalue, lhs, rhs),
+                    } => (source_info, place, lhs, rhs),
                     Statement {
                         source_info,
                         kind: StatementKind::Assign(
-                            lvalue,
+                            place,
                             Rvalue::CheckedBinaryOp(_, lhs, rhs))
-                    } => (source_info, lvalue, lhs, rhs),
+                    } => (source_info, place, lhs, rhs),
                     _ => bug!("Statement doesn't match pattern any more?"),
                 };
 
@@ -109,7 +109,7 @@ fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<
                 }
 
                 let call_did = check_lang_item_type(
-                    lang_item, &lvalue, &lhs, &rhs, local_decls, tcx);
+                    lang_item, &place, &lhs, &rhs, local_decls, tcx);
 
                 let bb = BasicBlock::new(cur_len + new_blocks.len());
                 new_blocks.push(after_call);
@@ -121,7 +121,7 @@ fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<
                             func: Operand::function_handle(tcx, call_did,
                                 Slice::empty(), source_info.span),
                             args: vec![lhs, rhs],
-                            destination: Some((lvalue, bb)),
+                            destination: Some((place, bb)),
                             cleanup: None,
                         },
                     });
@@ -134,7 +134,7 @@ fn lower_128bit_ops<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<
 
 fn check_lang_item_type<'a, 'tcx, D>(
     lang_item: LangItem,
-    lvalue: &Place<'tcx>,
+    place: &Place<'tcx>,
     lhs: &Operand<'tcx>,
     rhs: &Operand<'tcx>,
     local_decls: &D,
@@ -147,8 +147,8 @@ fn check_lang_item_type<'a, 'tcx, D>(
     let sig = tcx.no_late_bound_regions(&poly_sig).unwrap();
     let lhs_ty = lhs.ty(local_decls, tcx);
     let rhs_ty = rhs.ty(local_decls, tcx);
-    let lvalue_ty = lvalue.ty(local_decls, tcx).to_ty(tcx);
-    let expected = [lhs_ty, rhs_ty, lvalue_ty];
+    let place_ty = place.ty(local_decls, tcx).to_ty(tcx);
+    let expected = [lhs_ty, rhs_ty, place_ty];
     assert_eq!(sig.inputs_and_output[..], expected,
         "lang item {}", tcx.def_symbol_name(did));
     did
index 2986d99a2232083d7b5312578931bc343c661284..73d5a610dbd537ac8e2e2353ad3f34629b386e50 100644 (file)
@@ -189,9 +189,9 @@ fn add_reborrow_constraint(
         &mut self,
         location: Location,
         borrow_region: ty::Region<'tcx>,
-        borrowed_lv: &Place<'tcx>,
+        borrowed_place: &Place<'tcx>,
     ) {
-        if let Projection(ref proj) = *borrowed_lv {
+        if let Projection(ref proj) = *borrowed_place {
             let PlaceProjection { ref base, ref elem } = **proj;
 
             if let ProjectionElem::Deref = *elem {
@@ -232,8 +232,8 @@ fn visit_rvalue(&mut self,
         // where L is the path that is borrowed. In that case, we have
         // to add the reborrow constraints (which don't fall out
         // naturally from the type-checker).
-        if let Rvalue::Ref(region, _bk, ref borrowed_lv) = *rvalue {
-            self.add_reborrow_constraint(location, region, borrowed_lv);
+        if let Rvalue::Ref(region, _bk, ref borrowed_place) = *rvalue {
+            self.add_reborrow_constraint(location, region, borrowed_place);
         }
 
         self.super_rvalue(rvalue, location);
index 57e8de05d479077852aaac122ded7f6833b82a8d..1e5b0bc1392bcf30eb722c893a50f0230b6c3bba 100644 (file)
@@ -316,7 +316,7 @@ fn promote_candidate(mut self, candidate: Candidate) {
             statement_index: usize::MAX
         });
 
-        self.assign(RETURN_POINTER, rvalue, span);
+        self.assign(RETURN_PLACE, rvalue, span);
         self.source.promoted.push(self.promoted);
     }
 }
@@ -373,8 +373,8 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
             }
         };
 
-        // Declare return pointer local
-        let initial_locals = iter::once(LocalDecl::new_return_pointer(ty, span))
+        // Declare return place local
+        let initial_locals = iter::once(LocalDecl::new_return_place(ty, span))
             .collect();
 
         let mut promoter = Promoter {
index 6aa44d64e5b80f7082a37b69a1bb6a7dd9ec97c5..b9b86dd6e840fddd1900fa01bdeb6a1b14c92380 100644 (file)
@@ -51,7 +51,7 @@ struct Qualif: u8 {
         // Function argument.
         const FN_ARGUMENT       = 1 << 2;
 
-        // Static lvalue or move from a static.
+        // Static place or move from a static.
         const STATIC            = 1 << 3;
 
         // Reference to a static.
@@ -261,7 +261,7 @@ fn assign(&mut self, dest: &Place<'tcx>, location: Location) {
                 store(&mut self.temp_qualif[index])
             }
             Place::Local(index) if self.mir.local_kind(index) == LocalKind::ReturnPointer => {
-                debug!("store to return pointer {:?}", index);
+                debug!("store to return place {:?}", index);
                 store(&mut self.return_qualif)
             }
 
@@ -280,7 +280,7 @@ fn assign(&mut self, dest: &Place<'tcx>, location: Location) {
             // This must be an explicit assignment.
             _ => {
                 // Catch more errors in the destination.
-                self.visit_lvalue(dest, PlaceContext::Store, location);
+                self.visit_place(dest, PlaceContext::Store, location);
                 self.statement_like();
             }
         }
@@ -438,11 +438,11 @@ fn visit_local(&mut self,
         }
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &Place<'tcx>,
                     context: PlaceContext<'tcx>,
                     location: Location) {
-        match *lvalue {
+        match *place {
             Place::Local(ref local) => self.visit_local(local, context, location),
             Place::Static(ref global) => {
                 self.add(Qualif::STATIC);
@@ -467,7 +467,7 @@ fn visit_lvalue(&mut self,
             }
             Place::Projection(ref proj) => {
                 self.nest(|this| {
-                    this.super_lvalue(lvalue, context, location);
+                    this.super_place(place, context, location);
                     match proj.elem {
                         ProjectionElem::Deref => {
                             if !this.try_consume() {
@@ -502,7 +502,7 @@ fn visit_lvalue(&mut self,
                                           "cannot refer to the interior of another \
                                            static, use a constant instead");
                             }
-                            let ty = lvalue.ty(this.mir, this.tcx).to_ty(this.tcx);
+                            let ty = place.ty(this.mir, this.tcx).to_ty(this.tcx);
                             this.qualif.restrict(ty, this.tcx, this.param_env);
                         }
 
@@ -519,15 +519,15 @@ fn visit_lvalue(&mut self,
 
     fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
         match *operand {
-            Operand::Copy(ref lvalue) |
-            Operand::Move(ref lvalue) => {
+            Operand::Copy(ref place) |
+            Operand::Move(ref place) => {
                 self.nest(|this| {
                     this.super_operand(operand, location);
                     this.try_consume();
                 });
 
                 // Mark the consumed locals to indicate later drops are noops.
-                if let Place::Local(local) = *lvalue {
+                if let Place::Local(local) = *place {
                     self.local_needs_drop[local] = None;
                 }
             }
@@ -555,7 +555,7 @@ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
     }
 
     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
-        // Recurse through operands and lvalues.
+        // Recurse through operands and places.
         self.super_rvalue(rvalue, location);
 
         match *rvalue {
@@ -572,20 +572,20 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
             Rvalue::Discriminant(..) => {}
 
             Rvalue::Len(_) => {
-                // Static lvalues in consts would have errored already,
+                // Static places in consts would have errored already,
                 // don't treat length checks as reads from statics.
                 self.qualif = self.qualif - Qualif::STATIC;
             }
 
-            Rvalue::Ref(_, kind, ref lvalue) => {
-                // Static lvalues in consts would have errored already,
+            Rvalue::Ref(_, kind, ref place) => {
+                // Static places in consts would have errored already,
                 // only keep track of references to them here.
                 if self.qualif.intersects(Qualif::STATIC) {
                     self.qualif = self.qualif - Qualif::STATIC;
                     self.add(Qualif::STATIC_REF);
                 }
 
-                let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
+                let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);
                 if kind == BorrowKind::Mut {
                     // In theory, any zero-sized value could be borrowed
                     // mutably without consequences. However, only &mut []
@@ -635,7 +635,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 let candidate = Candidate::Ref(location);
                 if !self.qualif.intersects(Qualif::NEVER_PROMOTE) {
                     // We can only promote direct borrows of temps.
-                    if let Place::Local(local) = *lvalue {
+                    if let Place::Local(local) = *place {
                         if self.mir.local_kind(local) == LocalKind::Temp {
                             self.promotion_candidates.push(candidate);
                         }
@@ -829,14 +829,14 @@ struct and enum constructors",
                 }
                 self.assign(dest, location);
             }
-        } else if let TerminatorKind::Drop { location: ref lvalue, .. } = *kind {
+        } else if let TerminatorKind::Drop { location: ref place, .. } = *kind {
             self.super_terminator_kind(bb, kind, location);
 
             // Deny *any* live drops anywhere other than functions.
             if self.mode != Mode::Fn {
                 // HACK(eddyb) Emulate a bit of dataflow analysis,
                 // conservatively, that drop elaboration will do.
-                let needs_drop = if let Place::Local(local) = *lvalue {
+                let needs_drop = if let Place::Local(local) = *place {
                     self.local_needs_drop[local]
                 } else {
                     None
@@ -844,7 +844,7 @@ struct and enum constructors",
 
                 if let Some(span) = needs_drop {
                     // Double-check the type being dropped, to minimize false positives.
-                    let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
+                    let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);
                     if ty.needs_drop(self.tcx, self.param_env) {
                         struct_span_err!(self.tcx.sess, span, E0493,
                                          "destructors cannot be evaluated at compile-time")
@@ -905,8 +905,8 @@ fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>, locat
         self.nest(|this| {
             this.visit_source_info(&statement.source_info);
             match statement.kind {
-                StatementKind::Assign(ref lvalue, ref rvalue) => {
-                    this.visit_assign(bb, lvalue, rvalue, location);
+                StatementKind::Assign(ref place, ref rvalue) => {
+                    this.visit_assign(bb, place, rvalue, location);
                 }
                 StatementKind::SetDiscriminant { .. } |
                 StatementKind::StorageLive(_) |
index b42fa4262704f9c281c1536f7ef06ba21a4c5a97..08508143976e61aaeed3779cb5200c8e549011f3 100644 (file)
@@ -123,13 +123,13 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         None => return,
     };
     assert!(args.len() == 1);
-    let peek_arg_lval = match args[0] {
-        mir::Operand::Copy(ref lval @ mir::Place::Local(_)) |
-        mir::Operand::Move(ref lval @ mir::Place::Local(_)) => Some(lval),
+    let peek_arg_place = match args[0] {
+        mir::Operand::Copy(ref place @ mir::Place::Local(_)) |
+        mir::Operand::Move(ref place @ mir::Place::Local(_)) => Some(place),
         _ => None,
     };
 
-    let peek_arg_lval = match peek_arg_lval {
+    let peek_arg_place = match peek_arg_place {
         Some(arg) => arg,
         None => {
             tcx.sess.diagnostic().span_err(
@@ -143,8 +143,8 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let mut kill = results.0.sets.kill_set_for(bb.index()).to_owned();
 
     // Emulate effect of all statements in the block up to (but not
-    // including) the borrow within `peek_arg_lval`. Do *not* include
-    // call to `peek_arg_lval` itself (since we are peeking the state
+    // including) the borrow within `peek_arg_place`. Do *not* include
+    // call to `peek_arg_place` itself (since we are peeking the state
     // of the argument at time immediate preceding Call to
     // `rustc_peek`).
 
@@ -154,9 +154,9 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     for (j, stmt) in statements.iter().enumerate() {
         debug!("rustc_peek: ({:?},{}) {:?}", bb, j, stmt);
-        let (lvalue, rvalue) = match stmt.kind {
-            mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
-                (lvalue, rvalue)
+        let (place, rvalue) = match stmt.kind {
+            mir::StatementKind::Assign(ref place, ref rvalue) => {
+                (place, rvalue)
             }
             mir::StatementKind::StorageLive(_) |
             mir::StatementKind::StorageDead(_) |
@@ -169,14 +169,14 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           "sanity_check should run before Deaggregator inserts SetDiscriminant"),
         };
 
-        if lvalue == peek_arg_lval {
-            if let mir::Rvalue::Ref(_, mir::BorrowKind::Shared, ref peeking_at_lval) = *rvalue {
+        if place == peek_arg_place {
+            if let mir::Rvalue::Ref(_, mir::BorrowKind::Shared, ref peeking_at_place) = *rvalue {
                 // Okay, our search is over.
-                match move_data.rev_lookup.find(peeking_at_lval) {
+                match move_data.rev_lookup.find(peeking_at_place) {
                     LookupResult::Exact(peek_mpi) => {
                         let bit_state = sets.on_entry.contains(&peek_mpi);
                         debug!("rustc_peek({:?} = &{:?}) bit_state: {}",
-                               lvalue, peeking_at_lval, bit_state);
+                               place, peeking_at_place, bit_state);
                         if !bit_state {
                             tcx.sess.span_err(span, "rustc_peek: bit not set");
                         }
@@ -196,10 +196,10 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
         }
 
-        let lhs_mpi = move_data.rev_lookup.find(lvalue);
+        let lhs_mpi = move_data.rev_lookup.find(place);
 
-        debug!("rustc_peek: computing effect on lvalue: {:?} ({:?}) in stmt: {:?}",
-               lvalue, lhs_mpi, stmt);
+        debug!("rustc_peek: computing effect on place: {:?} ({:?}) in stmt: {:?}",
+               place, lhs_mpi, stmt);
         // reset GEN and KILL sets before emulating their effect.
         for e in sets.gen_set.words_mut() { *e = 0; }
         for e in sets.kill_set.words_mut() { *e = 0; }
index 319bc3db52701e00d0010bc0b2df06473bc6ae2f..3b41b2545bf5597714751f3cec4aacb4f2c59308 100644 (file)
@@ -104,13 +104,13 @@ fn visit_span(&mut self, span: &Span) {
         }
     }
 
-    fn visit_lvalue(
+    fn visit_place(
         &mut self,
-        lvalue: &Place<'tcx>,
+        place: &Place<'tcx>,
         context: PlaceContext,
         location: Location,
     ) {
-        self.sanitize_lvalue(lvalue, location, context);
+        self.sanitize_place(place, location, context);
     }
 
     fn visit_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
@@ -164,18 +164,18 @@ fn sanitize_type(&mut self, parent: &fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
         }
     }
 
-    fn sanitize_lvalue(&mut self,
-                       lvalue: &Place<'tcx>,
+    fn sanitize_place(&mut self,
+                       place: &Place<'tcx>,
                        location: Location,
                        context: PlaceContext)
                        -> PlaceTy<'tcx> {
-        debug!("sanitize_lvalue: {:?}", lvalue);
-        let lvalue_ty = match *lvalue {
+        debug!("sanitize_place: {:?}", place);
+        let place_ty = match *place {
             Place::Local(index) => PlaceTy::Ty {
                 ty: self.mir.local_decls[index].ty,
             },
             Place::Static(box Static { def_id, ty: sty }) => {
-                let sty = self.sanitize_type(lvalue, sty);
+                let sty = self.sanitize_type(place, sty);
                 let ty = self.tcx().type_of(def_id);
                 let ty = self.cx.normalize(&ty, location);
                 if let Err(terr) = self.cx
@@ -183,7 +183,7 @@ fn sanitize_lvalue(&mut self,
                 {
                     span_mirbug!(
                         self,
-                        lvalue,
+                        place,
                         "bad static type ({:?}: {:?}): {:?}",
                         ty,
                         sty,
@@ -198,7 +198,7 @@ fn sanitize_lvalue(&mut self,
                 } else {
                     PlaceContext::Projection(Mutability::Not)
                 };
-                let base_ty = self.sanitize_lvalue(&proj.base, location, base_context);
+                let base_ty = self.sanitize_place(&proj.base, location, base_context);
                 if let PlaceTy::Ty { ty } = base_ty {
                     if ty.references_error() {
                         assert!(self.errors_reported);
@@ -207,27 +207,27 @@ fn sanitize_lvalue(&mut self,
                         };
                     }
                 }
-                self.sanitize_projection(base_ty, &proj.elem, lvalue, location)
+                self.sanitize_projection(base_ty, &proj.elem, place, location)
             }
         };
         if let PlaceContext::Copy = context {
-            let ty = lvalue_ty.to_ty(self.tcx());
+            let ty = place_ty.to_ty(self.tcx());
             if self.cx.infcx.type_moves_by_default(self.cx.param_env, ty, DUMMY_SP) {
-                span_mirbug!(self, lvalue,
+                span_mirbug!(self, place,
                              "attempted copy of non-Copy type ({:?})", ty);
             }
         }
-        lvalue_ty
+        place_ty
     }
 
     fn sanitize_projection(
         &mut self,
         base: PlaceTy<'tcx>,
         pi: &PlaceElem<'tcx>,
-        lvalue: &Place<'tcx>,
+        place: &Place<'tcx>,
         location: Location,
     ) -> PlaceTy<'tcx> {
-        debug!("sanitize_projection: {:?} {:?} {:?}", base, pi, lvalue);
+        debug!("sanitize_projection: {:?} {:?} {:?}", base, pi, place);
         let tcx = self.tcx();
         let base_ty = base.to_ty(tcx);
         let span = self.last_span;
@@ -236,7 +236,7 @@ fn sanitize_projection(
                 let deref_ty = base_ty.builtin_deref(true, ty::LvaluePreference::NoPreference);
                 PlaceTy::Ty {
                     ty: deref_ty.map(|t| t.ty).unwrap_or_else(|| {
-                        span_mirbug_and_err!(self, lvalue, "deref of non-pointer {:?}", base_ty)
+                        span_mirbug_and_err!(self, place, "deref of non-pointer {:?}", base_ty)
                     }),
                 }
             }
@@ -249,7 +249,7 @@ fn sanitize_projection(
                 } else {
                     PlaceTy::Ty {
                         ty: base_ty.builtin_index().unwrap_or_else(|| {
-                            span_mirbug_and_err!(self, lvalue, "index of non-array {:?}", base_ty)
+                            span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty)
                         }),
                     }
                 }
@@ -258,7 +258,7 @@ fn sanitize_projection(
                 // consider verifying in-bounds
                 PlaceTy::Ty {
                     ty: base_ty.builtin_index().unwrap_or_else(|| {
-                        span_mirbug_and_err!(self, lvalue, "index of non-array {:?}", base_ty)
+                        span_mirbug_and_err!(self, place, "index of non-array {:?}", base_ty)
                     }),
                 }
             }
@@ -272,14 +272,14 @@ fn sanitize_projection(
                         } else {
                             span_mirbug_and_err!(
                                 self,
-                                lvalue,
+                                place,
                                 "taking too-small slice of {:?}",
                                 base_ty
                             )
                         }
                     }
                     ty::TySlice(..) => base_ty,
-                    _ => span_mirbug_and_err!(self, lvalue, "slice of non-array {:?}", base_ty),
+                    _ => span_mirbug_and_err!(self, place, "slice of non-array {:?}", base_ty),
                 },
             },
             ProjectionElem::Downcast(adt_def1, index) => match base_ty.sty {
@@ -288,7 +288,7 @@ fn sanitize_projection(
                         PlaceTy::Ty {
                             ty: span_mirbug_and_err!(
                                 self,
-                                lvalue,
+                                place,
                                 "cast to variant #{:?} but enum only has {:?}",
                                 index,
                                 adt_def.variants.len()
@@ -305,7 +305,7 @@ fn sanitize_projection(
                 _ => PlaceTy::Ty {
                     ty: span_mirbug_and_err!(
                         self,
-                        lvalue,
+                        place,
                         "can't downcast {:?} as {:?}",
                         base_ty,
                         adt_def1
@@ -313,13 +313,13 @@ fn sanitize_projection(
                 },
             },
             ProjectionElem::Field(field, fty) => {
-                let fty = self.sanitize_type(lvalue, fty);
-                match self.field_ty(lvalue, base, field, location) {
+                let fty = self.sanitize_type(place, fty);
+                match self.field_ty(place, base, field, location) {
                     Ok(ty) => {
                         if let Err(terr) = self.cx.eq_types(span, ty, fty, location.at_self()) {
                             span_mirbug!(
                                 self,
-                                lvalue,
+                                place,
                                 "bad field access ({:?}: {:?}): {:?}",
                                 ty,
                                 fty,
@@ -329,7 +329,7 @@ fn sanitize_projection(
                     }
                     Err(FieldAccessError::OutOfRange { field_count }) => span_mirbug!(
                         self,
-                        lvalue,
+                        place,
                         "accessed field #{} but variant only has {}",
                         field.index(),
                         field_count
@@ -551,17 +551,17 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
         debug!("check_stmt: {:?}", stmt);
         let tcx = self.tcx();
         match stmt.kind {
-            StatementKind::Assign(ref lv, ref rv) => {
-                let lv_ty = lv.ty(mir, tcx).to_ty(tcx);
+            StatementKind::Assign(ref place, ref rv) => {
+                let place_ty = place.ty(mir, tcx).to_ty(tcx);
                 let rv_ty = rv.ty(mir, tcx);
                 if let Err(terr) =
-                    self.sub_types(rv_ty, lv_ty, location.at_successor_within_block())
+                    self.sub_types(rv_ty, place_ty, location.at_successor_within_block())
                 {
                     span_mirbug!(
                         self,
                         stmt,
                         "bad assignment ({:?} = {:?}): {:?}",
-                        lv_ty,
+                        place_ty,
                         rv_ty,
                         terr
                     );
@@ -569,17 +569,17 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
                 self.check_rvalue(mir, rv, location);
             }
             StatementKind::SetDiscriminant {
-                ref lvalue,
+                ref place,
                 variant_index,
             } => {
-                let lvalue_type = lvalue.ty(mir, tcx).to_ty(tcx);
-                let adt = match lvalue_type.sty {
+                let place_type = place.ty(mir, tcx).to_ty(tcx);
+                let adt = match place_type.sty {
                     TypeVariants::TyAdt(adt, _) if adt.is_enum() => adt,
                     _ => {
                         span_bug!(
                             stmt.source_info.span,
                             "bad set discriminant ({:?} = {:?}): lhs is not an enum",
-                            lvalue,
+                            place,
                             variant_index
                         );
                     }
@@ -588,7 +588,7 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
                     span_bug!(
                         stmt.source_info.span,
                         "bad set discriminant ({:?} = {:?}): value of of range",
-                        lvalue,
+                        place,
                         variant_index
                     );
                 };
@@ -627,19 +627,19 @@ fn check_terminator(
                 target,
                 unwind,
             } => {
-                let lv_ty = location.ty(mir, tcx).to_ty(tcx);
+                let place_ty = location.ty(mir, tcx).to_ty(tcx);
                 let rv_ty = value.ty(mir, tcx);
 
                 let locations = Locations {
                     from_location: term_location,
                     at_location: target.start_location(),
                 };
-                if let Err(terr) = self.sub_types(rv_ty, lv_ty, locations) {
+                if let Err(terr) = self.sub_types(rv_ty, place_ty, locations) {
                     span_mirbug!(
                         self,
                         term,
                         "bad DropAndReplace ({:?} = {:?}): {:?}",
-                        lv_ty,
+                        place_ty,
                         rv_ty,
                         terr
                     );
@@ -653,12 +653,12 @@ fn check_terminator(
                         from_location: term_location,
                         at_location: unwind.start_location(),
                     };
-                    if let Err(terr) = self.sub_types(rv_ty, lv_ty, locations) {
+                    if let Err(terr) = self.sub_types(rv_ty, place_ty, locations) {
                         span_mirbug!(
                             self,
                             term,
                             "bad DropAndReplace ({:?} = {:?}): {:?}",
-                            lv_ty,
+                            place_ty,
                             rv_ty,
                             terr
                         );
@@ -1209,8 +1209,8 @@ trait AtLocation {
     /// its successor within the block is the at-location. This means
     /// that any required region relationships must hold only upon
     /// **exiting** the statement/terminator indicated by `self`. This
-    /// is for example used when you have a `lv = rv` statement: it
-    /// indicates that the `typeof(rv) <: typeof(lv)` as of the
+    /// is for example used when you have a `place = rv` statement: it
+    /// indicates that the `typeof(rv) <: typeof(place)` as of the
     /// **next** statement.
     fn at_successor_within_block(self) -> Locations;
 }
index 02792bb2f500395c2e6b6431de73560bbef86c43..d1410210bda96e8653e7e09a1ec53397a8223daa 100644 (file)
 use rustc::ty::{self, TyCtxt};
 use rustc::mir::*;
 
-/// Return `true` if this lvalue is allowed to be less aligned
+/// Return `true` if this place is allowed to be less aligned
 /// than its containing struct (because it is within a packed
 /// struct).
 pub fn is_disaligned<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                   local_decls: &L,
                                   param_env: ty::ParamEnv<'tcx>,
-                                  lvalue: &Place<'tcx>)
+                                  place: &Place<'tcx>)
                                   -> bool
     where L: HasLocalDecls<'tcx>
 {
-    debug!("is_disaligned({:?})", lvalue);
-    if !is_within_packed(tcx, local_decls, lvalue) {
-        debug!("is_disaligned({:?}) - not within packed", lvalue);
+    debug!("is_disaligned({:?})", place);
+    if !is_within_packed(tcx, local_decls, place) {
+        debug!("is_disaligned({:?}) - not within packed", place);
         return false
     }
 
-    let ty = lvalue.ty(local_decls, tcx).to_ty(tcx);
+    let ty = place.ty(local_decls, tcx).to_ty(tcx);
     match tcx.layout_raw(param_env.and(ty)) {
         Ok(layout) if layout.align.abi() == 1 => {
             // if the alignment is 1, the type can't be further
             // disaligned.
-            debug!("is_disaligned({:?}) - align = 1", lvalue);
+            debug!("is_disaligned({:?}) - align = 1", place);
             false
         }
         _ => {
-            debug!("is_disaligned({:?}) - true", lvalue);
+            debug!("is_disaligned({:?}) - true", place);
             true
         }
     }
@@ -45,14 +45,14 @@ pub fn is_disaligned<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
 fn is_within_packed<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                  local_decls: &L,
-                                 lvalue: &Place<'tcx>)
+                                 place: &Place<'tcx>)
                                  -> bool
     where L: HasLocalDecls<'tcx>
 {
-    let mut lvalue = lvalue;
+    let mut place = place;
     while let &Place::Projection(box Projection {
         ref base, ref elem
-    }) = lvalue {
+    }) = place {
         match *elem {
             // encountered a Deref, which is ABI-aligned
             ProjectionElem::Deref => break,
@@ -67,7 +67,7 @@ fn is_within_packed<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
             _ => {}
         }
-        lvalue = base;
+        place = base;
     }
 
     false
index 16c418617fc699bc604a4e5bf15cfd9bab27ed05..07de346e795f92375b3783f57bc102a371afb97b 100644 (file)
@@ -64,10 +64,10 @@ fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callbac
                                where F: for<'a> FnMut(&'a mut Local,
                                                       PlaceContext<'tcx>,
                                                       Location) {
-        for lvalue_use in &self.info[local].defs_and_uses {
+        for place_use in &self.info[local].defs_and_uses {
             MutateUseVisitor::new(local,
                                   &mut callback,
-                                  mir).visit_location(mir, lvalue_use.location)
+                                  mir).visit_location(mir, place_use.location)
         }
     }
 
@@ -108,7 +108,7 @@ fn clear(&mut self) {
     }
 
     pub fn def_count(&self) -> usize {
-        self.defs_and_uses.iter().filter(|lvalue_use| lvalue_use.context.is_mutating_use()).count()
+        self.defs_and_uses.iter().filter(|place_use| place_use.context.is_mutating_use()).count()
     }
 
     pub fn def_count_not_including_drop(&self) -> usize {
@@ -118,14 +118,14 @@ pub fn def_count_not_including_drop(&self) -> usize {
     pub fn defs_not_including_drop(
         &self,
     ) -> iter::Filter<slice::Iter<Use<'tcx>>, fn(&&Use<'tcx>) -> bool> {
-        self.defs_and_uses.iter().filter(|lvalue_use| {
-            lvalue_use.context.is_mutating_use() && !lvalue_use.context.is_drop()
+        self.defs_and_uses.iter().filter(|place_use| {
+            place_use.context.is_mutating_use() && !place_use.context.is_drop()
         })
     }
 
     pub fn use_count(&self) -> usize {
-        self.defs_and_uses.iter().filter(|lvalue_use| {
-            lvalue_use.context.is_nonmutating_use()
+        self.defs_and_uses.iter().filter(|place_use| {
+            place_use.context.is_nonmutating_use()
         }).count()
     }
 }
index fc40682c9b6e63d4065999f66d3102be4fc6d8bf..e7d1e5a9ccc74c1834e7e8e1352ed231f9a50f07 100644 (file)
@@ -105,7 +105,7 @@ struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
 
     source_info: SourceInfo,
 
-    lvalue: &'l Place<'tcx>,
+    place: &'l Place<'tcx>,
     path: D::Path,
     succ: BasicBlock,
     unwind: Unwind,
@@ -114,7 +114,7 @@ struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
 pub fn elaborate_drop<'b, 'tcx, D>(
     elaborator: &mut D,
     source_info: SourceInfo,
-    lvalue: &Place<'tcx>,
+    place: &Place<'tcx>,
     path: D::Path,
     succ: BasicBlock,
     unwind: Unwind,
@@ -122,15 +122,15 @@ pub fn elaborate_drop<'b, 'tcx, D>(
     where D: DropElaborator<'b, 'tcx>
 {
     DropCtxt {
-        elaborator, source_info, lvalue, path, succ, unwind
+        elaborator, source_info, place, path, succ, unwind
     }.elaborate_drop(bb)
 }
 
 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
     where D: DropElaborator<'b, 'tcx>
 {
-    fn lvalue_ty(&self, lvalue: &Place<'tcx>) -> Ty<'tcx> {
-        lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
+    fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
+        place.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
     }
 
     fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
@@ -169,7 +169,7 @@ pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
                 let loc = self.terminator_loc(bb);
                 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
                 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
-                    location: self.lvalue.clone(),
+                    location: self.place.clone(),
                     target: self.succ,
                     unwind: self.unwind.into_option(),
                 });
@@ -191,10 +191,10 @@ pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
         }
     }
 
-    /// Return the lvalue and move path for each field of `variant`,
+    /// Return the place and move path for each field of `variant`,
     /// (the move path is `None` if the field is a rest field).
     fn move_paths_for_fields(&self,
-                             base_lv: &Place<'tcx>,
+                             base_place: &Place<'tcx>,
                              variant_path: D::Path,
                              variant: &'tcx ty::VariantDef,
                              substs: &'tcx Substs<'tcx>)
@@ -209,32 +209,32 @@ fn move_paths_for_fields(&self,
                     &f.ty(self.tcx(), substs),
                     self.elaborator.param_env()
                 );
-            (base_lv.clone().field(field, field_ty), subpath)
+            (base_place.clone().field(field, field_ty), subpath)
         }).collect()
     }
 
     fn drop_subpath(&mut self,
-                    lvalue: &Place<'tcx>,
+                    place: &Place<'tcx>,
                     path: Option<D::Path>,
                     succ: BasicBlock,
                     unwind: Unwind)
                     -> BasicBlock
     {
         if let Some(path) = path {
-            debug!("drop_subpath: for std field {:?}", lvalue);
+            debug!("drop_subpath: for std field {:?}", place);
 
             DropCtxt {
                 elaborator: self.elaborator,
                 source_info: self.source_info,
-                path, lvalue, succ, unwind,
+                path, place, succ, unwind,
             }.elaborated_drop_block()
         } else {
-            debug!("drop_subpath: for rest field {:?}", lvalue);
+            debug!("drop_subpath: for rest field {:?}", place);
 
             DropCtxt {
                 elaborator: self.elaborator,
                 source_info: self.source_info,
-                lvalue, succ, unwind,
+                place, succ, unwind,
                 // Using `self.path` here to condition the drop on
                 // our own drop flag.
                 path: self.path
@@ -256,8 +256,8 @@ fn drop_halfladder(&mut self,
     {
         Some(succ).into_iter().chain(
             fields.iter().rev().zip(unwind_ladder)
-                .map(|(&(ref lv, path), &unwind_succ)| {
-                    succ = self.drop_subpath(lv, path, succ, unwind_succ);
+                .map(|(&(ref place, path), &unwind_succ)| {
+                    succ = self.drop_subpath(place, path, succ, unwind_succ);
                     succ
                 })
         ).collect()
@@ -302,8 +302,8 @@ fn drop_ladder<'a>(&mut self,
         debug!("drop_ladder({:?}, {:?})", self, fields);
 
         let mut fields = fields;
-        fields.retain(|&(ref lvalue, _)| {
-            self.lvalue_ty(lvalue).needs_drop(self.tcx(), self.elaborator.param_env())
+        fields.retain(|&(ref place, _)| {
+            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
         });
 
         debug!("drop_ladder - fields needing drop: {:?}", fields);
@@ -328,7 +328,7 @@ fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
         debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
 
         let fields = tys.iter().enumerate().map(|(i, &ty)| {
-            (self.lvalue.clone().field(Field::new(i), ty),
+            (self.place.clone().field(Field::new(i), ty),
              self.elaborator.field_subpath(self.path, Field::new(i)))
         }).collect();
 
@@ -340,7 +340,7 @@ fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
     {
         debug!("open_drop_for_box({:?}, {:?})", self, ty);
 
-        let interior = self.lvalue.clone().deref();
+        let interior = self.place.clone().deref();
         let interior_path = self.elaborator.deref_subpath(self.path);
 
         let succ = self.succ; // FIXME(#6393)
@@ -386,7 +386,7 @@ fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
         let (succ, unwind) = self.drop_ladder_bottom();
         if !adt.is_enum() {
             let fields = self.move_paths_for_fields(
-                self.lvalue,
+                self.place,
                 self.path,
                 &adt.variants[0],
                 substs
@@ -416,11 +416,11 @@ fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
             let subpath = self.elaborator.downcast_subpath(
                 self.path, variant_index);
             if let Some(variant_path) = subpath {
-                let base_lv = self.lvalue.clone().elem(
+                let base_place = self.place.clone().elem(
                     ProjectionElem::Downcast(adt, variant_index)
                         );
                 let fields = self.move_paths_for_fields(
-                    &base_lv,
+                    &base_place,
                     variant_path,
                     &adt.variants[variant_index],
                     substs);
@@ -492,7 +492,7 @@ fn adt_switch_block(&mut self,
         // way lies only trouble.
         let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
         let discr = Place::Local(self.new_temp(discr_ty));
-        let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
+        let discr_rv = Rvalue::Discriminant(self.place.clone());
         let switch_block = BasicBlockData {
             statements: vec![self.assign(&discr, discr_rv)],
             terminator: Some(Terminator {
@@ -517,26 +517,26 @@ fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
         let tcx = self.tcx();
         let drop_trait = tcx.lang_items().drop_trait().unwrap();
         let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
-        let ty = self.lvalue_ty(self.lvalue);
+        let ty = self.place_ty(self.place);
         let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
 
         let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
             ty,
             mutbl: hir::Mutability::MutMutable
         });
-        let ref_lvalue = self.new_temp(ref_ty);
+        let ref_place = self.new_temp(ref_ty);
         let unit_temp = Place::Local(self.new_temp(tcx.mk_nil()));
 
         let result = BasicBlockData {
             statements: vec![self.assign(
-                &Place::Local(ref_lvalue),
-                Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
+                &Place::Local(ref_place),
+                Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.place.clone())
             )],
             terminator: Some(Terminator {
                 kind: TerminatorKind::Call {
                     func: Operand::function_handle(tcx, drop_fn.def_id, substs,
                                                    self.source_info.span),
-                    args: vec![Operand::Move(Place::Local(ref_lvalue))],
+                    args: vec![Operand::Move(Place::Local(ref_place))],
                     destination: Some((unit_temp, succ)),
                     cleanup: unwind.into_option(),
                 },
@@ -572,8 +572,8 @@ fn drop_loop(&mut self,
                  ptr_based: bool)
                  -> BasicBlock
     {
-        let copy = |lv: &Place<'tcx>| Operand::Copy(lv.clone());
-        let move_ = |lv: &Place<'tcx>| Operand::Move(lv.clone());
+        let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
+        let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
         let tcx = self.tcx();
 
         let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
@@ -591,7 +591,7 @@ fn drop_loop(&mut self,
             (Rvalue::Ref(
                  tcx.types.re_erased,
                  BorrowKind::Mut,
-                 self.lvalue.clone().index(cur)),
+                 self.place.clone().index(cur)),
              Rvalue::BinaryOp(BinOp::Add, copy(&Place::Local(cur)), one))
         };
 
@@ -643,7 +643,7 @@ fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
 
         let tcx = self.tcx();
 
-        let move_ = |lv: &Place<'tcx>| Operand::Move(lv.clone());
+        let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
         let size = &Place::Local(self.new_temp(tcx.types.usize));
         let size_is_zero = &Place::Local(self.new_temp(tcx.types.bool));
         let base_block = BasicBlockData {
@@ -667,7 +667,7 @@ fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
         self.elaborator.patch().new_block(base_block)
     }
 
-    // create a pair of drop-loops of `lvalue`, which drops its contents
+    // create a pair of drop-loops of `place`, which drops its contents
     // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
     // otherwise create an index loop.
     fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
@@ -708,15 +708,15 @@ fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
         let cur = Place::Local(cur);
         let zero = self.constant_usize(0);
         let mut drop_block_stmts = vec![];
-        drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
+        drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
         if ptr_based {
-            let tmp_ty = tcx.mk_mut_ptr(self.lvalue_ty(self.lvalue));
+            let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
             let tmp = Place::Local(self.new_temp(tmp_ty));
             // tmp = &LV;
             // cur = tmp as *mut T;
             // end = Offset(cur, len);
             drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
-                tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone()
+                tcx.types.re_erased, BorrowKind::Mut, self.place.clone()
             )));
             drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
                 CastKind::Misc, Operand::Move(tmp.clone()), iter_ty
@@ -752,7 +752,7 @@ fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
     /// This creates a "drop ladder" that drops the needed fields of the
     /// ADT, both in the success case or if one of the destructors fail.
     fn open_drop<'a>(&mut self) -> BasicBlock {
-        let ty = self.lvalue_ty(self.lvalue);
+        let ty = self.place_ty(self.place);
         match ty.sty {
             ty::TyClosure(def_id, substs) |
             // Note that `elaborate_drops` only drops the upvars of a generator,
@@ -786,13 +786,13 @@ fn open_drop<'a>(&mut self) -> BasicBlock {
         }
     }
 
-    /// Return a basic block that drop an lvalue using the context
+    /// Return a basic block that drop a place using the context
     /// and path in `c`. If `mode` is something, also clear `c`
     /// according to it.
     ///
     /// if FLAG(self.path)
     ///     if let Some(mode) = mode: FLAG(self.path)[mode] = false
-    ///     drop(self.lv)
+    ///     drop(self.place)
     fn complete_drop<'a>(&mut self,
                          drop_mode: Option<DropFlagMode>,
                          succ: BasicBlock,
@@ -855,7 +855,7 @@ fn unelaborated_free_block<'a>(
 
         let call = TerminatorKind::Call {
             func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
-            args: vec![Operand::Move(self.lvalue.clone())],
+            args: vec![Operand::Move(self.place.clone())],
             destination: Some((unit_temp, target)),
             cleanup: None
         }; // FIXME(#6393)
@@ -868,7 +868,7 @@ fn unelaborated_free_block<'a>(
 
     fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
         let block = TerminatorKind::Drop {
-            location: self.lvalue.clone(),
+            location: self.place.clone(),
             target,
             unwind: unwind.into_option()
         };
index ed85cc36b05809cecb1b49cc18f5ece200208f65..9da593fb48e3bb858fe1817098e69b63c1b31c61 100644 (file)
@@ -127,8 +127,8 @@ pub fn add_statement(&mut self, loc: Location, stmt: StatementKind<'tcx>) {
         self.new_statements.push((loc, stmt));
     }
 
-    pub fn add_assign(&mut self, loc: Location, lv: Place<'tcx>, rv: Rvalue<'tcx>) {
-        self.add_statement(loc, StatementKind::Assign(lv, rv));
+    pub fn add_assign(&mut self, loc: Location, place: Place<'tcx>, rv: Rvalue<'tcx>) {
+        self.add_statement(loc, StatementKind::Assign(place, rv));
     }
 
     pub fn apply(self, mir: &mut Mir<'tcx>) {
index 5f392e4592630ea7bca0e14465edc420bd7ab59a..4a0090204851c55d1fe8dbe40f10b440471a2637 100644 (file)
@@ -344,12 +344,12 @@ pub fn write_mir_intro<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
         }
     }
 
-    // Print return pointer
+    // Print return place
     let indented_retptr = format!("{}let mut {:?}: {};",
                                   INDENT,
-                                  RETURN_POINTER,
-                                  mir.local_decls[RETURN_POINTER].ty);
-    writeln!(w, "{0:1$} // return pointer",
+                                  RETURN_PLACE,
+                                  mir.local_decls[RETURN_PLACE].ty);
+    writeln!(w, "{0:1$} // return place",
              indented_retptr,
              ALIGN)?;
 
index 4bd5ce0a4afd8f727e5458286d12a53aca25362b..8a9936ecb8bb4fe9a483531bbb07c701cab9df22 100644 (file)
@@ -188,41 +188,41 @@ fn visit_operand(&mut self,
         self.super_operand(operand, location);
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &Place<'tcx>,
                     context: mir_visit::PlaceContext<'tcx>,
                     location: Location) {
-        self.record("Place", lvalue);
-        self.record(match *lvalue {
+        self.record("Place", place);
+        self.record(match *place {
             Place::Local(..) => "Place::Local",
             Place::Static(..) => "Place::Static",
             Place::Projection(..) => "Place::Projection",
-        }, lvalue);
-        self.super_lvalue(lvalue, context, location);
+        }, place);
+        self.super_place(place, context, location);
     }
 
     fn visit_projection(&mut self,
-                        lvalue: &PlaceProjection<'tcx>,
+                        place: &PlaceProjection<'tcx>,
                         context: mir_visit::PlaceContext<'tcx>,
                         location: Location) {
-        self.record("PlaceProjection", lvalue);
-        self.super_projection(lvalue, context, location);
+        self.record("PlaceProjection", place);
+        self.super_projection(place, context, location);
     }
 
     fn visit_projection_elem(&mut self,
-                             lvalue: &PlaceElem<'tcx>,
+                             place: &PlaceElem<'tcx>,
                              context: mir_visit::PlaceContext<'tcx>,
                              location: Location) {
-        self.record("PlaceElem", lvalue);
-        self.record(match *lvalue {
+        self.record("PlaceElem", place);
+        self.record(match *place {
             ProjectionElem::Deref => "PlaceElem::Deref",
             ProjectionElem::Subslice { .. } => "PlaceElem::Subslice",
             ProjectionElem::Field(..) => "PlaceElem::Field",
             ProjectionElem::Index(..) => "PlaceElem::Index",
             ProjectionElem::ConstantIndex { .. } => "PlaceElem::ConstantIndex",
             ProjectionElem::Downcast(..) => "PlaceElem::Downcast",
-        }, lvalue);
-        self.super_projection_elem(lvalue, context, location);
+        }, place);
+        self.super_projection_elem(place, context, location);
     }
 
     fn visit_constant(&mut self,
index 9c23ffec6c06509552633c7fc5c2ff131cbfdd0c..834558fc16614825af9dd3dde7684502fcde389b 100644 (file)
@@ -30,7 +30,7 @@
 use cabi_nvptx;
 use cabi_nvptx64;
 use cabi_hexagon;
-use mir::lvalue::{Alignment, PlaceRef};
+use mir::place::{Alignment, PlaceRef};
 use mir::operand::OperandValue;
 use type_::Type;
 use type_of::{LayoutLlvmExt, PointerKind};
@@ -545,14 +545,14 @@ pub fn is_ignore(&self) -> bool {
         self.mode == PassMode::Ignore
     }
 
-    /// Get the LLVM type for an lvalue of the original Rust type of
+    /// Get the LLVM type for an place of the original Rust type of
     /// this argument/return, i.e. the result of `type_of::type_of`.
     pub fn memory_ty(&self, ccx: &CrateContext<'a, 'tcx>) -> Type {
         self.layout.llvm_type(ccx)
     }
 
     /// Store a direct/indirect value described by this ArgType into a
-    /// lvalue for the original Rust type of this argument/return.
+    /// place for the original Rust type of this argument/return.
     /// Can be used for both storing formal arguments into Rust variables
     /// or results of call/invoke instructions into their destinations.
     pub fn store(&self, bcx: &Builder<'a, 'tcx>, val: ValueRef, dst: PlaceRef<'tcx>) {
index 5e6dfabc18873a097595bf41e067d94ed98e54a1..ef76fece088fe24af69d2ad116ab6677a6c6adc7 100644 (file)
@@ -18,7 +18,7 @@
 
 use rustc::hir;
 
-use mir::lvalue::PlaceRef;
+use mir::place::PlaceRef;
 use mir::operand::OperandValue;
 
 use std::ffi::CString;
@@ -37,15 +37,15 @@ pub fn trans_inline_asm<'a, 'tcx>(
 
     // Prepare the output operands
     let mut indirect_outputs = vec![];
-    for (i, (out, lvalue)) in ia.outputs.iter().zip(&outputs).enumerate() {
+    for (i, (out, place)) in ia.outputs.iter().zip(&outputs).enumerate() {
         if out.is_rw {
-            inputs.push(lvalue.load(bcx).immediate());
+            inputs.push(place.load(bcx).immediate());
             ext_constraints.push(i.to_string());
         }
         if out.is_indirect {
-            indirect_outputs.push(lvalue.load(bcx).immediate());
+            indirect_outputs.push(place.load(bcx).immediate());
         } else {
-            output_types.push(lvalue.layout.llvm_type(bcx.ccx));
+            output_types.push(place.layout.llvm_type(bcx.ccx));
         }
     }
     if !indirect_outputs.is_empty() {
@@ -100,9 +100,9 @@ pub fn trans_inline_asm<'a, 'tcx>(
 
     // Again, based on how many outputs we have
     let outputs = ia.outputs.iter().zip(&outputs).filter(|&(ref o, _)| !o.is_indirect);
-    for (i, (_, &lvalue)) in outputs.enumerate() {
+    for (i, (_, &place)) in outputs.enumerate() {
         let v = if num_outputs == 1 { r } else { bcx.extract_value(r, i as u64) };
-        OperandValue::Immediate(v).store(bcx, lvalue);
+        OperandValue::Immediate(v).store(bcx, place);
     }
 
     // Store mark in a metadata node so we can map LLVM errors
index ec9ed402ac62299c66b18bea293b0dc380d1e9d6..43e7938e98c78575855dec84f9f086a4b474e30f 100644 (file)
@@ -50,7 +50,7 @@
 use rustc::session::Session;
 use rustc_incremental;
 use allocator;
-use mir::lvalue::PlaceRef;
+use mir::place::PlaceRef;
 use attributes;
 use builder::Builder;
 use callee;
index cfa312606175e9e674f9501ef39fed88c954afb7..a35afb806111c4b807fd43e1da9cf103b6968ee5 100644 (file)
@@ -14,7 +14,7 @@
 use llvm;
 use llvm::{ValueRef};
 use abi::{Abi, FnType, PassMode};
-use mir::lvalue::{PlaceRef, Alignment};
+use mir::place::{PlaceRef, Alignment};
 use mir::operand::{OperandRef, OperandValue};
 use base::*;
 use common::*;
index 6feb056a83cacfca5edd02800bb446c10af33bba..b5e5dd3b9ce166c1ad6f779a58b7870f0a4643d8 100644 (file)
@@ -22,7 +22,7 @@
 use type_of::LayoutLlvmExt;
 use super::MirContext;
 
-pub fn lvalue_locals<'a, 'tcx>(mircx: &MirContext<'a, 'tcx>) -> BitVector {
+pub fn memory_locals<'a, 'tcx>(mircx: &MirContext<'a, 'tcx>) -> BitVector {
     let mir = mircx.mir;
     let mut analyzer = LocalAnalyzer::new(mircx);
 
@@ -44,16 +44,16 @@ pub fn lvalue_locals<'a, 'tcx>(mircx: &MirContext<'a, 'tcx>) -> BitVector {
             // (e.g. structs) into an alloca unconditionally, just so
             // that we don't have to deal with having two pathways
             // (gep vs extractvalue etc).
-            analyzer.mark_as_lvalue(mir::Local::new(index));
+            analyzer.mark_as_memory(mir::Local::new(index));
         }
     }
 
-    analyzer.lvalue_locals
+    analyzer.memory_locals
 }
 
 struct LocalAnalyzer<'mir, 'a: 'mir, 'tcx: 'a> {
     cx: &'mir MirContext<'a, 'tcx>,
-    lvalue_locals: BitVector,
+    memory_locals: BitVector,
     seen_assigned: BitVector
 }
 
@@ -61,7 +61,7 @@ impl<'mir, 'a, 'tcx> LocalAnalyzer<'mir, 'a, 'tcx> {
     fn new(mircx: &'mir MirContext<'a, 'tcx>) -> LocalAnalyzer<'mir, 'a, 'tcx> {
         let mut analyzer = LocalAnalyzer {
             cx: mircx,
-            lvalue_locals: BitVector::new(mircx.mir.local_decls.len()),
+            memory_locals: BitVector::new(mircx.mir.local_decls.len()),
             seen_assigned: BitVector::new(mircx.mir.local_decls.len())
         };
 
@@ -73,14 +73,14 @@ fn new(mircx: &'mir MirContext<'a, 'tcx>) -> LocalAnalyzer<'mir, 'a, 'tcx> {
         analyzer
     }
 
-    fn mark_as_lvalue(&mut self, local: mir::Local) {
-        debug!("marking {:?} as lvalue", local);
-        self.lvalue_locals.insert(local.index());
+    fn mark_as_memory(&mut self, local: mir::Local) {
+        debug!("marking {:?} as memory", local);
+        self.memory_locals.insert(local.index());
     }
 
     fn mark_assigned(&mut self, local: mir::Local) {
         if !self.seen_assigned.insert(local.index()) {
-            self.mark_as_lvalue(local);
+            self.mark_as_memory(local);
         }
     }
 }
@@ -88,18 +88,18 @@ fn mark_assigned(&mut self, local: mir::Local) {
 impl<'mir, 'a, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'a, 'tcx> {
     fn visit_assign(&mut self,
                     block: mir::BasicBlock,
-                    lvalue: &mir::Place<'tcx>,
+                    place: &mir::Place<'tcx>,
                     rvalue: &mir::Rvalue<'tcx>,
                     location: Location) {
-        debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue);
+        debug!("visit_assign(block={:?}, place={:?}, rvalue={:?})", block, place, rvalue);
 
-        if let mir::Place::Local(index) = *lvalue {
+        if let mir::Place::Local(index) = *place {
             self.mark_assigned(index);
             if !self.cx.rvalue_creates_operand(rvalue) {
-                self.mark_as_lvalue(index);
+                self.mark_as_memory(index);
             }
         } else {
-            self.visit_lvalue(lvalue, PlaceContext::Store, location);
+            self.visit_place(place, PlaceContext::Store, location);
         }
 
         self.visit_rvalue(rvalue, location);
@@ -121,8 +121,8 @@ fn visit_terminator_kind(&mut self,
                 // box_free(x) shares with `drop x` the property that it
                 // is not guaranteed to be statically dominated by the
                 // definition of x, so x must always be in an alloca.
-                if let mir::Operand::Move(ref lvalue) = args[0] {
-                    self.visit_lvalue(lvalue, PlaceContext::Drop, location);
+                if let mir::Operand::Move(ref place) = args[0] {
+                    self.visit_place(place, PlaceContext::Drop, location);
                 }
             }
             _ => {}
@@ -131,14 +131,14 @@ fn visit_terminator_kind(&mut self,
         self.super_terminator_kind(block, kind, location);
     }
 
-    fn visit_lvalue(&mut self,
-                    lvalue: &mir::Place<'tcx>,
+    fn visit_place(&mut self,
+                    place: &mir::Place<'tcx>,
                     context: PlaceContext<'tcx>,
                     location: Location) {
-        debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context);
+        debug!("visit_place(place={:?}, context={:?})", place, context);
         let ccx = self.cx.ccx;
 
-        if let mir::Place::Projection(ref proj) = *lvalue {
+        if let mir::Place::Projection(ref proj) = *place {
             // Allow uses of projections that are ZSTs or from scalar fields.
             let is_consume = match context {
                 PlaceContext::Copy | PlaceContext::Move => true,
@@ -160,20 +160,20 @@ fn visit_lvalue(&mut self,
                     if layout.is_llvm_immediate() || layout.is_llvm_scalar_pair() {
                         // Recurse with the same context, instead of `Projection`,
                         // potentially stopping at non-operand projections,
-                        // which would trigger `mark_as_lvalue` on locals.
-                        self.visit_lvalue(&proj.base, context, location);
+                        // which would trigger `mark_as_memory` on locals.
+                        self.visit_place(&proj.base, context, location);
                         return;
                     }
                 }
             }
 
-            // A deref projection only reads the pointer, never needs the lvalue.
+            // A deref projection only reads the pointer, never needs the place.
             if let mir::ProjectionElem::Deref = proj.elem {
-                return self.visit_lvalue(&proj.base, PlaceContext::Copy, location);
+                return self.visit_place(&proj.base, PlaceContext::Copy, location);
             }
         }
 
-        self.super_lvalue(lvalue, context, location);
+        self.super_place(place, context, location);
     }
 
     fn visit_local(&mut self,
@@ -195,16 +195,16 @@ fn visit_local(&mut self,
             PlaceContext::Store |
             PlaceContext::Borrow { .. } |
             PlaceContext::Projection(..) => {
-                self.mark_as_lvalue(index);
+                self.mark_as_memory(index);
             }
 
             PlaceContext::Drop => {
                 let ty = mir::Place::Local(index).ty(self.cx.mir, self.cx.ccx.tcx());
                 let ty = self.cx.monomorphize(&ty.to_ty(self.cx.ccx.tcx()));
 
-                // Only need the lvalue if we're actually dropping it.
+                // Only need the place if we're actually dropping it.
                 if self.cx.ccx.shared().type_needs_drop(ty) {
-                    self.mark_as_lvalue(index);
+                    self.mark_as_memory(index);
                 }
             }
         }
index 94a8df0a35bfa2f21f3c6c76a14a572030cc1b37..94c8d469c642d6c947e42bd68d32e085c93a4c27 100644 (file)
@@ -31,7 +31,7 @@
 
 use super::{MirContext, LocalRef};
 use super::constant::Const;
-use super::lvalue::{Alignment, PlaceRef};
+use super::place::{Alignment, PlaceRef};
 use super::operand::OperandRef;
 use super::operand::OperandValue::{Pair, Ref, Immediate};
 
@@ -214,7 +214,7 @@ fn trans_terminator(&mut self,
                     }
 
                     PassMode::Direct(_) | PassMode::Pair(..) => {
-                        let op = self.trans_consume(&bcx, &mir::Place::Local(mir::RETURN_POINTER));
+                        let op = self.trans_consume(&bcx, &mir::Place::Local(mir::RETURN_PLACE));
                         if let Ref(llval, align) = op.val {
                             bcx.load(llval, align.non_abi())
                         } else {
@@ -223,13 +223,13 @@ fn trans_terminator(&mut self,
                     }
 
                     PassMode::Cast(cast_ty) => {
-                        let op = match self.locals[mir::RETURN_POINTER] {
+                        let op = match self.locals[mir::RETURN_PLACE] {
                             LocalRef::Operand(Some(op)) => op,
                             LocalRef::Operand(None) => bug!("use of return before def"),
-                            LocalRef::Place(tr_lvalue) => {
+                            LocalRef::Place(tr_place) => {
                                 OperandRef {
-                                    val: Ref(tr_lvalue.llval, tr_lvalue.alignment),
-                                    layout: tr_lvalue.layout
+                                    val: Ref(tr_place.llval, tr_place.alignment),
+                                    layout: tr_place.layout
                                 }
                             }
                         };
@@ -241,7 +241,7 @@ fn trans_terminator(&mut self,
                             }
                             Ref(llval, align) => {
                                 assert_eq!(align, Alignment::AbiAligned,
-                                           "return pointer is unaligned!");
+                                           "return place is unaligned!");
                                 llval
                             }
                         };
@@ -268,9 +268,9 @@ fn trans_terminator(&mut self,
                     return
                 }
 
-                let lvalue = self.trans_lvalue(&bcx, location);
-                let mut args: &[_] = &[lvalue.llval, lvalue.llextra];
-                args = &args[..1 + lvalue.has_extra() as usize];
+                let place = self.trans_place(&bcx, location);
+                let mut args: &[_] = &[place.llval, place.llextra];
+                args = &args[..1 + place.has_extra() as usize];
                 let (drop_fn, fn_ty) = match ty.sty {
                     ty::TyDynamic(..) => {
                         let fn_ty = common::instance_ty(bcx.ccx.tcx(), &drop_fn);
@@ -278,7 +278,7 @@ fn trans_terminator(&mut self,
                         let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
                         let fn_ty = FnType::new_vtable(bcx.ccx, sig, &[]);
                         args = &args[..1];
-                        (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra, &fn_ty), fn_ty)
+                        (meth::DESTRUCTOR.get_fn(&bcx, place.llextra, &fn_ty), fn_ty)
                     }
                     _ => {
                         (callee::get_fn(bcx.ccx, drop_fn),
@@ -792,7 +792,7 @@ fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>,
             match self.locals[index] {
                 LocalRef::Place(dest) => dest,
                 LocalRef::Operand(None) => {
-                    // Handle temporary lvalues, specifically Operand ones, as
+                    // Handle temporary places, specifically Operand ones, as
                     // they don't have allocas
                     return if fn_ret.is_indirect() {
                         // Odd, but possible, case, we have an operand temporary,
@@ -813,11 +813,11 @@ fn make_return_dest(&mut self, bcx: &Builder<'a, 'tcx>,
                     };
                 }
                 LocalRef::Operand(Some(_)) => {
-                    bug!("lvalue local already assigned to");
+                    bug!("place local already assigned to");
                 }
             }
         } else {
-            self.trans_lvalue(bcx, dest)
+            self.trans_place(bcx, dest)
         };
         if fn_ret.is_indirect() {
             match dest.alignment {
@@ -845,15 +845,15 @@ fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
                        dst: &mir::Place<'tcx>) {
         if let mir::Place::Local(index) = *dst {
             match self.locals[index] {
-                LocalRef::Place(lvalue) => self.trans_transmute_into(bcx, src, lvalue),
+                LocalRef::Place(place) => self.trans_transmute_into(bcx, src, place),
                 LocalRef::Operand(None) => {
-                    let dst_layout = bcx.ccx.layout_of(self.monomorphized_lvalue_ty(dst));
+                    let dst_layout = bcx.ccx.layout_of(self.monomorphized_place_ty(dst));
                     assert!(!dst_layout.ty.has_erasable_regions());
-                    let lvalue = PlaceRef::alloca(bcx, dst_layout, "transmute_temp");
-                    lvalue.storage_live(bcx);
-                    self.trans_transmute_into(bcx, src, lvalue);
-                    let op = lvalue.load(bcx);
-                    lvalue.storage_dead(bcx);
+                    let place = PlaceRef::alloca(bcx, dst_layout, "transmute_temp");
+                    place.storage_live(bcx);
+                    self.trans_transmute_into(bcx, src, place);
+                    let op = place.load(bcx);
+                    place.storage_dead(bcx);
                     self.locals[index] = LocalRef::Operand(Some(op));
                 }
                 LocalRef::Operand(Some(op)) => {
@@ -862,7 +862,7 @@ fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
                 }
             }
         } else {
-            let dst = self.trans_lvalue(bcx, dst);
+            let dst = self.trans_place(bcx, dst);
             self.trans_transmute_into(bcx, src, dst);
         }
     }
@@ -918,8 +918,8 @@ enum ReturnDest<'tcx> {
     Nothing,
     // Store the return value to the pointer
     Store(PlaceRef<'tcx>),
-    // Stores an indirect return value to an operand local lvalue
+    // Stores an indirect return value to an operand local place
     IndirectOperand(PlaceRef<'tcx>, mir::Local),
-    // Stores a direct return value to an operand local lvalue
+    // Stores a direct return value to an operand local place
     DirectOperand(mir::Local)
 }
index 75fb95ed6e8daa7ad2bae762e696d2686392bf89..764021983e99c02c4aac601ec7760c2a3a3175b2 100644 (file)
@@ -42,7 +42,7 @@
 use std::fmt;
 use std::ptr;
 
-use super::lvalue::Alignment;
+use super::place::Alignment;
 use super::operand::{OperandRef, OperandValue};
 use super::MirContext;
 
@@ -156,7 +156,7 @@ fn get_fat_ptr(&self, ccx: &CrateContext<'a, 'tcx>) -> (ValueRef, ValueRef) {
         self.get_pair(ccx)
     }
 
-    fn as_lvalue(&self) -> ConstPlace<'tcx> {
+    fn as_place(&self) -> ConstPlace<'tcx> {
         ConstPlace {
             base: Base::Value(self.llval),
             llextra: ptr::null_mut(),
@@ -210,7 +210,7 @@ enum Base {
     Static(ValueRef)
 }
 
-/// An lvalue as seen from a constant.
+/// An place as seen from a constant.
 #[derive(Copy, Clone)]
 struct ConstPlace<'tcx> {
     base: Base,
@@ -348,7 +348,7 @@ fn trans(&mut self) -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
                 mir::TerminatorKind::Goto { target } => target,
                 mir::TerminatorKind::Return => {
                     failure?;
-                    return self.locals[mir::RETURN_POINTER].clone().unwrap_or_else(|| {
+                    return self.locals[mir::RETURN_PLACE].clone().unwrap_or_else(|| {
                         span_bug!(span, "no returned value in constant");
                     });
                 }
@@ -437,17 +437,17 @@ fn store(&mut self,
         }
     }
 
-    fn const_lvalue(&self, lvalue: &mir::Place<'tcx>, span: Span)
+    fn const_place(&self, place: &mir::Place<'tcx>, span: Span)
                     -> Result<ConstPlace<'tcx>, ConstEvalErr<'tcx>> {
         let tcx = self.ccx.tcx();
 
-        if let mir::Place::Local(index) = *lvalue {
+        if let mir::Place::Local(index) = *place {
             return self.locals[index].clone().unwrap_or_else(|| {
-                span_bug!(span, "{:?} not initialized", lvalue)
-            }).map(|v| v.as_lvalue());
+                span_bug!(span, "{:?} not initialized", place)
+            }).map(|v| v.as_place());
         }
 
-        let lvalue = match *lvalue {
+        let place = match *place {
             mir::Place::Local(_)  => bug!(), // handled above
             mir::Place::Static(box mir::Static { def_id, ty }) => {
                 ConstPlace {
@@ -457,7 +457,7 @@ fn const_lvalue(&self, lvalue: &mir::Place<'tcx>, span: Span)
                 }
             }
             mir::Place::Projection(ref projection) => {
-                let tr_base = self.const_lvalue(&projection.base, span)?;
+                let tr_base = self.const_place(&projection.base, span)?;
                 let projected_ty = PlaceTy::Ty { ty: tr_base.ty }
                     .projection_ty(tcx, &projection.elem);
                 let base = tr_base.to_const(span);
@@ -533,16 +533,16 @@ fn const_lvalue(&self, lvalue: &mir::Place<'tcx>, span: Span)
                 }
             }
         };
-        Ok(lvalue)
+        Ok(place)
     }
 
     fn const_operand(&self, operand: &mir::Operand<'tcx>, span: Span)
                      -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
         debug!("const_operand({:?} @ {:?})", operand, span);
         let result = match *operand {
-            mir::Operand::Copy(ref lvalue) |
-            mir::Operand::Move(ref lvalue) => {
-                Ok(self.const_lvalue(lvalue, span)?.to_const(span))
+            mir::Operand::Copy(ref place) |
+            mir::Operand::Move(ref place) => {
+                Ok(self.const_place(place, span)?.to_const(span))
             }
 
             mir::Operand::Constant(ref constant) => {
@@ -779,14 +779,14 @@ fn const_rvalue(&self, rvalue: &mir::Rvalue<'tcx>,
                 Const::new(val, cast_ty)
             }
 
-            mir::Rvalue::Ref(_, bk, ref lvalue) => {
-                let tr_lvalue = self.const_lvalue(lvalue, span)?;
+            mir::Rvalue::Ref(_, bk, ref place) => {
+                let tr_place = self.const_place(place, span)?;
 
-                let ty = tr_lvalue.ty;
+                let ty = tr_place.ty;
                 let ref_ty = tcx.mk_ref(tcx.types.re_erased,
                     ty::TypeAndMut { ty: ty, mutbl: bk.to_mutbl_lossy() });
 
-                let base = match tr_lvalue.base {
+                let base = match tr_place.base {
                     Base::Value(llval) => {
                         // FIXME: may be wrong for &*(&simd_vec as &fmt::Debug)
                         let align = if self.ccx.shared().type_is_sized(ty) {
@@ -807,14 +807,14 @@ fn const_rvalue(&self, rvalue: &mir::Rvalue<'tcx>,
                 let ptr = if self.ccx.shared().type_is_sized(ty) {
                     base
                 } else {
-                    C_fat_ptr(self.ccx, base, tr_lvalue.llextra)
+                    C_fat_ptr(self.ccx, base, tr_place.llextra)
                 };
                 Const::new(ptr, ref_ty)
             }
 
-            mir::Rvalue::Len(ref lvalue) => {
-                let tr_lvalue = self.const_lvalue(lvalue, span)?;
-                Const::new(tr_lvalue.len(self.ccx), tcx.types.usize)
+            mir::Rvalue::Len(ref place) => {
+                let tr_place = self.const_place(place, span)?;
+                Const::new(tr_place.len(self.ccx), tcx.types.usize)
             }
 
             mir::Rvalue::BinaryOp(op, ref lhs, ref rhs) => {
diff --git a/src/librustc_trans/mir/lvalue.rs b/src/librustc_trans/mir/lvalue.rs
deleted file mode 100644 (file)
index 8868954..0000000
+++ /dev/null
@@ -1,545 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use llvm::{self, ValueRef};
-use rustc::ty::{self, Ty};
-use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
-use rustc::mir;
-use rustc::mir::tcx::PlaceTy;
-use rustc_data_structures::indexed_vec::Idx;
-use base;
-use builder::Builder;
-use common::{CrateContext, C_usize, C_u8, C_u32, C_uint, C_int, C_null, C_uint_big};
-use consts;
-use type_of::LayoutLlvmExt;
-use type_::Type;
-use value::Value;
-use glue;
-
-use std::ptr;
-use std::ops;
-
-use super::{MirContext, LocalRef};
-use super::operand::{OperandRef, OperandValue};
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum Alignment {
-    Packed(Align),
-    AbiAligned,
-}
-
-impl ops::BitOr for Alignment {
-    type Output = Self;
-
-    fn bitor(self, rhs: Self) -> Self {
-        match (self, rhs) {
-            (Alignment::Packed(a), Alignment::Packed(b)) => {
-                Alignment::Packed(a.min(b))
-            }
-            (Alignment::Packed(x), _) | (_, Alignment::Packed(x)) => {
-                Alignment::Packed(x)
-            }
-            (Alignment::AbiAligned, Alignment::AbiAligned) => {
-                Alignment::AbiAligned
-            }
-        }
-    }
-}
-
-impl<'a> From<TyLayout<'a>> for Alignment {
-    fn from(layout: TyLayout) -> Self {
-        if layout.is_packed() {
-            Alignment::Packed(layout.align)
-        } else {
-            Alignment::AbiAligned
-        }
-    }
-}
-
-impl Alignment {
-    pub fn non_abi(self) -> Option<Align> {
-        match self {
-            Alignment::Packed(x) => Some(x),
-            Alignment::AbiAligned => None,
-        }
-    }
-}
-
-#[derive(Copy, Clone, Debug)]
-pub struct PlaceRef<'tcx> {
-    /// Pointer to the contents of the lvalue
-    pub llval: ValueRef,
-
-    /// This lvalue's extra data if it is unsized, or null
-    pub llextra: ValueRef,
-
-    /// Monomorphized type of this lvalue, including variant information
-    pub layout: TyLayout<'tcx>,
-
-    /// Whether this lvalue is known to be aligned according to its layout
-    pub alignment: Alignment,
-}
-
-impl<'a, 'tcx> PlaceRef<'tcx> {
-    pub fn new_sized(llval: ValueRef,
-                     layout: TyLayout<'tcx>,
-                     alignment: Alignment)
-                     -> PlaceRef<'tcx> {
-        PlaceRef {
-            llval,
-            llextra: ptr::null_mut(),
-            layout,
-            alignment
-        }
-    }
-
-    pub fn alloca(bcx: &Builder<'a, 'tcx>, layout: TyLayout<'tcx>, name: &str)
-                  -> PlaceRef<'tcx> {
-        debug!("alloca({:?}: {:?})", name, layout);
-        let tmp = bcx.alloca(layout.llvm_type(bcx.ccx), name, layout.align);
-        Self::new_sized(tmp, layout, Alignment::AbiAligned)
-    }
-
-    pub fn len(&self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef {
-        if let layout::FieldPlacement::Array { count, .. } = self.layout.fields {
-            if self.layout.is_unsized() {
-                assert!(self.has_extra());
-                assert_eq!(count, 0);
-                self.llextra
-            } else {
-                C_usize(ccx, count)
-            }
-        } else {
-            bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
-        }
-    }
-
-    pub fn has_extra(&self) -> bool {
-        !self.llextra.is_null()
-    }
-
-    pub fn load(&self, bcx: &Builder<'a, 'tcx>) -> OperandRef<'tcx> {
-        debug!("PlaceRef::load: {:?}", self);
-
-        assert!(!self.has_extra());
-
-        if self.layout.is_zst() {
-            return OperandRef::new_zst(bcx.ccx, self.layout);
-        }
-
-        let scalar_load_metadata = |load, scalar: &layout::Scalar| {
-            let (min, max) = (scalar.valid_range.start, scalar.valid_range.end);
-            let max_next = max.wrapping_add(1);
-            let bits = scalar.value.size(bcx.ccx).bits();
-            assert!(bits <= 128);
-            let mask = !0u128 >> (128 - bits);
-            // For a (max) value of -1, max will be `-1 as usize`, which overflows.
-            // However, that is fine here (it would still represent the full range),
-            // i.e., if the range is everything.  The lo==hi case would be
-            // rejected by the LLVM verifier (it would mean either an
-            // empty set, which is impossible, or the entire range of the
-            // type, which is pointless).
-            match scalar.value {
-                layout::Int(..) if max_next & mask != min & mask => {
-                    // llvm::ConstantRange can deal with ranges that wrap around,
-                    // so an overflow on (max + 1) is fine.
-                    bcx.range_metadata(load, min..max_next);
-                }
-                layout::Pointer if 0 < min && min < max => {
-                    bcx.nonnull_metadata(load);
-                }
-                _ => {}
-            }
-        };
-
-        let val = if self.layout.is_llvm_immediate() {
-            let mut const_llval = ptr::null_mut();
-            unsafe {
-                let global = llvm::LLVMIsAGlobalVariable(self.llval);
-                if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
-                    const_llval = llvm::LLVMGetInitializer(global);
-                }
-            }
-
-            let llval = if !const_llval.is_null() {
-                const_llval
-            } else {
-                let load = bcx.load(self.llval, self.alignment.non_abi());
-                if let layout::Abi::Scalar(ref scalar) = self.layout.abi {
-                    scalar_load_metadata(load, scalar);
-                }
-                load
-            };
-            OperandValue::Immediate(base::to_immediate(bcx, llval, self.layout))
-        } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
-            let load = |i, scalar: &layout::Scalar| {
-                let mut llptr = bcx.struct_gep(self.llval, i as u64);
-                // Make sure to always load i1 as i8.
-                if scalar.is_bool() {
-                    llptr = bcx.pointercast(llptr, Type::i8p(bcx.ccx));
-                }
-                let load = bcx.load(llptr, self.alignment.non_abi());
-                scalar_load_metadata(load, scalar);
-                if scalar.is_bool() {
-                    bcx.trunc(load, Type::i1(bcx.ccx))
-                } else {
-                    load
-                }
-            };
-            OperandValue::Pair(load(0, a), load(1, b))
-        } else {
-            OperandValue::Ref(self.llval, self.alignment)
-        };
-
-        OperandRef { val, layout: self.layout }
-    }
-
-    /// Access a field, at a point when the value's case is known.
-    pub fn project_field(self, bcx: &Builder<'a, 'tcx>, ix: usize) -> PlaceRef<'tcx> {
-        let ccx = bcx.ccx;
-        let field = self.layout.field(ccx, ix);
-        let offset = self.layout.fields.offset(ix);
-        let alignment = self.alignment | Alignment::from(self.layout);
-
-        let simple = || {
-            // Unions and newtypes only use an offset of 0.
-            let llval = if offset.bytes() == 0 {
-                self.llval
-            } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
-                // Offsets have to match either first or second field.
-                assert_eq!(offset, a.value.size(ccx).abi_align(b.value.align(ccx)));
-                bcx.struct_gep(self.llval, 1)
-            } else {
-                bcx.struct_gep(self.llval, self.layout.llvm_field_index(ix))
-            };
-            PlaceRef {
-                // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
-                llval: bcx.pointercast(llval, field.llvm_type(ccx).ptr_to()),
-                llextra: if ccx.shared().type_has_metadata(field.ty) {
-                    self.llextra
-                } else {
-                    ptr::null_mut()
-                },
-                layout: field,
-                alignment,
-            }
-        };
-
-        // Simple case - we can just GEP the field
-        //   * Packed struct - There is no alignment padding
-        //   * Field is sized - pointer is properly aligned already
-        if self.layout.is_packed() || !field.is_unsized() {
-            return simple();
-        }
-
-        // If the type of the last field is [T], str or a foreign type, then we don't need to do
-        // any adjusments
-        match field.ty.sty {
-            ty::TySlice(..) | ty::TyStr | ty::TyForeign(..) => return simple(),
-            _ => ()
-        }
-
-        // There's no metadata available, log the case and just do the GEP.
-        if !self.has_extra() {
-            debug!("Unsized field `{}`, of `{:?}` has no metadata for adjustment",
-                ix, Value(self.llval));
-            return simple();
-        }
-
-        // We need to get the pointer manually now.
-        // We do this by casting to a *i8, then offsetting it by the appropriate amount.
-        // We do this instead of, say, simply adjusting the pointer from the result of a GEP
-        // because the field may have an arbitrary alignment in the LLVM representation
-        // anyway.
-        //
-        // To demonstrate:
-        //   struct Foo<T: ?Sized> {
-        //      x: u16,
-        //      y: T
-        //   }
-        //
-        // The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
-        // the `y` field has 16-bit alignment.
-
-        let meta = self.llextra;
-
-        let unaligned_offset = C_usize(ccx, offset.bytes());
-
-        // Get the alignment of the field
-        let (_, align) = glue::size_and_align_of_dst(bcx, field.ty, meta);
-
-        // Bump the unaligned offset up to the appropriate alignment using the
-        // following expression:
-        //
-        //   (unaligned offset + (align - 1)) & -align
-
-        // Calculate offset
-        let align_sub_1 = bcx.sub(align, C_usize(ccx, 1u64));
-        let offset = bcx.and(bcx.add(unaligned_offset, align_sub_1),
-        bcx.neg(align));
-
-        debug!("struct_field_ptr: DST field offset: {:?}", Value(offset));
-
-        // Cast and adjust pointer
-        let byte_ptr = bcx.pointercast(self.llval, Type::i8p(ccx));
-        let byte_ptr = bcx.gep(byte_ptr, &[offset]);
-
-        // Finally, cast back to the type expected
-        let ll_fty = field.llvm_type(ccx);
-        debug!("struct_field_ptr: Field type is {:?}", ll_fty);
-
-        PlaceRef {
-            llval: bcx.pointercast(byte_ptr, ll_fty.ptr_to()),
-            llextra: self.llextra,
-            layout: field,
-            alignment,
-        }
-    }
-
-    /// Obtain the actual discriminant of a value.
-    pub fn trans_get_discr(self, bcx: &Builder<'a, 'tcx>, cast_to: Ty<'tcx>) -> ValueRef {
-        let cast_to = bcx.ccx.layout_of(cast_to).immediate_llvm_type(bcx.ccx);
-        match self.layout.variants {
-            layout::Variants::Single { index } => {
-                return C_uint(cast_to, index as u64);
-            }
-            layout::Variants::Tagged { .. } |
-            layout::Variants::NicheFilling { .. } => {},
-        }
-
-        let discr = self.project_field(bcx, 0);
-        let lldiscr = discr.load(bcx).immediate();
-        match self.layout.variants {
-            layout::Variants::Single { .. } => bug!(),
-            layout::Variants::Tagged { ref discr, .. } => {
-                let signed = match discr.value {
-                    layout::Int(_, signed) => signed,
-                    _ => false
-                };
-                bcx.intcast(lldiscr, cast_to, signed)
-            }
-            layout::Variants::NicheFilling {
-                dataful_variant,
-                ref niche_variants,
-                niche_start,
-                ..
-            } => {
-                let niche_llty = discr.layout.immediate_llvm_type(bcx.ccx);
-                if niche_variants.start == niche_variants.end {
-                    // FIXME(eddyb) Check the actual primitive type here.
-                    let niche_llval = if niche_start == 0 {
-                        // HACK(eddyb) Using `C_null` as it works on all types.
-                        C_null(niche_llty)
-                    } else {
-                        C_uint_big(niche_llty, niche_start)
-                    };
-                    bcx.select(bcx.icmp(llvm::IntEQ, lldiscr, niche_llval),
-                        C_uint(cast_to, niche_variants.start as u64),
-                        C_uint(cast_to, dataful_variant as u64))
-                } else {
-                    // Rebase from niche values to discriminant values.
-                    let delta = niche_start.wrapping_sub(niche_variants.start as u128);
-                    let lldiscr = bcx.sub(lldiscr, C_uint_big(niche_llty, delta));
-                    let lldiscr_max = C_uint(niche_llty, niche_variants.end as u64);
-                    bcx.select(bcx.icmp(llvm::IntULE, lldiscr, lldiscr_max),
-                        bcx.intcast(lldiscr, cast_to, false),
-                        C_uint(cast_to, dataful_variant as u64))
-                }
-            }
-        }
-    }
-
-    /// Set the discriminant for a new value of the given case of the given
-    /// representation.
-    pub fn trans_set_discr(&self, bcx: &Builder<'a, 'tcx>, variant_index: usize) {
-        match self.layout.variants {
-            layout::Variants::Single { index } => {
-                if index != variant_index {
-                    // If the layout of an enum is `Single`, all
-                    // other variants are necessarily uninhabited.
-                    assert_eq!(self.layout.for_variant(bcx.ccx, variant_index).abi,
-                               layout::Abi::Uninhabited);
-                }
-            }
-            layout::Variants::Tagged { .. } => {
-                let ptr = self.project_field(bcx, 0);
-                let to = self.layout.ty.ty_adt_def().unwrap()
-                    .discriminant_for_variant(bcx.tcx(), variant_index)
-                    .to_u128_unchecked() as u64;
-                bcx.store(C_int(ptr.layout.llvm_type(bcx.ccx), to as i64),
-                    ptr.llval, ptr.alignment.non_abi());
-            }
-            layout::Variants::NicheFilling {
-                dataful_variant,
-                ref niche_variants,
-                niche_start,
-                ..
-            } => {
-                if variant_index != dataful_variant {
-                    if bcx.sess().target.target.arch == "arm" ||
-                       bcx.sess().target.target.arch == "aarch64" {
-                        // Issue #34427: As workaround for LLVM bug on ARM,
-                        // use memset of 0 before assigning niche value.
-                        let llptr = bcx.pointercast(self.llval, Type::i8(bcx.ccx).ptr_to());
-                        let fill_byte = C_u8(bcx.ccx, 0);
-                        let (size, align) = self.layout.size_and_align();
-                        let size = C_usize(bcx.ccx, size.bytes());
-                        let align = C_u32(bcx.ccx, align.abi() as u32);
-                        base::call_memset(bcx, llptr, fill_byte, size, align, false);
-                    }
-
-                    let niche = self.project_field(bcx, 0);
-                    let niche_llty = niche.layout.immediate_llvm_type(bcx.ccx);
-                    let niche_value = ((variant_index - niche_variants.start) as u128)
-                        .wrapping_add(niche_start);
-                    // FIXME(eddyb) Check the actual primitive type here.
-                    let niche_llval = if niche_value == 0 {
-                        // HACK(eddyb) Using `C_null` as it works on all types.
-                        C_null(niche_llty)
-                    } else {
-                        C_uint_big(niche_llty, niche_value)
-                    };
-                    OperandValue::Immediate(niche_llval).store(bcx, niche);
-                }
-            }
-        }
-    }
-
-    pub fn project_index(&self, bcx: &Builder<'a, 'tcx>, llindex: ValueRef)
-                         -> PlaceRef<'tcx> {
-        PlaceRef {
-            llval: bcx.inbounds_gep(self.llval, &[C_usize(bcx.ccx, 0), llindex]),
-            llextra: ptr::null_mut(),
-            layout: self.layout.field(bcx.ccx, 0),
-            alignment: self.alignment
-        }
-    }
-
-    pub fn project_downcast(&self, bcx: &Builder<'a, 'tcx>, variant_index: usize)
-                            -> PlaceRef<'tcx> {
-        let mut downcast = *self;
-        downcast.layout = self.layout.for_variant(bcx.ccx, variant_index);
-
-        // Cast to the appropriate variant struct type.
-        let variant_ty = downcast.layout.llvm_type(bcx.ccx);
-        downcast.llval = bcx.pointercast(downcast.llval, variant_ty.ptr_to());
-
-        downcast
-    }
-
-    pub fn storage_live(&self, bcx: &Builder<'a, 'tcx>) {
-        bcx.lifetime_start(self.llval, self.layout.size);
-    }
-
-    pub fn storage_dead(&self, bcx: &Builder<'a, 'tcx>) {
-        bcx.lifetime_end(self.llval, self.layout.size);
-    }
-}
-
-impl<'a, 'tcx> MirContext<'a, 'tcx> {
-    pub fn trans_lvalue(&mut self,
-                        bcx: &Builder<'a, 'tcx>,
-                        lvalue: &mir::Place<'tcx>)
-                        -> PlaceRef<'tcx> {
-        debug!("trans_lvalue(lvalue={:?})", lvalue);
-
-        let ccx = bcx.ccx;
-        let tcx = ccx.tcx();
-
-        if let mir::Place::Local(index) = *lvalue {
-            match self.locals[index] {
-                LocalRef::Place(lvalue) => {
-                    return lvalue;
-                }
-                LocalRef::Operand(..) => {
-                    bug!("using operand local {:?} as lvalue", lvalue);
-                }
-            }
-        }
-
-        let result = match *lvalue {
-            mir::Place::Local(_) => bug!(), // handled above
-            mir::Place::Static(box mir::Static { def_id, ty }) => {
-                PlaceRef::new_sized(consts::get_static(ccx, def_id),
-                                     ccx.layout_of(self.monomorphize(&ty)),
-                                     Alignment::AbiAligned)
-            },
-            mir::Place::Projection(box mir::Projection {
-                ref base,
-                elem: mir::ProjectionElem::Deref
-            }) => {
-                // Load the pointer from its location.
-                self.trans_consume(bcx, base).deref(bcx.ccx)
-            }
-            mir::Place::Projection(ref projection) => {
-                let tr_base = self.trans_lvalue(bcx, &projection.base);
-
-                match projection.elem {
-                    mir::ProjectionElem::Deref => bug!(),
-                    mir::ProjectionElem::Field(ref field, _) => {
-                        tr_base.project_field(bcx, field.index())
-                    }
-                    mir::ProjectionElem::Index(index) => {
-                        let index = &mir::Operand::Copy(mir::Place::Local(index));
-                        let index = self.trans_operand(bcx, index);
-                        let llindex = index.immediate();
-                        tr_base.project_index(bcx, llindex)
-                    }
-                    mir::ProjectionElem::ConstantIndex { offset,
-                                                         from_end: false,
-                                                         min_length: _ } => {
-                        let lloffset = C_usize(bcx.ccx, offset as u64);
-                        tr_base.project_index(bcx, lloffset)
-                    }
-                    mir::ProjectionElem::ConstantIndex { offset,
-                                                         from_end: true,
-                                                         min_length: _ } => {
-                        let lloffset = C_usize(bcx.ccx, offset as u64);
-                        let lllen = tr_base.len(bcx.ccx);
-                        let llindex = bcx.sub(lllen, lloffset);
-                        tr_base.project_index(bcx, llindex)
-                    }
-                    mir::ProjectionElem::Subslice { from, to } => {
-                        let mut subslice = tr_base.project_index(bcx,
-                            C_usize(bcx.ccx, from as u64));
-                        let projected_ty = PlaceTy::Ty { ty: tr_base.layout.ty }
-                            .projection_ty(tcx, &projection.elem).to_ty(bcx.tcx());
-                        subslice.layout = bcx.ccx.layout_of(self.monomorphize(&projected_ty));
-
-                        if subslice.layout.is_unsized() {
-                            assert!(tr_base.has_extra());
-                            subslice.llextra = bcx.sub(tr_base.llextra,
-                                C_usize(bcx.ccx, (from as u64) + (to as u64)));
-                        }
-
-                        // Cast the lvalue pointer type to the new
-                        // array or slice type (*[%_; new_len]).
-                        subslice.llval = bcx.pointercast(subslice.llval,
-                            subslice.layout.llvm_type(bcx.ccx).ptr_to());
-
-                        subslice
-                    }
-                    mir::ProjectionElem::Downcast(_, v) => {
-                        tr_base.project_downcast(bcx, v)
-                    }
-                }
-            }
-        };
-        debug!("trans_lvalue(lvalue={:?}) => {:?}", lvalue, result);
-        result
-    }
-
-    pub fn monomorphized_lvalue_ty(&self, lvalue: &mir::Place<'tcx>) -> Ty<'tcx> {
-        let tcx = self.ccx.tcx();
-        let lvalue_ty = lvalue.ty(self.mir, tcx);
-        self.monomorphize(&lvalue_ty.to_ty(tcx))
-    }
-}
-
index de57b0239e16e034cf24ca3140e612c985187e1e..39e2503081af8033cae9476825a184411701eb83 100644 (file)
@@ -35,7 +35,7 @@
 pub use self::constant::trans_static_initializer;
 
 use self::analyze::CleanupKind;
-use self::lvalue::{Alignment, PlaceRef};
+use self::place::{Alignment, PlaceRef};
 use rustc::mir::traversal;
 
 use self::operand::{OperandRef, OperandValue};
@@ -87,7 +87,7 @@ pub struct MirContext<'a, 'tcx:'a> {
     /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
     /// - the operand must never be referenced indirectly
     ///     - we should not take its address using the `&` operator
-    ///     - nor should it appear in an lvalue path like `tmp.a`
+    ///     - nor should it appear in a place path like `tmp.a`
     /// - the operand must be defined by an rvalue that can generate immediate
     ///   values
     ///
@@ -244,11 +244,11 @@ pub fn trans_mir<'a, 'tcx: 'a>(
         },
     };
 
-    let lvalue_locals = analyze::lvalue_locals(&mircx);
+    let memory_locals = analyze::memory_locals(&mircx);
 
     // Allocate variable and temp allocas
     mircx.locals = {
-        let args = arg_local_refs(&bcx, &mircx, &mircx.scopes, &lvalue_locals);
+        let args = arg_local_refs(&bcx, &mircx, &mircx.scopes, &memory_locals);
 
         let mut allocate_local = |local| {
             let decl = &mir.local_decls[local];
@@ -260,30 +260,30 @@ pub fn trans_mir<'a, 'tcx: 'a>(
                 let debug_scope = mircx.scopes[decl.source_info.scope];
                 let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
 
-                if !lvalue_locals.contains(local.index()) && !dbg {
+                if !memory_locals.contains(local.index()) && !dbg {
                     debug!("alloc: {:?} ({}) -> operand", local, name);
                     return LocalRef::new_operand(bcx.ccx, layout);
                 }
 
-                debug!("alloc: {:?} ({}) -> lvalue", local, name);
-                let lvalue = PlaceRef::alloca(&bcx, layout, &name.as_str());
+                debug!("alloc: {:?} ({}) -> place", local, name);
+                let place = PlaceRef::alloca(&bcx, layout, &name.as_str());
                 if dbg {
                     let (scope, span) = mircx.debug_loc(decl.source_info);
                     declare_local(&bcx, &mircx.debug_context, name, layout.ty, scope,
-                        VariableAccess::DirectVariable { alloca: lvalue.llval },
+                        VariableAccess::DirectVariable { alloca: place.llval },
                         VariableKind::LocalVariable, span);
                 }
-                LocalRef::Place(lvalue)
+                LocalRef::Place(place)
             } else {
-                // Temporary or return pointer
-                if local == mir::RETURN_POINTER && mircx.fn_ty.ret.is_indirect() {
-                    debug!("alloc: {:?} (return pointer) -> lvalue", local);
+                // Temporary or return place
+                if local == mir::RETURN_PLACE && mircx.fn_ty.ret.is_indirect() {
+                    debug!("alloc: {:?} (return place) -> place", local);
                     let llretptr = llvm::get_param(llfn, 0);
                     LocalRef::Place(PlaceRef::new_sized(llretptr,
                                                           layout,
                                                           Alignment::AbiAligned))
-                } else if lvalue_locals.contains(local.index()) {
-                    debug!("alloc: {:?} -> lvalue", local);
+                } else if memory_locals.contains(local.index()) {
+                    debug!("alloc: {:?} -> place", local);
                     LocalRef::Place(PlaceRef::alloca(&bcx, layout, &format!("{:?}", local)))
                 } else {
                     // If this is an immediate local, we do not create an
@@ -295,7 +295,7 @@ pub fn trans_mir<'a, 'tcx: 'a>(
             }
         };
 
-        let retptr = allocate_local(mir::RETURN_POINTER);
+        let retptr = allocate_local(mir::RETURN_PLACE);
         iter::once(retptr)
             .chain(args.into_iter())
             .chain(mir.vars_and_temps_iter().map(allocate_local))
@@ -355,12 +355,12 @@ fn create_funclets<'a, 'tcx>(
 }
 
 /// Produce, for each argument, a `ValueRef` pointing at the
-/// argument's value. As arguments are lvalues, these are always
+/// argument's value. As arguments are places, these are always
 /// indirect.
 fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                             mircx: &MirContext<'a, 'tcx>,
                             scopes: &IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
-                            lvalue_locals: &BitVector)
+                            memory_locals: &BitVector)
                             -> Vec<LocalRef<'tcx>> {
     let mir = mircx.mir;
     let tcx = bcx.tcx();
@@ -400,18 +400,18 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                 _ => bug!("spread argument isn't a tuple?!")
             };
 
-            let lvalue = PlaceRef::alloca(bcx, bcx.ccx.layout_of(arg_ty), &name);
+            let place = PlaceRef::alloca(bcx, bcx.ccx.layout_of(arg_ty), &name);
             for i in 0..tupled_arg_tys.len() {
                 let arg = &mircx.fn_ty.args[idx];
                 idx += 1;
-                arg.store_fn_arg(bcx, &mut llarg_idx, lvalue.project_field(bcx, i));
+                arg.store_fn_arg(bcx, &mut llarg_idx, place.project_field(bcx, i));
             }
 
             // Now that we have one alloca that contains the aggregate value,
             // we can create one debuginfo entry for the argument.
             arg_scope.map(|scope| {
                 let variable_access = VariableAccess::DirectVariable {
-                    alloca: lvalue.llval
+                    alloca: place.llval
                 };
                 declare_local(
                     bcx,
@@ -424,7 +424,7 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                 );
             });
 
-            return LocalRef::Place(lvalue);
+            return LocalRef::Place(place);
         }
 
         let arg = &mircx.fn_ty.args[idx];
@@ -433,7 +433,7 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
             llarg_idx += 1;
         }
 
-        if arg_scope.is_none() && !lvalue_locals.contains(local.index()) {
+        if arg_scope.is_none() && !memory_locals.contains(local.index()) {
             // We don't have to cast or keep the argument in the alloca.
             // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
             // of putting everything in allocas just so we can use llvm.dbg.declare.
@@ -467,7 +467,7 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
             }
         }
 
-        let lvalue = if arg.is_indirect() {
+        let place = if arg.is_indirect() {
             // Don't copy an indirect argument to an alloca, the caller
             // already put it in a temporary alloca and gave it up.
             // FIXME: lifetimes
@@ -487,13 +487,13 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                 // need to insert a deref here, but the C ABI uses a pointer and a copy using the
                 // byval attribute, for which LLVM does the deref itself, so we must not add it.
                 let mut variable_access = VariableAccess::DirectVariable {
-                    alloca: lvalue.llval
+                    alloca: place.llval
                 };
 
                 if let PassMode::Indirect(ref attrs) = arg.mode {
                     if !attrs.contains(ArgAttribute::ByVal) {
                         variable_access = VariableAccess::IndirectVariable {
-                            alloca: lvalue.llval,
+                            alloca: place.llval,
                             address_operations: &deref_op,
                         };
                     }
@@ -535,10 +535,10 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                 let alloc = PlaceRef::alloca(bcx,
                     bcx.ccx.layout_of(tcx.mk_mut_ptr(arg.layout.ty)),
                     "__debuginfo_env_ptr");
-                bcx.store(lvalue.llval, alloc.llval, None);
+                bcx.store(place.llval, alloc.llval, None);
                 alloc.llval
             } else {
-                lvalue.llval
+                place.llval
             };
 
             for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
@@ -580,14 +580,14 @@ fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
                 );
             }
         });
-        LocalRef::Place(lvalue)
+        LocalRef::Place(place)
     }).collect()
 }
 
 mod analyze;
 mod block;
 mod constant;
-pub mod lvalue;
+pub mod place;
 pub mod operand;
 mod rvalue;
 mod statement;
index 876b5e7477949300a98c15296578814548ae63a1..9f9710257723145ef02efafe333773f0ab98f5ac 100644 (file)
@@ -25,7 +25,7 @@
 use std::ptr;
 
 use super::{MirContext, LocalRef};
-use super::lvalue::{Alignment, PlaceRef};
+use super::place::{Alignment, PlaceRef};
 
 /// The representation of a Rust value. The enum variant is in fact
 /// uniquely determined by the value's type, but is kept as a
@@ -243,20 +243,20 @@ pub fn store(self, bcx: &Builder<'a, 'tcx>, dest: PlaceRef<'tcx>) {
 impl<'a, 'tcx> MirContext<'a, 'tcx> {
     fn maybe_trans_consume_direct(&mut self,
                                   bcx: &Builder<'a, 'tcx>,
-                                  lvalue: &mir::Place<'tcx>)
+                                  place: &mir::Place<'tcx>)
                                    -> Option<OperandRef<'tcx>>
     {
-        debug!("maybe_trans_consume_direct(lvalue={:?})", lvalue);
+        debug!("maybe_trans_consume_direct(place={:?})", place);
 
         // watch out for locals that do not have an
         // alloca; they are handled somewhat differently
-        if let mir::Place::Local(index) = *lvalue {
+        if let mir::Place::Local(index) = *place {
             match self.locals[index] {
                 LocalRef::Operand(Some(o)) => {
                     return Some(o);
                 }
                 LocalRef::Operand(None) => {
-                    bug!("use of {:?} before def", lvalue);
+                    bug!("use of {:?} before def", place);
                 }
                 LocalRef::Place(..) => {
                     // use path below
@@ -265,7 +265,7 @@ fn maybe_trans_consume_direct(&mut self,
         }
 
         // Moves out of scalar and scalar pair fields are trivial.
-        if let &mir::Place::Projection(ref proj) = lvalue {
+        if let &mir::Place::Projection(ref proj) = place {
             if let mir::ProjectionElem::Field(ref f, _) = proj.elem {
                 if let Some(o) = self.maybe_trans_consume_direct(bcx, &proj.base) {
                     return Some(o.extract_field(bcx, f.index()));
@@ -278,12 +278,12 @@ fn maybe_trans_consume_direct(&mut self,
 
     pub fn trans_consume(&mut self,
                          bcx: &Builder<'a, 'tcx>,
-                         lvalue: &mir::Place<'tcx>)
+                         place: &mir::Place<'tcx>)
                          -> OperandRef<'tcx>
     {
-        debug!("trans_consume(lvalue={:?})", lvalue);
+        debug!("trans_consume(place={:?})", place);
 
-        let ty = self.monomorphized_lvalue_ty(lvalue);
+        let ty = self.monomorphized_place_ty(place);
         let layout = bcx.ccx.layout_of(ty);
 
         // ZSTs don't require any actual memory access.
@@ -291,13 +291,13 @@ pub fn trans_consume(&mut self,
             return OperandRef::new_zst(bcx.ccx, layout);
         }
 
-        if let Some(o) = self.maybe_trans_consume_direct(bcx, lvalue) {
+        if let Some(o) = self.maybe_trans_consume_direct(bcx, place) {
             return o;
         }
 
-        // for most lvalues, to consume them we just load them
+        // for most places, to consume them we just load them
         // out from their home
-        self.trans_lvalue(bcx, lvalue).load(bcx)
+        self.trans_place(bcx, place).load(bcx)
     }
 
     pub fn trans_operand(&mut self,
@@ -308,9 +308,9 @@ pub fn trans_operand(&mut self,
         debug!("trans_operand(operand={:?})", operand);
 
         match *operand {
-            mir::Operand::Copy(ref lvalue) |
-            mir::Operand::Move(ref lvalue) => {
-                self.trans_consume(bcx, lvalue)
+            mir::Operand::Copy(ref place) |
+            mir::Operand::Move(ref place) => {
+                self.trans_consume(bcx, place)
             }
 
             mir::Operand::Constant(ref constant) => {
diff --git a/src/librustc_trans/mir/place.rs b/src/librustc_trans/mir/place.rs
new file mode 100644 (file)
index 0000000..3bcbb7f
--- /dev/null
@@ -0,0 +1,545 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use llvm::{self, ValueRef};
+use rustc::ty::{self, Ty};
+use rustc::ty::layout::{self, Align, TyLayout, LayoutOf};
+use rustc::mir;
+use rustc::mir::tcx::PlaceTy;
+use rustc_data_structures::indexed_vec::Idx;
+use base;
+use builder::Builder;
+use common::{CrateContext, C_usize, C_u8, C_u32, C_uint, C_int, C_null, C_uint_big};
+use consts;
+use type_of::LayoutLlvmExt;
+use type_::Type;
+use value::Value;
+use glue;
+
+use std::ptr;
+use std::ops;
+
+use super::{MirContext, LocalRef};
+use super::operand::{OperandRef, OperandValue};
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum Alignment {
+    Packed(Align),
+    AbiAligned,
+}
+
+impl ops::BitOr for Alignment {
+    type Output = Self;
+
+    fn bitor(self, rhs: Self) -> Self {
+        match (self, rhs) {
+            (Alignment::Packed(a), Alignment::Packed(b)) => {
+                Alignment::Packed(a.min(b))
+            }
+            (Alignment::Packed(x), _) | (_, Alignment::Packed(x)) => {
+                Alignment::Packed(x)
+            }
+            (Alignment::AbiAligned, Alignment::AbiAligned) => {
+                Alignment::AbiAligned
+            }
+        }
+    }
+}
+
+impl<'a> From<TyLayout<'a>> for Alignment {
+    fn from(layout: TyLayout) -> Self {
+        if layout.is_packed() {
+            Alignment::Packed(layout.align)
+        } else {
+            Alignment::AbiAligned
+        }
+    }
+}
+
+impl Alignment {
+    pub fn non_abi(self) -> Option<Align> {
+        match self {
+            Alignment::Packed(x) => Some(x),
+            Alignment::AbiAligned => None,
+        }
+    }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct PlaceRef<'tcx> {
+    /// Pointer to the contents of the place
+    pub llval: ValueRef,
+
+    /// This place's extra data if it is unsized, or null
+    pub llextra: ValueRef,
+
+    /// Monomorphized type of this place, including variant information
+    pub layout: TyLayout<'tcx>,
+
+    /// Whether this place is known to be aligned according to its layout
+    pub alignment: Alignment,
+}
+
+impl<'a, 'tcx> PlaceRef<'tcx> {
+    pub fn new_sized(llval: ValueRef,
+                     layout: TyLayout<'tcx>,
+                     alignment: Alignment)
+                     -> PlaceRef<'tcx> {
+        PlaceRef {
+            llval,
+            llextra: ptr::null_mut(),
+            layout,
+            alignment
+        }
+    }
+
+    pub fn alloca(bcx: &Builder<'a, 'tcx>, layout: TyLayout<'tcx>, name: &str)
+                  -> PlaceRef<'tcx> {
+        debug!("alloca({:?}: {:?})", name, layout);
+        let tmp = bcx.alloca(layout.llvm_type(bcx.ccx), name, layout.align);
+        Self::new_sized(tmp, layout, Alignment::AbiAligned)
+    }
+
+    pub fn len(&self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef {
+        if let layout::FieldPlacement::Array { count, .. } = self.layout.fields {
+            if self.layout.is_unsized() {
+                assert!(self.has_extra());
+                assert_eq!(count, 0);
+                self.llextra
+            } else {
+                C_usize(ccx, count)
+            }
+        } else {
+            bug!("unexpected layout `{:#?}` in PlaceRef::len", self.layout)
+        }
+    }
+
+    pub fn has_extra(&self) -> bool {
+        !self.llextra.is_null()
+    }
+
+    pub fn load(&self, bcx: &Builder<'a, 'tcx>) -> OperandRef<'tcx> {
+        debug!("PlaceRef::load: {:?}", self);
+
+        assert!(!self.has_extra());
+
+        if self.layout.is_zst() {
+            return OperandRef::new_zst(bcx.ccx, self.layout);
+        }
+
+        let scalar_load_metadata = |load, scalar: &layout::Scalar| {
+            let (min, max) = (scalar.valid_range.start, scalar.valid_range.end);
+            let max_next = max.wrapping_add(1);
+            let bits = scalar.value.size(bcx.ccx).bits();
+            assert!(bits <= 128);
+            let mask = !0u128 >> (128 - bits);
+            // For a (max) value of -1, max will be `-1 as usize`, which overflows.
+            // However, that is fine here (it would still represent the full range),
+            // i.e., if the range is everything.  The lo==hi case would be
+            // rejected by the LLVM verifier (it would mean either an
+            // empty set, which is impossible, or the entire range of the
+            // type, which is pointless).
+            match scalar.value {
+                layout::Int(..) if max_next & mask != min & mask => {
+                    // llvm::ConstantRange can deal with ranges that wrap around,
+                    // so an overflow on (max + 1) is fine.
+                    bcx.range_metadata(load, min..max_next);
+                }
+                layout::Pointer if 0 < min && min < max => {
+                    bcx.nonnull_metadata(load);
+                }
+                _ => {}
+            }
+        };
+
+        let val = if self.layout.is_llvm_immediate() {
+            let mut const_llval = ptr::null_mut();
+            unsafe {
+                let global = llvm::LLVMIsAGlobalVariable(self.llval);
+                if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
+                    const_llval = llvm::LLVMGetInitializer(global);
+                }
+            }
+
+            let llval = if !const_llval.is_null() {
+                const_llval
+            } else {
+                let load = bcx.load(self.llval, self.alignment.non_abi());
+                if let layout::Abi::Scalar(ref scalar) = self.layout.abi {
+                    scalar_load_metadata(load, scalar);
+                }
+                load
+            };
+            OperandValue::Immediate(base::to_immediate(bcx, llval, self.layout))
+        } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
+            let load = |i, scalar: &layout::Scalar| {
+                let mut llptr = bcx.struct_gep(self.llval, i as u64);
+                // Make sure to always load i1 as i8.
+                if scalar.is_bool() {
+                    llptr = bcx.pointercast(llptr, Type::i8p(bcx.ccx));
+                }
+                let load = bcx.load(llptr, self.alignment.non_abi());
+                scalar_load_metadata(load, scalar);
+                if scalar.is_bool() {
+                    bcx.trunc(load, Type::i1(bcx.ccx))
+                } else {
+                    load
+                }
+            };
+            OperandValue::Pair(load(0, a), load(1, b))
+        } else {
+            OperandValue::Ref(self.llval, self.alignment)
+        };
+
+        OperandRef { val, layout: self.layout }
+    }
+
+    /// Access a field, at a point when the value's case is known.
+    pub fn project_field(self, bcx: &Builder<'a, 'tcx>, ix: usize) -> PlaceRef<'tcx> {
+        let ccx = bcx.ccx;
+        let field = self.layout.field(ccx, ix);
+        let offset = self.layout.fields.offset(ix);
+        let alignment = self.alignment | Alignment::from(self.layout);
+
+        let simple = || {
+            // Unions and newtypes only use an offset of 0.
+            let llval = if offset.bytes() == 0 {
+                self.llval
+            } else if let layout::Abi::ScalarPair(ref a, ref b) = self.layout.abi {
+                // Offsets have to match either first or second field.
+                assert_eq!(offset, a.value.size(ccx).abi_align(b.value.align(ccx)));
+                bcx.struct_gep(self.llval, 1)
+            } else {
+                bcx.struct_gep(self.llval, self.layout.llvm_field_index(ix))
+            };
+            PlaceRef {
+                // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
+                llval: bcx.pointercast(llval, field.llvm_type(ccx).ptr_to()),
+                llextra: if ccx.shared().type_has_metadata(field.ty) {
+                    self.llextra
+                } else {
+                    ptr::null_mut()
+                },
+                layout: field,
+                alignment,
+            }
+        };
+
+        // Simple case - we can just GEP the field
+        //   * Packed struct - There is no alignment padding
+        //   * Field is sized - pointer is properly aligned already
+        if self.layout.is_packed() || !field.is_unsized() {
+            return simple();
+        }
+
+        // If the type of the last field is [T], str or a foreign type, then we don't need to do
+        // any adjusments
+        match field.ty.sty {
+            ty::TySlice(..) | ty::TyStr | ty::TyForeign(..) => return simple(),
+            _ => ()
+        }
+
+        // There's no metadata available, log the case and just do the GEP.
+        if !self.has_extra() {
+            debug!("Unsized field `{}`, of `{:?}` has no metadata for adjustment",
+                ix, Value(self.llval));
+            return simple();
+        }
+
+        // We need to get the pointer manually now.
+        // We do this by casting to a *i8, then offsetting it by the appropriate amount.
+        // We do this instead of, say, simply adjusting the pointer from the result of a GEP
+        // because the field may have an arbitrary alignment in the LLVM representation
+        // anyway.
+        //
+        // To demonstrate:
+        //   struct Foo<T: ?Sized> {
+        //      x: u16,
+        //      y: T
+        //   }
+        //
+        // The type Foo<Foo<Trait>> is represented in LLVM as { u16, { u16, u8 }}, meaning that
+        // the `y` field has 16-bit alignment.
+
+        let meta = self.llextra;
+
+        let unaligned_offset = C_usize(ccx, offset.bytes());
+
+        // Get the alignment of the field
+        let (_, align) = glue::size_and_align_of_dst(bcx, field.ty, meta);
+
+        // Bump the unaligned offset up to the appropriate alignment using the
+        // following expression:
+        //
+        //   (unaligned offset + (align - 1)) & -align
+
+        // Calculate offset
+        let align_sub_1 = bcx.sub(align, C_usize(ccx, 1u64));
+        let offset = bcx.and(bcx.add(unaligned_offset, align_sub_1),
+        bcx.neg(align));
+
+        debug!("struct_field_ptr: DST field offset: {:?}", Value(offset));
+
+        // Cast and adjust pointer
+        let byte_ptr = bcx.pointercast(self.llval, Type::i8p(ccx));
+        let byte_ptr = bcx.gep(byte_ptr, &[offset]);
+
+        // Finally, cast back to the type expected
+        let ll_fty = field.llvm_type(ccx);
+        debug!("struct_field_ptr: Field type is {:?}", ll_fty);
+
+        PlaceRef {
+            llval: bcx.pointercast(byte_ptr, ll_fty.ptr_to()),
+            llextra: self.llextra,
+            layout: field,
+            alignment,
+        }
+    }
+
+    /// Obtain the actual discriminant of a value.
+    pub fn trans_get_discr(self, bcx: &Builder<'a, 'tcx>, cast_to: Ty<'tcx>) -> ValueRef {
+        let cast_to = bcx.ccx.layout_of(cast_to).immediate_llvm_type(bcx.ccx);
+        match self.layout.variants {
+            layout::Variants::Single { index } => {
+                return C_uint(cast_to, index as u64);
+            }
+            layout::Variants::Tagged { .. } |
+            layout::Variants::NicheFilling { .. } => {},
+        }
+
+        let discr = self.project_field(bcx, 0);
+        let lldiscr = discr.load(bcx).immediate();
+        match self.layout.variants {
+            layout::Variants::Single { .. } => bug!(),
+            layout::Variants::Tagged { ref discr, .. } => {
+                let signed = match discr.value {
+                    layout::Int(_, signed) => signed,
+                    _ => false
+                };
+                bcx.intcast(lldiscr, cast_to, signed)
+            }
+            layout::Variants::NicheFilling {
+                dataful_variant,
+                ref niche_variants,
+                niche_start,
+                ..
+            } => {
+                let niche_llty = discr.layout.immediate_llvm_type(bcx.ccx);
+                if niche_variants.start == niche_variants.end {
+                    // FIXME(eddyb) Check the actual primitive type here.
+                    let niche_llval = if niche_start == 0 {
+                        // HACK(eddyb) Using `C_null` as it works on all types.
+                        C_null(niche_llty)
+                    } else {
+                        C_uint_big(niche_llty, niche_start)
+                    };
+                    bcx.select(bcx.icmp(llvm::IntEQ, lldiscr, niche_llval),
+                        C_uint(cast_to, niche_variants.start as u64),
+                        C_uint(cast_to, dataful_variant as u64))
+                } else {
+                    // Rebase from niche values to discriminant values.
+                    let delta = niche_start.wrapping_sub(niche_variants.start as u128);
+                    let lldiscr = bcx.sub(lldiscr, C_uint_big(niche_llty, delta));
+                    let lldiscr_max = C_uint(niche_llty, niche_variants.end as u64);
+                    bcx.select(bcx.icmp(llvm::IntULE, lldiscr, lldiscr_max),
+                        bcx.intcast(lldiscr, cast_to, false),
+                        C_uint(cast_to, dataful_variant as u64))
+                }
+            }
+        }
+    }
+
+    /// Set the discriminant for a new value of the given case of the given
+    /// representation.
+    pub fn trans_set_discr(&self, bcx: &Builder<'a, 'tcx>, variant_index: usize) {
+        match self.layout.variants {
+            layout::Variants::Single { index } => {
+                if index != variant_index {
+                    // If the layout of an enum is `Single`, all
+                    // other variants are necessarily uninhabited.
+                    assert_eq!(self.layout.for_variant(bcx.ccx, variant_index).abi,
+                               layout::Abi::Uninhabited);
+                }
+            }
+            layout::Variants::Tagged { .. } => {
+                let ptr = self.project_field(bcx, 0);
+                let to = self.layout.ty.ty_adt_def().unwrap()
+                    .discriminant_for_variant(bcx.tcx(), variant_index)
+                    .to_u128_unchecked() as u64;
+                bcx.store(C_int(ptr.layout.llvm_type(bcx.ccx), to as i64),
+                    ptr.llval, ptr.alignment.non_abi());
+            }
+            layout::Variants::NicheFilling {
+                dataful_variant,
+                ref niche_variants,
+                niche_start,
+                ..
+            } => {
+                if variant_index != dataful_variant {
+                    if bcx.sess().target.target.arch == "arm" ||
+                       bcx.sess().target.target.arch == "aarch64" {
+                        // Issue #34427: As workaround for LLVM bug on ARM,
+                        // use memset of 0 before assigning niche value.
+                        let llptr = bcx.pointercast(self.llval, Type::i8(bcx.ccx).ptr_to());
+                        let fill_byte = C_u8(bcx.ccx, 0);
+                        let (size, align) = self.layout.size_and_align();
+                        let size = C_usize(bcx.ccx, size.bytes());
+                        let align = C_u32(bcx.ccx, align.abi() as u32);
+                        base::call_memset(bcx, llptr, fill_byte, size, align, false);
+                    }
+
+                    let niche = self.project_field(bcx, 0);
+                    let niche_llty = niche.layout.immediate_llvm_type(bcx.ccx);
+                    let niche_value = ((variant_index - niche_variants.start) as u128)
+                        .wrapping_add(niche_start);
+                    // FIXME(eddyb) Check the actual primitive type here.
+                    let niche_llval = if niche_value == 0 {
+                        // HACK(eddyb) Using `C_null` as it works on all types.
+                        C_null(niche_llty)
+                    } else {
+                        C_uint_big(niche_llty, niche_value)
+                    };
+                    OperandValue::Immediate(niche_llval).store(bcx, niche);
+                }
+            }
+        }
+    }
+
+    pub fn project_index(&self, bcx: &Builder<'a, 'tcx>, llindex: ValueRef)
+                         -> PlaceRef<'tcx> {
+        PlaceRef {
+            llval: bcx.inbounds_gep(self.llval, &[C_usize(bcx.ccx, 0), llindex]),
+            llextra: ptr::null_mut(),
+            layout: self.layout.field(bcx.ccx, 0),
+            alignment: self.alignment
+        }
+    }
+
+    pub fn project_downcast(&self, bcx: &Builder<'a, 'tcx>, variant_index: usize)
+                            -> PlaceRef<'tcx> {
+        let mut downcast = *self;
+        downcast.layout = self.layout.for_variant(bcx.ccx, variant_index);
+
+        // Cast to the appropriate variant struct type.
+        let variant_ty = downcast.layout.llvm_type(bcx.ccx);
+        downcast.llval = bcx.pointercast(downcast.llval, variant_ty.ptr_to());
+
+        downcast
+    }
+
+    pub fn storage_live(&self, bcx: &Builder<'a, 'tcx>) {
+        bcx.lifetime_start(self.llval, self.layout.size);
+    }
+
+    pub fn storage_dead(&self, bcx: &Builder<'a, 'tcx>) {
+        bcx.lifetime_end(self.llval, self.layout.size);
+    }
+}
+
+impl<'a, 'tcx> MirContext<'a, 'tcx> {
+    pub fn trans_place(&mut self,
+                        bcx: &Builder<'a, 'tcx>,
+                        place: &mir::Place<'tcx>)
+                        -> PlaceRef<'tcx> {
+        debug!("trans_place(place={:?})", place);
+
+        let ccx = bcx.ccx;
+        let tcx = ccx.tcx();
+
+        if let mir::Place::Local(index) = *place {
+            match self.locals[index] {
+                LocalRef::Place(place) => {
+                    return place;
+                }
+                LocalRef::Operand(..) => {
+                    bug!("using operand local {:?} as place", place);
+                }
+            }
+        }
+
+        let result = match *place {
+            mir::Place::Local(_) => bug!(), // handled above
+            mir::Place::Static(box mir::Static { def_id, ty }) => {
+                PlaceRef::new_sized(consts::get_static(ccx, def_id),
+                                     ccx.layout_of(self.monomorphize(&ty)),
+                                     Alignment::AbiAligned)
+            },
+            mir::Place::Projection(box mir::Projection {
+                ref base,
+                elem: mir::ProjectionElem::Deref
+            }) => {
+                // Load the pointer from its location.
+                self.trans_consume(bcx, base).deref(bcx.ccx)
+            }
+            mir::Place::Projection(ref projection) => {
+                let tr_base = self.trans_place(bcx, &projection.base);
+
+                match projection.elem {
+                    mir::ProjectionElem::Deref => bug!(),
+                    mir::ProjectionElem::Field(ref field, _) => {
+                        tr_base.project_field(bcx, field.index())
+                    }
+                    mir::ProjectionElem::Index(index) => {
+                        let index = &mir::Operand::Copy(mir::Place::Local(index));
+                        let index = self.trans_operand(bcx, index);
+                        let llindex = index.immediate();
+                        tr_base.project_index(bcx, llindex)
+                    }
+                    mir::ProjectionElem::ConstantIndex { offset,
+                                                         from_end: false,
+                                                         min_length: _ } => {
+                        let lloffset = C_usize(bcx.ccx, offset as u64);
+                        tr_base.project_index(bcx, lloffset)
+                    }
+                    mir::ProjectionElem::ConstantIndex { offset,
+                                                         from_end: true,
+                                                         min_length: _ } => {
+                        let lloffset = C_usize(bcx.ccx, offset as u64);
+                        let lllen = tr_base.len(bcx.ccx);
+                        let llindex = bcx.sub(lllen, lloffset);
+                        tr_base.project_index(bcx, llindex)
+                    }
+                    mir::ProjectionElem::Subslice { from, to } => {
+                        let mut subslice = tr_base.project_index(bcx,
+                            C_usize(bcx.ccx, from as u64));
+                        let projected_ty = PlaceTy::Ty { ty: tr_base.layout.ty }
+                            .projection_ty(tcx, &projection.elem).to_ty(bcx.tcx());
+                        subslice.layout = bcx.ccx.layout_of(self.monomorphize(&projected_ty));
+
+                        if subslice.layout.is_unsized() {
+                            assert!(tr_base.has_extra());
+                            subslice.llextra = bcx.sub(tr_base.llextra,
+                                C_usize(bcx.ccx, (from as u64) + (to as u64)));
+                        }
+
+                        // Cast the place pointer type to the new
+                        // array or slice type (*[%_; new_len]).
+                        subslice.llval = bcx.pointercast(subslice.llval,
+                            subslice.layout.llvm_type(bcx.ccx).ptr_to());
+
+                        subslice
+                    }
+                    mir::ProjectionElem::Downcast(_, v) => {
+                        tr_base.project_downcast(bcx, v)
+                    }
+                }
+            }
+        };
+        debug!("trans_place(place={:?}) => {:?}", place, result);
+        result
+    }
+
+    pub fn monomorphized_place_ty(&self, place: &mir::Place<'tcx>) -> Ty<'tcx> {
+        let tcx = self.ccx.tcx();
+        let place_ty = place.ty(self.mir, tcx);
+        self.monomorphize(&place_ty.to_ty(tcx))
+    }
+}
+
index 431c34eb9e677a371ae550449927202e7e689a42..a93c0cea11869a724782a8714a4644a507bb2d57 100644 (file)
@@ -32,7 +32,7 @@
 use super::{MirContext, LocalRef};
 use super::constant::const_scalar_checked_binop;
 use super::operand::{OperandRef, OperandValue};
-use super::lvalue::PlaceRef;
+use super::place::PlaceRef;
 
 impl<'a, 'tcx> MirContext<'a, 'tcx> {
     pub fn trans_rvalue(&mut self,
@@ -334,17 +334,17 @@ pub fn trans_rvalue_operand(&mut self,
                 })
             }
 
-            mir::Rvalue::Ref(_, bk, ref lvalue) => {
-                let tr_lvalue = self.trans_lvalue(&bcx, lvalue);
+            mir::Rvalue::Ref(_, bk, ref place) => {
+                let tr_place = self.trans_place(&bcx, place);
 
-                let ty = tr_lvalue.layout.ty;
+                let ty = tr_place.layout.ty;
 
-                // Note: lvalues are indirect, so storing the `llval` into the
+                // Note: places are indirect, so storing the `llval` into the
                 // destination effectively creates a reference.
                 let val = if !bcx.ccx.shared().type_has_metadata(ty) {
-                    OperandValue::Immediate(tr_lvalue.llval)
+                    OperandValue::Immediate(tr_place.llval)
                 } else {
-                    OperandValue::Pair(tr_lvalue.llval, tr_lvalue.llextra)
+                    OperandValue::Pair(tr_place.llval, tr_place.llextra)
                 };
                 (bcx, OperandRef {
                     val,
@@ -355,8 +355,8 @@ pub fn trans_rvalue_operand(&mut self,
                 })
             }
 
-            mir::Rvalue::Len(ref lvalue) => {
-                let size = self.evaluate_array_len(&bcx, lvalue);
+            mir::Rvalue::Len(ref place) => {
+                let size = self.evaluate_array_len(&bcx, place);
                 let operand = OperandRef {
                     val: OperandValue::Immediate(size),
                     layout: bcx.ccx.layout_of(bcx.tcx().types.usize),
@@ -424,9 +424,9 @@ pub fn trans_rvalue_operand(&mut self,
                 })
             }
 
-            mir::Rvalue::Discriminant(ref lvalue) => {
+            mir::Rvalue::Discriminant(ref place) => {
                 let discr_ty = rvalue.ty(&*self.mir, bcx.tcx());
-                let discr =  self.trans_lvalue(&bcx, lvalue)
+                let discr =  self.trans_place(&bcx, place)
                     .trans_get_discr(&bcx, discr_ty);
                 (bcx, OperandRef {
                     val: OperandValue::Immediate(discr),
@@ -486,11 +486,11 @@ pub fn trans_rvalue_operand(&mut self,
 
     fn evaluate_array_len(&mut self,
                           bcx: &Builder<'a, 'tcx>,
-                          lvalue: &mir::Place<'tcx>) -> ValueRef
+                          place: &mir::Place<'tcx>) -> ValueRef
     {
         // ZST are passed as operands and require special handling
-        // because trans_lvalue() panics if Local is operand.
-        if let mir::Place::Local(index) = *lvalue {
+        // because trans_place() panics if Local is operand.
+        if let mir::Place::Local(index) = *place {
             if let LocalRef::Operand(Some(op)) = self.locals[index] {
                 if let ty::TyArray(_, n) = op.layout.ty.sty {
                     let n = n.val.to_const_int().unwrap().to_u64().unwrap();
@@ -499,7 +499,7 @@ fn evaluate_array_len(&mut self,
             }
         }
         // use common size calculation for non zero-sized types
-        let tr_value = self.trans_lvalue(&bcx, lvalue);
+        let tr_value = self.trans_place(&bcx, place);
         return tr_value.len(bcx.ccx);
     }
 
index 20843db2e6451e085472612879cadf33472450f4..e0ca5dcc9d08293cb122d778c37bf137343c3c89 100644 (file)
@@ -25,8 +25,8 @@ pub fn trans_statement(&mut self,
 
         self.set_debug_loc(&bcx, statement.source_info);
         match statement.kind {
-            mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
-                if let mir::Place::Local(index) = *lvalue {
+            mir::StatementKind::Assign(ref place, ref rvalue) => {
+                if let mir::Place::Local(index) = *place {
                     match self.locals[index] {
                         LocalRef::Place(tr_dest) => {
                             self.trans_rvalue(bcx, tr_dest, rvalue)
@@ -49,30 +49,30 @@ pub fn trans_statement(&mut self,
                         }
                     }
                 } else {
-                    let tr_dest = self.trans_lvalue(&bcx, lvalue);
+                    let tr_dest = self.trans_place(&bcx, place);
                     self.trans_rvalue(bcx, tr_dest, rvalue)
                 }
             }
-            mir::StatementKind::SetDiscriminant{ref lvalue, variant_index} => {
-                self.trans_lvalue(&bcx, lvalue)
+            mir::StatementKind::SetDiscriminant{ref place, variant_index} => {
+                self.trans_place(&bcx, place)
                     .trans_set_discr(&bcx, variant_index);
                 bcx
             }
             mir::StatementKind::StorageLive(local) => {
-                if let LocalRef::Place(tr_lval) = self.locals[local] {
-                    tr_lval.storage_live(&bcx);
+                if let LocalRef::Place(tr_place) = self.locals[local] {
+                    tr_place.storage_live(&bcx);
                 }
                 bcx
             }
             mir::StatementKind::StorageDead(local) => {
-                if let LocalRef::Place(tr_lval) = self.locals[local] {
-                    tr_lval.storage_dead(&bcx);
+                if let LocalRef::Place(tr_place) = self.locals[local] {
+                    tr_place.storage_dead(&bcx);
                 }
                 bcx
             }
             mir::StatementKind::InlineAsm { ref asm, ref outputs, ref inputs } => {
                 let outputs = outputs.iter().map(|output| {
-                    self.trans_lvalue(&bcx, output)
+                    self.trans_place(&bcx, output)
                 }).collect();
 
                 let input_vals = inputs.iter().map(|input| {