1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use rustc::ty::{self, Ty, TypeFoldable};
13 use rustc::mir::repr as mir;
14 use rustc::mir::tcx::LvalueTy;
15 use rustc_data_structures::indexed_vec::Idx;
20 use common::{self, BlockAndBuilder, CrateContext, C_uint, C_undef};
29 use super::{MirContext, LocalRef};
30 use super::operand::OperandValue;
32 #[derive(Copy, Clone, Debug)]
33 pub struct LvalueRef<'tcx> {
34 /// Pointer to the contents of the lvalue
37 /// This lvalue's extra data if it is unsized, or null
38 pub llextra: ValueRef,
40 /// Monomorphized type of this lvalue, including variant information
41 pub ty: LvalueTy<'tcx>,
44 impl<'tcx> LvalueRef<'tcx> {
45 pub fn new_sized(llval: ValueRef, lvalue_ty: LvalueTy<'tcx>) -> LvalueRef<'tcx> {
46 LvalueRef { llval: llval, llextra: ptr::null_mut(), ty: lvalue_ty }
49 pub fn alloca<'bcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
54 assert!(!ty.has_erasable_regions());
55 let lltemp = bcx.with_block(|bcx| base::alloc_ty(bcx, ty, name));
56 LvalueRef::new_sized(lltemp, LvalueTy::from_ty(ty))
59 pub fn len<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef {
60 let ty = self.ty.to_ty(ccx.tcx());
62 ty::TyArray(_, n) => common::C_uint(ccx, n),
63 ty::TySlice(_) | ty::TyStr => {
64 assert!(self.llextra != ptr::null_mut());
67 _ => bug!("unexpected type `{}` in LvalueRef::len", ty)
72 pub fn get_meta(b: &Builder, fat_ptr: ValueRef) -> ValueRef {
73 b.struct_gep(fat_ptr, abi::FAT_PTR_EXTRA)
76 pub fn get_dataptr(b: &Builder, fat_ptr: ValueRef) -> ValueRef {
77 b.struct_gep(fat_ptr, abi::FAT_PTR_ADDR)
80 pub fn load_fat_ptr(b: &Builder, fat_ptr: ValueRef) -> (ValueRef, ValueRef) {
81 (b.load(get_dataptr(b, fat_ptr)), b.load(get_meta(b, fat_ptr)))
84 impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
85 pub fn trans_lvalue(&mut self,
86 bcx: &BlockAndBuilder<'bcx, 'tcx>,
87 lvalue: &mir::Lvalue<'tcx>)
89 debug!("trans_lvalue(lvalue={:?})", lvalue);
94 if let Some(index) = self.mir.local_index(lvalue) {
95 match self.locals[index] {
96 LocalRef::Lvalue(lvalue) => {
99 LocalRef::Operand(..) => {
100 bug!("using operand local {:?} as lvalue", lvalue);
105 let result = match *lvalue {
106 mir::Lvalue::Var(_) |
107 mir::Lvalue::Temp(_) |
108 mir::Lvalue::Arg(_) |
109 mir::Lvalue::ReturnPointer => bug!(), // handled above
110 mir::Lvalue::Static(def_id) => {
111 let const_ty = self.monomorphized_lvalue_ty(lvalue);
112 LvalueRef::new_sized(consts::get_static(ccx, def_id).val,
113 LvalueTy::from_ty(const_ty))
115 mir::Lvalue::Projection(box mir::Projection {
117 elem: mir::ProjectionElem::Deref
119 // Load the pointer from its location.
120 let ptr = self.trans_consume(bcx, base);
121 let projected_ty = LvalueTy::from_ty(ptr.ty)
122 .projection_ty(tcx, &mir::ProjectionElem::Deref);
123 let projected_ty = bcx.monomorphize(&projected_ty);
124 let (llptr, llextra) = match ptr.val {
125 OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
126 OperandValue::Pair(llptr, llextra) => (llptr, llextra),
127 OperandValue::Ref(_) => bug!("Deref of by-Ref type {:?}", ptr.ty)
135 mir::Lvalue::Projection(ref projection) => {
136 let tr_base = self.trans_lvalue(bcx, &projection.base);
137 let projected_ty = tr_base.ty.projection_ty(tcx, &projection.elem);
138 let projected_ty = bcx.monomorphize(&projected_ty);
140 let project_index = |llindex| {
141 let element = if let ty::TySlice(_) = tr_base.ty.to_ty(tcx).sty {
142 // Slices already point to the array element type.
143 bcx.inbounds_gep(tr_base.llval, &[llindex])
145 let zero = common::C_uint(bcx.ccx(), 0u64);
146 bcx.inbounds_gep(tr_base.llval, &[zero, llindex])
151 let (llprojected, llextra) = match projection.elem {
152 mir::ProjectionElem::Deref => bug!(),
153 mir::ProjectionElem::Field(ref field, _) => {
154 let base_ty = tr_base.ty.to_ty(tcx);
155 let base_repr = adt::represent_type(ccx, base_ty);
156 let discr = match tr_base.ty {
157 LvalueTy::Ty { .. } => 0,
158 LvalueTy::Downcast { adt_def: _, substs: _, variant_index: v } => v,
160 let discr = discr as u64;
161 let is_sized = common::type_is_sized(tcx, projected_ty.to_ty(tcx));
162 let base = if is_sized {
163 adt::MaybeSizedValue::sized(tr_base.llval)
165 adt::MaybeSizedValue::unsized_(tr_base.llval, tr_base.llextra)
167 let llprojected = adt::trans_field_ptr_builder(bcx, &base_repr, base,
168 Disr(discr), field.index());
169 let llextra = if is_sized {
174 (llprojected, llextra)
176 mir::ProjectionElem::Index(ref index) => {
177 let index = self.trans_operand(bcx, index);
178 (project_index(self.prepare_index(bcx, index.immediate())), ptr::null_mut())
180 mir::ProjectionElem::ConstantIndex { offset,
183 let lloffset = C_uint(bcx.ccx(), offset);
184 (project_index(lloffset), ptr::null_mut())
186 mir::ProjectionElem::ConstantIndex { offset,
189 let lloffset = C_uint(bcx.ccx(), offset);
190 let lllen = tr_base.len(bcx.ccx());
191 let llindex = bcx.sub(lllen, lloffset);
192 (project_index(llindex), ptr::null_mut())
194 mir::ProjectionElem::Subslice { from, to } => {
195 let llindex = C_uint(bcx.ccx(), from);
196 let llbase = project_index(llindex);
198 let base_ty = tr_base.ty.to_ty(bcx.tcx());
201 // must cast the lvalue pointer type to the new
202 // array type (*[%_; new_len]).
203 let base_ty = self.monomorphized_lvalue_ty(lvalue);
204 let llbasety = type_of::type_of(bcx.ccx(), base_ty).ptr_to();
205 let llbase = bcx.pointercast(llbase, llbasety);
206 (llbase, ptr::null_mut())
209 assert!(tr_base.llextra != ptr::null_mut());
210 let lllen = bcx.sub(tr_base.llextra,
211 C_uint(bcx.ccx(), from+to));
214 _ => bug!("unexpected type {:?} in Subslice", base_ty)
217 mir::ProjectionElem::Downcast(..) => {
218 (tr_base.llval, tr_base.llextra)
228 debug!("trans_lvalue(lvalue={:?}) => {:?}", lvalue, result);
232 // Perform an action using the given Lvalue.
233 // If the Lvalue is an empty LocalRef::Operand, then a temporary stack slot
234 // is created first, then used as an operand to update the Lvalue.
235 pub fn with_lvalue_ref<F, U>(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
236 lvalue: &mir::Lvalue<'tcx>, f: F) -> U
237 where F: FnOnce(&mut Self, LvalueRef<'tcx>) -> U
239 if let Some(index) = self.mir.local_index(lvalue) {
240 match self.locals[index] {
241 LocalRef::Lvalue(lvalue) => f(self, lvalue),
242 LocalRef::Operand(None) => {
243 let lvalue_ty = self.monomorphized_lvalue_ty(lvalue);
244 let lvalue = LvalueRef::alloca(bcx,
247 let ret = f(self, lvalue);
248 let op = self.trans_load(bcx, lvalue.llval, lvalue_ty);
249 self.locals[index] = LocalRef::Operand(Some(op));
252 LocalRef::Operand(Some(_)) => {
253 // See comments in LocalRef::new_operand as to why
254 // we always have Some in a ZST LocalRef::Operand.
255 let ty = self.monomorphized_lvalue_ty(lvalue);
256 if common::type_is_zero_size(bcx.ccx(), ty) {
257 // Pass an undef pointer as no stores can actually occur.
258 let llptr = C_undef(type_of(bcx.ccx(), ty).ptr_to());
259 f(self, LvalueRef::new_sized(llptr, LvalueTy::from_ty(ty)))
261 bug!("Lvalue local already set");
266 let lvalue = self.trans_lvalue(bcx, lvalue);
271 /// Adjust the bitwidth of an index since LLVM is less forgiving
274 /// nmatsakis: is this still necessary? Not sure.
275 fn prepare_index(&mut self,
276 bcx: &BlockAndBuilder<'bcx, 'tcx>,
281 let index_size = machine::llbitsize_of_real(bcx.ccx(), common::val_ty(llindex));
282 let int_size = machine::llbitsize_of_real(bcx.ccx(), ccx.int_type());
283 if index_size < int_size {
284 bcx.zext(llindex, ccx.int_type())
285 } else if index_size > int_size {
286 bcx.trunc(llindex, ccx.int_type())
292 pub fn monomorphized_lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> Ty<'tcx> {
293 let tcx = self.fcx.ccx.tcx();
294 let lvalue_ty = lvalue.ty(&self.mir, tcx);
295 self.fcx.monomorphize(&lvalue_ty.to_ty(tcx))