1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(non_camel_case_types, non_snake_case)]
13 //! Code that is useful in various trans modules.
15 pub use self::ExprOrMethodCall::*;
19 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ContextRef};
20 use llvm::{True, False, Bool};
24 use middle::lang_items::LangItem;
25 use middle::mem_categorization as mc;
27 use middle::subst::{self, Subst, Substs};
35 use trans::monomorphize;
36 use trans::type_::Type;
39 use middle::ty::{self, HasProjectionTypes, Ty};
41 use middle::ty_fold::{TypeFolder, TypeFoldable};
42 use util::ppaux::Repr;
43 use util::nodemap::{FnvHashMap, NodeMap};
45 use arena::TypedArena;
46 use libc::{c_uint, c_char};
47 use std::ffi::CString;
48 use std::cell::{Cell, RefCell};
50 use syntax::ast::Ident;
52 use syntax::ast_map::{PathElem, PathName};
53 use syntax::codemap::{DUMMY_SP, Span};
54 use syntax::parse::token::InternedString;
55 use syntax::parse::token;
56 use util::common::memoized;
57 use util::nodemap::FnvHashSet;
59 pub use trans::context::CrateContext;
61 /// Returns an equivalent value with all free regions removed (note
62 /// that late-bound regions remain, because they are important for
63 /// subtyping, but they are anonymized and normalized as well). This
64 /// is a stronger, caching version of `ty_fold::erase_regions`.
65 pub fn erase_regions<'tcx,T>(cx: &ty::ctxt<'tcx>, value: &T) -> T
66 where T : TypeFoldable<'tcx> + Repr<'tcx>
68 let value1 = value.fold_with(&mut RegionEraser(cx));
69 debug!("erase_regions({}) = {}",
70 value.repr(cx), value1.repr(cx));
73 struct RegionEraser<'a, 'tcx: 'a>(&'a ty::ctxt<'tcx>);
75 impl<'a, 'tcx> TypeFolder<'tcx> for RegionEraser<'a, 'tcx> {
76 fn tcx(&self) -> &ty::ctxt<'tcx> { self.0 }
78 fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
79 match self.tcx().normalized_cache.borrow().get(&ty).cloned() {
84 let t_norm = ty_fold::super_fold_ty(self, ty);
85 self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm);
89 fn fold_binder<T>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T>
90 where T : TypeFoldable<'tcx> + Repr<'tcx>
92 let u = ty::anonymize_late_bound_regions(self.tcx(), t);
93 ty_fold::super_fold_binder(self, &u)
96 fn fold_region(&mut self, r: ty::Region) -> ty::Region {
97 // because late-bound regions affect subtyping, we can't
98 // erase the bound/free distinction, but we can replace
99 // all free regions with 'static.
101 // Note that we *CAN* replace early-bound regions -- the
102 // type system never "sees" those, they get substituted
103 // away. In trans, they will always be erased to 'static
104 // whenever a substitution occurs.
106 ty::ReLateBound(..) => r,
111 fn fold_substs(&mut self,
112 substs: &subst::Substs<'tcx>)
113 -> subst::Substs<'tcx> {
114 subst::Substs { regions: subst::ErasedRegions,
115 types: substs.types.fold_with(self) }
120 // Is the type's representation size known at compile time?
121 pub fn type_is_sized<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
122 let param_env = ty::empty_parameter_environment(tcx);
123 ty::type_is_sized(¶m_env, DUMMY_SP, ty)
126 pub fn lltype_is_sized<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
128 ty::ty_open(_) => true,
129 _ => type_is_sized(cx, ty),
133 pub fn type_is_fat_ptr<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool {
135 ty::ty_ptr(ty::mt{ty, ..}) |
136 ty::ty_rptr(_, ty::mt{ty, ..}) |
138 !type_is_sized(cx, ty)
146 // Return the smallest part of `ty` which is unsized. Fails if `ty` is sized.
147 // 'Smallest' here means component of the static representation of the type; not
148 // the size of an object at runtime.
149 pub fn unsized_part_of_type<'tcx>(cx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
151 ty::ty_str | ty::ty_trait(..) | ty::ty_vec(..) => ty,
152 ty::ty_struct(def_id, substs) => {
153 let unsized_fields: Vec<_> =
154 ty::struct_fields(cx, def_id, substs)
157 .filter(|ty| !type_is_sized(cx, *ty))
160 // Exactly one of the fields must be unsized.
161 assert!(unsized_fields.len() == 1);
163 unsized_part_of_type(cx, unsized_fields[0])
166 assert!(type_is_sized(cx, ty),
167 "unsized_part_of_type failed even though ty is unsized");
168 panic!("called unsized_part_of_type with sized ty");
173 // Some things don't need cleanups during unwinding because the
174 // task can free them all at once later. Currently only things
175 // that only contain scalars and shared boxes can avoid unwind
177 pub fn type_needs_unwind_cleanup<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
178 return memoized(ccx.needs_unwind_cleanup_cache(), ty, |ty| {
179 type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet::new())
182 fn type_needs_unwind_cleanup_<'tcx>(tcx: &ty::ctxt<'tcx>,
184 tycache: &mut FnvHashSet<Ty<'tcx>>)
187 // Prevent infinite recursion
188 if !tycache.insert(ty) {
192 let mut needs_unwind_cleanup = false;
193 ty::maybe_walk_ty(ty, |ty| {
194 needs_unwind_cleanup |= match ty.sty {
195 ty::ty_bool | ty::ty_int(_) | ty::ty_uint(_) |
196 ty::ty_float(_) | ty::ty_tup(_) | ty::ty_ptr(_) => false,
198 ty::ty_enum(did, substs) =>
199 ty::enum_variants(tcx, did).iter().any(|v|
200 v.args.iter().any(|&aty| {
201 let t = aty.subst(tcx, substs);
202 type_needs_unwind_cleanup_(tcx, t, tycache)
208 !needs_unwind_cleanup
214 pub fn type_needs_drop<'tcx>(cx: &ty::ctxt<'tcx>,
217 ty::type_contents(cx, ty).needs_drop(cx)
220 fn type_is_newtype_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
221 ty: Ty<'tcx>) -> bool {
223 ty::ty_struct(def_id, substs) => {
224 let fields = ty::struct_fields(ccx.tcx(), def_id, substs);
225 fields.len() == 1 && type_is_immediate(ccx, fields[0].mt.ty)
231 pub fn type_is_immediate<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
232 use trans::machine::llsize_of_alloc;
233 use trans::type_of::sizing_type_of;
236 let simple = ty::type_is_scalar(ty) ||
237 ty::type_is_unique(ty) || ty::type_is_region_ptr(ty) ||
238 type_is_newtype_immediate(ccx, ty) ||
239 ty::type_is_simd(tcx, ty);
240 if simple && !type_is_fat_ptr(tcx, ty) {
243 if !type_is_sized(tcx, ty) {
247 ty::ty_struct(..) | ty::ty_enum(..) | ty::ty_tup(..) | ty::ty_vec(_, Some(_)) |
248 ty::ty_unboxed_closure(..) => {
249 let llty = sizing_type_of(ccx, ty);
250 llsize_of_alloc(ccx, llty) <= llsize_of_alloc(ccx, ccx.int_type())
252 _ => type_is_zero_size(ccx, ty)
256 /// Identify types which have size zero at runtime.
257 pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
258 use trans::machine::llsize_of_alloc;
259 use trans::type_of::sizing_type_of;
260 let llty = sizing_type_of(ccx, ty);
261 llsize_of_alloc(ccx, llty) == 0
264 /// Identifies types which we declare to be equivalent to `void` in C for the purpose of function
265 /// return types. These are `()`, bot, and uninhabited enums. Note that all such types are also
266 /// zero-size, but not all zero-size types use a `void` return type (in order to aid with C ABI
268 pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
269 ty::type_is_nil(ty) || ty::type_is_empty(ccx.tcx(), ty)
272 /// Generates a unique symbol based off the name given. This is used to create
273 /// unique symbols for things like closures.
274 pub fn gensym_name(name: &str) -> PathElem {
275 let num = token::gensym(name).uint();
276 // use one colon which will get translated to a period by the mangler, and
277 // we're guaranteed that `num` is globally unique for this crate.
278 PathName(token::gensym(&format!("{}:{}", name, num)[]))
282 pub struct tydesc_info<'tcx> {
284 pub tydesc: ValueRef,
291 * A note on nomenclature of linking: "extern", "foreign", and "upcall".
293 * An "extern" is an LLVM symbol we wind up emitting an undefined external
294 * reference to. This means "we don't have the thing in this compilation unit,
295 * please make sure you link it in at runtime". This could be a reference to
296 * C code found in a C library, or rust code found in a rust crate.
298 * Most "externs" are implicitly declared (automatically) as a result of a
299 * user declaring an extern _module_ dependency; this causes the rust driver
300 * to locate an extern crate, scan its compilation metadata, and emit extern
301 * declarations for any symbols used by the declaring crate.
303 * A "foreign" is an extern that references C (or other non-rust ABI) code.
304 * There is no metadata to scan for extern references so in these cases either
305 * a header-digester like bindgen, or manual function prototypes, have to
306 * serve as declarators. So these are usually given explicitly as prototype
307 * declarations, in rust code, with ABI attributes on them noting which ABI to
310 * An "upcall" is a foreign call generated by the compiler (not corresponding
311 * to any user-written call in the code) into the runtime library, to perform
312 * some helper task such as bringing a task to life, allocating memory, etc.
317 pub struct NodeInfo {
322 pub fn expr_info(expr: &ast::Expr) -> NodeInfo {
323 NodeInfo { id: expr.id, span: expr.span }
326 pub struct BuilderRef_res {
330 impl Drop for BuilderRef_res {
333 llvm::LLVMDisposeBuilder(self.b);
338 pub fn BuilderRef_res(b: BuilderRef) -> BuilderRef_res {
344 pub type ExternMap = FnvHashMap<String, ValueRef>;
346 pub fn validate_substs(substs: &Substs) {
347 assert!(substs.types.all(|t| !ty::type_needs_infer(*t)));
350 // work around bizarre resolve errors
351 type RvalueDatum<'tcx> = datum::Datum<'tcx, datum::Rvalue>;
352 type LvalueDatum<'tcx> = datum::Datum<'tcx, datum::Lvalue>;
354 // Function context. Every LLVM function we create will have one of
356 pub struct FunctionContext<'a, 'tcx: 'a> {
357 // The ValueRef returned from a call to llvm::LLVMAddFunction; the
358 // address of the first instruction in the sequence of
359 // instructions for this function that will go in the .text
360 // section of the executable we're generating.
363 // always an empty parameter-environment
364 pub param_env: ty::ParameterEnvironment<'a, 'tcx>,
366 // The environment argument in a closure.
367 pub llenv: Option<ValueRef>,
369 // A pointer to where to store the return value. If the return type is
370 // immediate, this points to an alloca in the function. Otherwise, it's a
371 // pointer to the hidden first parameter of the function. After function
372 // construction, this should always be Some.
373 pub llretslotptr: Cell<Option<ValueRef>>,
375 // These pub elements: "hoisted basic blocks" containing
376 // administrative activities that have to happen in only one place in
377 // the function, due to LLVM's quirks.
378 // A marker for the place where we want to insert the function's static
379 // allocas, so that LLVM will coalesce them into a single alloca call.
380 pub alloca_insert_pt: Cell<Option<ValueRef>>,
381 pub llreturn: Cell<Option<BasicBlockRef>>,
383 // If the function has any nested return's, including something like:
384 // fn foo() -> Option<Foo> { Some(Foo { x: return None }) }, then
385 // we use a separate alloca for each return
386 pub needs_ret_allocas: bool,
388 // The a value alloca'd for calls to upcalls.rust_personality. Used when
389 // outputting the resume instruction.
390 pub personality: Cell<Option<ValueRef>>,
392 // True if the caller expects this fn to use the out pointer to
393 // return. Either way, your code should write into the slot llretslotptr
394 // points to, but if this value is false, that slot will be a local alloca.
395 pub caller_expects_out_pointer: bool,
397 // Maps the DefId's for local variables to the allocas created for
398 // them in llallocas.
399 pub lllocals: RefCell<NodeMap<LvalueDatum<'tcx>>>,
401 // Same as above, but for closure upvars
402 pub llupvars: RefCell<NodeMap<ValueRef>>,
404 // The NodeId of the function, or -1 if it doesn't correspond to
405 // a user-defined function.
408 // If this function is being monomorphized, this contains the type
409 // substitutions used.
410 pub param_substs: &'a Substs<'tcx>,
412 // The source span and nesting context where this function comes from, for
413 // error reporting and symbol generation.
414 pub span: Option<Span>,
416 // The arena that blocks are allocated from.
417 pub block_arena: &'a TypedArena<BlockS<'a, 'tcx>>,
419 // This function's enclosing crate context.
420 pub ccx: &'a CrateContext<'a, 'tcx>,
422 // Used and maintained by the debuginfo module.
423 pub debug_context: debuginfo::FunctionDebugContext,
426 pub scopes: RefCell<Vec<cleanup::CleanupScope<'a, 'tcx>>>,
428 pub cfg: Option<cfg::CFG>,
431 impl<'a, 'tcx> FunctionContext<'a, 'tcx> {
432 pub fn arg_pos(&self, arg: uint) -> uint {
433 let arg = self.env_arg_pos() + arg;
434 if self.llenv.is_some() {
441 pub fn env_arg_pos(&self) -> uint {
442 if self.caller_expects_out_pointer {
449 pub fn cleanup(&self) {
451 llvm::LLVMInstructionEraseFromParent(self.alloca_insert_pt
457 pub fn get_llreturn(&self) -> BasicBlockRef {
458 if self.llreturn.get().is_none() {
460 self.llreturn.set(Some(unsafe {
461 llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(), self.llfn,
462 "return\0".as_ptr() as *const _)
466 self.llreturn.get().unwrap()
469 pub fn get_ret_slot(&self, bcx: Block<'a, 'tcx>,
470 output: ty::FnOutput<'tcx>,
471 name: &str) -> ValueRef {
472 if self.needs_ret_allocas {
473 base::alloca_no_lifetime(bcx, match output {
474 ty::FnConverging(output_type) => type_of::type_of(bcx.ccx(), output_type),
475 ty::FnDiverging => Type::void(bcx.ccx())
478 self.llretslotptr.get().unwrap()
482 pub fn new_block(&'a self,
485 opt_node_id: Option<ast::NodeId>)
488 let name = CString::from_slice(name.as_bytes());
489 let llbb = llvm::LLVMAppendBasicBlockInContext(self.ccx.llcx(),
492 BlockS::new(llbb, is_lpad, opt_node_id, self)
496 pub fn new_id_block(&'a self,
498 node_id: ast::NodeId)
500 self.new_block(false, name, Some(node_id))
503 pub fn new_temp_block(&'a self,
506 self.new_block(false, name, None)
509 pub fn join_blocks(&'a self,
511 in_cxs: &[Block<'a, 'tcx>])
513 let out = self.new_id_block("join", id);
514 let mut reachable = false;
515 for bcx in in_cxs.iter() {
516 if !bcx.unreachable.get() {
517 build::Br(*bcx, out.llbb);
522 build::Unreachable(out);
527 pub fn monomorphize<T>(&self, value: &T) -> T
528 where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone
530 monomorphize::apply_param_substs(self.ccx.tcx(),
536 // Basic block context. We create a block context for each basic block
537 // (single-entry, single-exit sequence of instructions) we generate from Rust
538 // code. Each basic block we generate is attached to a function, typically
539 // with many basic blocks per function. All the basic blocks attached to a
540 // function are organized as a directed graph.
541 pub struct BlockS<'blk, 'tcx: 'blk> {
542 // The BasicBlockRef returned from a call to
543 // llvm::LLVMAppendBasicBlock(llfn, name), which adds a basic
544 // block to the function pointed to by llfn. We insert
545 // instructions into that block by way of this block context.
546 // The block pointing to this one in the function's digraph.
547 pub llbb: BasicBlockRef,
548 pub terminated: Cell<bool>,
549 pub unreachable: Cell<bool>,
551 // Is this block part of a landing pad?
554 // AST node-id associated with this block, if any. Used for
555 // debugging purposes only.
556 pub opt_node_id: Option<ast::NodeId>,
558 // The function context for the function to which this block is
560 pub fcx: &'blk FunctionContext<'blk, 'tcx>,
563 pub type Block<'blk, 'tcx> = &'blk BlockS<'blk, 'tcx>;
565 impl<'blk, 'tcx> BlockS<'blk, 'tcx> {
566 pub fn new(llbb: BasicBlockRef,
568 opt_node_id: Option<ast::NodeId>,
569 fcx: &'blk FunctionContext<'blk, 'tcx>)
570 -> Block<'blk, 'tcx> {
571 fcx.block_arena.alloc(BlockS {
573 terminated: Cell::new(false),
574 unreachable: Cell::new(false),
576 opt_node_id: opt_node_id,
581 pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {
584 pub fn tcx(&self) -> &'blk ty::ctxt<'tcx> {
587 pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
589 pub fn ident(&self, ident: Ident) -> String {
590 token::get_ident(ident).get().to_string()
593 pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
594 self.tcx().map.node_to_string(id).to_string()
597 pub fn expr_to_string(&self, e: &ast::Expr) -> String {
601 pub fn def(&self, nid: ast::NodeId) -> def::Def {
602 match self.tcx().def_map.borrow().get(&nid) {
603 Some(v) => v.clone(),
605 self.tcx().sess.bug(&format!(
606 "no def associated with node id {}", nid)[]);
611 pub fn val_to_string(&self, val: ValueRef) -> String {
612 self.ccx().tn().val_to_string(val)
615 pub fn llty_str(&self, ty: Type) -> String {
616 self.ccx().tn().type_to_string(ty)
619 pub fn ty_to_string(&self, t: Ty<'tcx>) -> String {
623 pub fn to_str(&self) -> String {
624 format!("[block {:p}]", self)
627 pub fn monomorphize<T>(&self, value: &T) -> T
628 where T : TypeFoldable<'tcx> + Repr<'tcx> + HasProjectionTypes + Clone
630 monomorphize::apply_param_substs(self.tcx(),
631 self.fcx.param_substs,
636 impl<'blk, 'tcx> mc::Typer<'tcx> for BlockS<'blk, 'tcx> {
637 fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> {
641 fn node_ty(&self, id: ast::NodeId) -> mc::McResult<Ty<'tcx>> {
642 Ok(node_id_type(self, id))
645 fn expr_ty_adjusted(&self, expr: &ast::Expr) -> mc::McResult<Ty<'tcx>> {
646 Ok(expr_ty_adjusted(self, expr))
649 fn node_method_ty(&self, method_call: ty::MethodCall) -> Option<Ty<'tcx>> {
654 .map(|method| monomorphize_type(self, method.ty))
657 fn node_method_origin(&self, method_call: ty::MethodCall)
658 -> Option<ty::MethodOrigin<'tcx>>
664 .map(|method| method.origin.clone())
667 fn adjustments<'a>(&'a self) -> &'a RefCell<NodeMap<ty::AutoAdjustment<'tcx>>> {
668 &self.tcx().adjustments
671 fn is_method_call(&self, id: ast::NodeId) -> bool {
672 self.tcx().method_map.borrow().contains_key(&ty::MethodCall::expr(id))
675 fn temporary_scope(&self, rvalue_id: ast::NodeId) -> Option<region::CodeExtent> {
676 self.tcx().region_maps.temporary_scope(rvalue_id)
679 fn upvar_borrow(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarBorrow> {
680 Some(self.tcx().upvar_borrow_map.borrow()[upvar_id].clone())
683 fn capture_mode(&self, closure_expr_id: ast::NodeId)
684 -> ast::CaptureClause {
685 self.tcx().capture_modes.borrow()[closure_expr_id].clone()
688 fn type_moves_by_default(&self, span: Span, ty: Ty<'tcx>) -> bool {
689 self.fcx.param_env.type_moves_by_default(span, ty)
693 impl<'blk, 'tcx> ty::UnboxedClosureTyper<'tcx> for BlockS<'blk, 'tcx> {
694 fn param_env<'a>(&'a self) -> &'a ty::ParameterEnvironment<'a, 'tcx> {
698 fn unboxed_closure_kind(&self,
700 -> ty::UnboxedClosureKind
702 let typer = NormalizingUnboxedClosureTyper::new(self.tcx());
703 typer.unboxed_closure_kind(def_id)
706 fn unboxed_closure_type(&self,
708 substs: &subst::Substs<'tcx>)
709 -> ty::ClosureTy<'tcx>
711 let typer = NormalizingUnboxedClosureTyper::new(self.tcx());
712 typer.unboxed_closure_type(def_id, substs)
715 fn unboxed_closure_upvars(&self,
717 substs: &Substs<'tcx>)
718 -> Option<Vec<ty::UnboxedClosureUpvar<'tcx>>>
720 let typer = NormalizingUnboxedClosureTyper::new(self.tcx());
721 typer.unboxed_closure_upvars(def_id, substs)
725 pub struct Result<'blk, 'tcx: 'blk> {
726 pub bcx: Block<'blk, 'tcx>,
730 impl<'b, 'tcx> Result<'b, 'tcx> {
731 pub fn new(bcx: Block<'b, 'tcx>, val: ValueRef) -> Result<'b, 'tcx> {
739 pub fn val_ty(v: ValueRef) -> Type {
741 Type::from_ref(llvm::LLVMTypeOf(v))
745 // LLVM constant constructors.
746 pub fn C_null(t: Type) -> ValueRef {
748 llvm::LLVMConstNull(t.to_ref())
752 pub fn C_undef(t: Type) -> ValueRef {
754 llvm::LLVMGetUndef(t.to_ref())
758 pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
760 llvm::LLVMConstInt(t.to_ref(), u, sign_extend as Bool)
764 pub fn C_floating(s: &str, t: Type) -> ValueRef {
766 let s = CString::from_slice(s.as_bytes());
767 llvm::LLVMConstRealOfString(t.to_ref(), s.as_ptr())
771 pub fn C_nil(ccx: &CrateContext) -> ValueRef {
772 C_struct(ccx, &[], false)
775 pub fn C_bool(ccx: &CrateContext, val: bool) -> ValueRef {
776 C_integral(Type::i1(ccx), val as u64, false)
779 pub fn C_i32(ccx: &CrateContext, i: i32) -> ValueRef {
780 C_integral(Type::i32(ccx), i as u64, true)
783 pub fn C_i64(ccx: &CrateContext, i: i64) -> ValueRef {
784 C_integral(Type::i64(ccx), i as u64, true)
787 pub fn C_u64(ccx: &CrateContext, i: u64) -> ValueRef {
788 C_integral(Type::i64(ccx), i, false)
791 pub fn C_int<I: AsI64>(ccx: &CrateContext, i: I) -> ValueRef {
794 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
795 32 => assert!(v < (1<<31) && v >= -(1<<31)),
797 n => panic!("unsupported target size: {}", n)
800 C_integral(ccx.int_type(), v as u64, true)
803 pub fn C_uint<I: AsU64>(ccx: &CrateContext, i: I) -> ValueRef {
806 match machine::llbitsize_of_real(ccx, ccx.int_type()) {
807 32 => assert!(v < (1<<32)),
809 n => panic!("unsupported target size: {}", n)
812 C_integral(ccx.int_type(), v, false)
815 pub trait AsI64 { fn as_i64(self) -> i64; }
816 pub trait AsU64 { fn as_u64(self) -> u64; }
818 // FIXME: remove the intptr conversions, because they
819 // are host-architecture-dependent
820 impl AsI64 for i64 { fn as_i64(self) -> i64 { self as i64 }}
821 impl AsI64 for i32 { fn as_i64(self) -> i64 { self as i64 }}
822 impl AsI64 for int { fn as_i64(self) -> i64 { self as i64 }}
824 impl AsU64 for u64 { fn as_u64(self) -> u64 { self as u64 }}
825 impl AsU64 for u32 { fn as_u64(self) -> u64 { self as u64 }}
826 impl AsU64 for uint { fn as_u64(self) -> u64 { self as u64 }}
828 pub fn C_u8(ccx: &CrateContext, i: uint) -> ValueRef {
829 C_integral(Type::i8(ccx), i as u64, false)
833 // This is a 'c-like' raw string, which differs from
834 // our boxed-and-length-annotated strings.
835 pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> ValueRef {
837 match cx.const_cstr_cache().borrow().get(&s) {
838 Some(&llval) => return llval,
842 let sc = llvm::LLVMConstStringInContext(cx.llcx(),
843 s.get().as_ptr() as *const c_char,
844 s.get().len() as c_uint,
845 !null_terminated as Bool);
847 let gsym = token::gensym("str");
848 let buf = CString::from_vec(format!("str{}", gsym.uint()).into_bytes());
849 let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(sc).to_ref(), buf.as_ptr());
850 llvm::LLVMSetInitializer(g, sc);
851 llvm::LLVMSetGlobalConstant(g, True);
852 llvm::SetLinkage(g, llvm::InternalLinkage);
854 cx.const_cstr_cache().borrow_mut().insert(s, g);
859 // NB: Do not use `do_spill_noroot` to make this into a constant string, or
860 // you will be kicked off fast isel. See issue #4352 for an example of this.
861 pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
862 let len = s.get().len();
863 let cs = consts::ptrcast(C_cstr(cx, s, false), Type::i8p(cx));
864 C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
867 pub fn C_binary_slice(cx: &CrateContext, data: &[u8]) -> ValueRef {
869 let len = data.len();
870 let lldata = C_bytes(cx, data);
872 let gsym = token::gensym("binary");
873 let name = format!("binary{}", gsym.uint());
874 let name = CString::from_vec(name.into_bytes());
875 let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(),
877 llvm::LLVMSetInitializer(g, lldata);
878 llvm::LLVMSetGlobalConstant(g, True);
879 llvm::SetLinkage(g, llvm::InternalLinkage);
881 let cs = consts::ptrcast(g, Type::i8p(cx));
882 C_struct(cx, &[cs, C_uint(cx, len)], false)
886 pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
887 C_struct_in_context(cx.llcx(), elts, packed)
890 pub fn C_struct_in_context(llcx: ContextRef, elts: &[ValueRef], packed: bool) -> ValueRef {
892 llvm::LLVMConstStructInContext(llcx,
893 elts.as_ptr(), elts.len() as c_uint,
898 pub fn C_named_struct(t: Type, elts: &[ValueRef]) -> ValueRef {
900 llvm::LLVMConstNamedStruct(t.to_ref(), elts.as_ptr(), elts.len() as c_uint)
904 pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
906 return llvm::LLVMConstArray(ty.to_ref(), elts.as_ptr(), elts.len() as c_uint);
910 pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
911 C_bytes_in_context(cx.llcx(), bytes)
914 pub fn C_bytes_in_context(llcx: ContextRef, bytes: &[u8]) -> ValueRef {
916 let ptr = bytes.as_ptr() as *const c_char;
917 return llvm::LLVMConstStringInContext(llcx, ptr, bytes.len() as c_uint, True);
921 pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
924 let r = llvm::LLVMConstExtractValue(v, us.as_ptr(), us.len() as c_uint);
926 debug!("const_get_elt(v={}, us={:?}, r={})",
927 cx.tn().val_to_string(v), us, cx.tn().val_to_string(r));
933 pub fn is_const(v: ValueRef) -> bool {
935 llvm::LLVMIsConstant(v) == True
939 pub fn const_to_int(v: ValueRef) -> i64 {
941 llvm::LLVMConstIntGetSExtValue(v)
945 pub fn const_to_uint(v: ValueRef) -> u64 {
947 llvm::LLVMConstIntGetZExtValue(v)
951 pub fn is_undef(val: ValueRef) -> bool {
953 llvm::LLVMIsUndef(val) != False
957 #[allow(dead_code)] // potentially useful
958 pub fn is_null(val: ValueRef) -> bool {
960 llvm::LLVMIsNull(val) != False
964 pub fn monomorphize_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
965 bcx.fcx.monomorphize(&t)
968 pub fn node_id_type<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, id: ast::NodeId) -> Ty<'tcx> {
970 let t = ty::node_id_to_type(tcx, id);
971 monomorphize_type(bcx, t)
974 pub fn expr_ty<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
975 node_id_type(bcx, ex.id)
978 pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &ast::Expr) -> Ty<'tcx> {
979 monomorphize_type(bcx, ty::expr_ty_adjusted(bcx.tcx(), ex))
982 /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
983 /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
984 /// guarantee to us that all nested obligations *could be* resolved if we wanted to.
985 pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
987 trait_ref: ty::PolyTraitRef<'tcx>)
988 -> traits::Vtable<'tcx, ()>
992 // Remove any references to regions; this helps improve caching.
993 let trait_ref = erase_regions(tcx, &trait_ref);
995 // First check the cache.
996 match ccx.trait_cache().borrow().get(&trait_ref) {
998 info!("Cache hit: {}", trait_ref.repr(ccx.tcx()));
999 return (*vtable).clone();
1004 debug!("trans fulfill_obligation: trait_ref={}", trait_ref.repr(ccx.tcx()));
1006 ty::populate_implementations_for_trait_if_necessary(tcx, trait_ref.def_id());
1007 let infcx = infer::new_infer_ctxt(tcx);
1009 // Do the initial selection for the obligation. This yields the
1010 // shallow result we are looking for -- that is, what specific impl.
1011 let typer = NormalizingUnboxedClosureTyper::new(tcx);
1012 let mut selcx = traits::SelectionContext::new(&infcx, &typer);
1013 let obligation = traits::Obligation::new(traits::ObligationCause::dummy(),
1014 trait_ref.to_poly_trait_predicate());
1015 let selection = match selcx.select(&obligation) {
1016 Ok(Some(selection)) => selection,
1018 // Ambiguity can happen when monomorphizing during trans
1019 // expands to some humongo type that never occurred
1020 // statically -- this humongo type can then overflow,
1021 // leading to an ambiguous result. So report this as an
1022 // overflow bug, since I believe this is the only case
1023 // where ambiguity can result.
1024 debug!("Encountered ambiguity selecting `{}` during trans, \
1025 presuming due to overflow",
1026 trait_ref.repr(tcx));
1027 ccx.sess().span_fatal(
1029 "reached the recursion limit during monomorphization");
1034 &format!("Encountered error `{}` selecting `{}` during trans",
1036 trait_ref.repr(tcx))[])
1040 // Currently, we use a fulfillment context to completely resolve
1041 // all nested obligations. This is because they can inform the
1042 // inference of the impl's type parameters.
1043 let mut fulfill_cx = traits::FulfillmentContext::new();
1044 let vtable = selection.map_move_nested(|predicate| {
1045 fulfill_cx.register_predicate_obligation(&infcx, predicate);
1047 let vtable = drain_fulfillment_cx(span, &infcx, &mut fulfill_cx, &vtable);
1049 info!("Cache miss: {}", trait_ref.repr(ccx.tcx()));
1050 ccx.trait_cache().borrow_mut().insert(trait_ref,
1056 pub struct NormalizingUnboxedClosureTyper<'a,'tcx:'a> {
1057 param_env: ty::ParameterEnvironment<'a, 'tcx>
1060 impl<'a,'tcx> NormalizingUnboxedClosureTyper<'a,'tcx> {
1061 pub fn new(tcx: &'a ty::ctxt<'tcx>) -> NormalizingUnboxedClosureTyper<'a,'tcx> {
1062 // Parameter environment is used to give details about type parameters,
1063 // but since we are in trans, everything is fully monomorphized.
1064 NormalizingUnboxedClosureTyper { param_env: ty::empty_parameter_environment(tcx) }
1068 impl<'a,'tcx> ty::UnboxedClosureTyper<'tcx> for NormalizingUnboxedClosureTyper<'a,'tcx> {
1069 fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> {
1073 fn unboxed_closure_kind(&self,
1075 -> ty::UnboxedClosureKind
1077 self.param_env.tcx.unboxed_closure_kind(def_id)
1080 fn unboxed_closure_type(&self,
1082 substs: &subst::Substs<'tcx>)
1083 -> ty::ClosureTy<'tcx>
1085 // the substitutions in `substs` are already monomorphized,
1086 // but we still must normalize associated types
1087 let closure_ty = self.param_env.tcx.unboxed_closure_type(def_id, substs);
1088 monomorphize::normalize_associated_type(self.param_env.tcx, &closure_ty)
1091 fn unboxed_closure_upvars(&self,
1093 substs: &Substs<'tcx>)
1094 -> Option<Vec<ty::UnboxedClosureUpvar<'tcx>>>
1096 // the substitutions in `substs` are already monomorphized,
1097 // but we still must normalize associated types
1098 let result = ty::unboxed_closure_upvars(&self.param_env, def_id, substs);
1099 monomorphize::normalize_associated_type(self.param_env.tcx, &result)
1103 pub fn drain_fulfillment_cx<'a,'tcx,T>(span: Span,
1104 infcx: &infer::InferCtxt<'a,'tcx>,
1105 fulfill_cx: &mut traits::FulfillmentContext<'tcx>,
1108 where T : TypeFoldable<'tcx> + Repr<'tcx>
1110 debug!("drain_fulfillment_cx(result={})",
1111 result.repr(infcx.tcx));
1113 // In principle, we only need to do this so long as `result`
1114 // contains unbound type parameters. It could be a slight
1115 // optimization to stop iterating early.
1116 let typer = NormalizingUnboxedClosureTyper::new(infcx.tcx);
1117 match fulfill_cx.select_all_or_error(infcx, &typer) {
1120 if errors.iter().all(|e| e.is_overflow()) {
1121 // See Ok(None) case above.
1122 infcx.tcx.sess.span_fatal(
1124 "reached the recursion limit during monomorphization");
1126 infcx.tcx.sess.span_bug(
1128 &format!("Encountered errors `{}` fulfilling during trans",
1129 errors.repr(infcx.tcx))[]);
1134 // Use freshen to simultaneously replace all type variables with
1135 // their bindings and replace all regions with 'static. This is
1136 // sort of overkill because we do not expect there to be any
1137 // unbound type variables, hence no `TyFresh` types should ever be
1139 result.fold_with(&mut infcx.freshener())
1142 // Key used to lookup values supplied for type parameters in an expr.
1143 #[derive(Copy, PartialEq, Show)]
1144 pub enum ExprOrMethodCall {
1145 // Type parameters for a path like `None::<int>`
1146 ExprId(ast::NodeId),
1148 // Type parameters for a method call like `a.foo::<int>()`
1149 MethodCallKey(ty::MethodCall)
1152 pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
1153 node: ExprOrMethodCall,
1154 param_substs: &subst::Substs<'tcx>)
1155 -> subst::Substs<'tcx> {
1156 let tcx = ccx.tcx();
1158 let substs = match node {
1160 ty::node_id_item_substs(tcx, id).substs
1162 MethodCallKey(method_call) => {
1163 (*tcx.method_map.borrow())[method_call].substs.clone()
1167 if substs.types.any(|t| ty::type_needs_infer(*t)) {
1168 tcx.sess.bug(&format!("type parameters for node {:?} include inference types: {:?}",
1169 node, substs.repr(tcx))[]);
1172 monomorphize::apply_param_substs(tcx,
1174 &substs.erase_regions())
1177 pub fn langcall(bcx: Block,
1182 match bcx.tcx().lang_items.require(li) {
1185 let msg = format!("{} {}", msg, s);
1187 Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]),
1188 None => bcx.tcx().sess.fatal(&msg[]),