1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(dead_code)] // FFI wrappers
14 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
15 use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
16 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
18 use machine::llalign_of_pref;
21 use libc::{c_uint, c_char};
22 use rustc::ty::TyCtxt;
23 use rustc::session::Session;
26 use std::ffi::CString;
30 // All Builders must have an llfn associated with them
32 pub struct Builder<'a, 'tcx: 'a> {
33 pub llbuilder: BuilderRef,
34 pub ccx: &'a CrateContext<'a, 'tcx>,
37 impl<'a, 'tcx> Drop for Builder<'a, 'tcx> {
40 llvm::LLVMDisposeBuilder(self.llbuilder);
45 // This is a really awful way to get a zero-length c-string, but better (and a
46 // lot more efficient) than doing str::as_c_str("", ...) every time.
47 fn noname() -> *const c_char {
48 static CNULL: c_char = 0;
52 impl<'a, 'tcx> Builder<'a, 'tcx> {
53 pub fn new_block<'b>(ccx: &'a CrateContext<'a, 'tcx>, llfn: ValueRef, name: &'b str) -> Self {
54 let builder = Builder::with_ccx(ccx);
56 let name = CString::new(name).unwrap();
57 llvm::LLVMAppendBasicBlockInContext(
63 builder.position_at_end(llbb);
67 pub fn with_ccx(ccx: &'a CrateContext<'a, 'tcx>) -> Self {
68 // Create a fresh builder from the crate context.
69 let llbuilder = unsafe {
70 llvm::LLVMCreateBuilderInContext(ccx.llcx())
78 pub fn build_sibling_block<'b>(&self, name: &'b str) -> Builder<'a, 'tcx> {
79 Builder::new_block(self.ccx, self.llfn(), name)
82 pub fn sess(&self) -> &Session {
86 pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
90 pub fn llfn(&self) -> ValueRef {
92 llvm::LLVMGetBasicBlockParent(self.llbb())
96 pub fn llbb(&self) -> BasicBlockRef {
98 llvm::LLVMGetInsertBlock(self.llbuilder)
102 fn count_insn(&self, category: &str) {
103 if self.ccx.sess().trans_stats() {
104 self.ccx.stats().borrow_mut().n_llvm_insns += 1;
106 if self.ccx.sess().count_llvm_insns() {
110 .entry(category.to_string())
115 pub fn position_before(&self, insn: ValueRef) {
117 llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
121 pub fn position_at_end(&self, llbb: BasicBlockRef) {
123 llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
127 pub fn position_at_start(&self, llbb: BasicBlockRef) {
129 llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
133 pub fn ret_void(&self) {
134 self.count_insn("retvoid");
136 llvm::LLVMBuildRetVoid(self.llbuilder);
140 pub fn ret(&self, v: ValueRef) {
141 self.count_insn("ret");
143 llvm::LLVMBuildRet(self.llbuilder, v);
147 pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
149 llvm::LLVMBuildAggregateRet(self.llbuilder,
151 ret_vals.len() as c_uint);
155 pub fn br(&self, dest: BasicBlockRef) {
156 self.count_insn("br");
158 llvm::LLVMBuildBr(self.llbuilder, dest);
162 pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
163 self.count_insn("condbr");
165 llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
169 pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: usize) -> ValueRef {
171 llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
175 pub fn indirect_br(&self, addr: ValueRef, num_dests: usize) {
176 self.count_insn("indirectbr");
178 llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
186 catch: BasicBlockRef,
187 bundle: Option<&OperandBundleDef>) -> ValueRef {
188 self.count_insn("invoke");
190 debug!("Invoke {:?} with args ({})",
193 .map(|&v| format!("{:?}", Value(v)))
194 .collect::<Vec<String>>()
197 let args = self.check_call("invoke", llfn, args);
198 let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
201 llvm::LLVMRustBuildInvoke(self.llbuilder,
204 args.len() as c_uint,
212 pub fn unreachable(&self) {
213 self.count_insn("unreachable");
215 llvm::LLVMBuildUnreachable(self.llbuilder);
220 pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
221 self.count_insn("add");
223 llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
227 pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
228 self.count_insn("nswadd");
230 llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
234 pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
235 self.count_insn("nuwadd");
237 llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
241 pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
242 self.count_insn("fadd");
244 llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
248 pub fn fadd_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
249 self.count_insn("fadd");
251 let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
252 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
257 pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
258 self.count_insn("sub");
260 llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
264 pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
265 self.count_insn("nwsub");
267 llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
271 pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
272 self.count_insn("nuwsub");
274 llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
278 pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
279 self.count_insn("sub");
281 llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
285 pub fn fsub_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
286 self.count_insn("sub");
288 let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
289 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
294 pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
295 self.count_insn("mul");
297 llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
301 pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
302 self.count_insn("nswmul");
304 llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
308 pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
309 self.count_insn("nuwmul");
311 llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
315 pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
316 self.count_insn("fmul");
318 llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
322 pub fn fmul_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
323 self.count_insn("fmul");
325 let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
326 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
332 pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
333 self.count_insn("udiv");
335 llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
339 pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
340 self.count_insn("sdiv");
342 llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
346 pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
347 self.count_insn("exactsdiv");
349 llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
353 pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
354 self.count_insn("fdiv");
356 llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
360 pub fn fdiv_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
361 self.count_insn("fdiv");
363 let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
364 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
369 pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
370 self.count_insn("urem");
372 llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
376 pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
377 self.count_insn("srem");
379 llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
383 pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
384 self.count_insn("frem");
386 llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
390 pub fn frem_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
391 self.count_insn("frem");
393 let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
394 llvm::LLVMRustSetHasUnsafeAlgebra(instr);
399 pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
400 self.count_insn("shl");
402 llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
406 pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
407 self.count_insn("lshr");
409 llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
413 pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
414 self.count_insn("ashr");
416 llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
420 pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
421 self.count_insn("and");
423 llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
427 pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
428 self.count_insn("or");
430 llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
434 pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
435 self.count_insn("xor");
437 llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
441 pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
443 self.count_insn("binop");
445 llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
449 pub fn neg(&self, v: ValueRef) -> ValueRef {
450 self.count_insn("neg");
452 llvm::LLVMBuildNeg(self.llbuilder, v, noname())
456 pub fn nswneg(&self, v: ValueRef) -> ValueRef {
457 self.count_insn("nswneg");
459 llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
463 pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
464 self.count_insn("nuwneg");
466 llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
469 pub fn fneg(&self, v: ValueRef) -> ValueRef {
470 self.count_insn("fneg");
472 llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
476 pub fn not(&self, v: ValueRef) -> ValueRef {
477 self.count_insn("not");
479 llvm::LLVMBuildNot(self.llbuilder, v, noname())
483 pub fn alloca(&self, ty: Type, name: &str, align: Option<u32>) -> ValueRef {
484 let builder = Builder::with_ccx(self.ccx);
485 builder.position_at_start(unsafe {
486 llvm::LLVMGetFirstBasicBlock(self.llfn())
488 builder.dynamic_alloca(ty, name, align)
491 pub fn dynamic_alloca(&self, ty: Type, name: &str, align: Option<u32>) -> ValueRef {
492 self.count_insn("alloca");
494 let alloca = if name.is_empty() {
495 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
497 let name = CString::new(name).unwrap();
498 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(),
501 if let Some(align) = align {
502 llvm::LLVMSetAlignment(alloca, align as c_uint);
508 pub fn free(&self, ptr: ValueRef) {
509 self.count_insn("free");
511 llvm::LLVMBuildFree(self.llbuilder, ptr);
515 pub fn load(&self, ptr: ValueRef, align: Option<u32>) -> ValueRef {
516 self.count_insn("load");
518 let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
519 if let Some(align) = align {
520 llvm::LLVMSetAlignment(load, align as c_uint);
526 pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
527 self.count_insn("load.volatile");
529 let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
530 llvm::LLVMSetVolatile(insn, llvm::True);
535 pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
536 self.count_insn("load.atomic");
538 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
539 let align = llalign_of_pref(self.ccx, ty.element_type());
540 llvm::LLVMRustBuildAtomicLoad(self.llbuilder, ptr, noname(), order,
546 pub fn load_range_assert(&self, ptr: ValueRef, lo: u64,
547 hi: u64, signed: llvm::Bool,
548 align: Option<u32>) -> ValueRef {
549 let value = self.load(ptr, align);
552 let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(ptr));
553 let min = llvm::LLVMConstInt(t, lo, signed);
554 let max = llvm::LLVMConstInt(t, hi, signed);
558 llvm::LLVMSetMetadata(value, llvm::MD_range as c_uint,
559 llvm::LLVMMDNodeInContext(self.ccx.llcx(),
567 pub fn load_nonnull(&self, ptr: ValueRef, align: Option<u32>) -> ValueRef {
568 let value = self.load(ptr, align);
570 llvm::LLVMSetMetadata(value, llvm::MD_nonnull as c_uint,
571 llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
577 pub fn store(&self, val: ValueRef, ptr: ValueRef, align: Option<u32>) -> ValueRef {
578 debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
579 assert!(!self.llbuilder.is_null());
580 self.count_insn("store");
581 let ptr = self.check_store(val, ptr);
583 let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
584 if let Some(align) = align {
585 llvm::LLVMSetAlignment(store, align as c_uint);
591 pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
592 debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
593 assert!(!self.llbuilder.is_null());
594 self.count_insn("store.volatile");
595 let ptr = self.check_store(val, ptr);
597 let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
598 llvm::LLVMSetVolatile(insn, llvm::True);
603 pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
604 debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
605 self.count_insn("store.atomic");
606 let ptr = self.check_store(val, ptr);
608 let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
609 let align = llalign_of_pref(self.ccx, ty.element_type());
610 llvm::LLVMRustBuildAtomicStore(self.llbuilder, val, ptr, order, align as c_uint);
614 pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
615 self.count_insn("gep");
617 llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
618 indices.len() as c_uint, noname())
622 // Simple wrapper around GEP that takes an array of ints and wraps them
625 pub fn gepi(&self, base: ValueRef, ixs: &[usize]) -> ValueRef {
626 // Small vector optimization. This should catch 100% of the cases that
629 let mut small_vec = [ C_i32(self.ccx, 0); 16 ];
630 for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs) {
631 *small_vec_e = C_i32(self.ccx, ix as i32);
633 self.inbounds_gep(base, &small_vec[..ixs.len()])
635 let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
636 self.count_insn("gepi");
637 self.inbounds_gep(base, &v)
641 pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
642 self.count_insn("inboundsgep");
644 llvm::LLVMBuildInBoundsGEP(
645 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
649 pub fn struct_gep(&self, ptr: ValueRef, idx: usize) -> ValueRef {
650 self.count_insn("structgep");
652 llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
656 pub fn global_string(&self, _str: *const c_char) -> ValueRef {
657 self.count_insn("globalstring");
659 llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
663 pub fn global_string_ptr(&self, _str: *const c_char) -> ValueRef {
664 self.count_insn("globalstringptr");
666 llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
671 pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
672 self.count_insn("trunc");
674 llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
678 pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
679 self.count_insn("zext");
681 llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
685 pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
686 self.count_insn("sext");
688 llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
692 pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
693 self.count_insn("fptoui");
695 llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
699 pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
700 self.count_insn("fptosi");
702 llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
706 pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
707 self.count_insn("uitofp");
709 llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
713 pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
714 self.count_insn("sitofp");
716 llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
720 pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
721 self.count_insn("fptrunc");
723 llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
727 pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
728 self.count_insn("fpext");
730 llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
734 pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
735 self.count_insn("ptrtoint");
737 llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
741 pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
742 self.count_insn("inttoptr");
744 llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
748 pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
749 self.count_insn("bitcast");
751 llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
755 pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
756 self.count_insn("zextorbitcast");
758 llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
762 pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
763 self.count_insn("sextorbitcast");
765 llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
769 pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
770 self.count_insn("truncorbitcast");
772 llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
776 pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
777 self.count_insn("cast");
779 llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
783 pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
784 self.count_insn("pointercast");
786 llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
790 pub fn intcast(&self, val: ValueRef, dest_ty: Type, is_signed: bool) -> ValueRef {
791 self.count_insn("intcast");
793 llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), is_signed)
797 pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
798 self.count_insn("fpcast");
800 llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
806 pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
807 self.count_insn("icmp");
809 llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
813 pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
814 self.count_insn("fcmp");
816 llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
820 /* Miscellaneous instructions */
821 pub fn empty_phi(&self, ty: Type) -> ValueRef {
822 self.count_insn("emptyphi");
824 llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
828 pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
829 assert_eq!(vals.len(), bbs.len());
830 let phi = self.empty_phi(ty);
831 self.count_insn("addincoming");
833 llvm::LLVMAddIncoming(phi, vals.as_ptr(),
835 vals.len() as c_uint);
840 pub fn add_span_comment(&self, sp: Span, text: &str) {
841 if self.ccx.sess().asm_comments() {
842 let s = format!("{} ({})",
844 self.ccx.sess().codemap().span_to_string(sp));
846 self.add_comment(&s);
850 pub fn add_comment(&self, text: &str) {
851 if self.ccx.sess().asm_comments() {
852 let sanitized = text.replace("$", "");
853 let comment_text = format!("{} {}", "#",
854 sanitized.replace("\n", "\n\t# "));
855 self.count_insn("inlineasm");
856 let comment_text = CString::new(comment_text).unwrap();
858 llvm::LLVMConstInlineAsm(Type::func(&[], &Type::void(self.ccx)).to_ref(),
859 comment_text.as_ptr(), noname(), False,
862 self.call(asm, &[], None);
866 pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
867 inputs: &[ValueRef], output: Type,
868 volatile: bool, alignstack: bool,
869 dia: AsmDialect) -> ValueRef {
870 self.count_insn("inlineasm");
872 let volatile = if volatile { llvm::True }
873 else { llvm::False };
874 let alignstack = if alignstack { llvm::True }
875 else { llvm::False };
877 let argtys = inputs.iter().map(|v| {
878 debug!("Asm Input Type: {:?}", Value(*v));
880 }).collect::<Vec<_>>();
882 debug!("Asm Output Type: {:?}", output);
883 let fty = Type::func(&argtys[..], &output);
885 let v = llvm::LLVMRustInlineAsm(
886 fty.to_ref(), asm, cons, volatile, alignstack, dia);
887 self.call(v, inputs, None)
891 pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
892 bundle: Option<&OperandBundleDef>) -> ValueRef {
893 self.count_insn("call");
895 debug!("Call {:?} with args ({})",
898 .map(|&v| format!("{:?}", Value(v)))
899 .collect::<Vec<String>>()
902 let args = self.check_call("call", llfn, args);
903 let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
906 llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
907 args.len() as c_uint, bundle, noname())
911 pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
912 self.count_insn("select");
914 llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
918 pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
919 self.count_insn("vaarg");
921 llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
925 pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
926 self.count_insn("extractelement");
928 llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
932 pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
933 self.count_insn("insertelement");
935 llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
939 pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
940 self.count_insn("shufflevector");
942 llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
946 pub fn vector_splat(&self, num_elts: usize, elt: ValueRef) -> ValueRef {
948 let elt_ty = val_ty(elt);
949 let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
950 let vec = self.insert_element(undef, elt, C_i32(self.ccx, 0));
951 let vec_i32_ty = Type::vector(&Type::i32(self.ccx), num_elts as u64);
952 self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
956 pub fn extract_value(&self, agg_val: ValueRef, idx: usize) -> ValueRef {
957 self.count_insn("extractvalue");
959 llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
963 pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
964 idx: usize) -> ValueRef {
965 self.count_insn("insertvalue");
967 llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
972 pub fn is_null(&self, val: ValueRef) -> ValueRef {
973 self.count_insn("isnull");
975 llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
979 pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
980 self.count_insn("isnotnull");
982 llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
986 pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
987 self.count_insn("ptrdiff");
989 llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
995 let bb: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
996 let fn_: ValueRef = llvm::LLVMGetBasicBlockParent(bb);
997 let m: ModuleRef = llvm::LLVMGetGlobalParent(fn_);
998 let p = "llvm.trap\0".as_ptr();
999 let t: ValueRef = llvm::LLVMGetNamedFunction(m, p as *const _);
1000 assert!((t as isize != 0));
1001 let args: &[ValueRef] = &[];
1002 self.count_insn("trap");
1003 llvm::LLVMRustBuildCall(self.llbuilder, t,
1004 args.as_ptr(), args.len() as c_uint,
1010 pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef,
1012 llfn: ValueRef) -> ValueRef {
1013 self.count_insn("landingpad");
1015 llvm::LLVMRustBuildLandingPad(self.llbuilder, ty.to_ref(), pers_fn,
1016 num_clauses as c_uint, noname(), llfn)
1020 pub fn add_clause(&self, landing_pad: ValueRef, clause: ValueRef) {
1022 llvm::LLVMAddClause(landing_pad, clause);
1026 pub fn set_cleanup(&self, landing_pad: ValueRef) {
1027 self.count_insn("setcleanup");
1029 llvm::LLVMSetCleanup(landing_pad, llvm::True);
1033 pub fn resume(&self, exn: ValueRef) -> ValueRef {
1034 self.count_insn("resume");
1036 llvm::LLVMBuildResume(self.llbuilder, exn)
1040 pub fn cleanup_pad(&self,
1041 parent: Option<ValueRef>,
1042 args: &[ValueRef]) -> ValueRef {
1043 self.count_insn("cleanuppad");
1044 let parent = parent.unwrap_or(ptr::null_mut());
1045 let name = CString::new("cleanuppad").unwrap();
1047 llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1049 args.len() as c_uint,
1053 assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
1057 pub fn cleanup_ret(&self, cleanup: ValueRef,
1058 unwind: Option<BasicBlockRef>) -> ValueRef {
1059 self.count_insn("cleanupret");
1060 let unwind = unwind.unwrap_or(ptr::null_mut());
1062 llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1064 assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
1068 pub fn catch_pad(&self,
1070 args: &[ValueRef]) -> ValueRef {
1071 self.count_insn("catchpad");
1072 let name = CString::new("catchpad").unwrap();
1074 llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1075 args.len() as c_uint, args.as_ptr(),
1078 assert!(!ret.is_null(), "LLVM does not have support for catchpad");
1082 pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1083 self.count_insn("catchret");
1085 llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1087 assert!(!ret.is_null(), "LLVM does not have support for catchret");
1091 pub fn catch_switch(&self,
1092 parent: Option<ValueRef>,
1093 unwind: Option<BasicBlockRef>,
1094 num_handlers: usize) -> ValueRef {
1095 self.count_insn("catchswitch");
1096 let parent = parent.unwrap_or(ptr::null_mut());
1097 let unwind = unwind.unwrap_or(ptr::null_mut());
1098 let name = CString::new("catchswitch").unwrap();
1100 llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1101 num_handlers as c_uint,
1104 assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
1108 pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
1110 llvm::LLVMRustAddHandler(catch_switch, handler);
1114 pub fn set_personality_fn(&self, personality: ValueRef) {
1116 llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1120 // Atomic Operations
1121 pub fn atomic_cmpxchg(&self, dst: ValueRef,
1122 cmp: ValueRef, src: ValueRef,
1123 order: AtomicOrdering,
1124 failure_order: AtomicOrdering,
1125 weak: llvm::Bool) -> ValueRef {
1127 llvm::LLVMRustBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
1128 order, failure_order, weak)
1131 pub fn atomic_rmw(&self, op: AtomicRmwBinOp,
1132 dst: ValueRef, src: ValueRef,
1133 order: AtomicOrdering) -> ValueRef {
1135 llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
1139 pub fn atomic_fence(&self, order: AtomicOrdering, scope: SynchronizationScope) {
1141 llvm::LLVMRustBuildAtomicFence(self.llbuilder, order, scope);
1145 pub fn add_case(&self, s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
1147 if llvm::LLVMIsUndef(s) == llvm::True { return; }
1148 llvm::LLVMAddCase(s, on_val, dest)
1152 pub fn add_incoming_to_phi(&self, phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
1154 if llvm::LLVMIsUndef(phi) == llvm::True { return; }
1155 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1159 pub fn set_invariant_load(&self, load: ValueRef) {
1161 llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1162 llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
1166 /// Returns the ptr value that should be used for storing `val`.
1167 fn check_store<'b>(&self,
1169 ptr: ValueRef) -> ValueRef {
1170 let dest_ptr_ty = val_ty(ptr);
1171 let stored_ty = val_ty(val);
1172 let stored_ptr_ty = stored_ty.ptr_to();
1174 assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1176 if dest_ptr_ty == stored_ptr_ty {
1179 debug!("Type mismatch in store. \
1180 Expected {:?}, got {:?}; inserting bitcast",
1181 dest_ptr_ty, stored_ptr_ty);
1182 self.bitcast(ptr, stored_ptr_ty)
1186 /// Returns the args that should be used for a call to `llfn`.
1187 fn check_call<'b>(&self,
1190 args: &'b [ValueRef]) -> Cow<'b, [ValueRef]> {
1191 let mut fn_ty = val_ty(llfn);
1192 // Strip off pointers
1193 while fn_ty.kind() == llvm::TypeKind::Pointer {
1194 fn_ty = fn_ty.element_type();
1197 assert!(fn_ty.kind() == llvm::TypeKind::Function,
1198 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1200 let param_tys = fn_ty.func_params();
1202 let all_args_match = param_tys.iter()
1203 .zip(args.iter().map(|&v| val_ty(v)))
1204 .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1207 return Cow::Borrowed(args);
1210 let casted_args: Vec<_> = param_tys.into_iter()
1213 .map(|(i, (expected_ty, &actual_val))| {
1214 let actual_ty = val_ty(actual_val);
1215 if expected_ty != actual_ty {
1216 debug!("Type mismatch in function call of {:?}. \
1217 Expected {:?} for param {}, got {:?}; injecting bitcast",
1219 expected_ty, i, actual_ty);
1220 self.bitcast(actual_val, expected_ty)
1227 return Cow::Owned(casted_args);