1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 #![allow(dead_code)] // FFI wrappers
12 #![allow(non_snake_case_functions)]
15 use lib::llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
16 use lib::llvm::{Opcode, IntPredicate, RealPredicate};
17 use lib::llvm::{ValueRef, BasicBlockRef};
19 use middle::trans::common::*;
20 use syntax::codemap::Span;
22 use middle::trans::builder::Builder;
23 use middle::trans::type_::Type;
25 use libc::{c_uint, c_ulonglong, c_char};
27 pub fn terminate(cx: &Block, _: &str) {
28 debug!("terminate({})", cx.to_str());
29 cx.terminated.set(true);
32 pub fn check_not_terminated(cx: &Block) {
33 if cx.terminated.get() {
34 fail!("already terminated!");
38 pub fn B<'a>(cx: &'a Block) -> Builder<'a> {
39 let b = cx.fcx.ccx.builder();
40 b.position_at_end(cx.llbb);
44 // The difference between a block being unreachable and being terminated is
45 // somewhat obscure, and has to do with error checking. When a block is
46 // terminated, we're saying that trying to add any further statements in the
47 // block is an error. On the other hand, if something is unreachable, that
48 // means that the block was terminated in some way that we don't want to check
49 // for (fail/break/return statements, call to diverging functions, etc), and
50 // further instructions to the block should simply be ignored.
52 pub fn RetVoid(cx: &Block) {
53 if cx.unreachable.get() { return; }
54 check_not_terminated(cx);
55 terminate(cx, "RetVoid");
59 pub fn Ret(cx: &Block, v: ValueRef) {
60 if cx.unreachable.get() { return; }
61 check_not_terminated(cx);
66 pub fn AggregateRet(cx: &Block, ret_vals: &[ValueRef]) {
67 if cx.unreachable.get() { return; }
68 check_not_terminated(cx);
69 terminate(cx, "AggregateRet");
70 B(cx).aggregate_ret(ret_vals);
73 pub fn Br(cx: &Block, dest: BasicBlockRef) {
74 if cx.unreachable.get() { return; }
75 check_not_terminated(cx);
80 pub fn CondBr(cx: &Block,
83 else_: BasicBlockRef) {
84 if cx.unreachable.get() { return; }
85 check_not_terminated(cx);
86 terminate(cx, "CondBr");
87 B(cx).cond_br(if_, then, else_);
90 pub fn Switch(cx: &Block, v: ValueRef, else_: BasicBlockRef, num_cases: uint)
92 if cx.unreachable.get() { return _Undef(v); }
93 check_not_terminated(cx);
94 terminate(cx, "Switch");
95 B(cx).switch(v, else_, num_cases)
98 pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
100 if llvm::LLVMIsUndef(s) == lib::llvm::True { return; }
101 llvm::LLVMAddCase(s, on_val, dest);
105 pub fn IndirectBr(cx: &Block, addr: ValueRef, num_dests: uint) {
106 if cx.unreachable.get() { return; }
107 check_not_terminated(cx);
108 terminate(cx, "IndirectBr");
109 B(cx).indirect_br(addr, num_dests);
112 pub fn Invoke(cx: &Block,
116 catch: BasicBlockRef,
117 attributes: &[(uint, u64)])
119 if cx.unreachable.get() {
120 return C_null(Type::i8(cx.ccx()));
122 check_not_terminated(cx);
123 terminate(cx, "Invoke");
124 debug!("Invoke({} with arguments ({}))",
126 args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<String>>().connect(", "));
127 B(cx).invoke(fn_, args, then, catch, attributes)
130 pub fn Unreachable(cx: &Block) {
131 if cx.unreachable.get() {
134 cx.unreachable.set(true);
135 if !cx.terminated.get() {
140 pub fn _Undef(val: ValueRef) -> ValueRef {
142 return llvm::LLVMGetUndef(val_ty(val).to_ref());
147 pub fn Add(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
148 if cx.unreachable.get() { return _Undef(lhs); }
152 pub fn NSWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
153 if cx.unreachable.get() { return _Undef(lhs); }
154 B(cx).nswadd(lhs, rhs)
157 pub fn NUWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
158 if cx.unreachable.get() { return _Undef(lhs); }
159 B(cx).nuwadd(lhs, rhs)
162 pub fn FAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
163 if cx.unreachable.get() { return _Undef(lhs); }
167 pub fn Sub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
168 if cx.unreachable.get() { return _Undef(lhs); }
172 pub fn NSWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
173 if cx.unreachable.get() { return _Undef(lhs); }
174 B(cx).nswsub(lhs, rhs)
177 pub fn NUWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
178 if cx.unreachable.get() { return _Undef(lhs); }
179 B(cx).nuwsub(lhs, rhs)
182 pub fn FSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
183 if cx.unreachable.get() { return _Undef(lhs); }
187 pub fn Mul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
188 if cx.unreachable.get() { return _Undef(lhs); }
192 pub fn NSWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
193 if cx.unreachable.get() { return _Undef(lhs); }
194 B(cx).nswmul(lhs, rhs)
197 pub fn NUWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
198 if cx.unreachable.get() { return _Undef(lhs); }
199 B(cx).nuwmul(lhs, rhs)
202 pub fn FMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
203 if cx.unreachable.get() { return _Undef(lhs); }
207 pub fn UDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
208 if cx.unreachable.get() { return _Undef(lhs); }
212 pub fn SDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
213 if cx.unreachable.get() { return _Undef(lhs); }
217 pub fn ExactSDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
218 if cx.unreachable.get() { return _Undef(lhs); }
219 B(cx).exactsdiv(lhs, rhs)
222 pub fn FDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
223 if cx.unreachable.get() { return _Undef(lhs); }
227 pub fn URem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
228 if cx.unreachable.get() { return _Undef(lhs); }
232 pub fn SRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
233 if cx.unreachable.get() { return _Undef(lhs); }
237 pub fn FRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
238 if cx.unreachable.get() { return _Undef(lhs); }
242 pub fn Shl(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
243 if cx.unreachable.get() { return _Undef(lhs); }
247 pub fn LShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
248 if cx.unreachable.get() { return _Undef(lhs); }
252 pub fn AShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
253 if cx.unreachable.get() { return _Undef(lhs); }
257 pub fn And(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
258 if cx.unreachable.get() { return _Undef(lhs); }
262 pub fn Or(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
263 if cx.unreachable.get() { return _Undef(lhs); }
267 pub fn Xor(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
268 if cx.unreachable.get() { return _Undef(lhs); }
272 pub fn BinOp(cx: &Block, op: Opcode, lhs: ValueRef, rhs: ValueRef)
274 if cx.unreachable.get() { return _Undef(lhs); }
275 B(cx).binop(op, lhs, rhs)
278 pub fn Neg(cx: &Block, v: ValueRef) -> ValueRef {
279 if cx.unreachable.get() { return _Undef(v); }
283 pub fn NSWNeg(cx: &Block, v: ValueRef) -> ValueRef {
284 if cx.unreachable.get() { return _Undef(v); }
288 pub fn NUWNeg(cx: &Block, v: ValueRef) -> ValueRef {
289 if cx.unreachable.get() { return _Undef(v); }
292 pub fn FNeg(cx: &Block, v: ValueRef) -> ValueRef {
293 if cx.unreachable.get() { return _Undef(v); }
297 pub fn Not(cx: &Block, v: ValueRef) -> ValueRef {
298 if cx.unreachable.get() { return _Undef(v); }
303 pub fn Malloc(cx: &Block, ty: Type) -> ValueRef {
305 if cx.unreachable.get() {
306 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
312 pub fn ArrayMalloc(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
314 if cx.unreachable.get() {
315 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
317 B(cx).array_malloc(ty, val)
321 pub fn Alloca(cx: &Block, ty: Type, name: &str) -> ValueRef {
323 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
324 AllocaFcx(cx.fcx, ty, name)
328 pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
329 let b = fcx.ccx.builder();
330 b.position_before(fcx.alloca_insert_pt.get().unwrap());
334 pub fn ArrayAlloca(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
336 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
337 let b = cx.fcx.ccx.builder();
338 b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
339 b.array_alloca(ty, val)
343 pub fn Free(cx: &Block, pointer_val: ValueRef) {
344 if cx.unreachable.get() { return; }
345 B(cx).free(pointer_val)
348 pub fn Load(cx: &Block, pointer_val: ValueRef) -> ValueRef {
350 let ccx = cx.fcx.ccx;
351 if cx.unreachable.get() {
352 let ty = val_ty(pointer_val);
353 let eltty = if ty.kind() == lib::llvm::Array {
358 return llvm::LLVMGetUndef(eltty.to_ref());
360 B(cx).load(pointer_val)
364 pub fn VolatileLoad(cx: &Block, pointer_val: ValueRef) -> ValueRef {
366 if cx.unreachable.get() {
367 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
369 B(cx).volatile_load(pointer_val)
373 pub fn AtomicLoad(cx: &Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
375 let ccx = cx.fcx.ccx;
376 if cx.unreachable.get() {
377 return llvm::LLVMGetUndef(ccx.int_type.to_ref());
379 B(cx).atomic_load(pointer_val, order)
384 pub fn LoadRangeAssert(cx: &Block, pointer_val: ValueRef, lo: c_ulonglong,
385 hi: c_ulonglong, signed: lib::llvm::Bool) -> ValueRef {
386 if cx.unreachable.get() {
387 let ccx = cx.fcx.ccx;
388 let ty = val_ty(pointer_val);
389 let eltty = if ty.kind() == lib::llvm::Array {
395 llvm::LLVMGetUndef(eltty.to_ref())
398 B(cx).load_range_assert(pointer_val, lo, hi, signed)
402 pub fn Store(cx: &Block, val: ValueRef, ptr: ValueRef) {
403 if cx.unreachable.get() { return; }
404 B(cx).store(val, ptr)
407 pub fn VolatileStore(cx: &Block, val: ValueRef, ptr: ValueRef) {
408 if cx.unreachable.get() { return; }
409 B(cx).volatile_store(val, ptr)
412 pub fn AtomicStore(cx: &Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
413 if cx.unreachable.get() { return; }
414 B(cx).atomic_store(val, ptr, order)
417 pub fn GEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
419 if cx.unreachable.get() {
420 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
422 B(cx).gep(pointer, indices)
426 // Simple wrapper around GEP that takes an array of ints and wraps them
429 pub fn GEPi(cx: &Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
431 if cx.unreachable.get() {
432 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
434 B(cx).gepi(base, ixs)
438 pub fn InBoundsGEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
440 if cx.unreachable.get() {
441 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
443 B(cx).inbounds_gep(pointer, indices)
447 pub fn StructGEP(cx: &Block, pointer: ValueRef, idx: uint) -> ValueRef {
449 if cx.unreachable.get() {
450 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
452 B(cx).struct_gep(pointer, idx)
456 pub fn GlobalString(cx: &Block, _str: *c_char) -> ValueRef {
458 if cx.unreachable.get() {
459 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
461 B(cx).global_string(_str)
465 pub fn GlobalStringPtr(cx: &Block, _str: *c_char) -> ValueRef {
467 if cx.unreachable.get() {
468 return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
470 B(cx).global_string_ptr(_str)
475 pub fn Trunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
477 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
478 B(cx).trunc(val, dest_ty)
482 pub fn ZExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
484 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
485 B(cx).zext(val, dest_ty)
489 pub fn SExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
491 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
492 B(cx).sext(val, dest_ty)
496 pub fn FPToUI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
498 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
499 B(cx).fptoui(val, dest_ty)
503 pub fn FPToSI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
505 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
506 B(cx).fptosi(val, dest_ty)
510 pub fn UIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
512 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
513 B(cx).uitofp(val, dest_ty)
517 pub fn SIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
519 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
520 B(cx).sitofp(val, dest_ty)
524 pub fn FPTrunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
526 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
527 B(cx).fptrunc(val, dest_ty)
531 pub fn FPExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
533 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
534 B(cx).fpext(val, dest_ty)
538 pub fn PtrToInt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
540 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
541 B(cx).ptrtoint(val, dest_ty)
545 pub fn IntToPtr(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
547 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
548 B(cx).inttoptr(val, dest_ty)
552 pub fn BitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
554 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
555 B(cx).bitcast(val, dest_ty)
559 pub fn ZExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
561 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
562 B(cx).zext_or_bitcast(val, dest_ty)
566 pub fn SExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
568 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
569 B(cx).sext_or_bitcast(val, dest_ty)
573 pub fn TruncOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
575 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
576 B(cx).trunc_or_bitcast(val, dest_ty)
580 pub fn Cast(cx: &Block, op: Opcode, val: ValueRef, dest_ty: Type, _: *u8)
583 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
584 B(cx).cast(op, val, dest_ty)
588 pub fn PointerCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
590 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
591 B(cx).pointercast(val, dest_ty)
595 pub fn IntCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
597 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
598 B(cx).intcast(val, dest_ty)
602 pub fn FPCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
604 if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
605 B(cx).fpcast(val, dest_ty)
611 pub fn ICmp(cx: &Block, op: IntPredicate, lhs: ValueRef, rhs: ValueRef)
614 if cx.unreachable.get() {
615 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
617 B(cx).icmp(op, lhs, rhs)
621 pub fn FCmp(cx: &Block, op: RealPredicate, lhs: ValueRef, rhs: ValueRef)
624 if cx.unreachable.get() {
625 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
627 B(cx).fcmp(op, lhs, rhs)
631 /* Miscellaneous instructions */
632 pub fn EmptyPhi(cx: &Block, ty: Type) -> ValueRef {
634 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
639 pub fn Phi(cx: &Block, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
641 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
642 B(cx).phi(ty, vals, bbs)
646 pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
648 if llvm::LLVMIsUndef(phi) == lib::llvm::True { return; }
649 llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
653 pub fn _UndefReturn(cx: &Block, fn_: ValueRef) -> ValueRef {
655 let ccx = cx.fcx.ccx;
656 let ty = val_ty(fn_);
657 let retty = if ty.kind() == lib::llvm::Integer {
662 B(cx).count_insn("ret_undef");
663 llvm::LLVMGetUndef(retty.to_ref())
667 pub fn add_span_comment(cx: &Block, sp: Span, text: &str) {
668 B(cx).add_span_comment(sp, text)
671 pub fn add_comment(cx: &Block, text: &str) {
672 B(cx).add_comment(text)
675 pub fn InlineAsmCall(cx: &Block, asm: *c_char, cons: *c_char,
676 inputs: &[ValueRef], output: Type,
677 volatile: bool, alignstack: bool,
678 dia: AsmDialect) -> ValueRef {
679 B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
682 pub fn Call(cx: &Block, fn_: ValueRef, args: &[ValueRef],
683 attributes: &[(uint, u64)]) -> ValueRef {
684 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
685 B(cx).call(fn_, args, attributes)
688 pub fn CallWithConv(cx: &Block, fn_: ValueRef, args: &[ValueRef], conv: CallConv,
689 attributes: &[(uint, u64)]) -> ValueRef {
690 if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
691 B(cx).call_with_conv(fn_, args, conv, attributes)
694 pub fn AtomicFence(cx: &Block, order: AtomicOrdering) {
695 if cx.unreachable.get() { return; }
696 B(cx).atomic_fence(order)
699 pub fn Select(cx: &Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
700 if cx.unreachable.get() { return _Undef(then); }
701 B(cx).select(if_, then, else_)
704 pub fn VAArg(cx: &Block, list: ValueRef, ty: Type) -> ValueRef {
706 if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
707 B(cx).va_arg(list, ty)
711 pub fn ExtractElement(cx: &Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
713 if cx.unreachable.get() {
714 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
716 B(cx).extract_element(vec_val, index)
720 pub fn InsertElement(cx: &Block, vec_val: ValueRef, elt_val: ValueRef,
721 index: ValueRef) -> ValueRef {
723 if cx.unreachable.get() {
724 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
726 B(cx).insert_element(vec_val, elt_val, index)
730 pub fn ShuffleVector(cx: &Block, v1: ValueRef, v2: ValueRef,
731 mask: ValueRef) -> ValueRef {
733 if cx.unreachable.get() {
734 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
736 B(cx).shuffle_vector(v1, v2, mask)
740 pub fn VectorSplat(cx: &Block, num_elts: uint, elt_val: ValueRef) -> ValueRef {
742 if cx.unreachable.get() {
743 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
745 B(cx).vector_splat(num_elts, elt_val)
749 pub fn ExtractValue(cx: &Block, agg_val: ValueRef, index: uint) -> ValueRef {
751 if cx.unreachable.get() {
752 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
754 B(cx).extract_value(agg_val, index)
758 pub fn InsertValue(cx: &Block, agg_val: ValueRef, elt_val: ValueRef, index: uint) -> ValueRef {
760 if cx.unreachable.get() {
761 return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
763 B(cx).insert_value(agg_val, elt_val, index)
767 pub fn IsNull(cx: &Block, val: ValueRef) -> ValueRef {
769 if cx.unreachable.get() {
770 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
776 pub fn IsNotNull(cx: &Block, val: ValueRef) -> ValueRef {
778 if cx.unreachable.get() {
779 return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
781 B(cx).is_not_null(val)
785 pub fn PtrDiff(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
787 let ccx = cx.fcx.ccx;
788 if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); }
789 B(cx).ptrdiff(lhs, rhs)
793 pub fn Trap(cx: &Block) {
794 if cx.unreachable.get() { return; }
798 pub fn LandingPad(cx: &Block, ty: Type, pers_fn: ValueRef,
799 num_clauses: uint) -> ValueRef {
800 check_not_terminated(cx);
801 assert!(!cx.unreachable.get());
802 B(cx).landing_pad(ty, pers_fn, num_clauses)
805 pub fn SetCleanup(cx: &Block, landing_pad: ValueRef) {
806 B(cx).set_cleanup(landing_pad)
809 pub fn Resume(cx: &Block, exn: ValueRef) -> ValueRef {
810 check_not_terminated(cx);
811 terminate(cx, "Resume");
816 pub fn AtomicCmpXchg(cx: &Block, dst: ValueRef,
817 cmp: ValueRef, src: ValueRef,
818 order: AtomicOrdering,
819 failure_order: AtomicOrdering) -> ValueRef {
820 B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
822 pub fn AtomicRMW(cx: &Block, op: AtomicBinOp,
823 dst: ValueRef, src: ValueRef,
824 order: AtomicOrdering) -> ValueRef {
825 B(cx).atomic_rmw(op, dst, src, order)