]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/builder.rs
5f460a14ef9879914e4d15a33504c4811f81a5ca
[rust.git] / src / librustc_codegen_llvm / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use llvm;
14 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
15 use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
16 use llvm::{ValueRef, BasicBlockRef, BuilderRef};
17 use common::*;
18 use type_::Type;
19 use value::Value;
20 use libc::{c_uint, c_char};
21 use rustc::ty::TyCtxt;
22 use rustc::ty::layout::{Align, Size};
23 use rustc::session::{config, Session};
24
25 use std::borrow::Cow;
26 use std::ffi::CString;
27 use std::ops::Range;
28 use std::ptr;
29 use std::ptr::NonNull;
30 use syntax_pos::Span;
31
32 // All Builders must have an llfn associated with them
33 #[must_use]
34 pub struct Builder<'a, 'tcx: 'a> {
35     pub llbuilder: BuilderRef,
36     pub cx: &'a CodegenCx<'a, 'tcx>,
37 }
38
39 impl<'a, 'tcx> Drop for Builder<'a, 'tcx> {
40     fn drop(&mut self) {
41         unsafe {
42             llvm::LLVMDisposeBuilder(self.llbuilder);
43         }
44     }
45 }
46
47 // This is a really awful way to get a zero-length c-string, but better (and a
48 // lot more efficient) than doing str::as_c_str("", ...) every time.
49 fn noname() -> *const c_char {
50     static CNULL: c_char = 0;
51     &CNULL
52 }
53
54 bitflags! {
55     pub struct MemFlags: u8 {
56         const VOLATILE = 1 << 0;
57         const NONTEMPORAL = 1 << 1;
58         const UNALIGNED = 1 << 2;
59     }
60 }
61
62 impl<'a, 'tcx> Builder<'a, 'tcx> {
63     pub fn new_block<'b>(cx: &'a CodegenCx<'a, 'tcx>, llfn: ValueRef, name: &'b str) -> Self {
64         let bx = Builder::with_cx(cx);
65         let llbb = unsafe {
66             let name = CString::new(name).unwrap();
67             llvm::LLVMAppendBasicBlockInContext(
68                 cx.llcx,
69                 llfn,
70                 name.as_ptr()
71             )
72         };
73         bx.position_at_end(llbb);
74         bx
75     }
76
77     pub fn with_cx(cx: &'a CodegenCx<'a, 'tcx>) -> Self {
78         // Create a fresh builder from the crate context.
79         let llbuilder = unsafe {
80             llvm::LLVMCreateBuilderInContext(cx.llcx)
81         };
82         Builder {
83             llbuilder,
84             cx,
85         }
86     }
87
88     pub fn build_sibling_block<'b>(&self, name: &'b str) -> Builder<'a, 'tcx> {
89         Builder::new_block(self.cx, self.llfn(), name)
90     }
91
92     pub fn sess(&self) -> &Session {
93         self.cx.sess()
94     }
95
96     pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
97         self.cx.tcx
98     }
99
100     pub fn llfn(&self) -> ValueRef {
101         unsafe {
102             llvm::LLVMGetBasicBlockParent(self.llbb())
103         }
104     }
105
106     pub fn llbb(&self) -> BasicBlockRef {
107         unsafe {
108             llvm::LLVMGetInsertBlock(self.llbuilder)
109         }
110     }
111
112     fn count_insn(&self, category: &str) {
113         if self.cx.sess().codegen_stats() {
114             self.cx.stats.borrow_mut().n_llvm_insns += 1;
115         }
116         if self.cx.sess().count_llvm_insns() {
117             *self.cx.stats
118                 .borrow_mut()
119                 .llvm_insns
120                 .entry(category.to_string())
121                 .or_insert(0) += 1;
122         }
123     }
124
125     pub fn set_value_name(&self, value: ValueRef, name: &str) {
126         let cname = CString::new(name.as_bytes()).unwrap();
127         unsafe {
128             llvm::LLVMSetValueName(value, cname.as_ptr());
129         }
130     }
131
132     pub fn position_before(&self, insn: ValueRef) {
133         unsafe {
134             llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
135         }
136     }
137
138     pub fn position_at_end(&self, llbb: BasicBlockRef) {
139         unsafe {
140             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
141         }
142     }
143
144     pub fn position_at_start(&self, llbb: BasicBlockRef) {
145         unsafe {
146             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
147         }
148     }
149
150     pub fn ret_void(&self) {
151         self.count_insn("retvoid");
152         unsafe {
153             llvm::LLVMBuildRetVoid(self.llbuilder);
154         }
155     }
156
157     pub fn ret(&self, v: ValueRef) {
158         self.count_insn("ret");
159         unsafe {
160             llvm::LLVMBuildRet(self.llbuilder, v);
161         }
162     }
163
164     pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
165         unsafe {
166             llvm::LLVMBuildAggregateRet(self.llbuilder,
167                                         ret_vals.as_ptr(),
168                                         ret_vals.len() as c_uint);
169         }
170     }
171
172     pub fn br(&self, dest: BasicBlockRef) {
173         self.count_insn("br");
174         unsafe {
175             llvm::LLVMBuildBr(self.llbuilder, dest);
176         }
177     }
178
179     pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
180         self.count_insn("condbr");
181         unsafe {
182             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
183         }
184     }
185
186     pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: usize) -> ValueRef {
187         unsafe {
188             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
189         }
190     }
191
192     pub fn indirect_br(&self, addr: ValueRef, num_dests: usize) {
193         self.count_insn("indirectbr");
194         unsafe {
195             llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
196         }
197     }
198
199     pub fn invoke(&self,
200                   llfn: ValueRef,
201                   args: &[ValueRef],
202                   then: BasicBlockRef,
203                   catch: BasicBlockRef,
204                   bundle: Option<&OperandBundleDef>) -> ValueRef {
205         self.count_insn("invoke");
206
207         debug!("Invoke {:?} with args ({})",
208                Value(llfn),
209                args.iter()
210                    .map(|&v| format!("{:?}", Value(v)))
211                    .collect::<Vec<String>>()
212                    .join(", "));
213
214         let args = self.check_call("invoke", llfn, args);
215         let bundle = bundle.as_ref().and_then(|b| NonNull::new(b.raw()));
216
217         unsafe {
218             llvm::LLVMRustBuildInvoke(self.llbuilder,
219                                       llfn,
220                                       args.as_ptr(),
221                                       args.len() as c_uint,
222                                       then,
223                                       catch,
224                                       bundle,
225                                       noname())
226         }
227     }
228
229     pub fn unreachable(&self) {
230         self.count_insn("unreachable");
231         unsafe {
232             llvm::LLVMBuildUnreachable(self.llbuilder);
233         }
234     }
235
236     /* Arithmetic */
237     pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
238         self.count_insn("add");
239         unsafe {
240             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
241         }
242     }
243
244     pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
245         self.count_insn("nswadd");
246         unsafe {
247             llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
248         }
249     }
250
251     pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
252         self.count_insn("nuwadd");
253         unsafe {
254             llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
255         }
256     }
257
258     pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
259         self.count_insn("fadd");
260         unsafe {
261             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
262         }
263     }
264
265     pub fn fadd_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
266         self.count_insn("fadd");
267         unsafe {
268             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
269             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
270             instr
271         }
272     }
273
274     pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
275         self.count_insn("sub");
276         unsafe {
277             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
278         }
279     }
280
281     pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
282         self.count_insn("nswsub");
283         unsafe {
284             llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
285         }
286     }
287
288     pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
289         self.count_insn("nuwsub");
290         unsafe {
291             llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
292         }
293     }
294
295     pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
296         self.count_insn("fsub");
297         unsafe {
298             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
299         }
300     }
301
302     pub fn fsub_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
303         self.count_insn("fsub");
304         unsafe {
305             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
306             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
307             instr
308         }
309     }
310
311     pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
312         self.count_insn("mul");
313         unsafe {
314             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
315         }
316     }
317
318     pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
319         self.count_insn("nswmul");
320         unsafe {
321             llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
322         }
323     }
324
325     pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
326         self.count_insn("nuwmul");
327         unsafe {
328             llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
329         }
330     }
331
332     pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
333         self.count_insn("fmul");
334         unsafe {
335             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
336         }
337     }
338
339     pub fn fmul_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
340         self.count_insn("fmul");
341         unsafe {
342             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
343             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
344             instr
345         }
346     }
347
348
349     pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
350         self.count_insn("udiv");
351         unsafe {
352             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
353         }
354     }
355
356     pub fn exactudiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
357         self.count_insn("exactudiv");
358         unsafe {
359             llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
360         }
361     }
362
363     pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
364         self.count_insn("sdiv");
365         unsafe {
366             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
367         }
368     }
369
370     pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
371         self.count_insn("exactsdiv");
372         unsafe {
373             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
374         }
375     }
376
377     pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
378         self.count_insn("fdiv");
379         unsafe {
380             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
381         }
382     }
383
384     pub fn fdiv_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
385         self.count_insn("fdiv");
386         unsafe {
387             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
388             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
389             instr
390         }
391     }
392
393     pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
394         self.count_insn("urem");
395         unsafe {
396             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
397         }
398     }
399
400     pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
401         self.count_insn("srem");
402         unsafe {
403             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
404         }
405     }
406
407     pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
408         self.count_insn("frem");
409         unsafe {
410             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
411         }
412     }
413
414     pub fn frem_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
415         self.count_insn("frem");
416         unsafe {
417             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
418             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
419             instr
420         }
421     }
422
423     pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
424         self.count_insn("shl");
425         unsafe {
426             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
427         }
428     }
429
430     pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
431         self.count_insn("lshr");
432         unsafe {
433             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
434         }
435     }
436
437     pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
438         self.count_insn("ashr");
439         unsafe {
440             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
441         }
442     }
443
444     pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
445         self.count_insn("and");
446         unsafe {
447             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
448         }
449     }
450
451     pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
452         self.count_insn("or");
453         unsafe {
454             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
455         }
456     }
457
458     pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
459         self.count_insn("xor");
460         unsafe {
461             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
462         }
463     }
464
465     pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
466               -> ValueRef {
467         self.count_insn("binop");
468         unsafe {
469             llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
470         }
471     }
472
473     pub fn neg(&self, v: ValueRef) -> ValueRef {
474         self.count_insn("neg");
475         unsafe {
476             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
477         }
478     }
479
480     pub fn nswneg(&self, v: ValueRef) -> ValueRef {
481         self.count_insn("nswneg");
482         unsafe {
483             llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
484         }
485     }
486
487     pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
488         self.count_insn("nuwneg");
489         unsafe {
490             llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
491         }
492     }
493     pub fn fneg(&self, v: ValueRef) -> ValueRef {
494         self.count_insn("fneg");
495         unsafe {
496             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
497         }
498     }
499
500     pub fn not(&self, v: ValueRef) -> ValueRef {
501         self.count_insn("not");
502         unsafe {
503             llvm::LLVMBuildNot(self.llbuilder, v, noname())
504         }
505     }
506
507     pub fn alloca(&self, ty: Type, name: &str, align: Align) -> ValueRef {
508         let bx = Builder::with_cx(self.cx);
509         bx.position_at_start(unsafe {
510             llvm::LLVMGetFirstBasicBlock(self.llfn())
511         });
512         bx.dynamic_alloca(ty, name, align)
513     }
514
515     pub fn dynamic_alloca(&self, ty: Type, name: &str, align: Align) -> ValueRef {
516         self.count_insn("alloca");
517         unsafe {
518             let alloca = if name.is_empty() {
519                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
520             } else {
521                 let name = CString::new(name).unwrap();
522                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(),
523                                       name.as_ptr())
524             };
525             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
526             alloca
527         }
528     }
529
530     pub fn free(&self, ptr: ValueRef) {
531         self.count_insn("free");
532         unsafe {
533             llvm::LLVMBuildFree(self.llbuilder, ptr);
534         }
535     }
536
537     pub fn load(&self, ptr: ValueRef, align: Align) -> ValueRef {
538         self.count_insn("load");
539         unsafe {
540             let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
541             llvm::LLVMSetAlignment(load, align.abi() as c_uint);
542             load
543         }
544     }
545
546     pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
547         self.count_insn("load.volatile");
548         unsafe {
549             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
550             llvm::LLVMSetVolatile(insn, llvm::True);
551             insn
552         }
553     }
554
555     pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering, align: Align) -> ValueRef {
556         self.count_insn("load.atomic");
557         unsafe {
558             let load = llvm::LLVMRustBuildAtomicLoad(self.llbuilder, ptr, noname(), order);
559             // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
560             // However, 64-bit atomic loads on `i686-apple-darwin` appear to
561             // require `___atomic_load` with ABI-alignment, so it's staying.
562             llvm::LLVMSetAlignment(load, align.pref() as c_uint);
563             load
564         }
565     }
566
567
568     pub fn range_metadata(&self, load: ValueRef, range: Range<u128>) {
569         unsafe {
570             let llty = val_ty(load);
571             let v = [
572                 C_uint_big(llty, range.start),
573                 C_uint_big(llty, range.end)
574             ];
575
576             llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
577                                   llvm::LLVMMDNodeInContext(self.cx.llcx,
578                                                             v.as_ptr(),
579                                                             v.len() as c_uint));
580         }
581     }
582
583     pub fn nonnull_metadata(&self, load: ValueRef) {
584         unsafe {
585             llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
586                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
587         }
588     }
589
590     pub fn store(&self, val: ValueRef, ptr: ValueRef, align: Align) -> ValueRef {
591         self.store_with_flags(val, ptr, align, MemFlags::empty())
592     }
593
594     pub fn store_with_flags(
595         &self,
596         val: ValueRef,
597         ptr: ValueRef,
598         align: Align,
599         flags: MemFlags,
600     ) -> ValueRef {
601         debug!("Store {:?} -> {:?} ({:?})", Value(val), Value(ptr), flags);
602         assert!(!self.llbuilder.is_null());
603         self.count_insn("store");
604         let ptr = self.check_store(val, ptr);
605         unsafe {
606             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
607             let align = if flags.contains(MemFlags::UNALIGNED) {
608                 1
609             } else {
610                 align.abi() as c_uint
611             };
612             llvm::LLVMSetAlignment(store, align);
613             if flags.contains(MemFlags::VOLATILE) {
614                 llvm::LLVMSetVolatile(store, llvm::True);
615             }
616             if flags.contains(MemFlags::NONTEMPORAL) {
617                 // According to LLVM [1] building a nontemporal store must
618                 // *always* point to a metadata value of the integer 1.
619                 //
620                 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
621                 let one = C_i32(self.cx, 1);
622                 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
623                 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
624             }
625             store
626         }
627     }
628
629     pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef,
630                         order: AtomicOrdering, align: Align) {
631         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
632         self.count_insn("store.atomic");
633         let ptr = self.check_store(val, ptr);
634         unsafe {
635             let store = llvm::LLVMRustBuildAtomicStore(self.llbuilder, val, ptr, order);
636             // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
637             // Also see `atomic_load` for more context.
638             llvm::LLVMSetAlignment(store, align.pref() as c_uint);
639         }
640     }
641
642     pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
643         self.count_insn("gep");
644         unsafe {
645             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
646                                indices.len() as c_uint, noname())
647         }
648     }
649
650     pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
651         self.count_insn("inboundsgep");
652         unsafe {
653             llvm::LLVMBuildInBoundsGEP(
654                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
655         }
656     }
657
658     pub fn struct_gep(&self, ptr: ValueRef, idx: u64) -> ValueRef {
659         self.count_insn("structgep");
660         assert_eq!(idx as c_uint as u64, idx);
661         unsafe {
662             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
663         }
664     }
665
666     pub fn global_string(&self, _str: *const c_char) -> ValueRef {
667         self.count_insn("globalstring");
668         unsafe {
669             llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
670         }
671     }
672
673     pub fn global_string_ptr(&self, _str: *const c_char) -> ValueRef {
674         self.count_insn("globalstringptr");
675         unsafe {
676             llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
677         }
678     }
679
680     /* Casts */
681     pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
682         self.count_insn("trunc");
683         unsafe {
684             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
685         }
686     }
687
688     pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
689         self.count_insn("zext");
690         unsafe {
691             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
692         }
693     }
694
695     pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
696         self.count_insn("sext");
697         unsafe {
698             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
699         }
700     }
701
702     pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
703         self.count_insn("fptoui");
704         unsafe {
705             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
706         }
707     }
708
709     pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
710         self.count_insn("fptosi");
711         unsafe {
712             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
713         }
714     }
715
716     pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
717         self.count_insn("uitofp");
718         unsafe {
719             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
720         }
721     }
722
723     pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
724         self.count_insn("sitofp");
725         unsafe {
726             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
727         }
728     }
729
730     pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
731         self.count_insn("fptrunc");
732         unsafe {
733             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
734         }
735     }
736
737     pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
738         self.count_insn("fpext");
739         unsafe {
740             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
741         }
742     }
743
744     pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
745         self.count_insn("ptrtoint");
746         unsafe {
747             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
748         }
749     }
750
751     pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
752         self.count_insn("inttoptr");
753         unsafe {
754             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
755         }
756     }
757
758     pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
759         self.count_insn("bitcast");
760         unsafe {
761             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
762         }
763     }
764
765     pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
766         self.count_insn("zextorbitcast");
767         unsafe {
768             llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
769         }
770     }
771
772     pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
773         self.count_insn("sextorbitcast");
774         unsafe {
775             llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
776         }
777     }
778
779     pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
780         self.count_insn("truncorbitcast");
781         unsafe {
782             llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
783         }
784     }
785
786     pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
787         self.count_insn("cast");
788         unsafe {
789             llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
790         }
791     }
792
793     pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
794         self.count_insn("pointercast");
795         unsafe {
796             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
797         }
798     }
799
800     pub fn intcast(&self, val: ValueRef, dest_ty: Type, is_signed: bool) -> ValueRef {
801         self.count_insn("intcast");
802         unsafe {
803             llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), is_signed)
804         }
805     }
806
807     pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
808         self.count_insn("fpcast");
809         unsafe {
810             llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
811         }
812     }
813
814
815     /* Comparisons */
816     pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
817         self.count_insn("icmp");
818         unsafe {
819             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
820         }
821     }
822
823     pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
824         self.count_insn("fcmp");
825         unsafe {
826             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
827         }
828     }
829
830     /* Miscellaneous instructions */
831     pub fn empty_phi(&self, ty: Type) -> ValueRef {
832         self.count_insn("emptyphi");
833         unsafe {
834             llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
835         }
836     }
837
838     pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
839         assert_eq!(vals.len(), bbs.len());
840         let phi = self.empty_phi(ty);
841         self.count_insn("addincoming");
842         unsafe {
843             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
844                                   bbs.as_ptr(),
845                                   vals.len() as c_uint);
846             phi
847         }
848     }
849
850     pub fn add_span_comment(&self, sp: Span, text: &str) {
851         if self.cx.sess().asm_comments() {
852             let s = format!("{} ({})",
853                             text,
854                             self.cx.sess().codemap().span_to_string(sp));
855             debug!("{}", s);
856             self.add_comment(&s);
857         }
858     }
859
860     pub fn add_comment(&self, text: &str) {
861         if self.cx.sess().asm_comments() {
862             let sanitized = text.replace("$", "");
863             let comment_text = format!("{} {}", "#",
864                                        sanitized.replace("\n", "\n\t# "));
865             self.count_insn("inlineasm");
866             let comment_text = CString::new(comment_text).unwrap();
867             let asm = unsafe {
868                 llvm::LLVMConstInlineAsm(Type::func(&[], &Type::void(self.cx)).to_ref(),
869                                          comment_text.as_ptr(), noname(), False,
870                                          False)
871             };
872             self.call(asm, &[], None);
873         }
874     }
875
876     pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
877                          inputs: &[ValueRef], output: Type,
878                          volatile: bool, alignstack: bool,
879                          dia: AsmDialect) -> ValueRef {
880         self.count_insn("inlineasm");
881
882         let volatile = if volatile { llvm::True }
883                        else        { llvm::False };
884         let alignstack = if alignstack { llvm::True }
885                          else          { llvm::False };
886
887         let argtys = inputs.iter().map(|v| {
888             debug!("Asm Input Type: {:?}", Value(*v));
889             val_ty(*v)
890         }).collect::<Vec<_>>();
891
892         debug!("Asm Output Type: {:?}", output);
893         let fty = Type::func(&argtys[..], &output);
894         unsafe {
895             let v = llvm::LLVMRustInlineAsm(
896                 fty.to_ref(), asm, cons, volatile, alignstack, dia);
897             self.call(v, inputs, None)
898         }
899     }
900
901     pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
902                 bundle: Option<&OperandBundleDef>) -> ValueRef {
903         self.count_insn("call");
904
905         debug!("Call {:?} with args ({})",
906                Value(llfn),
907                args.iter()
908                    .map(|&v| format!("{:?}", Value(v)))
909                    .collect::<Vec<String>>()
910                    .join(", "));
911
912         let args = self.check_call("call", llfn, args);
913         let bundle = bundle.as_ref().and_then(|b| NonNull::new(b.raw()));
914
915         unsafe {
916             llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
917                                     args.len() as c_uint, bundle, noname())
918         }
919     }
920
921     pub fn minnum(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
922         self.count_insn("minnum");
923         unsafe {
924             let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
925             if instr.is_null() {
926                 bug!("LLVMRustBuildMinNum is not available in LLVM version < 6.0");
927             }
928             instr
929         }
930     }
931     pub fn maxnum(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
932         self.count_insn("maxnum");
933         unsafe {
934             let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
935             if instr.is_null() {
936                 bug!("LLVMRustBuildMaxNum is not available in LLVM version < 6.0");
937             }
938             instr
939         }
940     }
941
942     pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
943         self.count_insn("select");
944         unsafe {
945             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
946         }
947     }
948
949     pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
950         self.count_insn("vaarg");
951         unsafe {
952             llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
953         }
954     }
955
956     pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
957         self.count_insn("extractelement");
958         unsafe {
959             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
960         }
961     }
962
963     pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
964         self.count_insn("insertelement");
965         unsafe {
966             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
967         }
968     }
969
970     pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
971         self.count_insn("shufflevector");
972         unsafe {
973             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
974         }
975     }
976
977     pub fn vector_splat(&self, num_elts: usize, elt: ValueRef) -> ValueRef {
978         unsafe {
979             let elt_ty = val_ty(elt);
980             let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
981             let vec = self.insert_element(undef, elt, C_i32(self.cx, 0));
982             let vec_i32_ty = Type::vector(&Type::i32(self.cx), num_elts as u64);
983             self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
984         }
985     }
986
987     pub fn vector_reduce_fadd_fast(&self, acc: ValueRef, src: ValueRef) -> ValueRef {
988         self.count_insn("vector.reduce.fadd_fast");
989         unsafe {
990             // FIXME: add a non-fast math version once
991             // https://bugs.llvm.org/show_bug.cgi?id=36732
992             // is fixed.
993             let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
994             if instr.is_null() {
995                 bug!("LLVMRustBuildVectorReduceFAdd is not available in LLVM version < 5.0");
996             }
997             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
998             instr
999         }
1000     }
1001     pub fn vector_reduce_fmul_fast(&self, acc: ValueRef, src: ValueRef) -> ValueRef {
1002         self.count_insn("vector.reduce.fmul_fast");
1003         unsafe {
1004             // FIXME: add a non-fast math version once
1005             // https://bugs.llvm.org/show_bug.cgi?id=36732
1006             // is fixed.
1007             let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1008             if instr.is_null() {
1009                 bug!("LLVMRustBuildVectorReduceFMul is not available in LLVM version < 5.0");
1010             }
1011             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1012             instr
1013         }
1014     }
1015     pub fn vector_reduce_add(&self, src: ValueRef) -> ValueRef {
1016         self.count_insn("vector.reduce.add");
1017         unsafe {
1018             let instr = llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src);
1019             if instr.is_null() {
1020                 bug!("LLVMRustBuildVectorReduceAdd is not available in LLVM version < 5.0");
1021             }
1022             instr
1023         }
1024     }
1025     pub fn vector_reduce_mul(&self, src: ValueRef) -> ValueRef {
1026         self.count_insn("vector.reduce.mul");
1027         unsafe {
1028             let instr = llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src);
1029             if instr.is_null() {
1030                 bug!("LLVMRustBuildVectorReduceMul is not available in LLVM version < 5.0");
1031             }
1032             instr
1033         }
1034     }
1035     pub fn vector_reduce_and(&self, src: ValueRef) -> ValueRef {
1036         self.count_insn("vector.reduce.and");
1037         unsafe {
1038             let instr = llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src);
1039             if instr.is_null() {
1040                 bug!("LLVMRustBuildVectorReduceAnd is not available in LLVM version < 5.0");
1041             }
1042             instr
1043         }
1044     }
1045     pub fn vector_reduce_or(&self, src: ValueRef) -> ValueRef {
1046         self.count_insn("vector.reduce.or");
1047         unsafe {
1048             let instr = llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src);
1049             if instr.is_null() {
1050                 bug!("LLVMRustBuildVectorReduceOr is not available in LLVM version < 5.0");
1051             }
1052             instr
1053         }
1054     }
1055     pub fn vector_reduce_xor(&self, src: ValueRef) -> ValueRef {
1056         self.count_insn("vector.reduce.xor");
1057         unsafe {
1058             let instr = llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src);
1059             if instr.is_null() {
1060                 bug!("LLVMRustBuildVectorReduceXor is not available in LLVM version < 5.0");
1061             }
1062             instr
1063         }
1064     }
1065     pub fn vector_reduce_fmin(&self, src: ValueRef) -> ValueRef {
1066         self.count_insn("vector.reduce.fmin");
1067         unsafe {
1068             let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false);
1069             if instr.is_null() {
1070                 bug!("LLVMRustBuildVectorReduceFMin is not available in LLVM version < 5.0");
1071             }
1072             instr
1073         }
1074     }
1075     pub fn vector_reduce_fmax(&self, src: ValueRef) -> ValueRef {
1076         self.count_insn("vector.reduce.fmax");
1077         unsafe {
1078             let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false);
1079             if instr.is_null() {
1080                 bug!("LLVMRustBuildVectorReduceFMax is not available in LLVM version < 5.0");
1081             }
1082             instr
1083         }
1084     }
1085     pub fn vector_reduce_fmin_fast(&self, src: ValueRef) -> ValueRef {
1086         self.count_insn("vector.reduce.fmin_fast");
1087         unsafe {
1088             let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
1089             if instr.is_null() {
1090                 bug!("LLVMRustBuildVectorReduceFMin is not available in LLVM version < 5.0");
1091             }
1092             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1093             instr
1094         }
1095     }
1096     pub fn vector_reduce_fmax_fast(&self, src: ValueRef) -> ValueRef {
1097         self.count_insn("vector.reduce.fmax_fast");
1098         unsafe {
1099             let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
1100             if instr.is_null() {
1101                 bug!("LLVMRustBuildVectorReduceFMax is not available in LLVM version < 5.0");
1102             }
1103             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
1104             instr
1105         }
1106     }
1107     pub fn vector_reduce_min(&self, src: ValueRef, is_signed: bool) -> ValueRef {
1108         self.count_insn("vector.reduce.min");
1109         unsafe {
1110             let instr = llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed);
1111             if instr.is_null() {
1112                 bug!("LLVMRustBuildVectorReduceMin is not available in LLVM version < 5.0");
1113             }
1114             instr
1115         }
1116     }
1117     pub fn vector_reduce_max(&self, src: ValueRef, is_signed: bool) -> ValueRef {
1118         self.count_insn("vector.reduce.max");
1119         unsafe {
1120             let instr = llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed);
1121             if instr.is_null() {
1122                 bug!("LLVMRustBuildVectorReduceMax is not available in LLVM version < 5.0");
1123             }
1124             instr
1125         }
1126     }
1127
1128     pub fn extract_value(&self, agg_val: ValueRef, idx: u64) -> ValueRef {
1129         self.count_insn("extractvalue");
1130         assert_eq!(idx as c_uint as u64, idx);
1131         unsafe {
1132             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
1133         }
1134     }
1135
1136     pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
1137                        idx: u64) -> ValueRef {
1138         self.count_insn("insertvalue");
1139         assert_eq!(idx as c_uint as u64, idx);
1140         unsafe {
1141             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
1142                                        noname())
1143         }
1144     }
1145
1146     pub fn is_null(&self, val: ValueRef) -> ValueRef {
1147         self.count_insn("isnull");
1148         unsafe {
1149             llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
1150         }
1151     }
1152
1153     pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
1154         self.count_insn("isnotnull");
1155         unsafe {
1156             llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
1157         }
1158     }
1159
1160     pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
1161         self.count_insn("ptrdiff");
1162         unsafe {
1163             llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
1164         }
1165     }
1166
1167     pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef,
1168                        num_clauses: usize) -> ValueRef {
1169         self.count_insn("landingpad");
1170         unsafe {
1171             llvm::LLVMBuildLandingPad(self.llbuilder, ty.to_ref(), pers_fn,
1172                                       num_clauses as c_uint, noname())
1173         }
1174     }
1175
1176     pub fn add_clause(&self, landing_pad: ValueRef, clause: ValueRef) {
1177         unsafe {
1178             llvm::LLVMAddClause(landing_pad, clause);
1179         }
1180     }
1181
1182     pub fn set_cleanup(&self, landing_pad: ValueRef) {
1183         self.count_insn("setcleanup");
1184         unsafe {
1185             llvm::LLVMSetCleanup(landing_pad, llvm::True);
1186         }
1187     }
1188
1189     pub fn resume(&self, exn: ValueRef) -> ValueRef {
1190         self.count_insn("resume");
1191         unsafe {
1192             llvm::LLVMBuildResume(self.llbuilder, exn)
1193         }
1194     }
1195
1196     pub fn cleanup_pad(&self,
1197                        parent: Option<ValueRef>,
1198                        args: &[ValueRef]) -> ValueRef {
1199         self.count_insn("cleanuppad");
1200         let parent = parent.and_then(NonNull::new);
1201         let name = CString::new("cleanuppad").unwrap();
1202         let ret = unsafe {
1203             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1204                                           parent,
1205                                           args.len() as c_uint,
1206                                           args.as_ptr(),
1207                                           name.as_ptr())
1208         };
1209         assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
1210         return ret
1211     }
1212
1213     pub fn cleanup_ret(&self, cleanup: ValueRef,
1214                        unwind: Option<BasicBlockRef>) -> ValueRef {
1215         self.count_insn("cleanupret");
1216         let unwind = unwind.and_then(NonNull::new);
1217         let ret = unsafe {
1218             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1219         };
1220         assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
1221         return ret
1222     }
1223
1224     pub fn catch_pad(&self,
1225                      parent: ValueRef,
1226                      args: &[ValueRef]) -> ValueRef {
1227         self.count_insn("catchpad");
1228         let name = CString::new("catchpad").unwrap();
1229         let ret = unsafe {
1230             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1231                                         args.len() as c_uint, args.as_ptr(),
1232                                         name.as_ptr())
1233         };
1234         assert!(!ret.is_null(), "LLVM does not have support for catchpad");
1235         return ret
1236     }
1237
1238     pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1239         self.count_insn("catchret");
1240         let ret = unsafe {
1241             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1242         };
1243         assert!(!ret.is_null(), "LLVM does not have support for catchret");
1244         return ret
1245     }
1246
1247     pub fn catch_switch(&self,
1248                         parent: Option<ValueRef>,
1249                         unwind: Option<BasicBlockRef>,
1250                         num_handlers: usize) -> ValueRef {
1251         self.count_insn("catchswitch");
1252         let parent = parent.and_then(NonNull::new);
1253         let unwind = unwind.and_then(NonNull::new);
1254         let name = CString::new("catchswitch").unwrap();
1255         let ret = unsafe {
1256             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1257                                            num_handlers as c_uint,
1258                                            name.as_ptr())
1259         };
1260         assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
1261         return ret
1262     }
1263
1264     pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
1265         unsafe {
1266             llvm::LLVMRustAddHandler(catch_switch, handler);
1267         }
1268     }
1269
1270     pub fn set_personality_fn(&self, personality: ValueRef) {
1271         unsafe {
1272             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1273         }
1274     }
1275
1276     // Atomic Operations
1277     pub fn atomic_cmpxchg(&self, dst: ValueRef,
1278                          cmp: ValueRef, src: ValueRef,
1279                          order: AtomicOrdering,
1280                          failure_order: AtomicOrdering,
1281                          weak: llvm::Bool) -> ValueRef {
1282         unsafe {
1283             llvm::LLVMRustBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
1284                                          order, failure_order, weak)
1285         }
1286     }
1287     pub fn atomic_rmw(&self, op: AtomicRmwBinOp,
1288                      dst: ValueRef, src: ValueRef,
1289                      order: AtomicOrdering) -> ValueRef {
1290         unsafe {
1291             llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
1292         }
1293     }
1294
1295     pub fn atomic_fence(&self, order: AtomicOrdering, scope: SynchronizationScope) {
1296         unsafe {
1297             llvm::LLVMRustBuildAtomicFence(self.llbuilder, order, scope);
1298         }
1299     }
1300
1301     pub fn add_case(&self, s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
1302         unsafe {
1303             llvm::LLVMAddCase(s, on_val, dest)
1304         }
1305     }
1306
1307     pub fn add_incoming_to_phi(&self, phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
1308         self.count_insn("addincoming");
1309         unsafe {
1310             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1311         }
1312     }
1313
1314     pub fn set_invariant_load(&self, load: ValueRef) {
1315         unsafe {
1316             llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1317                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1318         }
1319     }
1320
1321     /// Returns the ptr value that should be used for storing `val`.
1322     fn check_store<'b>(&self,
1323                        val: ValueRef,
1324                        ptr: ValueRef) -> ValueRef {
1325         let dest_ptr_ty = val_ty(ptr);
1326         let stored_ty = val_ty(val);
1327         let stored_ptr_ty = stored_ty.ptr_to();
1328
1329         assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1330
1331         if dest_ptr_ty == stored_ptr_ty {
1332             ptr
1333         } else {
1334             debug!("Type mismatch in store. \
1335                     Expected {:?}, got {:?}; inserting bitcast",
1336                    dest_ptr_ty, stored_ptr_ty);
1337             self.bitcast(ptr, stored_ptr_ty)
1338         }
1339     }
1340
1341     /// Returns the args that should be used for a call to `llfn`.
1342     fn check_call<'b>(&self,
1343                       typ: &str,
1344                       llfn: ValueRef,
1345                       args: &'b [ValueRef]) -> Cow<'b, [ValueRef]> {
1346         let mut fn_ty = val_ty(llfn);
1347         // Strip off pointers
1348         while fn_ty.kind() == llvm::TypeKind::Pointer {
1349             fn_ty = fn_ty.element_type();
1350         }
1351
1352         assert!(fn_ty.kind() == llvm::TypeKind::Function,
1353                 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1354
1355         let param_tys = fn_ty.func_params();
1356
1357         let all_args_match = param_tys.iter()
1358             .zip(args.iter().map(|&v| val_ty(v)))
1359             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1360
1361         if all_args_match {
1362             return Cow::Borrowed(args);
1363         }
1364
1365         let casted_args: Vec<_> = param_tys.into_iter()
1366             .zip(args.iter())
1367             .enumerate()
1368             .map(|(i, (expected_ty, &actual_val))| {
1369                 let actual_ty = val_ty(actual_val);
1370                 if expected_ty != actual_ty {
1371                     debug!("Type mismatch in function call of {:?}. \
1372                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1373                            Value(llfn),
1374                            expected_ty, i, actual_ty);
1375                     self.bitcast(actual_val, expected_ty)
1376                 } else {
1377                     actual_val
1378                 }
1379             })
1380             .collect();
1381
1382         return Cow::Owned(casted_args);
1383     }
1384
1385     pub fn lifetime_start(&self, ptr: ValueRef, size: Size) {
1386         self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1387     }
1388
1389     pub fn lifetime_end(&self, ptr: ValueRef, size: Size) {
1390         self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1391     }
1392
1393     /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
1394     /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
1395     /// and the intrinsic for `lt` and passes them to `emit`, which is in
1396     /// charge of generating code to call the passed intrinsic on whatever
1397     /// block of generated code is targeted for the intrinsic.
1398     ///
1399     /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
1400     /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1401     fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: ValueRef, size: Size) {
1402         if self.cx.sess().opts.optimize == config::OptLevel::No {
1403             return;
1404         }
1405
1406         let size = size.bytes();
1407         if size == 0 {
1408             return;
1409         }
1410
1411         let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1412
1413         let ptr = self.pointercast(ptr, Type::i8p(self.cx));
1414         self.call(lifetime_intrinsic, &[C_u64(self.cx, size), ptr], None);
1415     }
1416 }