]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/builder.rs
601c97d0c791fba8c176523beb4eddf22f4e3503
[rust.git] / src / librustc_codegen_llvm / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
12 use llvm::{self, False, OperandBundleDef};
13 use common::*;
14 use type_;
15 use value;
16 use libc::{c_uint, c_char};
17 use rustc::ty::TyCtxt;
18 use rustc::ty::layout::{Align, Size};
19 use rustc::session::{config, Session};
20 use rustc_data_structures::small_c_str::SmallCStr;
21 use traits::{self, BuilderMethods};
22 use syntax;
23
24 use std::borrow::Cow;
25 use std::ops::Range;
26 use std::ptr;
27
28 // All Builders must have an llfn associated with them
29 #[must_use]
30 pub struct Builder<'a, 'll: 'a, 'tcx: 'll, V: 'll = &'ll value::Value> {
31     pub llbuilder: &'ll mut llvm::Builder<'ll>,
32     pub cx: &'a CodegenCx<'ll, 'tcx, V>,
33 }
34
35 impl<V> Drop for Builder<'a, 'll, 'tcx, V> {
36     fn drop(&mut self) {
37         unsafe {
38             llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
39         }
40     }
41 }
42
43 // This is a really awful way to get a zero-length c-string, but better (and a
44 // lot more efficient) than doing str::as_c_str("", ...) every time.
45 fn noname() -> *const c_char {
46     static CNULL: c_char = 0;
47     &CNULL
48 }
49
50 bitflags! {
51     pub struct MemFlags: u8 {
52         const VOLATILE = 1 << 0;
53         const NONTEMPORAL = 1 << 1;
54         const UNALIGNED = 1 << 2;
55     }
56 }
57
58 impl BuilderMethods<'a, 'll, 'tcx> for Builder<'a, 'll, 'tcx> {
59     type Value = &'ll value::Value;
60     type BasicBlock = &'ll llvm::BasicBlock;
61     type Type = &'ll type_::Type;
62
63     fn new_block<'b>(
64         cx: &'a CodegenCx<'ll, 'tcx>,
65         llfn: Self::Value,
66         name: &'b str
67     ) -> Self {
68         let bx = Builder::with_cx(cx);
69         let llbb = unsafe {
70             let name = SmallCStr::new(name);
71             llvm::LLVMAppendBasicBlockInContext(
72                 cx.llcx,
73                 llfn,
74                 name.as_ptr()
75             )
76         };
77         bx.position_at_end(llbb);
78         bx
79     }
80
81     fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
82         // Create a fresh builder from the crate context.
83         let llbuilder = unsafe {
84             llvm::LLVMCreateBuilderInContext(cx.llcx)
85         };
86         Builder {
87             llbuilder,
88             cx,
89         }
90     }
91
92     fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
93         Builder::new_block(self.cx, self.llfn(), name)
94     }
95
96     fn sess(&self) -> &Session {
97         self.cx.sess()
98     }
99
100     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
101         self.cx.tcx
102     }
103
104     fn llfn(&self) -> Self::Value {
105         unsafe {
106             llvm::LLVMGetBasicBlockParent(self.llbb())
107         }
108     }
109
110     fn llbb(&self) -> Self::BasicBlock {
111         unsafe {
112             llvm::LLVMGetInsertBlock(self.llbuilder)
113         }
114     }
115
116     fn count_insn(&self, category: &str) {
117         if self.cx().sess().codegen_stats() {
118             self.cx().stats.borrow_mut().n_llvm_insns += 1;
119         }
120         if self.cx().sess().count_llvm_insns() {
121             *self.cx().stats
122                       .borrow_mut()
123                       .llvm_insns
124                       .entry(category.to_string())
125                       .or_insert(0) += 1;
126         }
127     }
128
129     fn set_value_name(&self, value: Self::Value, name: &str) {
130         let cname = SmallCStr::new(name);
131         unsafe {
132             llvm::LLVMSetValueName(value, cname.as_ptr());
133         }
134     }
135
136     fn position_at_end(&self, llbb: Self::BasicBlock) {
137         unsafe {
138             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
139         }
140     }
141
142     fn position_at_start(&self, llbb: Self::BasicBlock) {
143         unsafe {
144             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
145         }
146     }
147
148     fn ret_void(&self) {
149         self.count_insn("retvoid");
150         unsafe {
151             llvm::LLVMBuildRetVoid(self.llbuilder);
152         }
153     }
154
155     fn ret(&self, v: Self::Value) {
156         self.count_insn("ret");
157         unsafe {
158             llvm::LLVMBuildRet(self.llbuilder, v);
159         }
160     }
161
162     fn br(&self, dest: Self::BasicBlock) {
163         self.count_insn("br");
164         unsafe {
165             llvm::LLVMBuildBr(self.llbuilder, dest);
166         }
167     }
168
169     fn cond_br(
170         &self,
171         cond: Self::Value,
172         then_llbb: Self::BasicBlock,
173         else_llbb: Self::BasicBlock,
174     ) {
175         self.count_insn("condbr");
176         unsafe {
177             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
178         }
179     }
180
181     fn switch(
182         &self,
183         v: Self::Value,
184         else_llbb: Self::BasicBlock,
185         num_cases: usize,
186     ) -> Self::Value {
187         unsafe {
188             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
189         }
190     }
191
192     fn invoke(&self,
193                   llfn: Self::Value,
194                   args: &[Self::Value],
195                   then: Self::BasicBlock,
196                   catch: Self::BasicBlock,
197                   bundle: Option<&traits::OperandBundleDef<'ll, Self::Value>>) -> Self::Value {
198         self.count_insn("invoke");
199
200         debug!("Invoke {:?} with args ({:?})",
201                llfn,
202                args);
203
204         let args = self.check_call("invoke", llfn, args);
205         let bundle = bundle.map(OperandBundleDef::from_generic);
206         let bundle = bundle.as_ref().map(|b| &*b.raw);
207
208         unsafe {
209             llvm::LLVMRustBuildInvoke(self.llbuilder,
210                                       llfn,
211                                       args.as_ptr(),
212                                       args.len() as c_uint,
213                                       then,
214                                       catch,
215                                       bundle,
216                                       noname())
217         }
218     }
219
220     fn unreachable(&self) {
221         self.count_insn("unreachable");
222         unsafe {
223             llvm::LLVMBuildUnreachable(self.llbuilder);
224         }
225     }
226
227     /* Arithmetic */
228     fn add(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
229         self.count_insn("add");
230         unsafe {
231             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
232         }
233     }
234
235     fn fadd(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
236         self.count_insn("fadd");
237         unsafe {
238             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
239         }
240     }
241
242     fn fadd_fast(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
243         self.count_insn("fadd");
244         unsafe {
245             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
246             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
247             instr
248         }
249     }
250
251     fn sub(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
252         self.count_insn("sub");
253         unsafe {
254             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
255         }
256     }
257
258     fn fsub(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
259         self.count_insn("fsub");
260         unsafe {
261             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
262         }
263     }
264
265     fn fsub_fast(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
266         self.count_insn("fsub");
267         unsafe {
268             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
269             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
270             instr
271         }
272     }
273
274     fn mul(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
275         self.count_insn("mul");
276         unsafe {
277             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
278         }
279     }
280
281     fn fmul(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
282         self.count_insn("fmul");
283         unsafe {
284             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
285         }
286     }
287
288     fn fmul_fast(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
289         self.count_insn("fmul");
290         unsafe {
291             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
292             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
293             instr
294         }
295     }
296
297
298     fn udiv(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
299         self.count_insn("udiv");
300         unsafe {
301             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
302         }
303     }
304
305     fn exactudiv(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
306         self.count_insn("exactudiv");
307         unsafe {
308             llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
309         }
310     }
311
312     fn sdiv(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
313         self.count_insn("sdiv");
314         unsafe {
315             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
316         }
317     }
318
319     fn exactsdiv(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
320         self.count_insn("exactsdiv");
321         unsafe {
322             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
323         }
324     }
325
326     fn fdiv(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
327         self.count_insn("fdiv");
328         unsafe {
329             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
330         }
331     }
332
333     fn fdiv_fast(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
334         self.count_insn("fdiv");
335         unsafe {
336             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
337             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
338             instr
339         }
340     }
341
342     fn urem(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
343         self.count_insn("urem");
344         unsafe {
345             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
346         }
347     }
348
349     fn srem(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
350         self.count_insn("srem");
351         unsafe {
352             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
353         }
354     }
355
356     fn frem(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
357         self.count_insn("frem");
358         unsafe {
359             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
360         }
361     }
362
363     fn frem_fast(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
364         self.count_insn("frem");
365         unsafe {
366             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
367             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
368             instr
369         }
370     }
371
372     fn shl(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
373         self.count_insn("shl");
374         unsafe {
375             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
376         }
377     }
378
379     fn lshr(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
380         self.count_insn("lshr");
381         unsafe {
382             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
383         }
384     }
385
386     fn ashr(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
387         self.count_insn("ashr");
388         unsafe {
389             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
390         }
391     }
392
393     fn and(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
394         self.count_insn("and");
395         unsafe {
396             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
397         }
398     }
399
400     fn or(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
401         self.count_insn("or");
402         unsafe {
403             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
404         }
405     }
406
407     fn xor(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
408         self.count_insn("xor");
409         unsafe {
410             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
411         }
412     }
413
414     fn neg(&self, v: Self::Value) -> Self::Value {
415         self.count_insn("neg");
416         unsafe {
417             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
418         }
419     }
420
421     fn fneg(&self, v: Self::Value) -> Self::Value {
422         self.count_insn("fneg");
423         unsafe {
424             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
425         }
426     }
427
428     fn not(&self, v: Self::Value) -> Self::Value {
429         self.count_insn("not");
430         unsafe {
431             llvm::LLVMBuildNot(self.llbuilder, v, noname())
432         }
433     }
434
435     fn alloca(&self, ty: Self::Type, name: &str, align: Align) -> Self::Value {
436         let bx = Builder::with_cx(self.cx);
437         bx.position_at_start(unsafe {
438             llvm::LLVMGetFirstBasicBlock(self.llfn())
439         });
440         bx.dynamic_alloca(ty, name, align)
441     }
442
443     fn dynamic_alloca(&self, ty: Self::Type, name: &str, align: Align) -> Self::Value {
444         self.count_insn("alloca");
445         unsafe {
446             let alloca = if name.is_empty() {
447                 llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
448             } else {
449                 let name = SmallCStr::new(name);
450                 llvm::LLVMBuildAlloca(self.llbuilder, ty,
451                                       name.as_ptr())
452             };
453             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
454             alloca
455         }
456     }
457
458     fn array_alloca(&self,
459                         ty: Self::Type,
460                         len: Self::Value,
461                         name: &str,
462                         align: Align) -> Self::Value {
463         self.count_insn("alloca");
464         unsafe {
465             let alloca = if name.is_empty() {
466                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
467             } else {
468                 let name = SmallCStr::new(name);
469                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
470                                            name.as_ptr())
471             };
472             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
473             alloca
474         }
475     }
476
477     fn load(&self, ptr: Self::Value, align: Align) -> Self::Value {
478         self.count_insn("load");
479         unsafe {
480             let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
481             llvm::LLVMSetAlignment(load, align.abi() as c_uint);
482             load
483         }
484     }
485
486     fn volatile_load(&self, ptr: Self::Value) -> Self::Value {
487         self.count_insn("load.volatile");
488         unsafe {
489             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
490             llvm::LLVMSetVolatile(insn, llvm::True);
491             insn
492         }
493     }
494
495     fn atomic_load(
496         &self,
497         ptr: Self::Value,
498         order: traits::AtomicOrdering,
499         size: Size,
500     ) -> Self::Value {
501         self.count_insn("load.atomic");
502         unsafe {
503             let load = llvm::LLVMRustBuildAtomicLoad(
504                 self.llbuilder,
505                 ptr,
506                 noname(),
507                 AtomicOrdering::from_generic(order),
508             );
509             // LLVM requires the alignment of atomic loads to be at least the size of the type.
510             llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
511             load
512         }
513     }
514
515
516     fn range_metadata(&self, load: Self::Value, range: Range<u128>) {
517         if self.sess().target.target.arch == "amdgpu" {
518             // amdgpu/LLVM does something weird and thinks a i64 value is
519             // split into a v2i32, halving the bitwidth LLVM expects,
520             // tripping an assertion. So, for now, just disable this
521             // optimization.
522             return;
523         }
524
525         unsafe {
526             let llty = val_ty(load);
527             let v = [
528                 C_uint_big(llty, range.start),
529                 C_uint_big(llty, range.end)
530             ];
531
532             llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
533                                   llvm::LLVMMDNodeInContext(self.cx.llcx,
534                                                             v.as_ptr(),
535                                                             v.len() as c_uint));
536         }
537     }
538
539     fn nonnull_metadata(&self, load: Self::Value) {
540         unsafe {
541             llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
542                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
543         }
544     }
545
546     fn store(&self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value {
547         self.store_with_flags(val, ptr, align, MemFlags::empty())
548     }
549
550     fn store_with_flags(
551         &self,
552         val: Self::Value,
553         ptr: Self::Value,
554         align: Align,
555         flags: MemFlags,
556     ) -> Self::Value {
557         debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
558         self.count_insn("store");
559         let ptr = self.check_store(val, ptr);
560         unsafe {
561             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
562             let align = if flags.contains(MemFlags::UNALIGNED) {
563                 1
564             } else {
565                 align.abi() as c_uint
566             };
567             llvm::LLVMSetAlignment(store, align);
568             if flags.contains(MemFlags::VOLATILE) {
569                 llvm::LLVMSetVolatile(store, llvm::True);
570             }
571             if flags.contains(MemFlags::NONTEMPORAL) {
572                 // According to LLVM [1] building a nontemporal store must
573                 // *always* point to a metadata value of the integer 1.
574                 //
575                 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
576                 let one = C_i32(self.cx, 1);
577                 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
578                 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
579             }
580             store
581         }
582     }
583
584    fn atomic_store(&self, val: Self::Value, ptr: Self::Value,
585                    order: traits::AtomicOrdering, size: Size) {
586         debug!("Store {:?} -> {:?}", val, ptr);
587         self.count_insn("store.atomic");
588         let ptr = self.check_store(val, ptr);
589         unsafe {
590             let store = llvm::LLVMRustBuildAtomicStore(
591                 self.llbuilder,
592                 val,
593                 ptr,
594                 AtomicOrdering::from_generic(order),
595             );
596             // LLVM requires the alignment of atomic stores to be at least the size of the type.
597             llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
598         }
599     }
600
601     fn gep(&self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value {
602         self.count_insn("gep");
603         unsafe {
604             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
605                                indices.len() as c_uint, noname())
606         }
607     }
608
609     fn inbounds_gep(&self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value {
610         self.count_insn("inboundsgep");
611         unsafe {
612             llvm::LLVMBuildInBoundsGEP(
613                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
614         }
615     }
616
617     /* Casts */
618     fn trunc(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
619         self.count_insn("trunc");
620         unsafe {
621             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
622         }
623     }
624
625     fn sext(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
626         self.count_insn("sext");
627         unsafe {
628             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
629         }
630     }
631
632     fn fptoui(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
633         self.count_insn("fptoui");
634         unsafe {
635             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
636         }
637     }
638
639     fn fptosi(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
640         self.count_insn("fptosi");
641         unsafe {
642             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
643         }
644     }
645
646     fn uitofp(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
647         self.count_insn("uitofp");
648         unsafe {
649             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
650         }
651     }
652
653     fn sitofp(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
654         self.count_insn("sitofp");
655         unsafe {
656             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
657         }
658     }
659
660     fn fptrunc(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
661         self.count_insn("fptrunc");
662         unsafe {
663             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
664         }
665     }
666
667     fn fpext(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
668         self.count_insn("fpext");
669         unsafe {
670             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
671         }
672     }
673
674     fn ptrtoint(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
675         self.count_insn("ptrtoint");
676         unsafe {
677             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
678         }
679     }
680
681     fn inttoptr(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
682         self.count_insn("inttoptr");
683         unsafe {
684             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
685         }
686     }
687
688     fn bitcast(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
689         self.count_insn("bitcast");
690         unsafe {
691             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
692         }
693     }
694
695
696     fn intcast(&self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value {
697         self.count_insn("intcast");
698         unsafe {
699             llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
700         }
701     }
702
703     fn pointercast(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
704         self.count_insn("pointercast");
705         unsafe {
706             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
707         }
708     }
709
710     /* Comparisons */
711     fn icmp(&self, op: traits::IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
712         self.count_insn("icmp");
713         let op = llvm::IntPredicate::from_generic(op);
714         unsafe {
715             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
716         }
717     }
718
719     fn fcmp(&self, op: traits::RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
720         self.count_insn("fcmp");
721         unsafe {
722             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
723         }
724     }
725
726     /* Miscellaneous instructions */
727     fn empty_phi(&self, ty: Self::Type) -> Self::Value {
728         self.count_insn("emptyphi");
729         unsafe {
730             llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
731         }
732     }
733
734     fn phi(&self, ty: Self::Type, vals: &[Self::Value], bbs: &[Self::BasicBlock]) -> Self::Value {
735         assert_eq!(vals.len(), bbs.len());
736         let phi = self.empty_phi(ty);
737         self.count_insn("addincoming");
738         unsafe {
739             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
740                                   bbs.as_ptr(),
741                                   vals.len() as c_uint);
742             phi
743         }
744     }
745
746     fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
747                        inputs: &[Self::Value], output: Self::Type,
748                        volatile: bool, alignstack: bool,
749                        dia: syntax::ast::AsmDialect) -> Option<Self::Value> {
750         self.count_insn("inlineasm");
751
752         let volatile = if volatile { llvm::True }
753                        else        { llvm::False };
754         let alignstack = if alignstack { llvm::True }
755                          else          { llvm::False };
756
757         let argtys = inputs.iter().map(|v| {
758             debug!("Asm Input Type: {:?}", *v);
759             val_ty(*v)
760         }).collect::<Vec<_>>();
761
762         debug!("Asm Output Type: {:?}", output);
763         let fty = type_::Type::func(&argtys[..], output);
764         unsafe {
765             // Ask LLVM to verify that the constraints are well-formed.
766             let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons);
767             debug!("Constraint verification result: {:?}", constraints_ok);
768             if constraints_ok {
769                 let v = llvm::LLVMRustInlineAsm(
770                     fty, asm, cons, volatile, alignstack, AsmDialect::from_generic(dia));
771                 Some(self.call(v, inputs, None))
772             } else {
773                 // LLVM has detected an issue with our constraints, bail out
774                 None
775             }
776         }
777     }
778
779     fn memcpy(&self, dst: Self::Value, dst_align: u64,
780                   src: Self::Value, src_align: u64,
781                   size: Self::Value, is_volatile: bool) -> Self::Value {
782         unsafe {
783             llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align as c_uint,
784                                       src, src_align as c_uint, size, is_volatile)
785         }
786     }
787
788     fn memmove(&self, dst: Self::Value, dst_align: u64,
789                   src: Self::Value, src_align: u64,
790                   size: Self::Value, is_volatile: bool) -> Self::Value {
791         unsafe {
792             llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align as c_uint,
793                                       src, src_align as c_uint, size, is_volatile)
794         }
795     }
796
797     fn minnum(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
798         self.count_insn("minnum");
799         unsafe {
800             let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
801             instr.expect("LLVMRustBuildMinNum is not available in LLVM version < 6.0")
802         }
803     }
804     fn maxnum(&self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
805         self.count_insn("maxnum");
806         unsafe {
807             let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
808             instr.expect("LLVMRustBuildMaxNum is not available in LLVM version < 6.0")
809         }
810     }
811
812     fn select(
813         &self, cond: Self::Value,
814         then_val: Self::Value,
815         else_val: Self::Value,
816     ) -> Self::Value {
817         self.count_insn("select");
818         unsafe {
819             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
820         }
821     }
822
823     #[allow(dead_code)]
824     fn va_arg(&self, list: Self::Value, ty: Self::Type) -> Self::Value {
825         self.count_insn("vaarg");
826         unsafe {
827             llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
828         }
829     }
830
831     fn extract_element(&self, vec: Self::Value, idx: Self::Value) -> Self::Value {
832         self.count_insn("extractelement");
833         unsafe {
834             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
835         }
836     }
837
838     fn insert_element(
839         &self, vec: Self::Value,
840         elt: Self::Value,
841         idx: Self::Value,
842     ) -> Self::Value {
843         self.count_insn("insertelement");
844         unsafe {
845             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
846         }
847     }
848
849     fn shuffle_vector(&self, v1: Self::Value, v2: Self::Value, mask: Self::Value) -> Self::Value {
850         self.count_insn("shufflevector");
851         unsafe {
852             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
853         }
854     }
855
856     fn vector_splat(&self, num_elts: usize, elt: Self::Value) -> Self::Value {
857         unsafe {
858             let elt_ty = val_ty(elt);
859             let undef = llvm::LLVMGetUndef(type_::Type::vector(elt_ty, num_elts as u64));
860             let vec = self.insert_element(undef, elt, C_i32(self.cx, 0));
861             let vec_i32_ty = type_::Type::vector(type_::Type::i32(self.cx), num_elts as u64);
862             self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
863         }
864     }
865
866     fn vector_reduce_fadd_fast(&self, acc: Self::Value, src: Self::Value) -> Self::Value {
867         self.count_insn("vector.reduce.fadd_fast");
868         unsafe {
869             // FIXME: add a non-fast math version once
870             // https://bugs.llvm.org/show_bug.cgi?id=36732
871             // is fixed.
872             let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
873             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
874             instr
875         }
876     }
877     fn vector_reduce_fmul_fast(&self, acc: Self::Value, src: Self::Value) -> Self::Value {
878         self.count_insn("vector.reduce.fmul_fast");
879         unsafe {
880             // FIXME: add a non-fast math version once
881             // https://bugs.llvm.org/show_bug.cgi?id=36732
882             // is fixed.
883             let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
884             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
885             instr
886         }
887     }
888     fn vector_reduce_add(&self, src: Self::Value) -> Self::Value {
889         self.count_insn("vector.reduce.add");
890         unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
891     }
892     fn vector_reduce_mul(&self, src: Self::Value) -> Self::Value {
893         self.count_insn("vector.reduce.mul");
894         unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
895     }
896     fn vector_reduce_and(&self, src: Self::Value) -> Self::Value {
897         self.count_insn("vector.reduce.and");
898         unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
899     }
900     fn vector_reduce_or(&self, src: Self::Value) -> Self::Value {
901         self.count_insn("vector.reduce.or");
902         unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
903     }
904     fn vector_reduce_xor(&self, src: Self::Value) -> Self::Value {
905         self.count_insn("vector.reduce.xor");
906         unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
907     }
908     fn vector_reduce_fmin(&self, src: Self::Value) -> Self::Value {
909         self.count_insn("vector.reduce.fmin");
910         unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
911     }
912     fn vector_reduce_fmax(&self, src: Self::Value) -> Self::Value {
913         self.count_insn("vector.reduce.fmax");
914         unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
915     }
916     fn vector_reduce_fmin_fast(&self, src: Self::Value) -> Self::Value {
917         self.count_insn("vector.reduce.fmin_fast");
918         unsafe {
919             let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
920             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
921             instr
922         }
923     }
924     fn vector_reduce_fmax_fast(&self, src: Self::Value) -> Self::Value {
925         self.count_insn("vector.reduce.fmax_fast");
926         unsafe {
927             let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
928             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
929             instr
930         }
931     }
932     fn vector_reduce_min(&self, src: Self::Value, is_signed: bool) -> Self::Value {
933         self.count_insn("vector.reduce.min");
934         unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
935     }
936     fn vector_reduce_max(&self, src: Self::Value, is_signed: bool) -> Self::Value {
937         self.count_insn("vector.reduce.max");
938         unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
939     }
940
941     fn extract_value(&self, agg_val: Self::Value, idx: u64) -> Self::Value {
942         self.count_insn("extractvalue");
943         assert_eq!(idx as c_uint as u64, idx);
944         unsafe {
945             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
946         }
947     }
948
949     fn insert_value(&self, agg_val: Self::Value, elt: Self::Value,
950                        idx: u64) -> Self::Value {
951         self.count_insn("insertvalue");
952         assert_eq!(idx as c_uint as u64, idx);
953         unsafe {
954             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
955                                        noname())
956         }
957     }
958
959     fn landing_pad(&self, ty: Self::Type, pers_fn: Self::Value,
960                        num_clauses: usize) -> Self::Value {
961         self.count_insn("landingpad");
962         unsafe {
963             llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
964                                       num_clauses as c_uint, noname())
965         }
966     }
967
968     fn add_clause(&self, landing_pad: Self::Value, clause: Self::Value) {
969         unsafe {
970             llvm::LLVMAddClause(landing_pad, clause);
971         }
972     }
973
974     fn set_cleanup(&self, landing_pad: Self::Value) {
975         self.count_insn("setcleanup");
976         unsafe {
977             llvm::LLVMSetCleanup(landing_pad, llvm::True);
978         }
979     }
980
981     fn resume(&self, exn: Self::Value) -> Self::Value {
982         self.count_insn("resume");
983         unsafe {
984             llvm::LLVMBuildResume(self.llbuilder, exn)
985         }
986     }
987
988     fn cleanup_pad(&self,
989                        parent: Option<Self::Value>,
990                        args: &[Self::Value]) -> Self::Value {
991         self.count_insn("cleanuppad");
992         let name = const_cstr!("cleanuppad");
993         let ret = unsafe {
994             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
995                                           parent,
996                                           args.len() as c_uint,
997                                           args.as_ptr(),
998                                           name.as_ptr())
999         };
1000         ret.expect("LLVM does not have support for cleanuppad")
1001     }
1002
1003     fn cleanup_ret(
1004         &self, cleanup: Self::Value,
1005         unwind: Option<Self::BasicBlock>,
1006     ) -> Self::Value {
1007         self.count_insn("cleanupret");
1008         let ret = unsafe {
1009             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1010         };
1011         ret.expect("LLVM does not have support for cleanupret")
1012     }
1013
1014     fn catch_pad(&self,
1015                      parent: Self::Value,
1016                      args: &[Self::Value]) -> Self::Value {
1017         self.count_insn("catchpad");
1018         let name = const_cstr!("catchpad");
1019         let ret = unsafe {
1020             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1021                                         args.len() as c_uint, args.as_ptr(),
1022                                         name.as_ptr())
1023         };
1024         ret.expect("LLVM does not have support for catchpad")
1025     }
1026
1027     fn catch_ret(&self, pad: Self::Value, unwind: Self::BasicBlock) -> Self::Value {
1028         self.count_insn("catchret");
1029         let ret = unsafe {
1030             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1031         };
1032         ret.expect("LLVM does not have support for catchret")
1033     }
1034
1035     fn catch_switch(
1036         &self,
1037         parent: Option<Self::Value>,
1038         unwind: Option<Self::BasicBlock>,
1039         num_handlers: usize,
1040     ) -> Self::Value {
1041         self.count_insn("catchswitch");
1042         let name = const_cstr!("catchswitch");
1043         let ret = unsafe {
1044             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1045                                            num_handlers as c_uint,
1046                                            name.as_ptr())
1047         };
1048         ret.expect("LLVM does not have support for catchswitch")
1049     }
1050
1051     fn add_handler(&self, catch_switch: Self::Value, handler: Self::BasicBlock) {
1052         unsafe {
1053             llvm::LLVMRustAddHandler(catch_switch, handler);
1054         }
1055     }
1056
1057     fn set_personality_fn(&self, personality: Self::Value) {
1058         unsafe {
1059             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1060         }
1061     }
1062
1063     // Atomic Operations
1064     fn atomic_cmpxchg(
1065         &self,
1066         dst: Self::Value,
1067         cmp: Self::Value,
1068         src: Self::Value,
1069         order: traits::AtomicOrdering,
1070         failure_order: traits::AtomicOrdering,
1071         weak: bool,
1072     ) -> Self::Value {
1073         let weak = if weak { llvm::True } else { llvm::False };
1074         unsafe {
1075             llvm::LLVMRustBuildAtomicCmpXchg(
1076                 self.llbuilder,
1077                 dst,
1078                 cmp,
1079                 src,
1080                 AtomicOrdering::from_generic(order),
1081                 AtomicOrdering::from_generic(failure_order),
1082                 weak
1083             )
1084         }
1085     }
1086     fn atomic_rmw(
1087         &self,
1088         op: traits::AtomicRmwBinOp,
1089         dst: Self::Value,
1090         src: Self::Value,
1091         order: traits::AtomicOrdering,
1092     ) -> Self::Value {
1093         unsafe {
1094             llvm::LLVMBuildAtomicRMW(
1095                 self.llbuilder,
1096                 AtomicRmwBinOp::from_generic(op),
1097                 dst,
1098                 src,
1099                 AtomicOrdering::from_generic(order),
1100                 False)
1101         }
1102     }
1103
1104     fn atomic_fence(&self, order: traits::AtomicOrdering, scope: traits::SynchronizationScope) {
1105         unsafe {
1106             llvm::LLVMRustBuildAtomicFence(
1107                 self.llbuilder,
1108                 AtomicOrdering::from_generic(order),
1109                 SynchronizationScope::from_generic(scope)
1110             );
1111         }
1112     }
1113
1114     fn add_case(&self, s: Self::Value, on_val: Self::Value, dest: Self::BasicBlock) {
1115         unsafe {
1116             llvm::LLVMAddCase(s, on_val, dest)
1117         }
1118     }
1119
1120     fn add_incoming_to_phi(&self, phi: Self::Value, val: Self::Value, bb: Self::BasicBlock) {
1121         self.count_insn("addincoming");
1122         unsafe {
1123             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1124         }
1125     }
1126
1127     fn set_invariant_load(&self, load: Self::Value) {
1128         unsafe {
1129             llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1130                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1131         }
1132     }
1133
1134     /// Returns the ptr value that should be used for storing `val`.
1135     fn check_store<'b>(&self,
1136                        val: Self::Value,
1137                        ptr: Self::Value) -> Self::Value {
1138         let dest_ptr_ty = val_ty(ptr);
1139         let stored_ty = val_ty(val);
1140         let stored_ptr_ty = stored_ty.ptr_to();
1141
1142         assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1143
1144         if dest_ptr_ty == stored_ptr_ty {
1145             ptr
1146         } else {
1147             debug!("Type mismatch in store. \
1148                     Expected {:?}, got {:?}; inserting bitcast",
1149                    dest_ptr_ty, stored_ptr_ty);
1150             self.bitcast(ptr, stored_ptr_ty)
1151         }
1152     }
1153
1154     /// Returns the args that should be used for a call to `llfn`.
1155     fn check_call<'b>(&self,
1156                       typ: &str,
1157                       llfn: Self::Value,
1158                       args: &'b [Self::Value]) -> Cow<'b, [Self::Value]> {
1159         let mut fn_ty = val_ty(llfn);
1160         // Strip off pointers
1161         while fn_ty.kind() == llvm::TypeKind::Pointer {
1162             fn_ty = fn_ty.element_type();
1163         }
1164
1165         assert!(fn_ty.kind() == llvm::TypeKind::Function,
1166                 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1167
1168         let param_tys = fn_ty.func_params();
1169
1170         let all_args_match = param_tys.iter()
1171             .zip(args.iter().map(|&v| val_ty(v)))
1172             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1173
1174         if all_args_match {
1175             return Cow::Borrowed(args);
1176         }
1177
1178         let casted_args: Vec<_> = param_tys.into_iter()
1179             .zip(args.iter())
1180             .enumerate()
1181             .map(|(i, (expected_ty, &actual_val))| {
1182                 let actual_ty = val_ty(actual_val);
1183                 if expected_ty != actual_ty {
1184                     debug!("Type mismatch in function call of {:?}. \
1185                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1186                            llfn, expected_ty, i, actual_ty);
1187                     self.bitcast(actual_val, expected_ty)
1188                 } else {
1189                     actual_val
1190                 }
1191             })
1192             .collect();
1193
1194         Cow::Owned(casted_args)
1195     }
1196
1197     fn lifetime_start(&self, ptr: Self::Value, size: Size) {
1198         self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1199     }
1200
1201     fn lifetime_end(&self, ptr: Self::Value, size: Size) {
1202         self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1203     }
1204
1205     /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
1206     /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
1207     /// and the intrinsic for `lt` and passes them to `emit`, which is in
1208     /// charge of generating code to call the passed intrinsic on whatever
1209     /// block of generated code is targeted for the intrinsic.
1210     ///
1211     /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
1212     /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1213     fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: Self::Value, size: Size) {
1214         if self.cx.sess().opts.optimize == config::OptLevel::No {
1215             return;
1216         }
1217
1218         let size = size.bytes();
1219         if size == 0 {
1220             return;
1221         }
1222
1223         let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1224
1225         let ptr = self.pointercast(ptr, type_::Type::i8p(self.cx));
1226         self.call(lifetime_intrinsic, &[C_u64(self.cx, size), ptr], None);
1227     }
1228
1229     fn call(&self, llfn: Self::Value, args: &[Self::Value],
1230                 bundle: Option<&traits::OperandBundleDef<'ll, Self::Value>>) -> Self::Value {
1231         self.count_insn("call");
1232
1233         debug!("Call {:?} with args ({:?})",
1234                llfn,
1235                args);
1236
1237         let args = self.check_call("call", llfn, args);
1238         let bundle = bundle.map(OperandBundleDef::from_generic);
1239         let bundle = bundle.as_ref().map(|b| &*b.raw);
1240
1241         unsafe {
1242             llvm::LLVMRustBuildCall(
1243                 self.llbuilder,
1244                 llfn,
1245                 args.as_ptr() as *const &llvm::Value,
1246                 args.len() as c_uint,
1247                 bundle, noname()
1248             )
1249         }
1250     }
1251
1252     fn zext(&self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
1253         self.count_insn("zext");
1254         unsafe {
1255             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
1256         }
1257     }
1258
1259     fn struct_gep(&self, ptr: Self::Value, idx: u64) -> Self::Value {
1260         self.count_insn("structgep");
1261         assert_eq!(idx as c_uint as u64, idx);
1262         unsafe {
1263             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
1264         }
1265     }
1266
1267     fn cx(&self) -> &'a CodegenCx<'ll, 'tcx> {
1268         &self.cx
1269     }
1270 }