]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/builder.rs
Unignore u128 test for stage 0,1
[rust.git] / src / librustc_trans / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use llvm;
14 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
15 use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
16 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
17 use common::*;
18 use machine::llalign_of_pref;
19 use type_::Type;
20 use value::Value;
21 use libc::{c_uint, c_char};
22 use rustc::ty::{Ty, TyCtxt, TypeFoldable};
23 use rustc::session::Session;
24 use type_of;
25
26 use std::borrow::Cow;
27 use std::ffi::CString;
28 use std::ptr;
29 use syntax_pos::Span;
30
31 // All Builders must have an llfn associated with them
32 #[must_use]
33 pub struct Builder<'a, 'tcx: 'a> {
34     pub llbuilder: BuilderRef,
35     pub ccx: &'a CrateContext<'a, 'tcx>,
36 }
37
38 impl<'a, 'tcx> Drop for Builder<'a, 'tcx> {
39     fn drop(&mut self) {
40         unsafe {
41             llvm::LLVMDisposeBuilder(self.llbuilder);
42         }
43     }
44 }
45
46 // This is a really awful way to get a zero-length c-string, but better (and a
47 // lot more efficient) than doing str::as_c_str("", ...) every time.
48 fn noname() -> *const c_char {
49     static CNULL: c_char = 0;
50     &CNULL
51 }
52
53 impl<'a, 'tcx> Builder<'a, 'tcx> {
54     pub fn new_block<'b>(ccx: &'a CrateContext<'a, 'tcx>, llfn: ValueRef, name: &'b str) -> Self {
55         let builder = Builder::with_ccx(ccx);
56         let llbb = unsafe {
57             let name = CString::new(name).unwrap();
58             llvm::LLVMAppendBasicBlockInContext(
59                 ccx.llcx(),
60                 llfn,
61                 name.as_ptr()
62             )
63         };
64         builder.position_at_end(llbb);
65         builder
66     }
67
68     pub fn with_ccx(ccx: &'a CrateContext<'a, 'tcx>) -> Self {
69         // Create a fresh builder from the crate context.
70         let llbuilder = unsafe {
71             llvm::LLVMCreateBuilderInContext(ccx.llcx())
72         };
73         Builder {
74             llbuilder: llbuilder,
75             ccx: ccx,
76         }
77     }
78
79     pub fn build_sibling_block<'b>(&self, name: &'b str) -> Builder<'a, 'tcx> {
80         Builder::new_block(self.ccx, self.llfn(), name)
81     }
82
83     pub fn sess(&self) -> &Session {
84         self.ccx.sess()
85     }
86
87     pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
88         self.ccx.tcx()
89     }
90
91     pub fn llfn(&self) -> ValueRef {
92         unsafe {
93             llvm::LLVMGetBasicBlockParent(self.llbb())
94         }
95     }
96
97     pub fn llbb(&self) -> BasicBlockRef {
98         unsafe {
99             llvm::LLVMGetInsertBlock(self.llbuilder)
100         }
101     }
102
103     fn count_insn(&self, category: &str) {
104         if self.ccx.sess().trans_stats() {
105             self.ccx.stats().n_llvm_insns.set(self.ccx.stats().n_llvm_insns.get() + 1);
106         }
107         if self.ccx.sess().count_llvm_insns() {
108             let mut h = self.ccx.stats().llvm_insns.borrow_mut();
109             *h.entry(category.to_string()).or_insert(0) += 1;
110         }
111     }
112
113     pub fn position_before(&self, insn: ValueRef) {
114         unsafe {
115             llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
116         }
117     }
118
119     pub fn position_at_end(&self, llbb: BasicBlockRef) {
120         unsafe {
121             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
122         }
123     }
124
125     pub fn position_at_start(&self, llbb: BasicBlockRef) {
126         unsafe {
127             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
128         }
129     }
130
131     pub fn ret_void(&self) {
132         self.count_insn("retvoid");
133         unsafe {
134             llvm::LLVMBuildRetVoid(self.llbuilder);
135         }
136     }
137
138     pub fn ret(&self, v: ValueRef) {
139         self.count_insn("ret");
140         unsafe {
141             llvm::LLVMBuildRet(self.llbuilder, v);
142         }
143     }
144
145     pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
146         unsafe {
147             llvm::LLVMBuildAggregateRet(self.llbuilder,
148                                         ret_vals.as_ptr(),
149                                         ret_vals.len() as c_uint);
150         }
151     }
152
153     pub fn br(&self, dest: BasicBlockRef) {
154         self.count_insn("br");
155         unsafe {
156             llvm::LLVMBuildBr(self.llbuilder, dest);
157         }
158     }
159
160     pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
161         self.count_insn("condbr");
162         unsafe {
163             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
164         }
165     }
166
167     pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: usize) -> ValueRef {
168         unsafe {
169             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
170         }
171     }
172
173     pub fn indirect_br(&self, addr: ValueRef, num_dests: usize) {
174         self.count_insn("indirectbr");
175         unsafe {
176             llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
177         }
178     }
179
180     pub fn invoke(&self,
181                   llfn: ValueRef,
182                   args: &[ValueRef],
183                   then: BasicBlockRef,
184                   catch: BasicBlockRef,
185                   bundle: Option<&OperandBundleDef>) -> ValueRef {
186         self.count_insn("invoke");
187
188         debug!("Invoke {:?} with args ({})",
189                Value(llfn),
190                args.iter()
191                    .map(|&v| format!("{:?}", Value(v)))
192                    .collect::<Vec<String>>()
193                    .join(", "));
194
195         let args = self.check_call("invoke", llfn, args);
196         let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
197
198         unsafe {
199             llvm::LLVMRustBuildInvoke(self.llbuilder,
200                                       llfn,
201                                       args.as_ptr(),
202                                       args.len() as c_uint,
203                                       then,
204                                       catch,
205                                       bundle,
206                                       noname())
207         }
208     }
209
210     pub fn unreachable(&self) {
211         self.count_insn("unreachable");
212         unsafe {
213             llvm::LLVMBuildUnreachable(self.llbuilder);
214         }
215     }
216
217     /* Arithmetic */
218     pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
219         self.count_insn("add");
220         unsafe {
221             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
222         }
223     }
224
225     pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
226         self.count_insn("nswadd");
227         unsafe {
228             llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
229         }
230     }
231
232     pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
233         self.count_insn("nuwadd");
234         unsafe {
235             llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
236         }
237     }
238
239     pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
240         self.count_insn("fadd");
241         unsafe {
242             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
243         }
244     }
245
246     pub fn fadd_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
247         self.count_insn("fadd");
248         unsafe {
249             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
250             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
251             instr
252         }
253     }
254
255     pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
256         self.count_insn("sub");
257         unsafe {
258             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
259         }
260     }
261
262     pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
263         self.count_insn("nwsub");
264         unsafe {
265             llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
266         }
267     }
268
269     pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
270         self.count_insn("nuwsub");
271         unsafe {
272             llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
273         }
274     }
275
276     pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
277         self.count_insn("sub");
278         unsafe {
279             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
280         }
281     }
282
283     pub fn fsub_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
284         self.count_insn("sub");
285         unsafe {
286             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
287             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
288             instr
289         }
290     }
291
292     pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
293         self.count_insn("mul");
294         unsafe {
295             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
296         }
297     }
298
299     pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
300         self.count_insn("nswmul");
301         unsafe {
302             llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
303         }
304     }
305
306     pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
307         self.count_insn("nuwmul");
308         unsafe {
309             llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
310         }
311     }
312
313     pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
314         self.count_insn("fmul");
315         unsafe {
316             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
317         }
318     }
319
320     pub fn fmul_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
321         self.count_insn("fmul");
322         unsafe {
323             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
324             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
325             instr
326         }
327     }
328
329
330     pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
331         self.count_insn("udiv");
332         unsafe {
333             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
334         }
335     }
336
337     pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
338         self.count_insn("sdiv");
339         unsafe {
340             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
341         }
342     }
343
344     pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
345         self.count_insn("exactsdiv");
346         unsafe {
347             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
348         }
349     }
350
351     pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
352         self.count_insn("fdiv");
353         unsafe {
354             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
355         }
356     }
357
358     pub fn fdiv_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
359         self.count_insn("fdiv");
360         unsafe {
361             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
362             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
363             instr
364         }
365     }
366
367     pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
368         self.count_insn("urem");
369         unsafe {
370             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
371         }
372     }
373
374     pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
375         self.count_insn("srem");
376         unsafe {
377             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
378         }
379     }
380
381     pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
382         self.count_insn("frem");
383         unsafe {
384             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
385         }
386     }
387
388     pub fn frem_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
389         self.count_insn("frem");
390         unsafe {
391             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
392             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
393             instr
394         }
395     }
396
397     pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
398         self.count_insn("shl");
399         unsafe {
400             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
401         }
402     }
403
404     pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
405         self.count_insn("lshr");
406         unsafe {
407             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
408         }
409     }
410
411     pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
412         self.count_insn("ashr");
413         unsafe {
414             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
415         }
416     }
417
418     pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
419         self.count_insn("and");
420         unsafe {
421             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
422         }
423     }
424
425     pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
426         self.count_insn("or");
427         unsafe {
428             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
429         }
430     }
431
432     pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
433         self.count_insn("xor");
434         unsafe {
435             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
436         }
437     }
438
439     pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
440               -> ValueRef {
441         self.count_insn("binop");
442         unsafe {
443             llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
444         }
445     }
446
447     pub fn neg(&self, v: ValueRef) -> ValueRef {
448         self.count_insn("neg");
449         unsafe {
450             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
451         }
452     }
453
454     pub fn nswneg(&self, v: ValueRef) -> ValueRef {
455         self.count_insn("nswneg");
456         unsafe {
457             llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
458         }
459     }
460
461     pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
462         self.count_insn("nuwneg");
463         unsafe {
464             llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
465         }
466     }
467     pub fn fneg(&self, v: ValueRef) -> ValueRef {
468         self.count_insn("fneg");
469         unsafe {
470             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
471         }
472     }
473
474     pub fn not(&self, v: ValueRef) -> ValueRef {
475         self.count_insn("not");
476         unsafe {
477             llvm::LLVMBuildNot(self.llbuilder, v, noname())
478         }
479     }
480
481     pub fn alloca(&self, ty: Type, name: &str) -> ValueRef {
482         let builder = Builder::with_ccx(self.ccx);
483         builder.position_at_start(unsafe {
484             llvm::LLVMGetFirstBasicBlock(self.llfn())
485         });
486         builder.dynamic_alloca(ty, name)
487     }
488
489     pub fn alloca_ty(&self, ty: Ty<'tcx>, name: &str) -> ValueRef {
490         assert!(!ty.has_param_types());
491         self.alloca(type_of::type_of(self.ccx, ty), name)
492     }
493
494     pub fn dynamic_alloca(&self, ty: Type, name: &str) -> ValueRef {
495         self.count_insn("alloca");
496         unsafe {
497             if name.is_empty() {
498                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
499             } else {
500                 let name = CString::new(name).unwrap();
501                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(),
502                                       name.as_ptr())
503             }
504         }
505     }
506
507     pub fn free(&self, ptr: ValueRef) {
508         self.count_insn("free");
509         unsafe {
510             llvm::LLVMBuildFree(self.llbuilder, ptr);
511         }
512     }
513
514     pub fn load(&self, ptr: ValueRef) -> ValueRef {
515         self.count_insn("load");
516         unsafe {
517             llvm::LLVMBuildLoad(self.llbuilder, ptr, noname())
518         }
519     }
520
521     pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
522         self.count_insn("load.volatile");
523         unsafe {
524             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
525             llvm::LLVMSetVolatile(insn, llvm::True);
526             insn
527         }
528     }
529
530     pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering) -> ValueRef {
531         self.count_insn("load.atomic");
532         unsafe {
533             let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
534             let align = llalign_of_pref(self.ccx, ty.element_type());
535             llvm::LLVMRustBuildAtomicLoad(self.llbuilder, ptr, noname(), order,
536                                           align as c_uint)
537         }
538     }
539
540
541     pub fn load_range_assert(&self, ptr: ValueRef, lo: u64,
542                              hi: u64, signed: llvm::Bool) -> ValueRef {
543         let value = self.load(ptr);
544
545         unsafe {
546             let t = llvm::LLVMGetElementType(llvm::LLVMTypeOf(ptr));
547             let min = llvm::LLVMConstInt(t, lo, signed);
548             let max = llvm::LLVMConstInt(t, hi, signed);
549
550             let v = [min, max];
551
552             llvm::LLVMSetMetadata(value, llvm::MD_range as c_uint,
553                                   llvm::LLVMMDNodeInContext(self.ccx.llcx(),
554                                                             v.as_ptr(),
555                                                             v.len() as c_uint));
556         }
557
558         value
559     }
560
561     pub fn load_nonnull(&self, ptr: ValueRef) -> ValueRef {
562         let value = self.load(ptr);
563         unsafe {
564             llvm::LLVMSetMetadata(value, llvm::MD_nonnull as c_uint,
565                                   llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
566         }
567
568         value
569     }
570
571     pub fn store(&self, val: ValueRef, ptr: ValueRef, align: Option<u32>) -> ValueRef {
572         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
573         assert!(!self.llbuilder.is_null());
574         self.count_insn("store");
575         let ptr = self.check_store(val, ptr);
576         unsafe {
577             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
578             if let Some(align) = align {
579                 llvm::LLVMSetAlignment(store, align as c_uint);
580             }
581             store
582         }
583     }
584
585     pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
586         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
587         assert!(!self.llbuilder.is_null());
588         self.count_insn("store.volatile");
589         let ptr = self.check_store(val, ptr);
590         unsafe {
591             let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
592             llvm::LLVMSetVolatile(insn, llvm::True);
593             insn
594         }
595     }
596
597     pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
598         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
599         self.count_insn("store.atomic");
600         let ptr = self.check_store(val, ptr);
601         unsafe {
602             let ty = Type::from_ref(llvm::LLVMTypeOf(ptr));
603             let align = llalign_of_pref(self.ccx, ty.element_type());
604             llvm::LLVMRustBuildAtomicStore(self.llbuilder, val, ptr, order, align as c_uint);
605         }
606     }
607
608     pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
609         self.count_insn("gep");
610         unsafe {
611             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
612                                indices.len() as c_uint, noname())
613         }
614     }
615
616     // Simple wrapper around GEP that takes an array of ints and wraps them
617     // in C_i32()
618     #[inline]
619     pub fn gepi(&self, base: ValueRef, ixs: &[usize]) -> ValueRef {
620         // Small vector optimization. This should catch 100% of the cases that
621         // we care about.
622         if ixs.len() < 16 {
623             let mut small_vec = [ C_i32(self.ccx, 0); 16 ];
624             for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs) {
625                 *small_vec_e = C_i32(self.ccx, ix as i32);
626             }
627             self.inbounds_gep(base, &small_vec[..ixs.len()])
628         } else {
629             let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
630             self.count_insn("gepi");
631             self.inbounds_gep(base, &v[..])
632         }
633     }
634
635     pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
636         self.count_insn("inboundsgep");
637         unsafe {
638             llvm::LLVMBuildInBoundsGEP(
639                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
640         }
641     }
642
643     pub fn struct_gep(&self, ptr: ValueRef, idx: usize) -> ValueRef {
644         self.count_insn("structgep");
645         unsafe {
646             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
647         }
648     }
649
650     pub fn global_string(&self, _str: *const c_char) -> ValueRef {
651         self.count_insn("globalstring");
652         unsafe {
653             llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
654         }
655     }
656
657     pub fn global_string_ptr(&self, _str: *const c_char) -> ValueRef {
658         self.count_insn("globalstringptr");
659         unsafe {
660             llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
661         }
662     }
663
664     /* Casts */
665     pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
666         self.count_insn("trunc");
667         unsafe {
668             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
669         }
670     }
671
672     pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
673         self.count_insn("zext");
674         unsafe {
675             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
676         }
677     }
678
679     pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
680         self.count_insn("sext");
681         unsafe {
682             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
683         }
684     }
685
686     pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
687         self.count_insn("fptoui");
688         unsafe {
689             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
690         }
691     }
692
693     pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
694         self.count_insn("fptosi");
695         unsafe {
696             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
697         }
698     }
699
700     pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
701         self.count_insn("uitofp");
702         unsafe {
703             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
704         }
705     }
706
707     pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
708         self.count_insn("sitofp");
709         unsafe {
710             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
711         }
712     }
713
714     pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
715         self.count_insn("fptrunc");
716         unsafe {
717             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
718         }
719     }
720
721     pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
722         self.count_insn("fpext");
723         unsafe {
724             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
725         }
726     }
727
728     pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
729         self.count_insn("ptrtoint");
730         unsafe {
731             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
732         }
733     }
734
735     pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
736         self.count_insn("inttoptr");
737         unsafe {
738             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
739         }
740     }
741
742     pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
743         self.count_insn("bitcast");
744         unsafe {
745             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
746         }
747     }
748
749     pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
750         self.count_insn("zextorbitcast");
751         unsafe {
752             llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
753         }
754     }
755
756     pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
757         self.count_insn("sextorbitcast");
758         unsafe {
759             llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
760         }
761     }
762
763     pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
764         self.count_insn("truncorbitcast");
765         unsafe {
766             llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
767         }
768     }
769
770     pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
771         self.count_insn("cast");
772         unsafe {
773             llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
774         }
775     }
776
777     pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
778         self.count_insn("pointercast");
779         unsafe {
780             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
781         }
782     }
783
784     pub fn intcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
785         self.count_insn("intcast");
786         unsafe {
787             llvm::LLVMBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), noname())
788         }
789     }
790
791     pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
792         self.count_insn("fpcast");
793         unsafe {
794             llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
795         }
796     }
797
798
799     /* Comparisons */
800     pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
801         self.count_insn("icmp");
802         unsafe {
803             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
804         }
805     }
806
807     pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
808         self.count_insn("fcmp");
809         unsafe {
810             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
811         }
812     }
813
814     /* Miscellaneous instructions */
815     pub fn empty_phi(&self, ty: Type) -> ValueRef {
816         self.count_insn("emptyphi");
817         unsafe {
818             llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
819         }
820     }
821
822     pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
823         assert_eq!(vals.len(), bbs.len());
824         let phi = self.empty_phi(ty);
825         self.count_insn("addincoming");
826         unsafe {
827             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
828                                   bbs.as_ptr(),
829                                   vals.len() as c_uint);
830             phi
831         }
832     }
833
834     pub fn add_span_comment(&self, sp: Span, text: &str) {
835         if self.ccx.sess().asm_comments() {
836             let s = format!("{} ({})",
837                             text,
838                             self.ccx.sess().codemap().span_to_string(sp));
839             debug!("{}", &s[..]);
840             self.add_comment(&s[..]);
841         }
842     }
843
844     pub fn add_comment(&self, text: &str) {
845         if self.ccx.sess().asm_comments() {
846             let sanitized = text.replace("$", "");
847             let comment_text = format!("{} {}", "#",
848                                        sanitized.replace("\n", "\n\t# "));
849             self.count_insn("inlineasm");
850             let comment_text = CString::new(comment_text).unwrap();
851             let asm = unsafe {
852                 llvm::LLVMConstInlineAsm(Type::func(&[], &Type::void(self.ccx)).to_ref(),
853                                          comment_text.as_ptr(), noname(), False,
854                                          False)
855             };
856             self.call(asm, &[], None);
857         }
858     }
859
860     pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
861                          inputs: &[ValueRef], output: Type,
862                          volatile: bool, alignstack: bool,
863                          dia: AsmDialect) -> ValueRef {
864         self.count_insn("inlineasm");
865
866         let volatile = if volatile { llvm::True }
867                        else        { llvm::False };
868         let alignstack = if alignstack { llvm::True }
869                          else          { llvm::False };
870
871         let argtys = inputs.iter().map(|v| {
872             debug!("Asm Input Type: {:?}", Value(*v));
873             val_ty(*v)
874         }).collect::<Vec<_>>();
875
876         debug!("Asm Output Type: {:?}", output);
877         let fty = Type::func(&argtys[..], &output);
878         unsafe {
879             let v = llvm::LLVMRustInlineAsm(
880                 fty.to_ref(), asm, cons, volatile, alignstack, dia);
881             self.call(v, inputs, None)
882         }
883     }
884
885     pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
886                 bundle: Option<&OperandBundleDef>) -> ValueRef {
887         self.count_insn("call");
888
889         debug!("Call {:?} with args ({})",
890                Value(llfn),
891                args.iter()
892                    .map(|&v| format!("{:?}", Value(v)))
893                    .collect::<Vec<String>>()
894                    .join(", "));
895
896         let args = self.check_call("call", llfn, args);
897         let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
898
899         unsafe {
900             llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
901                                     args.len() as c_uint, bundle, noname())
902         }
903     }
904
905     pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
906         self.count_insn("select");
907         unsafe {
908             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
909         }
910     }
911
912     pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
913         self.count_insn("vaarg");
914         unsafe {
915             llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
916         }
917     }
918
919     pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
920         self.count_insn("extractelement");
921         unsafe {
922             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
923         }
924     }
925
926     pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
927         self.count_insn("insertelement");
928         unsafe {
929             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
930         }
931     }
932
933     pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
934         self.count_insn("shufflevector");
935         unsafe {
936             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
937         }
938     }
939
940     pub fn vector_splat(&self, num_elts: usize, elt: ValueRef) -> ValueRef {
941         unsafe {
942             let elt_ty = val_ty(elt);
943             let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
944             let vec = self.insert_element(undef, elt, C_i32(self.ccx, 0));
945             let vec_i32_ty = Type::vector(&Type::i32(self.ccx), num_elts as u64);
946             self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
947         }
948     }
949
950     pub fn extract_value(&self, agg_val: ValueRef, idx: usize) -> ValueRef {
951         self.count_insn("extractvalue");
952         unsafe {
953             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
954         }
955     }
956
957     pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
958                        idx: usize) -> ValueRef {
959         self.count_insn("insertvalue");
960         unsafe {
961             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
962                                        noname())
963         }
964     }
965
966     pub fn is_null(&self, val: ValueRef) -> ValueRef {
967         self.count_insn("isnull");
968         unsafe {
969             llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
970         }
971     }
972
973     pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
974         self.count_insn("isnotnull");
975         unsafe {
976             llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
977         }
978     }
979
980     pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
981         self.count_insn("ptrdiff");
982         unsafe {
983             llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
984         }
985     }
986
987     pub fn trap(&self) {
988         unsafe {
989             let bb: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
990             let fn_: ValueRef = llvm::LLVMGetBasicBlockParent(bb);
991             let m: ModuleRef = llvm::LLVMGetGlobalParent(fn_);
992             let p = "llvm.trap\0".as_ptr();
993             let t: ValueRef = llvm::LLVMGetNamedFunction(m, p as *const _);
994             assert!((t as isize != 0));
995             let args: &[ValueRef] = &[];
996             self.count_insn("trap");
997             llvm::LLVMRustBuildCall(self.llbuilder, t,
998                                     args.as_ptr(), args.len() as c_uint,
999                                     ptr::null_mut(),
1000                                     noname());
1001         }
1002     }
1003
1004     pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef,
1005                        num_clauses: usize,
1006                        llfn: ValueRef) -> ValueRef {
1007         self.count_insn("landingpad");
1008         unsafe {
1009             llvm::LLVMRustBuildLandingPad(self.llbuilder, ty.to_ref(), pers_fn,
1010                                           num_clauses as c_uint, noname(), llfn)
1011         }
1012     }
1013
1014     pub fn add_clause(&self, landing_pad: ValueRef, clause: ValueRef) {
1015         unsafe {
1016             llvm::LLVMAddClause(landing_pad, clause);
1017         }
1018     }
1019
1020     pub fn set_cleanup(&self, landing_pad: ValueRef) {
1021         self.count_insn("setcleanup");
1022         unsafe {
1023             llvm::LLVMSetCleanup(landing_pad, llvm::True);
1024         }
1025     }
1026
1027     pub fn resume(&self, exn: ValueRef) -> ValueRef {
1028         self.count_insn("resume");
1029         unsafe {
1030             llvm::LLVMBuildResume(self.llbuilder, exn)
1031         }
1032     }
1033
1034     pub fn cleanup_pad(&self,
1035                        parent: Option<ValueRef>,
1036                        args: &[ValueRef]) -> ValueRef {
1037         self.count_insn("cleanuppad");
1038         let parent = parent.unwrap_or(ptr::null_mut());
1039         let name = CString::new("cleanuppad").unwrap();
1040         let ret = unsafe {
1041             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1042                                           parent,
1043                                           args.len() as c_uint,
1044                                           args.as_ptr(),
1045                                           name.as_ptr())
1046         };
1047         assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
1048         return ret
1049     }
1050
1051     pub fn cleanup_ret(&self, cleanup: ValueRef,
1052                        unwind: Option<BasicBlockRef>) -> ValueRef {
1053         self.count_insn("cleanupret");
1054         let unwind = unwind.unwrap_or(ptr::null_mut());
1055         let ret = unsafe {
1056             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1057         };
1058         assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
1059         return ret
1060     }
1061
1062     pub fn catch_pad(&self,
1063                      parent: ValueRef,
1064                      args: &[ValueRef]) -> ValueRef {
1065         self.count_insn("catchpad");
1066         let name = CString::new("catchpad").unwrap();
1067         let ret = unsafe {
1068             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1069                                         args.len() as c_uint, args.as_ptr(),
1070                                         name.as_ptr())
1071         };
1072         assert!(!ret.is_null(), "LLVM does not have support for catchpad");
1073         return ret
1074     }
1075
1076     pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1077         self.count_insn("catchret");
1078         let ret = unsafe {
1079             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1080         };
1081         assert!(!ret.is_null(), "LLVM does not have support for catchret");
1082         return ret
1083     }
1084
1085     pub fn catch_switch(&self,
1086                         parent: Option<ValueRef>,
1087                         unwind: Option<BasicBlockRef>,
1088                         num_handlers: usize) -> ValueRef {
1089         self.count_insn("catchswitch");
1090         let parent = parent.unwrap_or(ptr::null_mut());
1091         let unwind = unwind.unwrap_or(ptr::null_mut());
1092         let name = CString::new("catchswitch").unwrap();
1093         let ret = unsafe {
1094             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1095                                            num_handlers as c_uint,
1096                                            name.as_ptr())
1097         };
1098         assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
1099         return ret
1100     }
1101
1102     pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
1103         unsafe {
1104             llvm::LLVMRustAddHandler(catch_switch, handler);
1105         }
1106     }
1107
1108     pub fn set_personality_fn(&self, personality: ValueRef) {
1109         unsafe {
1110             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1111         }
1112     }
1113
1114     // Atomic Operations
1115     pub fn atomic_cmpxchg(&self, dst: ValueRef,
1116                          cmp: ValueRef, src: ValueRef,
1117                          order: AtomicOrdering,
1118                          failure_order: AtomicOrdering,
1119                          weak: llvm::Bool) -> ValueRef {
1120         unsafe {
1121             llvm::LLVMRustBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
1122                                          order, failure_order, weak)
1123         }
1124     }
1125     pub fn atomic_rmw(&self, op: AtomicRmwBinOp,
1126                      dst: ValueRef, src: ValueRef,
1127                      order: AtomicOrdering) -> ValueRef {
1128         unsafe {
1129             llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
1130         }
1131     }
1132
1133     pub fn atomic_fence(&self, order: AtomicOrdering, scope: SynchronizationScope) {
1134         unsafe {
1135             llvm::LLVMRustBuildAtomicFence(self.llbuilder, order, scope);
1136         }
1137     }
1138
1139     pub fn add_case(&self, s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
1140         unsafe {
1141             if llvm::LLVMIsUndef(s) == llvm::True { return; }
1142             llvm::LLVMAddCase(s, on_val, dest)
1143         }
1144     }
1145
1146     pub fn add_incoming_to_phi(&self, phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
1147         unsafe {
1148             if llvm::LLVMIsUndef(phi) == llvm::True { return; }
1149             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1150         }
1151     }
1152
1153     /// Returns the ptr value that should be used for storing `val`.
1154     fn check_store<'b>(&self,
1155                        val: ValueRef,
1156                        ptr: ValueRef) -> ValueRef {
1157         let dest_ptr_ty = val_ty(ptr);
1158         let stored_ty = val_ty(val);
1159         let stored_ptr_ty = stored_ty.ptr_to();
1160
1161         assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1162
1163         if dest_ptr_ty == stored_ptr_ty {
1164             ptr
1165         } else {
1166             debug!("Type mismatch in store. \
1167                     Expected {:?}, got {:?}; inserting bitcast",
1168                    dest_ptr_ty, stored_ptr_ty);
1169             self.bitcast(ptr, stored_ptr_ty)
1170         }
1171     }
1172
1173     /// Returns the args that should be used for a call to `llfn`.
1174     fn check_call<'b>(&self,
1175                       typ: &str,
1176                       llfn: ValueRef,
1177                       args: &'b [ValueRef]) -> Cow<'b, [ValueRef]> {
1178         let mut fn_ty = val_ty(llfn);
1179         // Strip off pointers
1180         while fn_ty.kind() == llvm::TypeKind::Pointer {
1181             fn_ty = fn_ty.element_type();
1182         }
1183
1184         assert!(fn_ty.kind() == llvm::TypeKind::Function,
1185                 "builder::{} not passed a function", typ);
1186
1187         let param_tys = fn_ty.func_params();
1188
1189         let all_args_match = param_tys.iter()
1190             .zip(args.iter().map(|&v| val_ty(v)))
1191             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1192
1193         if all_args_match {
1194             return Cow::Borrowed(args);
1195         }
1196
1197         let casted_args: Vec<_> = param_tys.into_iter()
1198             .zip(args.iter())
1199             .enumerate()
1200             .map(|(i, (expected_ty, &actual_val))| {
1201                 let actual_ty = val_ty(actual_val);
1202                 if expected_ty != actual_ty {
1203                     debug!("Type mismatch in function call of {:?}. \
1204                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1205                            Value(llfn),
1206                            expected_ty, i, actual_ty);
1207                     self.bitcast(actual_val, expected_ty)
1208                 } else {
1209                     actual_val
1210                 }
1211             })
1212             .collect();
1213
1214         return Cow::Owned(casted_args);
1215     }
1216 }