]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/builder.rs
Use the method form for CodegenCx everywhere
[rust.git] / src / librustc_codegen_llvm / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
12 use llvm::{self, False, OperandBundleDef, BasicBlock};
13 use common::{self, *};
14 use context::CodegenCx;
15 use type_;
16 use value::Value;
17 use libc::{c_uint, c_char};
18 use rustc::ty::TyCtxt;
19 use rustc::ty::layout::{Align, Size};
20 use rustc::session::{config, Session};
21 use rustc_data_structures::small_c_str::SmallCStr;
22 use interfaces::{BuilderMethods, Backend, CommonMethods, CommonWriteMethods};
23 use syntax;
24
25 use std::borrow::Cow;
26 use std::ops::Range;
27 use std::ptr;
28
29 // All Builders must have an llfn associated with them
30 #[must_use]
31 pub struct Builder<'a, 'll: 'a, 'tcx: 'll, V: 'll = &'ll Value> {
32     pub llbuilder: &'ll mut llvm::Builder<'ll>,
33     pub cx: &'a CodegenCx<'ll, 'tcx, V>,
34 }
35
36 impl<V> Drop for Builder<'a, 'll, 'tcx, V> {
37     fn drop(&mut self) {
38         unsafe {
39             llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
40         }
41     }
42 }
43
44 // This is a really awful way to get a zero-length c-string, but better (and a
45 // lot more efficient) than doing str::as_c_str("", ...) every time.
46 fn noname() -> *const c_char {
47     static CNULL: c_char = 0;
48     &CNULL
49 }
50
51 bitflags! {
52     pub struct MemFlags: u8 {
53         const VOLATILE = 1 << 0;
54         const NONTEMPORAL = 1 << 1;
55         const UNALIGNED = 1 << 2;
56     }
57 }
58
59 impl Backend for Builder<'a, 'll, 'tcx>  {
60         type Value = &'ll Value;
61         type BasicBlock = &'ll BasicBlock;
62         type Type = &'ll type_::Type;
63         type Context = &'ll llvm::Context;
64 }
65
66 impl BuilderMethods<'a, 'll, 'tcx> for Builder<'a, 'll, 'tcx> {
67     fn new_block<'b>(
68         cx: &'a CodegenCx<'ll, 'tcx>,
69         llfn: &'ll Value,
70         name: &'b str
71     ) -> Self {
72         let bx = Builder::with_cx(cx);
73         let llbb = unsafe {
74             let name = SmallCStr::new(name);
75             llvm::LLVMAppendBasicBlockInContext(
76                 cx.llcx,
77                 llfn,
78                 name.as_ptr()
79             )
80         };
81         bx.position_at_end(llbb);
82         bx
83     }
84
85     fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
86         // Create a fresh builder from the crate context.
87         let llbuilder = unsafe {
88             llvm::LLVMCreateBuilderInContext(cx.llcx)
89         };
90         Builder {
91             llbuilder,
92             cx,
93         }
94     }
95
96     fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
97         Builder::new_block(self.cx, self.llfn(), name)
98     }
99
100     fn sess(&self) -> &Session {
101         self.cx.sess()
102     }
103
104     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
105         self.cx.tcx
106     }
107
108     fn llfn(&self) -> &'ll Value {
109         unsafe {
110             llvm::LLVMGetBasicBlockParent(self.llbb())
111         }
112     }
113
114     fn llbb(&self) -> &'ll BasicBlock {
115         unsafe {
116             llvm::LLVMGetInsertBlock(self.llbuilder)
117         }
118     }
119
120     fn count_insn(&self, category: &str) {
121         if self.cx().sess().codegen_stats() {
122             self.cx().stats.borrow_mut().n_llvm_insns += 1;
123         }
124         if self.cx().sess().count_llvm_insns() {
125             *self.cx().stats
126                       .borrow_mut()
127                       .llvm_insns
128                       .entry(category.to_string())
129                       .or_insert(0) += 1;
130         }
131     }
132
133     fn set_value_name(&self, value: &'ll Value, name: &str) {
134         let cname = SmallCStr::new(name);
135         unsafe {
136             llvm::LLVMSetValueName(value, cname.as_ptr());
137         }
138     }
139
140     fn position_at_end(&self, llbb: &'ll BasicBlock) {
141         unsafe {
142             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
143         }
144     }
145
146     fn position_at_start(&self, llbb: &'ll BasicBlock) {
147         unsafe {
148             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
149         }
150     }
151
152     fn ret_void(&self) {
153         self.count_insn("retvoid");
154         unsafe {
155             llvm::LLVMBuildRetVoid(self.llbuilder);
156         }
157     }
158
159     fn ret(&self, v: &'ll Value) {
160         self.count_insn("ret");
161         unsafe {
162             llvm::LLVMBuildRet(self.llbuilder, v);
163         }
164     }
165
166     fn br(&self, dest: &'ll BasicBlock) {
167         self.count_insn("br");
168         unsafe {
169             llvm::LLVMBuildBr(self.llbuilder, dest);
170         }
171     }
172
173     fn cond_br(
174         &self,
175         cond: &'ll Value,
176         then_llbb: &'ll BasicBlock,
177         else_llbb: &'ll BasicBlock,
178     ) {
179         self.count_insn("condbr");
180         unsafe {
181             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
182         }
183     }
184
185     fn switch(
186         &self,
187         v: &'ll Value,
188         else_llbb: &'ll BasicBlock,
189         num_cases: usize,
190     ) -> &'ll Value {
191         unsafe {
192             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
193         }
194     }
195
196     fn invoke(&self,
197                   llfn: &'ll Value,
198                   args: &[&'ll Value],
199                   then: &'ll BasicBlock,
200                   catch: &'ll BasicBlock,
201                   bundle: Option<&common::OperandBundleDef<'ll, &'ll Value>>) -> &'ll Value {
202         self.count_insn("invoke");
203
204         debug!("Invoke {:?} with args ({:?})",
205                llfn,
206                args);
207
208         let args = self.check_call("invoke", llfn, args);
209         let bundle = bundle.map(OperandBundleDef::from_generic);
210         let bundle = bundle.as_ref().map(|b| &*b.raw);
211
212         unsafe {
213             llvm::LLVMRustBuildInvoke(self.llbuilder,
214                                       llfn,
215                                       args.as_ptr(),
216                                       args.len() as c_uint,
217                                       then,
218                                       catch,
219                                       bundle,
220                                       noname())
221         }
222     }
223
224     fn unreachable(&self) {
225         self.count_insn("unreachable");
226         unsafe {
227             llvm::LLVMBuildUnreachable(self.llbuilder);
228         }
229     }
230
231     /* Arithmetic */
232     fn add(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
233         self.count_insn("add");
234         unsafe {
235             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
236         }
237     }
238
239     fn fadd(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
240         self.count_insn("fadd");
241         unsafe {
242             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
243         }
244     }
245
246     fn fadd_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
247         self.count_insn("fadd");
248         unsafe {
249             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
250             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
251             instr
252         }
253     }
254
255     fn sub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
256         self.count_insn("sub");
257         unsafe {
258             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
259         }
260     }
261
262     fn fsub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
263         self.count_insn("fsub");
264         unsafe {
265             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
266         }
267     }
268
269     fn fsub_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
270         self.count_insn("fsub");
271         unsafe {
272             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
273             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
274             instr
275         }
276     }
277
278     fn mul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
279         self.count_insn("mul");
280         unsafe {
281             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
282         }
283     }
284
285     fn fmul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
286         self.count_insn("fmul");
287         unsafe {
288             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
289         }
290     }
291
292     fn fmul_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
293         self.count_insn("fmul");
294         unsafe {
295             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
296             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
297             instr
298         }
299     }
300
301
302     fn udiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
303         self.count_insn("udiv");
304         unsafe {
305             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
306         }
307     }
308
309     fn exactudiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
310         self.count_insn("exactudiv");
311         unsafe {
312             llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
313         }
314     }
315
316     fn sdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
317         self.count_insn("sdiv");
318         unsafe {
319             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
320         }
321     }
322
323     fn exactsdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
324         self.count_insn("exactsdiv");
325         unsafe {
326             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
327         }
328     }
329
330     fn fdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
331         self.count_insn("fdiv");
332         unsafe {
333             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
334         }
335     }
336
337     fn fdiv_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
338         self.count_insn("fdiv");
339         unsafe {
340             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
341             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
342             instr
343         }
344     }
345
346     fn urem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
347         self.count_insn("urem");
348         unsafe {
349             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
350         }
351     }
352
353     fn srem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
354         self.count_insn("srem");
355         unsafe {
356             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
357         }
358     }
359
360     fn frem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
361         self.count_insn("frem");
362         unsafe {
363             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
364         }
365     }
366
367     fn frem_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
368         self.count_insn("frem");
369         unsafe {
370             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
371             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
372             instr
373         }
374     }
375
376     fn shl(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
377         self.count_insn("shl");
378         unsafe {
379             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
380         }
381     }
382
383     fn lshr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
384         self.count_insn("lshr");
385         unsafe {
386             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
387         }
388     }
389
390     fn ashr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
391         self.count_insn("ashr");
392         unsafe {
393             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
394         }
395     }
396
397     fn and(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
398         self.count_insn("and");
399         unsafe {
400             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
401         }
402     }
403
404     fn or(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
405         self.count_insn("or");
406         unsafe {
407             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
408         }
409     }
410
411     fn xor(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
412         self.count_insn("xor");
413         unsafe {
414             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
415         }
416     }
417
418     fn neg(&self, v: &'ll Value) -> &'ll Value {
419         self.count_insn("neg");
420         unsafe {
421             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
422         }
423     }
424
425     fn fneg(&self, v: &'ll Value) -> &'ll Value {
426         self.count_insn("fneg");
427         unsafe {
428             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
429         }
430     }
431
432     fn not(&self, v: &'ll Value) -> &'ll Value {
433         self.count_insn("not");
434         unsafe {
435             llvm::LLVMBuildNot(self.llbuilder, v, noname())
436         }
437     }
438
439     fn alloca(&self, ty: Self::Type, name: &str, align: Align) -> &'ll Value {
440         let bx = Builder::with_cx(self.cx);
441         bx.position_at_start(unsafe {
442             llvm::LLVMGetFirstBasicBlock(self.llfn())
443         });
444         bx.dynamic_alloca(ty, name, align)
445     }
446
447     fn dynamic_alloca(&self, ty: Self::Type, name: &str, align: Align) -> &'ll Value {
448         self.count_insn("alloca");
449         unsafe {
450             let alloca = if name.is_empty() {
451                 llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
452             } else {
453                 let name = SmallCStr::new(name);
454                 llvm::LLVMBuildAlloca(self.llbuilder, ty,
455                                       name.as_ptr())
456             };
457             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
458             alloca
459         }
460     }
461
462     fn array_alloca(&self,
463                         ty: Self::Type,
464                         len: &'ll Value,
465                         name: &str,
466                         align: Align) -> &'ll Value {
467         self.count_insn("alloca");
468         unsafe {
469             let alloca = if name.is_empty() {
470                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
471             } else {
472                 let name = SmallCStr::new(name);
473                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
474                                            name.as_ptr())
475             };
476             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
477             alloca
478         }
479     }
480
481     fn load(&self, ptr: &'ll Value, align: Align) -> &'ll Value {
482         self.count_insn("load");
483         unsafe {
484             let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
485             llvm::LLVMSetAlignment(load, align.abi() as c_uint);
486             load
487         }
488     }
489
490     fn volatile_load(&self, ptr: &'ll Value) -> &'ll Value {
491         self.count_insn("load.volatile");
492         unsafe {
493             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
494             llvm::LLVMSetVolatile(insn, llvm::True);
495             insn
496         }
497     }
498
499     fn atomic_load(
500         &self,
501         ptr: &'ll Value,
502         order: common::AtomicOrdering,
503         size: Size,
504     ) -> &'ll Value {
505         self.count_insn("load.atomic");
506         unsafe {
507             let load = llvm::LLVMRustBuildAtomicLoad(
508                 self.llbuilder,
509                 ptr,
510                 noname(),
511                 AtomicOrdering::from_generic(order),
512             );
513             // LLVM requires the alignment of atomic loads to be at least the size of the type.
514             llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
515             load
516         }
517     }
518
519
520     fn range_metadata(&self, load: &'ll Value, range: Range<u128>) {
521         if self.sess().target.target.arch == "amdgpu" {
522             // amdgpu/LLVM does something weird and thinks a i64 value is
523             // split into a v2i32, halving the bitwidth LLVM expects,
524             // tripping an assertion. So, for now, just disable this
525             // optimization.
526             return;
527         }
528
529         unsafe {
530             let llty = self.cx.val_ty(load);
531             let v = [
532                 self.cx.c_uint_big(llty, range.start),
533                 self.cx.c_uint_big(llty, range.end)
534             ];
535
536             llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
537                                   llvm::LLVMMDNodeInContext(self.cx.llcx,
538                                                             v.as_ptr(),
539                                                             v.len() as c_uint));
540         }
541     }
542
543     fn nonnull_metadata(&self, load: &'ll Value) {
544         unsafe {
545             llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
546                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
547         }
548     }
549
550     fn store(&self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
551         self.store_with_flags(val, ptr, align, MemFlags::empty())
552     }
553
554     fn store_with_flags(
555         &self,
556         val: &'ll Value,
557         ptr: &'ll Value,
558         align: Align,
559         flags: MemFlags,
560     ) -> &'ll Value {
561         debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
562         self.count_insn("store");
563         let ptr = self.check_store(val, ptr);
564         unsafe {
565             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
566             let align = if flags.contains(MemFlags::UNALIGNED) {
567                 1
568             } else {
569                 align.abi() as c_uint
570             };
571             llvm::LLVMSetAlignment(store, align);
572             if flags.contains(MemFlags::VOLATILE) {
573                 llvm::LLVMSetVolatile(store, llvm::True);
574             }
575             if flags.contains(MemFlags::NONTEMPORAL) {
576                 // According to LLVM [1] building a nontemporal store must
577                 // *always* point to a metadata value of the integer 1.
578                 //
579                 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
580                 let one = self.cx.c_i32(1);
581                 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
582                 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
583             }
584             store
585         }
586     }
587
588    fn atomic_store(&self, val: &'ll Value, ptr: &'ll Value,
589                    order: common::AtomicOrdering, size: Size) {
590         debug!("Store {:?} -> {:?}", val, ptr);
591         self.count_insn("store.atomic");
592         let ptr = self.check_store(val, ptr);
593         unsafe {
594             let store = llvm::LLVMRustBuildAtomicStore(
595                 self.llbuilder,
596                 val,
597                 ptr,
598                 AtomicOrdering::from_generic(order),
599             );
600             // LLVM requires the alignment of atomic stores to be at least the size of the type.
601             llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
602         }
603     }
604
605     fn gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
606         self.count_insn("gep");
607         unsafe {
608             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
609                                indices.len() as c_uint, noname())
610         }
611     }
612
613     fn inbounds_gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
614         self.count_insn("inboundsgep");
615         unsafe {
616             llvm::LLVMBuildInBoundsGEP(
617                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
618         }
619     }
620
621     /* Casts */
622     fn trunc(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
623         self.count_insn("trunc");
624         unsafe {
625             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
626         }
627     }
628
629     fn sext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
630         self.count_insn("sext");
631         unsafe {
632             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
633         }
634     }
635
636     fn fptoui(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
637         self.count_insn("fptoui");
638         unsafe {
639             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
640         }
641     }
642
643     fn fptosi(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
644         self.count_insn("fptosi");
645         unsafe {
646             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
647         }
648     }
649
650     fn uitofp(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
651         self.count_insn("uitofp");
652         unsafe {
653             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
654         }
655     }
656
657     fn sitofp(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
658         self.count_insn("sitofp");
659         unsafe {
660             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
661         }
662     }
663
664     fn fptrunc(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
665         self.count_insn("fptrunc");
666         unsafe {
667             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
668         }
669     }
670
671     fn fpext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
672         self.count_insn("fpext");
673         unsafe {
674             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
675         }
676     }
677
678     fn ptrtoint(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
679         self.count_insn("ptrtoint");
680         unsafe {
681             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
682         }
683     }
684
685     fn inttoptr(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
686         self.count_insn("inttoptr");
687         unsafe {
688             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
689         }
690     }
691
692     fn bitcast(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
693         self.count_insn("bitcast");
694         unsafe {
695             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
696         }
697     }
698
699
700     fn intcast(&self, val: &'ll Value, dest_ty: Self::Type, is_signed: bool) -> &'ll Value {
701         self.count_insn("intcast");
702         unsafe {
703             llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
704         }
705     }
706
707     fn pointercast(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
708         self.count_insn("pointercast");
709         unsafe {
710             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
711         }
712     }
713
714     /* Comparisons */
715     fn icmp(&self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
716         self.count_insn("icmp");
717         let op = llvm::IntPredicate::from_generic(op);
718         unsafe {
719             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
720         }
721     }
722
723     fn fcmp(&self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
724         self.count_insn("fcmp");
725         unsafe {
726             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
727         }
728     }
729
730     /* Miscellaneous instructions */
731     fn empty_phi(&self, ty: Self::Type) -> &'ll Value {
732         self.count_insn("emptyphi");
733         unsafe {
734             llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
735         }
736     }
737
738     fn phi(&self, ty: Self::Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
739         assert_eq!(vals.len(), bbs.len());
740         let phi = self.empty_phi(ty);
741         self.count_insn("addincoming");
742         unsafe {
743             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
744                                   bbs.as_ptr(),
745                                   vals.len() as c_uint);
746             phi
747         }
748     }
749
750     fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
751                        inputs: &[&'ll Value], output: Self::Type,
752                        volatile: bool, alignstack: bool,
753                        dia: syntax::ast::AsmDialect) -> Option<&'ll Value> {
754         self.count_insn("inlineasm");
755
756         let volatile = if volatile { llvm::True }
757                        else        { llvm::False };
758         let alignstack = if alignstack { llvm::True }
759                          else          { llvm::False };
760
761         let argtys = inputs.iter().map(|v| {
762             debug!("Asm Input Type: {:?}", *v);
763             self.cx.val_ty(*v)
764         }).collect::<Vec<_>>();
765
766         debug!("Asm Output Type: {:?}", output);
767         let fty = type_::Type::func(&argtys[..], output);
768         unsafe {
769             // Ask LLVM to verify that the constraints are well-formed.
770             let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons);
771             debug!("Constraint verification result: {:?}", constraints_ok);
772             if constraints_ok {
773                 let v = llvm::LLVMRustInlineAsm(
774                     fty, asm, cons, volatile, alignstack, AsmDialect::from_generic(dia));
775                 Some(self.call(v, inputs, None))
776             } else {
777                 // LLVM has detected an issue with our constraints, bail out
778                 None
779             }
780         }
781     }
782
783     fn memcpy(&self, dst: &'ll Value, dst_align: u64,
784                   src: &'ll Value, src_align: u64,
785                   size: &'ll Value, is_volatile: bool) -> &'ll Value {
786         unsafe {
787             llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align as c_uint,
788                                       src, src_align as c_uint, size, is_volatile)
789         }
790     }
791
792     fn memmove(&self, dst: &'ll Value, dst_align: u64,
793                   src: &'ll Value, src_align: u64,
794                   size: &'ll Value, is_volatile: bool) -> &'ll Value {
795         unsafe {
796             llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align as c_uint,
797                                       src, src_align as c_uint, size, is_volatile)
798         }
799     }
800
801     fn minnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
802         self.count_insn("minnum");
803         unsafe {
804             let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
805             instr.expect("LLVMRustBuildMinNum is not available in LLVM version < 6.0")
806         }
807     }
808     fn maxnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
809         self.count_insn("maxnum");
810         unsafe {
811             let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
812             instr.expect("LLVMRustBuildMaxNum is not available in LLVM version < 6.0")
813         }
814     }
815
816     fn select(
817         &self, cond: &'ll Value,
818         then_val: &'ll Value,
819         else_val: &'ll Value,
820     ) -> &'ll Value {
821         self.count_insn("select");
822         unsafe {
823             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
824         }
825     }
826
827     #[allow(dead_code)]
828     fn va_arg(&self, list: &'ll Value, ty: Self::Type) -> &'ll Value {
829         self.count_insn("vaarg");
830         unsafe {
831             llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
832         }
833     }
834
835     fn extract_element(&self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
836         self.count_insn("extractelement");
837         unsafe {
838             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
839         }
840     }
841
842     fn insert_element(
843         &self, vec: &'ll Value,
844         elt: &'ll Value,
845         idx: &'ll Value,
846     ) -> &'ll Value {
847         self.count_insn("insertelement");
848         unsafe {
849             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
850         }
851     }
852
853     fn shuffle_vector(&self, v1: &'ll Value, v2: &'ll Value, mask: &'ll Value) -> &'ll Value {
854         self.count_insn("shufflevector");
855         unsafe {
856             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
857         }
858     }
859
860     fn vector_splat(&self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
861         unsafe {
862             let elt_ty = self.cx.val_ty(elt);
863             let undef = llvm::LLVMGetUndef(type_::Type::vector(elt_ty, num_elts as u64));
864             let vec = self.insert_element(undef, elt, self.cx.c_i32(0));
865             let vec_i32_ty = type_::Type::vector(type_::Type::i32(self.cx), num_elts as u64);
866             self.shuffle_vector(vec, undef, self.cx.c_null(vec_i32_ty))
867         }
868     }
869
870     fn vector_reduce_fadd_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
871         self.count_insn("vector.reduce.fadd_fast");
872         unsafe {
873             // FIXME: add a non-fast math version once
874             // https://bugs.llvm.org/show_bug.cgi?id=36732
875             // is fixed.
876             let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
877             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
878             instr
879         }
880     }
881     fn vector_reduce_fmul_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
882         self.count_insn("vector.reduce.fmul_fast");
883         unsafe {
884             // FIXME: add a non-fast math version once
885             // https://bugs.llvm.org/show_bug.cgi?id=36732
886             // is fixed.
887             let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
888             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
889             instr
890         }
891     }
892     fn vector_reduce_add(&self, src: &'ll Value) -> &'ll Value {
893         self.count_insn("vector.reduce.add");
894         unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
895     }
896     fn vector_reduce_mul(&self, src: &'ll Value) -> &'ll Value {
897         self.count_insn("vector.reduce.mul");
898         unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
899     }
900     fn vector_reduce_and(&self, src: &'ll Value) -> &'ll Value {
901         self.count_insn("vector.reduce.and");
902         unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
903     }
904     fn vector_reduce_or(&self, src: &'ll Value) -> &'ll Value {
905         self.count_insn("vector.reduce.or");
906         unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
907     }
908     fn vector_reduce_xor(&self, src: &'ll Value) -> &'ll Value {
909         self.count_insn("vector.reduce.xor");
910         unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
911     }
912     fn vector_reduce_fmin(&self, src: &'ll Value) -> &'ll Value {
913         self.count_insn("vector.reduce.fmin");
914         unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
915     }
916     fn vector_reduce_fmax(&self, src: &'ll Value) -> &'ll Value {
917         self.count_insn("vector.reduce.fmax");
918         unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
919     }
920     fn vector_reduce_fmin_fast(&self, src: &'ll Value) -> &'ll Value {
921         self.count_insn("vector.reduce.fmin_fast");
922         unsafe {
923             let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
924             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
925             instr
926         }
927     }
928     fn vector_reduce_fmax_fast(&self, src: &'ll Value) -> &'ll Value {
929         self.count_insn("vector.reduce.fmax_fast");
930         unsafe {
931             let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
932             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
933             instr
934         }
935     }
936     fn vector_reduce_min(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
937         self.count_insn("vector.reduce.min");
938         unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
939     }
940     fn vector_reduce_max(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
941         self.count_insn("vector.reduce.max");
942         unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
943     }
944
945     fn extract_value(&self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
946         self.count_insn("extractvalue");
947         assert_eq!(idx as c_uint as u64, idx);
948         unsafe {
949             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
950         }
951     }
952
953     fn insert_value(&self, agg_val: &'ll Value, elt: &'ll Value,
954                        idx: u64) -> &'ll Value {
955         self.count_insn("insertvalue");
956         assert_eq!(idx as c_uint as u64, idx);
957         unsafe {
958             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
959                                        noname())
960         }
961     }
962
963     fn landing_pad(&self, ty: Self::Type, pers_fn: &'ll Value,
964                        num_clauses: usize) -> &'ll Value {
965         self.count_insn("landingpad");
966         unsafe {
967             llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
968                                       num_clauses as c_uint, noname())
969         }
970     }
971
972     fn add_clause(&self, landing_pad: &'ll Value, clause: &'ll Value) {
973         unsafe {
974             llvm::LLVMAddClause(landing_pad, clause);
975         }
976     }
977
978     fn set_cleanup(&self, landing_pad: &'ll Value) {
979         self.count_insn("setcleanup");
980         unsafe {
981             llvm::LLVMSetCleanup(landing_pad, llvm::True);
982         }
983     }
984
985     fn resume(&self, exn: &'ll Value) -> &'ll Value {
986         self.count_insn("resume");
987         unsafe {
988             llvm::LLVMBuildResume(self.llbuilder, exn)
989         }
990     }
991
992     fn cleanup_pad(&self,
993                        parent: Option<&'ll Value>,
994                        args: &[&'ll Value]) -> &'ll Value {
995         self.count_insn("cleanuppad");
996         let name = const_cstr!("cleanuppad");
997         let ret = unsafe {
998             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
999                                           parent,
1000                                           args.len() as c_uint,
1001                                           args.as_ptr(),
1002                                           name.as_ptr())
1003         };
1004         ret.expect("LLVM does not have support for cleanuppad")
1005     }
1006
1007     fn cleanup_ret(
1008         &self, cleanup: &'ll Value,
1009         unwind: Option<&'ll BasicBlock>,
1010     ) -> &'ll Value {
1011         self.count_insn("cleanupret");
1012         let ret = unsafe {
1013             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1014         };
1015         ret.expect("LLVM does not have support for cleanupret")
1016     }
1017
1018     fn catch_pad(&self,
1019                      parent: &'ll Value,
1020                      args: &[&'ll Value]) -> &'ll Value {
1021         self.count_insn("catchpad");
1022         let name = const_cstr!("catchpad");
1023         let ret = unsafe {
1024             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1025                                         args.len() as c_uint, args.as_ptr(),
1026                                         name.as_ptr())
1027         };
1028         ret.expect("LLVM does not have support for catchpad")
1029     }
1030
1031     fn catch_ret(&self, pad: &'ll Value, unwind: &'ll BasicBlock) -> &'ll Value {
1032         self.count_insn("catchret");
1033         let ret = unsafe {
1034             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1035         };
1036         ret.expect("LLVM does not have support for catchret")
1037     }
1038
1039     fn catch_switch(
1040         &self,
1041         parent: Option<&'ll Value>,
1042         unwind: Option<&'ll BasicBlock>,
1043         num_handlers: usize,
1044     ) -> &'ll Value {
1045         self.count_insn("catchswitch");
1046         let name = const_cstr!("catchswitch");
1047         let ret = unsafe {
1048             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1049                                            num_handlers as c_uint,
1050                                            name.as_ptr())
1051         };
1052         ret.expect("LLVM does not have support for catchswitch")
1053     }
1054
1055     fn add_handler(&self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
1056         unsafe {
1057             llvm::LLVMRustAddHandler(catch_switch, handler);
1058         }
1059     }
1060
1061     fn set_personality_fn(&self, personality: &'ll Value) {
1062         unsafe {
1063             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1064         }
1065     }
1066
1067     // Atomic Operations
1068     fn atomic_cmpxchg(
1069         &self,
1070         dst: &'ll Value,
1071         cmp: &'ll Value,
1072         src: &'ll Value,
1073         order: common::AtomicOrdering,
1074         failure_order: common::AtomicOrdering,
1075         weak: bool,
1076     ) -> &'ll Value {
1077         let weak = if weak { llvm::True } else { llvm::False };
1078         unsafe {
1079             llvm::LLVMRustBuildAtomicCmpXchg(
1080                 self.llbuilder,
1081                 dst,
1082                 cmp,
1083                 src,
1084                 AtomicOrdering::from_generic(order),
1085                 AtomicOrdering::from_generic(failure_order),
1086                 weak
1087             )
1088         }
1089     }
1090     fn atomic_rmw(
1091         &self,
1092         op: common::AtomicRmwBinOp,
1093         dst: &'ll Value,
1094         src: &'ll Value,
1095         order: common::AtomicOrdering,
1096     ) -> &'ll Value {
1097         unsafe {
1098             llvm::LLVMBuildAtomicRMW(
1099                 self.llbuilder,
1100                 AtomicRmwBinOp::from_generic(op),
1101                 dst,
1102                 src,
1103                 AtomicOrdering::from_generic(order),
1104                 False)
1105         }
1106     }
1107
1108     fn atomic_fence(&self, order: common::AtomicOrdering, scope: common::SynchronizationScope) {
1109         unsafe {
1110             llvm::LLVMRustBuildAtomicFence(
1111                 self.llbuilder,
1112                 AtomicOrdering::from_generic(order),
1113                 SynchronizationScope::from_generic(scope)
1114             );
1115         }
1116     }
1117
1118     fn add_case(&self, s: &'ll Value, on_val: &'ll Value, dest: &'ll BasicBlock) {
1119         unsafe {
1120             llvm::LLVMAddCase(s, on_val, dest)
1121         }
1122     }
1123
1124     fn add_incoming_to_phi(&self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
1125         self.count_insn("addincoming");
1126         unsafe {
1127             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1128         }
1129     }
1130
1131     fn set_invariant_load(&self, load: &'ll Value) {
1132         unsafe {
1133             llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1134                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1135         }
1136     }
1137
1138     /// Returns the ptr value that should be used for storing `val`.
1139     fn check_store<'b>(&self,
1140                        val: &'ll Value,
1141                        ptr: &'ll Value) -> &'ll Value {
1142         let dest_ptr_ty = self.cx.val_ty(ptr);
1143         let stored_ty = self.cx.val_ty(val);
1144         let stored_ptr_ty = stored_ty.ptr_to();
1145
1146         assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1147
1148         if dest_ptr_ty == stored_ptr_ty {
1149             ptr
1150         } else {
1151             debug!("Type mismatch in store. \
1152                     Expected {:?}, got {:?}; inserting bitcast",
1153                    dest_ptr_ty, stored_ptr_ty);
1154             self.bitcast(ptr, stored_ptr_ty)
1155         }
1156     }
1157
1158     /// Returns the args that should be used for a call to `llfn`.
1159     fn check_call<'b>(&self,
1160                       typ: &str,
1161                       llfn: &'ll Value,
1162                       args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
1163         let mut fn_ty = self.cx.val_ty(llfn);
1164         // Strip off pointers
1165         while fn_ty.kind() == llvm::TypeKind::Pointer {
1166             fn_ty = fn_ty.element_type();
1167         }
1168
1169         assert!(fn_ty.kind() == llvm::TypeKind::Function,
1170                 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1171
1172         let param_tys = fn_ty.func_params();
1173
1174         let all_args_match = param_tys.iter()
1175             .zip(args.iter().map(|&v| self.cx().val_ty(v)))
1176             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1177
1178         if all_args_match {
1179             return Cow::Borrowed(args);
1180         }
1181
1182         let casted_args: Vec<_> = param_tys.into_iter()
1183             .zip(args.iter())
1184             .enumerate()
1185             .map(|(i, (expected_ty, &actual_val))| {
1186                 let actual_ty = self.cx().val_ty(actual_val);
1187                 if expected_ty != actual_ty {
1188                     debug!("Type mismatch in function call of {:?}. \
1189                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1190                            llfn, expected_ty, i, actual_ty);
1191                     self.bitcast(actual_val, expected_ty)
1192                 } else {
1193                     actual_val
1194                 }
1195             })
1196             .collect();
1197
1198         Cow::Owned(casted_args)
1199     }
1200
1201     fn lifetime_start(&self, ptr: &'ll Value, size: Size) {
1202         self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1203     }
1204
1205     fn lifetime_end(&self, ptr: &'ll Value, size: Size) {
1206         self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1207     }
1208
1209     /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
1210     /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
1211     /// and the intrinsic for `lt` and passes them to `emit`, which is in
1212     /// charge of generating code to call the passed intrinsic on whatever
1213     /// block of generated code is targeted for the intrinsic.
1214     ///
1215     /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
1216     /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1217     fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: &'ll Value, size: Size) {
1218         if self.cx.sess().opts.optimize == config::OptLevel::No {
1219             return;
1220         }
1221
1222         let size = size.bytes();
1223         if size == 0 {
1224             return;
1225         }
1226
1227         let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1228
1229         let ptr = self.pointercast(ptr, type_::Type::i8p(self.cx));
1230         self.call(lifetime_intrinsic, &[self.cx.c_u64(size), ptr], None);
1231     }
1232
1233     fn call(&self, llfn: &'ll Value, args: &[&'ll Value],
1234                 bundle: Option<&common::OperandBundleDef<'ll, &'ll Value>>) -> &'ll Value {
1235         self.count_insn("call");
1236
1237         debug!("Call {:?} with args ({:?})",
1238                llfn,
1239                args);
1240
1241         let args = self.check_call("call", llfn, args);
1242         let bundle = bundle.map(OperandBundleDef::from_generic);
1243         let bundle = bundle.as_ref().map(|b| &*b.raw);
1244
1245         unsafe {
1246             llvm::LLVMRustBuildCall(
1247                 self.llbuilder,
1248                 llfn,
1249                 args.as_ptr() as *const &llvm::Value,
1250                 args.len() as c_uint,
1251                 bundle, noname()
1252             )
1253         }
1254     }
1255
1256     fn zext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
1257         self.count_insn("zext");
1258         unsafe {
1259             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
1260         }
1261     }
1262
1263     fn struct_gep(&self, ptr: &'ll Value, idx: u64) -> &'ll Value {
1264         self.count_insn("structgep");
1265         assert_eq!(idx as c_uint as u64, idx);
1266         unsafe {
1267             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
1268         }
1269     }
1270
1271     fn cx(&self) -> &'a CodegenCx<'ll, 'tcx> {
1272         &self.cx
1273     }
1274 }