]> git.lizzy.rs Git - rust.git/blob - src/librustc_codegen_llvm/builder.rs
1d3ef94f1552c156bd4ea1eaa174a90a08df5acb
[rust.git] / src / librustc_codegen_llvm / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
12 use llvm::{self, False, OperandBundleDef, BasicBlock};
13 use common::{self, *};
14 use context::CodegenCx;
15 use type_;
16 use value::Value;
17 use libc::{c_uint, c_char};
18 use rustc::ty::TyCtxt;
19 use rustc::ty::layout::{Align, Size};
20 use rustc::session::{config, Session};
21 use rustc_data_structures::small_c_str::SmallCStr;
22 use interfaces::{BuilderMethods, Backend, CommonMethods, CommonWriteMethods, TypeMethods};
23 use syntax;
24
25 use std::borrow::Cow;
26 use std::ops::Range;
27 use std::ptr;
28
29 // All Builders must have an llfn associated with them
30 #[must_use]
31 pub struct Builder<'a, 'll: 'a, 'tcx: 'll, V: 'll = &'ll Value> {
32     pub llbuilder: &'ll mut llvm::Builder<'ll>,
33     pub cx: &'a CodegenCx<'ll, 'tcx, V>,
34 }
35
36 impl<V> Drop for Builder<'a, 'll, 'tcx, V> {
37     fn drop(&mut self) {
38         unsafe {
39             llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
40         }
41     }
42 }
43
44 // This is a really awful way to get a zero-length c-string, but better (and a
45 // lot more efficient) than doing str::as_c_str("", ...) every time.
46 fn noname() -> *const c_char {
47     static CNULL: c_char = 0;
48     &CNULL
49 }
50
51 bitflags! {
52     pub struct MemFlags: u8 {
53         const VOLATILE = 1 << 0;
54         const NONTEMPORAL = 1 << 1;
55         const UNALIGNED = 1 << 2;
56     }
57 }
58
59 impl Backend for Builder<'a, 'll, 'tcx>  {
60         type Value = &'ll Value;
61         type BasicBlock = &'ll BasicBlock;
62         type Type = &'ll type_::Type;
63         type TypeKind = llvm::TypeKind;
64         type Context = &'ll llvm::Context;
65 }
66
67 impl BuilderMethods<'a, 'll, 'tcx> for Builder<'a, 'll, 'tcx> {
68     fn new_block<'b>(
69         cx: &'a CodegenCx<'ll, 'tcx>,
70         llfn: &'ll Value,
71         name: &'b str
72     ) -> Self {
73         let bx = Builder::with_cx(cx);
74         let llbb = unsafe {
75             let name = SmallCStr::new(name);
76             llvm::LLVMAppendBasicBlockInContext(
77                 cx.llcx,
78                 llfn,
79                 name.as_ptr()
80             )
81         };
82         bx.position_at_end(llbb);
83         bx
84     }
85
86     fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
87         // Create a fresh builder from the crate context.
88         let llbuilder = unsafe {
89             llvm::LLVMCreateBuilderInContext(cx.llcx)
90         };
91         Builder {
92             llbuilder,
93             cx,
94         }
95     }
96
97     fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
98         Builder::new_block(self.cx, self.llfn(), name)
99     }
100
101     fn sess(&self) -> &Session {
102         self.cx.sess()
103     }
104
105     fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
106         self.cx.tcx
107     }
108
109     fn llfn(&self) -> &'ll Value {
110         unsafe {
111             llvm::LLVMGetBasicBlockParent(self.llbb())
112         }
113     }
114
115     fn llbb(&self) -> &'ll BasicBlock {
116         unsafe {
117             llvm::LLVMGetInsertBlock(self.llbuilder)
118         }
119     }
120
121     fn count_insn(&self, category: &str) {
122         if self.cx().sess().codegen_stats() {
123             self.cx().stats.borrow_mut().n_llvm_insns += 1;
124         }
125         if self.cx().sess().count_llvm_insns() {
126             *self.cx().stats
127                       .borrow_mut()
128                       .llvm_insns
129                       .entry(category.to_string())
130                       .or_insert(0) += 1;
131         }
132     }
133
134     fn set_value_name(&self, value: &'ll Value, name: &str) {
135         let cname = SmallCStr::new(name);
136         unsafe {
137             llvm::LLVMSetValueName(value, cname.as_ptr());
138         }
139     }
140
141     fn position_at_end(&self, llbb: &'ll BasicBlock) {
142         unsafe {
143             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
144         }
145     }
146
147     fn position_at_start(&self, llbb: &'ll BasicBlock) {
148         unsafe {
149             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
150         }
151     }
152
153     fn ret_void(&self) {
154         self.count_insn("retvoid");
155         unsafe {
156             llvm::LLVMBuildRetVoid(self.llbuilder);
157         }
158     }
159
160     fn ret(&self, v: &'ll Value) {
161         self.count_insn("ret");
162         unsafe {
163             llvm::LLVMBuildRet(self.llbuilder, v);
164         }
165     }
166
167     fn br(&self, dest: &'ll BasicBlock) {
168         self.count_insn("br");
169         unsafe {
170             llvm::LLVMBuildBr(self.llbuilder, dest);
171         }
172     }
173
174     fn cond_br(
175         &self,
176         cond: &'ll Value,
177         then_llbb: &'ll BasicBlock,
178         else_llbb: &'ll BasicBlock,
179     ) {
180         self.count_insn("condbr");
181         unsafe {
182             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
183         }
184     }
185
186     fn switch(
187         &self,
188         v: &'ll Value,
189         else_llbb: &'ll BasicBlock,
190         num_cases: usize,
191     ) -> &'ll Value {
192         unsafe {
193             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
194         }
195     }
196
197     fn invoke(&self,
198                   llfn: &'ll Value,
199                   args: &[&'ll Value],
200                   then: &'ll BasicBlock,
201                   catch: &'ll BasicBlock,
202                   bundle: Option<&common::OperandBundleDef<'ll, &'ll Value>>) -> &'ll Value {
203         self.count_insn("invoke");
204
205         debug!("Invoke {:?} with args ({:?})",
206                llfn,
207                args);
208
209         let args = self.check_call("invoke", llfn, args);
210         let bundle = bundle.map(OperandBundleDef::from_generic);
211         let bundle = bundle.as_ref().map(|b| &*b.raw);
212
213         unsafe {
214             llvm::LLVMRustBuildInvoke(self.llbuilder,
215                                       llfn,
216                                       args.as_ptr(),
217                                       args.len() as c_uint,
218                                       then,
219                                       catch,
220                                       bundle,
221                                       noname())
222         }
223     }
224
225     fn unreachable(&self) {
226         self.count_insn("unreachable");
227         unsafe {
228             llvm::LLVMBuildUnreachable(self.llbuilder);
229         }
230     }
231
232     /* Arithmetic */
233     fn add(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
234         self.count_insn("add");
235         unsafe {
236             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
237         }
238     }
239
240     fn fadd(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
241         self.count_insn("fadd");
242         unsafe {
243             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
244         }
245     }
246
247     fn fadd_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
248         self.count_insn("fadd");
249         unsafe {
250             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
251             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
252             instr
253         }
254     }
255
256     fn sub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
257         self.count_insn("sub");
258         unsafe {
259             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
260         }
261     }
262
263     fn fsub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
264         self.count_insn("fsub");
265         unsafe {
266             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
267         }
268     }
269
270     fn fsub_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
271         self.count_insn("fsub");
272         unsafe {
273             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
274             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
275             instr
276         }
277     }
278
279     fn mul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
280         self.count_insn("mul");
281         unsafe {
282             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
283         }
284     }
285
286     fn fmul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
287         self.count_insn("fmul");
288         unsafe {
289             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
290         }
291     }
292
293     fn fmul_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
294         self.count_insn("fmul");
295         unsafe {
296             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
297             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
298             instr
299         }
300     }
301
302
303     fn udiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
304         self.count_insn("udiv");
305         unsafe {
306             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
307         }
308     }
309
310     fn exactudiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
311         self.count_insn("exactudiv");
312         unsafe {
313             llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
314         }
315     }
316
317     fn sdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
318         self.count_insn("sdiv");
319         unsafe {
320             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
321         }
322     }
323
324     fn exactsdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
325         self.count_insn("exactsdiv");
326         unsafe {
327             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
328         }
329     }
330
331     fn fdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
332         self.count_insn("fdiv");
333         unsafe {
334             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
335         }
336     }
337
338     fn fdiv_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
339         self.count_insn("fdiv");
340         unsafe {
341             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
342             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
343             instr
344         }
345     }
346
347     fn urem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
348         self.count_insn("urem");
349         unsafe {
350             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
351         }
352     }
353
354     fn srem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
355         self.count_insn("srem");
356         unsafe {
357             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
358         }
359     }
360
361     fn frem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
362         self.count_insn("frem");
363         unsafe {
364             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
365         }
366     }
367
368     fn frem_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
369         self.count_insn("frem");
370         unsafe {
371             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
372             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
373             instr
374         }
375     }
376
377     fn shl(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
378         self.count_insn("shl");
379         unsafe {
380             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
381         }
382     }
383
384     fn lshr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
385         self.count_insn("lshr");
386         unsafe {
387             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
388         }
389     }
390
391     fn ashr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
392         self.count_insn("ashr");
393         unsafe {
394             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
395         }
396     }
397
398     fn and(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
399         self.count_insn("and");
400         unsafe {
401             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
402         }
403     }
404
405     fn or(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
406         self.count_insn("or");
407         unsafe {
408             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
409         }
410     }
411
412     fn xor(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
413         self.count_insn("xor");
414         unsafe {
415             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
416         }
417     }
418
419     fn neg(&self, v: &'ll Value) -> &'ll Value {
420         self.count_insn("neg");
421         unsafe {
422             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
423         }
424     }
425
426     fn fneg(&self, v: &'ll Value) -> &'ll Value {
427         self.count_insn("fneg");
428         unsafe {
429             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
430         }
431     }
432
433     fn not(&self, v: &'ll Value) -> &'ll Value {
434         self.count_insn("not");
435         unsafe {
436             llvm::LLVMBuildNot(self.llbuilder, v, noname())
437         }
438     }
439
440     fn alloca(&self, ty: Self::Type, name: &str, align: Align) -> &'ll Value {
441         let bx = Builder::with_cx(self.cx);
442         bx.position_at_start(unsafe {
443             llvm::LLVMGetFirstBasicBlock(self.llfn())
444         });
445         bx.dynamic_alloca(ty, name, align)
446     }
447
448     fn dynamic_alloca(&self, ty: Self::Type, name: &str, align: Align) -> &'ll Value {
449         self.count_insn("alloca");
450         unsafe {
451             let alloca = if name.is_empty() {
452                 llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
453             } else {
454                 let name = SmallCStr::new(name);
455                 llvm::LLVMBuildAlloca(self.llbuilder, ty,
456                                       name.as_ptr())
457             };
458             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
459             alloca
460         }
461     }
462
463     fn array_alloca(&self,
464                         ty: Self::Type,
465                         len: &'ll Value,
466                         name: &str,
467                         align: Align) -> &'ll Value {
468         self.count_insn("alloca");
469         unsafe {
470             let alloca = if name.is_empty() {
471                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
472             } else {
473                 let name = SmallCStr::new(name);
474                 llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
475                                            name.as_ptr())
476             };
477             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
478             alloca
479         }
480     }
481
482     fn load(&self, ptr: &'ll Value, align: Align) -> &'ll Value {
483         self.count_insn("load");
484         unsafe {
485             let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
486             llvm::LLVMSetAlignment(load, align.abi() as c_uint);
487             load
488         }
489     }
490
491     fn volatile_load(&self, ptr: &'ll Value) -> &'ll Value {
492         self.count_insn("load.volatile");
493         unsafe {
494             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
495             llvm::LLVMSetVolatile(insn, llvm::True);
496             insn
497         }
498     }
499
500     fn atomic_load(
501         &self,
502         ptr: &'ll Value,
503         order: common::AtomicOrdering,
504         size: Size,
505     ) -> &'ll Value {
506         self.count_insn("load.atomic");
507         unsafe {
508             let load = llvm::LLVMRustBuildAtomicLoad(
509                 self.llbuilder,
510                 ptr,
511                 noname(),
512                 AtomicOrdering::from_generic(order),
513             );
514             // LLVM requires the alignment of atomic loads to be at least the size of the type.
515             llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
516             load
517         }
518     }
519
520
521     fn range_metadata(&self, load: &'ll Value, range: Range<u128>) {
522         if self.sess().target.target.arch == "amdgpu" {
523             // amdgpu/LLVM does something weird and thinks a i64 value is
524             // split into a v2i32, halving the bitwidth LLVM expects,
525             // tripping an assertion. So, for now, just disable this
526             // optimization.
527             return;
528         }
529
530         unsafe {
531             let llty = self.cx.val_ty(load);
532             let v = [
533                 self.cx.c_uint_big(llty, range.start),
534                 self.cx.c_uint_big(llty, range.end)
535             ];
536
537             llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
538                                   llvm::LLVMMDNodeInContext(self.cx.llcx,
539                                                             v.as_ptr(),
540                                                             v.len() as c_uint));
541         }
542     }
543
544     fn nonnull_metadata(&self, load: &'ll Value) {
545         unsafe {
546             llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
547                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
548         }
549     }
550
551     fn store(&self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
552         self.store_with_flags(val, ptr, align, MemFlags::empty())
553     }
554
555     fn store_with_flags(
556         &self,
557         val: &'ll Value,
558         ptr: &'ll Value,
559         align: Align,
560         flags: MemFlags,
561     ) -> &'ll Value {
562         debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
563         self.count_insn("store");
564         let ptr = self.check_store(val, ptr);
565         unsafe {
566             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
567             let align = if flags.contains(MemFlags::UNALIGNED) {
568                 1
569             } else {
570                 align.abi() as c_uint
571             };
572             llvm::LLVMSetAlignment(store, align);
573             if flags.contains(MemFlags::VOLATILE) {
574                 llvm::LLVMSetVolatile(store, llvm::True);
575             }
576             if flags.contains(MemFlags::NONTEMPORAL) {
577                 // According to LLVM [1] building a nontemporal store must
578                 // *always* point to a metadata value of the integer 1.
579                 //
580                 // [1]: http://llvm.org/docs/LangRef.html#store-instruction
581                 let one = self.cx.c_i32(1);
582                 let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
583                 llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
584             }
585             store
586         }
587     }
588
589    fn atomic_store(&self, val: &'ll Value, ptr: &'ll Value,
590                    order: common::AtomicOrdering, size: Size) {
591         debug!("Store {:?} -> {:?}", val, ptr);
592         self.count_insn("store.atomic");
593         let ptr = self.check_store(val, ptr);
594         unsafe {
595             let store = llvm::LLVMRustBuildAtomicStore(
596                 self.llbuilder,
597                 val,
598                 ptr,
599                 AtomicOrdering::from_generic(order),
600             );
601             // LLVM requires the alignment of atomic stores to be at least the size of the type.
602             llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
603         }
604     }
605
606     fn gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
607         self.count_insn("gep");
608         unsafe {
609             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
610                                indices.len() as c_uint, noname())
611         }
612     }
613
614     fn inbounds_gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
615         self.count_insn("inboundsgep");
616         unsafe {
617             llvm::LLVMBuildInBoundsGEP(
618                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
619         }
620     }
621
622     /* Casts */
623     fn trunc(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
624         self.count_insn("trunc");
625         unsafe {
626             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
627         }
628     }
629
630     fn sext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
631         self.count_insn("sext");
632         unsafe {
633             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
634         }
635     }
636
637     fn fptoui(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
638         self.count_insn("fptoui");
639         unsafe {
640             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
641         }
642     }
643
644     fn fptosi(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
645         self.count_insn("fptosi");
646         unsafe {
647             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
648         }
649     }
650
651     fn uitofp(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
652         self.count_insn("uitofp");
653         unsafe {
654             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
655         }
656     }
657
658     fn sitofp(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
659         self.count_insn("sitofp");
660         unsafe {
661             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
662         }
663     }
664
665     fn fptrunc(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
666         self.count_insn("fptrunc");
667         unsafe {
668             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
669         }
670     }
671
672     fn fpext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
673         self.count_insn("fpext");
674         unsafe {
675             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
676         }
677     }
678
679     fn ptrtoint(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
680         self.count_insn("ptrtoint");
681         unsafe {
682             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
683         }
684     }
685
686     fn inttoptr(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
687         self.count_insn("inttoptr");
688         unsafe {
689             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
690         }
691     }
692
693     fn bitcast(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
694         self.count_insn("bitcast");
695         unsafe {
696             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
697         }
698     }
699
700
701     fn intcast(&self, val: &'ll Value, dest_ty: Self::Type, is_signed: bool) -> &'ll Value {
702         self.count_insn("intcast");
703         unsafe {
704             llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
705         }
706     }
707
708     fn pointercast(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
709         self.count_insn("pointercast");
710         unsafe {
711             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
712         }
713     }
714
715     /* Comparisons */
716     fn icmp(&self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
717         self.count_insn("icmp");
718         let op = llvm::IntPredicate::from_generic(op);
719         unsafe {
720             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
721         }
722     }
723
724     fn fcmp(&self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
725         self.count_insn("fcmp");
726         unsafe {
727             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
728         }
729     }
730
731     /* Miscellaneous instructions */
732     fn empty_phi(&self, ty: Self::Type) -> &'ll Value {
733         self.count_insn("emptyphi");
734         unsafe {
735             llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
736         }
737     }
738
739     fn phi(&self, ty: Self::Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
740         assert_eq!(vals.len(), bbs.len());
741         let phi = self.empty_phi(ty);
742         self.count_insn("addincoming");
743         unsafe {
744             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
745                                   bbs.as_ptr(),
746                                   vals.len() as c_uint);
747             phi
748         }
749     }
750
751     fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
752                        inputs: &[&'ll Value], output: Self::Type,
753                        volatile: bool, alignstack: bool,
754                        dia: syntax::ast::AsmDialect) -> Option<&'ll Value> {
755         self.count_insn("inlineasm");
756
757         let volatile = if volatile { llvm::True }
758                        else        { llvm::False };
759         let alignstack = if alignstack { llvm::True }
760                          else          { llvm::False };
761
762         let argtys = inputs.iter().map(|v| {
763             debug!("Asm Input Type: {:?}", *v);
764             self.cx.val_ty(*v)
765         }).collect::<Vec<_>>();
766
767         debug!("Asm Output Type: {:?}", output);
768         let fty = &self.cx().func(&argtys[..], output);
769         unsafe {
770             // Ask LLVM to verify that the constraints are well-formed.
771             let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons);
772             debug!("Constraint verification result: {:?}", constraints_ok);
773             if constraints_ok {
774                 let v = llvm::LLVMRustInlineAsm(
775                     fty, asm, cons, volatile, alignstack, AsmDialect::from_generic(dia));
776                 Some(self.call(v, inputs, None))
777             } else {
778                 // LLVM has detected an issue with our constraints, bail out
779                 None
780             }
781         }
782     }
783
784     fn memcpy(&self, dst: &'ll Value, dst_align: u64,
785                   src: &'ll Value, src_align: u64,
786                   size: &'ll Value, is_volatile: bool) -> &'ll Value {
787         unsafe {
788             llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align as c_uint,
789                                       src, src_align as c_uint, size, is_volatile)
790         }
791     }
792
793     fn memmove(&self, dst: &'ll Value, dst_align: u64,
794                   src: &'ll Value, src_align: u64,
795                   size: &'ll Value, is_volatile: bool) -> &'ll Value {
796         unsafe {
797             llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align as c_uint,
798                                       src, src_align as c_uint, size, is_volatile)
799         }
800     }
801
802     fn minnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
803         self.count_insn("minnum");
804         unsafe {
805             let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
806             instr.expect("LLVMRustBuildMinNum is not available in LLVM version < 6.0")
807         }
808     }
809     fn maxnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
810         self.count_insn("maxnum");
811         unsafe {
812             let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
813             instr.expect("LLVMRustBuildMaxNum is not available in LLVM version < 6.0")
814         }
815     }
816
817     fn select(
818         &self, cond: &'ll Value,
819         then_val: &'ll Value,
820         else_val: &'ll Value,
821     ) -> &'ll Value {
822         self.count_insn("select");
823         unsafe {
824             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
825         }
826     }
827
828     #[allow(dead_code)]
829     fn va_arg(&self, list: &'ll Value, ty: Self::Type) -> &'ll Value {
830         self.count_insn("vaarg");
831         unsafe {
832             llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
833         }
834     }
835
836     fn extract_element(&self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
837         self.count_insn("extractelement");
838         unsafe {
839             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
840         }
841     }
842
843     fn insert_element(
844         &self, vec: &'ll Value,
845         elt: &'ll Value,
846         idx: &'ll Value,
847     ) -> &'ll Value {
848         self.count_insn("insertelement");
849         unsafe {
850             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
851         }
852     }
853
854     fn shuffle_vector(&self, v1: &'ll Value, v2: &'ll Value, mask: &'ll Value) -> &'ll Value {
855         self.count_insn("shufflevector");
856         unsafe {
857             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
858         }
859     }
860
861     fn vector_splat(&self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
862         unsafe {
863             let elt_ty = self.cx.val_ty(elt);
864             let undef = llvm::LLVMGetUndef(&self.cx().vector(elt_ty, num_elts as u64));
865             let vec = self.insert_element(undef, elt, self.cx.c_i32(0));
866             let vec_i32_ty = &self.cx().vector(&self.cx().i32(), num_elts as u64);
867             self.shuffle_vector(vec, undef, self.cx().c_null(vec_i32_ty))
868         }
869     }
870
871     fn vector_reduce_fadd_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
872         self.count_insn("vector.reduce.fadd_fast");
873         unsafe {
874             // FIXME: add a non-fast math version once
875             // https://bugs.llvm.org/show_bug.cgi?id=36732
876             // is fixed.
877             let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
878             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
879             instr
880         }
881     }
882     fn vector_reduce_fmul_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
883         self.count_insn("vector.reduce.fmul_fast");
884         unsafe {
885             // FIXME: add a non-fast math version once
886             // https://bugs.llvm.org/show_bug.cgi?id=36732
887             // is fixed.
888             let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
889             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
890             instr
891         }
892     }
893     fn vector_reduce_add(&self, src: &'ll Value) -> &'ll Value {
894         self.count_insn("vector.reduce.add");
895         unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
896     }
897     fn vector_reduce_mul(&self, src: &'ll Value) -> &'ll Value {
898         self.count_insn("vector.reduce.mul");
899         unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
900     }
901     fn vector_reduce_and(&self, src: &'ll Value) -> &'ll Value {
902         self.count_insn("vector.reduce.and");
903         unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
904     }
905     fn vector_reduce_or(&self, src: &'ll Value) -> &'ll Value {
906         self.count_insn("vector.reduce.or");
907         unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
908     }
909     fn vector_reduce_xor(&self, src: &'ll Value) -> &'ll Value {
910         self.count_insn("vector.reduce.xor");
911         unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
912     }
913     fn vector_reduce_fmin(&self, src: &'ll Value) -> &'ll Value {
914         self.count_insn("vector.reduce.fmin");
915         unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
916     }
917     fn vector_reduce_fmax(&self, src: &'ll Value) -> &'ll Value {
918         self.count_insn("vector.reduce.fmax");
919         unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
920     }
921     fn vector_reduce_fmin_fast(&self, src: &'ll Value) -> &'ll Value {
922         self.count_insn("vector.reduce.fmin_fast");
923         unsafe {
924             let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
925             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
926             instr
927         }
928     }
929     fn vector_reduce_fmax_fast(&self, src: &'ll Value) -> &'ll Value {
930         self.count_insn("vector.reduce.fmax_fast");
931         unsafe {
932             let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
933             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
934             instr
935         }
936     }
937     fn vector_reduce_min(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
938         self.count_insn("vector.reduce.min");
939         unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
940     }
941     fn vector_reduce_max(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
942         self.count_insn("vector.reduce.max");
943         unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
944     }
945
946     fn extract_value(&self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
947         self.count_insn("extractvalue");
948         assert_eq!(idx as c_uint as u64, idx);
949         unsafe {
950             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
951         }
952     }
953
954     fn insert_value(&self, agg_val: &'ll Value, elt: &'ll Value,
955                        idx: u64) -> &'ll Value {
956         self.count_insn("insertvalue");
957         assert_eq!(idx as c_uint as u64, idx);
958         unsafe {
959             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
960                                        noname())
961         }
962     }
963
964     fn landing_pad(&self, ty: Self::Type, pers_fn: &'ll Value,
965                        num_clauses: usize) -> &'ll Value {
966         self.count_insn("landingpad");
967         unsafe {
968             llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
969                                       num_clauses as c_uint, noname())
970         }
971     }
972
973     fn add_clause(&self, landing_pad: &'ll Value, clause: &'ll Value) {
974         unsafe {
975             llvm::LLVMAddClause(landing_pad, clause);
976         }
977     }
978
979     fn set_cleanup(&self, landing_pad: &'ll Value) {
980         self.count_insn("setcleanup");
981         unsafe {
982             llvm::LLVMSetCleanup(landing_pad, llvm::True);
983         }
984     }
985
986     fn resume(&self, exn: &'ll Value) -> &'ll Value {
987         self.count_insn("resume");
988         unsafe {
989             llvm::LLVMBuildResume(self.llbuilder, exn)
990         }
991     }
992
993     fn cleanup_pad(&self,
994                        parent: Option<&'ll Value>,
995                        args: &[&'ll Value]) -> &'ll Value {
996         self.count_insn("cleanuppad");
997         let name = const_cstr!("cleanuppad");
998         let ret = unsafe {
999             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1000                                           parent,
1001                                           args.len() as c_uint,
1002                                           args.as_ptr(),
1003                                           name.as_ptr())
1004         };
1005         ret.expect("LLVM does not have support for cleanuppad")
1006     }
1007
1008     fn cleanup_ret(
1009         &self, cleanup: &'ll Value,
1010         unwind: Option<&'ll BasicBlock>,
1011     ) -> &'ll Value {
1012         self.count_insn("cleanupret");
1013         let ret = unsafe {
1014             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1015         };
1016         ret.expect("LLVM does not have support for cleanupret")
1017     }
1018
1019     fn catch_pad(&self,
1020                      parent: &'ll Value,
1021                      args: &[&'ll Value]) -> &'ll Value {
1022         self.count_insn("catchpad");
1023         let name = const_cstr!("catchpad");
1024         let ret = unsafe {
1025             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1026                                         args.len() as c_uint, args.as_ptr(),
1027                                         name.as_ptr())
1028         };
1029         ret.expect("LLVM does not have support for catchpad")
1030     }
1031
1032     fn catch_ret(&self, pad: &'ll Value, unwind: &'ll BasicBlock) -> &'ll Value {
1033         self.count_insn("catchret");
1034         let ret = unsafe {
1035             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1036         };
1037         ret.expect("LLVM does not have support for catchret")
1038     }
1039
1040     fn catch_switch(
1041         &self,
1042         parent: Option<&'ll Value>,
1043         unwind: Option<&'ll BasicBlock>,
1044         num_handlers: usize,
1045     ) -> &'ll Value {
1046         self.count_insn("catchswitch");
1047         let name = const_cstr!("catchswitch");
1048         let ret = unsafe {
1049             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1050                                            num_handlers as c_uint,
1051                                            name.as_ptr())
1052         };
1053         ret.expect("LLVM does not have support for catchswitch")
1054     }
1055
1056     fn add_handler(&self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
1057         unsafe {
1058             llvm::LLVMRustAddHandler(catch_switch, handler);
1059         }
1060     }
1061
1062     fn set_personality_fn(&self, personality: &'ll Value) {
1063         unsafe {
1064             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1065         }
1066     }
1067
1068     // Atomic Operations
1069     fn atomic_cmpxchg(
1070         &self,
1071         dst: &'ll Value,
1072         cmp: &'ll Value,
1073         src: &'ll Value,
1074         order: common::AtomicOrdering,
1075         failure_order: common::AtomicOrdering,
1076         weak: bool,
1077     ) -> &'ll Value {
1078         let weak = if weak { llvm::True } else { llvm::False };
1079         unsafe {
1080             llvm::LLVMRustBuildAtomicCmpXchg(
1081                 self.llbuilder,
1082                 dst,
1083                 cmp,
1084                 src,
1085                 AtomicOrdering::from_generic(order),
1086                 AtomicOrdering::from_generic(failure_order),
1087                 weak
1088             )
1089         }
1090     }
1091     fn atomic_rmw(
1092         &self,
1093         op: common::AtomicRmwBinOp,
1094         dst: &'ll Value,
1095         src: &'ll Value,
1096         order: common::AtomicOrdering,
1097     ) -> &'ll Value {
1098         unsafe {
1099             llvm::LLVMBuildAtomicRMW(
1100                 self.llbuilder,
1101                 AtomicRmwBinOp::from_generic(op),
1102                 dst,
1103                 src,
1104                 AtomicOrdering::from_generic(order),
1105                 False)
1106         }
1107     }
1108
1109     fn atomic_fence(&self, order: common::AtomicOrdering, scope: common::SynchronizationScope) {
1110         unsafe {
1111             llvm::LLVMRustBuildAtomicFence(
1112                 self.llbuilder,
1113                 AtomicOrdering::from_generic(order),
1114                 SynchronizationScope::from_generic(scope)
1115             );
1116         }
1117     }
1118
1119     fn add_case(&self, s: &'ll Value, on_val: &'ll Value, dest: &'ll BasicBlock) {
1120         unsafe {
1121             llvm::LLVMAddCase(s, on_val, dest)
1122         }
1123     }
1124
1125     fn add_incoming_to_phi(&self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
1126         self.count_insn("addincoming");
1127         unsafe {
1128             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1129         }
1130     }
1131
1132     fn set_invariant_load(&self, load: &'ll Value) {
1133         unsafe {
1134             llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1135                                   llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1136         }
1137     }
1138
1139     /// Returns the ptr value that should be used for storing `val`.
1140     fn check_store<'b>(&self,
1141                        val: &'ll Value,
1142                        ptr: &'ll Value) -> &'ll Value {
1143         let dest_ptr_ty = self.cx.val_ty(ptr);
1144         let stored_ty = self.cx.val_ty(val);
1145         let stored_ptr_ty = self.cx.ptr_to(stored_ty);
1146
1147         assert_eq!(self.cx.kind(dest_ptr_ty), llvm::TypeKind::Pointer);
1148
1149         if dest_ptr_ty == stored_ptr_ty {
1150             ptr
1151         } else {
1152             debug!("Type mismatch in store. \
1153                     Expected {:?}, got {:?}; inserting bitcast",
1154                    dest_ptr_ty, stored_ptr_ty);
1155             self.bitcast(ptr, stored_ptr_ty)
1156         }
1157     }
1158
1159     /// Returns the args that should be used for a call to `llfn`.
1160     fn check_call<'b>(&self,
1161                       typ: &str,
1162                       llfn: &'ll Value,
1163                       args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
1164         let mut fn_ty = self.cx.val_ty(llfn);
1165         // Strip off pointers
1166         while self.cx.kind(fn_ty) == llvm::TypeKind::Pointer {
1167             fn_ty = self.cx.element_type(fn_ty);
1168         }
1169
1170         assert!(self.cx.kind(fn_ty) == llvm::TypeKind::Function,
1171                 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1172
1173         let param_tys = self.cx.func_params(fn_ty);
1174
1175         let all_args_match = param_tys.iter()
1176             .zip(args.iter().map(|&v| self.cx().val_ty(v)))
1177             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1178
1179         if all_args_match {
1180             return Cow::Borrowed(args);
1181         }
1182
1183         let casted_args: Vec<_> = param_tys.into_iter()
1184             .zip(args.iter())
1185             .enumerate()
1186             .map(|(i, (expected_ty, &actual_val))| {
1187                 let actual_ty = self.cx().val_ty(actual_val);
1188                 if expected_ty != actual_ty {
1189                     debug!("Type mismatch in function call of {:?}. \
1190                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1191                            llfn, expected_ty, i, actual_ty);
1192                     self.bitcast(actual_val, expected_ty)
1193                 } else {
1194                     actual_val
1195                 }
1196             })
1197             .collect();
1198
1199         Cow::Owned(casted_args)
1200     }
1201
1202     fn lifetime_start(&self, ptr: &'ll Value, size: Size) {
1203         self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1204     }
1205
1206     fn lifetime_end(&self, ptr: &'ll Value, size: Size) {
1207         self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1208     }
1209
1210     /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
1211     /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
1212     /// and the intrinsic for `lt` and passes them to `emit`, which is in
1213     /// charge of generating code to call the passed intrinsic on whatever
1214     /// block of generated code is targeted for the intrinsic.
1215     ///
1216     /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
1217     /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1218     fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: &'ll Value, size: Size) {
1219         if self.cx.sess().opts.optimize == config::OptLevel::No {
1220             return;
1221         }
1222
1223         let size = size.bytes();
1224         if size == 0 {
1225             return;
1226         }
1227
1228         let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1229
1230         let ptr = self.pointercast(ptr, self.cx.i8p());
1231         self.call(lifetime_intrinsic, &[self.cx.c_u64(size), ptr], None);
1232     }
1233
1234     fn call(&self, llfn: &'ll Value, args: &[&'ll Value],
1235                 bundle: Option<&common::OperandBundleDef<'ll, &'ll Value>>) -> &'ll Value {
1236         self.count_insn("call");
1237
1238         debug!("Call {:?} with args ({:?})",
1239                llfn,
1240                args);
1241
1242         let args = self.check_call("call", llfn, args);
1243         let bundle = bundle.map(OperandBundleDef::from_generic);
1244         let bundle = bundle.as_ref().map(|b| &*b.raw);
1245
1246         unsafe {
1247             llvm::LLVMRustBuildCall(
1248                 self.llbuilder,
1249                 llfn,
1250                 args.as_ptr() as *const &llvm::Value,
1251                 args.len() as c_uint,
1252                 bundle, noname()
1253             )
1254         }
1255     }
1256
1257     fn zext(&self, val: &'ll Value, dest_ty: Self::Type) -> &'ll Value {
1258         self.count_insn("zext");
1259         unsafe {
1260             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
1261         }
1262     }
1263
1264     fn struct_gep(&self, ptr: &'ll Value, idx: u64) -> &'ll Value {
1265         self.count_insn("structgep");
1266         assert_eq!(idx as c_uint as u64, idx);
1267         unsafe {
1268             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
1269         }
1270     }
1271
1272     fn cx(&self) -> &'a CodegenCx<'ll, 'tcx> {
1273         &self.cx
1274     }
1275 }