]> git.lizzy.rs Git - rust.git/blob - src/librustc_trans/builder.rs
Fixes doc important trait display on mobile
[rust.git] / src / librustc_trans / builder.rs
1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![allow(dead_code)] // FFI wrappers
12
13 use llvm;
14 use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
15 use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
16 use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
17 use common::*;
18 use type_::Type;
19 use value::Value;
20 use libc::{c_uint, c_char};
21 use rustc::ty::TyCtxt;
22 use rustc::ty::layout::{Align, Size};
23 use rustc::session::{config, Session};
24
25 use std::borrow::Cow;
26 use std::ffi::CString;
27 use std::ops::Range;
28 use std::ptr;
29 use syntax_pos::Span;
30
31 // All Builders must have an llfn associated with them
32 #[must_use]
33 pub struct Builder<'a, 'tcx: 'a> {
34     pub llbuilder: BuilderRef,
35     pub ccx: &'a CrateContext<'a, 'tcx>,
36 }
37
38 impl<'a, 'tcx> Drop for Builder<'a, 'tcx> {
39     fn drop(&mut self) {
40         unsafe {
41             llvm::LLVMDisposeBuilder(self.llbuilder);
42         }
43     }
44 }
45
46 // This is a really awful way to get a zero-length c-string, but better (and a
47 // lot more efficient) than doing str::as_c_str("", ...) every time.
48 fn noname() -> *const c_char {
49     static CNULL: c_char = 0;
50     &CNULL
51 }
52
53 impl<'a, 'tcx> Builder<'a, 'tcx> {
54     pub fn new_block<'b>(ccx: &'a CrateContext<'a, 'tcx>, llfn: ValueRef, name: &'b str) -> Self {
55         let builder = Builder::with_ccx(ccx);
56         let llbb = unsafe {
57             let name = CString::new(name).unwrap();
58             llvm::LLVMAppendBasicBlockInContext(
59                 ccx.llcx(),
60                 llfn,
61                 name.as_ptr()
62             )
63         };
64         builder.position_at_end(llbb);
65         builder
66     }
67
68     pub fn with_ccx(ccx: &'a CrateContext<'a, 'tcx>) -> Self {
69         // Create a fresh builder from the crate context.
70         let llbuilder = unsafe {
71             llvm::LLVMCreateBuilderInContext(ccx.llcx())
72         };
73         Builder {
74             llbuilder,
75             ccx,
76         }
77     }
78
79     pub fn build_sibling_block<'b>(&self, name: &'b str) -> Builder<'a, 'tcx> {
80         Builder::new_block(self.ccx, self.llfn(), name)
81     }
82
83     pub fn sess(&self) -> &Session {
84         self.ccx.sess()
85     }
86
87     pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
88         self.ccx.tcx()
89     }
90
91     pub fn llfn(&self) -> ValueRef {
92         unsafe {
93             llvm::LLVMGetBasicBlockParent(self.llbb())
94         }
95     }
96
97     pub fn llbb(&self) -> BasicBlockRef {
98         unsafe {
99             llvm::LLVMGetInsertBlock(self.llbuilder)
100         }
101     }
102
103     fn count_insn(&self, category: &str) {
104         if self.ccx.sess().trans_stats() {
105             self.ccx.stats().borrow_mut().n_llvm_insns += 1;
106         }
107         if self.ccx.sess().count_llvm_insns() {
108             *self.ccx.stats()
109                 .borrow_mut()
110                 .llvm_insns
111                 .entry(category.to_string())
112                 .or_insert(0) += 1;
113         }
114     }
115
116     pub fn set_value_name(&self, value: ValueRef, name: &str) {
117         let cname = CString::new(name.as_bytes()).unwrap();
118         unsafe {
119             llvm::LLVMSetValueName(value, cname.as_ptr());
120         }
121     }
122
123     pub fn position_before(&self, insn: ValueRef) {
124         unsafe {
125             llvm::LLVMPositionBuilderBefore(self.llbuilder, insn);
126         }
127     }
128
129     pub fn position_at_end(&self, llbb: BasicBlockRef) {
130         unsafe {
131             llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
132         }
133     }
134
135     pub fn position_at_start(&self, llbb: BasicBlockRef) {
136         unsafe {
137             llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
138         }
139     }
140
141     pub fn ret_void(&self) {
142         self.count_insn("retvoid");
143         unsafe {
144             llvm::LLVMBuildRetVoid(self.llbuilder);
145         }
146     }
147
148     pub fn ret(&self, v: ValueRef) {
149         self.count_insn("ret");
150         unsafe {
151             llvm::LLVMBuildRet(self.llbuilder, v);
152         }
153     }
154
155     pub fn aggregate_ret(&self, ret_vals: &[ValueRef]) {
156         unsafe {
157             llvm::LLVMBuildAggregateRet(self.llbuilder,
158                                         ret_vals.as_ptr(),
159                                         ret_vals.len() as c_uint);
160         }
161     }
162
163     pub fn br(&self, dest: BasicBlockRef) {
164         self.count_insn("br");
165         unsafe {
166             llvm::LLVMBuildBr(self.llbuilder, dest);
167         }
168     }
169
170     pub fn cond_br(&self, cond: ValueRef, then_llbb: BasicBlockRef, else_llbb: BasicBlockRef) {
171         self.count_insn("condbr");
172         unsafe {
173             llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
174         }
175     }
176
177     pub fn switch(&self, v: ValueRef, else_llbb: BasicBlockRef, num_cases: usize) -> ValueRef {
178         unsafe {
179             llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
180         }
181     }
182
183     pub fn indirect_br(&self, addr: ValueRef, num_dests: usize) {
184         self.count_insn("indirectbr");
185         unsafe {
186             llvm::LLVMBuildIndirectBr(self.llbuilder, addr, num_dests as c_uint);
187         }
188     }
189
190     pub fn invoke(&self,
191                   llfn: ValueRef,
192                   args: &[ValueRef],
193                   then: BasicBlockRef,
194                   catch: BasicBlockRef,
195                   bundle: Option<&OperandBundleDef>) -> ValueRef {
196         self.count_insn("invoke");
197
198         debug!("Invoke {:?} with args ({})",
199                Value(llfn),
200                args.iter()
201                    .map(|&v| format!("{:?}", Value(v)))
202                    .collect::<Vec<String>>()
203                    .join(", "));
204
205         let args = self.check_call("invoke", llfn, args);
206         let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
207
208         unsafe {
209             llvm::LLVMRustBuildInvoke(self.llbuilder,
210                                       llfn,
211                                       args.as_ptr(),
212                                       args.len() as c_uint,
213                                       then,
214                                       catch,
215                                       bundle,
216                                       noname())
217         }
218     }
219
220     pub fn unreachable(&self) {
221         self.count_insn("unreachable");
222         unsafe {
223             llvm::LLVMBuildUnreachable(self.llbuilder);
224         }
225     }
226
227     /* Arithmetic */
228     pub fn add(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
229         self.count_insn("add");
230         unsafe {
231             llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
232         }
233     }
234
235     pub fn nswadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
236         self.count_insn("nswadd");
237         unsafe {
238             llvm::LLVMBuildNSWAdd(self.llbuilder, lhs, rhs, noname())
239         }
240     }
241
242     pub fn nuwadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
243         self.count_insn("nuwadd");
244         unsafe {
245             llvm::LLVMBuildNUWAdd(self.llbuilder, lhs, rhs, noname())
246         }
247     }
248
249     pub fn fadd(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
250         self.count_insn("fadd");
251         unsafe {
252             llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
253         }
254     }
255
256     pub fn fadd_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
257         self.count_insn("fadd");
258         unsafe {
259             let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
260             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
261             instr
262         }
263     }
264
265     pub fn sub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
266         self.count_insn("sub");
267         unsafe {
268             llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
269         }
270     }
271
272     pub fn nswsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
273         self.count_insn("nwsub");
274         unsafe {
275             llvm::LLVMBuildNSWSub(self.llbuilder, lhs, rhs, noname())
276         }
277     }
278
279     pub fn nuwsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
280         self.count_insn("nuwsub");
281         unsafe {
282             llvm::LLVMBuildNUWSub(self.llbuilder, lhs, rhs, noname())
283         }
284     }
285
286     pub fn fsub(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
287         self.count_insn("sub");
288         unsafe {
289             llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
290         }
291     }
292
293     pub fn fsub_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
294         self.count_insn("sub");
295         unsafe {
296             let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
297             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
298             instr
299         }
300     }
301
302     pub fn mul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
303         self.count_insn("mul");
304         unsafe {
305             llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
306         }
307     }
308
309     pub fn nswmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
310         self.count_insn("nswmul");
311         unsafe {
312             llvm::LLVMBuildNSWMul(self.llbuilder, lhs, rhs, noname())
313         }
314     }
315
316     pub fn nuwmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
317         self.count_insn("nuwmul");
318         unsafe {
319             llvm::LLVMBuildNUWMul(self.llbuilder, lhs, rhs, noname())
320         }
321     }
322
323     pub fn fmul(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
324         self.count_insn("fmul");
325         unsafe {
326             llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
327         }
328     }
329
330     pub fn fmul_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
331         self.count_insn("fmul");
332         unsafe {
333             let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
334             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
335             instr
336         }
337     }
338
339
340     pub fn udiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
341         self.count_insn("udiv");
342         unsafe {
343             llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
344         }
345     }
346
347     pub fn sdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
348         self.count_insn("sdiv");
349         unsafe {
350             llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
351         }
352     }
353
354     pub fn exactsdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
355         self.count_insn("exactsdiv");
356         unsafe {
357             llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
358         }
359     }
360
361     pub fn fdiv(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
362         self.count_insn("fdiv");
363         unsafe {
364             llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
365         }
366     }
367
368     pub fn fdiv_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
369         self.count_insn("fdiv");
370         unsafe {
371             let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
372             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
373             instr
374         }
375     }
376
377     pub fn urem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
378         self.count_insn("urem");
379         unsafe {
380             llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
381         }
382     }
383
384     pub fn srem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
385         self.count_insn("srem");
386         unsafe {
387             llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
388         }
389     }
390
391     pub fn frem(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
392         self.count_insn("frem");
393         unsafe {
394             llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
395         }
396     }
397
398     pub fn frem_fast(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
399         self.count_insn("frem");
400         unsafe {
401             let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
402             llvm::LLVMRustSetHasUnsafeAlgebra(instr);
403             instr
404         }
405     }
406
407     pub fn shl(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
408         self.count_insn("shl");
409         unsafe {
410             llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
411         }
412     }
413
414     pub fn lshr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
415         self.count_insn("lshr");
416         unsafe {
417             llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
418         }
419     }
420
421     pub fn ashr(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
422         self.count_insn("ashr");
423         unsafe {
424             llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
425         }
426     }
427
428     pub fn and(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
429         self.count_insn("and");
430         unsafe {
431             llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
432         }
433     }
434
435     pub fn or(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
436         self.count_insn("or");
437         unsafe {
438             llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
439         }
440     }
441
442     pub fn xor(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
443         self.count_insn("xor");
444         unsafe {
445             llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
446         }
447     }
448
449     pub fn binop(&self, op: Opcode, lhs: ValueRef, rhs: ValueRef)
450               -> ValueRef {
451         self.count_insn("binop");
452         unsafe {
453             llvm::LLVMBuildBinOp(self.llbuilder, op, lhs, rhs, noname())
454         }
455     }
456
457     pub fn neg(&self, v: ValueRef) -> ValueRef {
458         self.count_insn("neg");
459         unsafe {
460             llvm::LLVMBuildNeg(self.llbuilder, v, noname())
461         }
462     }
463
464     pub fn nswneg(&self, v: ValueRef) -> ValueRef {
465         self.count_insn("nswneg");
466         unsafe {
467             llvm::LLVMBuildNSWNeg(self.llbuilder, v, noname())
468         }
469     }
470
471     pub fn nuwneg(&self, v: ValueRef) -> ValueRef {
472         self.count_insn("nuwneg");
473         unsafe {
474             llvm::LLVMBuildNUWNeg(self.llbuilder, v, noname())
475         }
476     }
477     pub fn fneg(&self, v: ValueRef) -> ValueRef {
478         self.count_insn("fneg");
479         unsafe {
480             llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
481         }
482     }
483
484     pub fn not(&self, v: ValueRef) -> ValueRef {
485         self.count_insn("not");
486         unsafe {
487             llvm::LLVMBuildNot(self.llbuilder, v, noname())
488         }
489     }
490
491     pub fn alloca(&self, ty: Type, name: &str, align: Align) -> ValueRef {
492         let builder = Builder::with_ccx(self.ccx);
493         builder.position_at_start(unsafe {
494             llvm::LLVMGetFirstBasicBlock(self.llfn())
495         });
496         builder.dynamic_alloca(ty, name, align)
497     }
498
499     pub fn dynamic_alloca(&self, ty: Type, name: &str, align: Align) -> ValueRef {
500         self.count_insn("alloca");
501         unsafe {
502             let alloca = if name.is_empty() {
503                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(), noname())
504             } else {
505                 let name = CString::new(name).unwrap();
506                 llvm::LLVMBuildAlloca(self.llbuilder, ty.to_ref(),
507                                       name.as_ptr())
508             };
509             llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
510             alloca
511         }
512     }
513
514     pub fn free(&self, ptr: ValueRef) {
515         self.count_insn("free");
516         unsafe {
517             llvm::LLVMBuildFree(self.llbuilder, ptr);
518         }
519     }
520
521     pub fn load(&self, ptr: ValueRef, align: Option<Align>) -> ValueRef {
522         self.count_insn("load");
523         unsafe {
524             let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
525             if let Some(align) = align {
526                 llvm::LLVMSetAlignment(load, align.abi() as c_uint);
527             }
528             load
529         }
530     }
531
532     pub fn volatile_load(&self, ptr: ValueRef) -> ValueRef {
533         self.count_insn("load.volatile");
534         unsafe {
535             let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
536             llvm::LLVMSetVolatile(insn, llvm::True);
537             insn
538         }
539     }
540
541     pub fn atomic_load(&self, ptr: ValueRef, order: AtomicOrdering, align: Align) -> ValueRef {
542         self.count_insn("load.atomic");
543         unsafe {
544             let load = llvm::LLVMRustBuildAtomicLoad(self.llbuilder, ptr, noname(), order);
545             // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
546             // However, 64-bit atomic loads on `i686-apple-darwin` appear to
547             // require `___atomic_load` with ABI-alignment, so it's staying.
548             llvm::LLVMSetAlignment(load, align.pref() as c_uint);
549             load
550         }
551     }
552
553
554     pub fn range_metadata(&self, load: ValueRef, range: Range<u128>) {
555         unsafe {
556             let llty = val_ty(load);
557             let v = [
558                 C_uint_big(llty, range.start),
559                 C_uint_big(llty, range.end)
560             ];
561
562             llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
563                                   llvm::LLVMMDNodeInContext(self.ccx.llcx(),
564                                                             v.as_ptr(),
565                                                             v.len() as c_uint));
566         }
567     }
568
569     pub fn nonnull_metadata(&self, load: ValueRef) {
570         unsafe {
571             llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
572                                   llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
573         }
574     }
575
576     pub fn store(&self, val: ValueRef, ptr: ValueRef, align: Option<Align>) -> ValueRef {
577         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
578         assert!(!self.llbuilder.is_null());
579         self.count_insn("store");
580         let ptr = self.check_store(val, ptr);
581         unsafe {
582             let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
583             if let Some(align) = align {
584                 llvm::LLVMSetAlignment(store, align.abi() as c_uint);
585             }
586             store
587         }
588     }
589
590     pub fn volatile_store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
591         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
592         assert!(!self.llbuilder.is_null());
593         self.count_insn("store.volatile");
594         let ptr = self.check_store(val, ptr);
595         unsafe {
596             let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
597             llvm::LLVMSetVolatile(insn, llvm::True);
598             insn
599         }
600     }
601
602     pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef,
603                         order: AtomicOrdering, align: Align) {
604         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
605         self.count_insn("store.atomic");
606         let ptr = self.check_store(val, ptr);
607         unsafe {
608             let store = llvm::LLVMRustBuildAtomicStore(self.llbuilder, val, ptr, order);
609             // FIXME(eddyb) Isn't it UB to use `pref` instead of `abi` here?
610             // Also see `atomic_load` for more context.
611             llvm::LLVMSetAlignment(store, align.pref() as c_uint);
612         }
613     }
614
615     pub fn nontemporal_store(&self, val: ValueRef, ptr: ValueRef) -> ValueRef {
616         debug!("Store {:?} -> {:?}", Value(val), Value(ptr));
617         assert!(!self.llbuilder.is_null());
618         self.count_insn("store.nontemporal");
619         let ptr = self.check_store(val, ptr);
620         unsafe {
621             let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
622
623             // According to LLVM [1] building a nontemporal store must *always*
624             // point to a metadata value of the integer 1. Who knew?
625             //
626             // [1]: http://llvm.org/docs/LangRef.html#store-instruction
627             let one = C_i32(self.ccx, 1);
628             let node = llvm::LLVMMDNodeInContext(self.ccx.llcx(),
629                                                  &one,
630                                                  1);
631             llvm::LLVMSetMetadata(insn,
632                                   llvm::MD_nontemporal as c_uint,
633                                   node);
634             insn
635         }
636     }
637
638     pub fn gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
639         self.count_insn("gep");
640         unsafe {
641             llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
642                                indices.len() as c_uint, noname())
643         }
644     }
645
646     pub fn inbounds_gep(&self, ptr: ValueRef, indices: &[ValueRef]) -> ValueRef {
647         self.count_insn("inboundsgep");
648         unsafe {
649             llvm::LLVMBuildInBoundsGEP(
650                 self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
651         }
652     }
653
654     pub fn struct_gep(&self, ptr: ValueRef, idx: u64) -> ValueRef {
655         self.count_insn("structgep");
656         assert_eq!(idx as c_uint as u64, idx);
657         unsafe {
658             llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
659         }
660     }
661
662     pub fn global_string(&self, _str: *const c_char) -> ValueRef {
663         self.count_insn("globalstring");
664         unsafe {
665             llvm::LLVMBuildGlobalString(self.llbuilder, _str, noname())
666         }
667     }
668
669     pub fn global_string_ptr(&self, _str: *const c_char) -> ValueRef {
670         self.count_insn("globalstringptr");
671         unsafe {
672             llvm::LLVMBuildGlobalStringPtr(self.llbuilder, _str, noname())
673         }
674     }
675
676     /* Casts */
677     pub fn trunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
678         self.count_insn("trunc");
679         unsafe {
680             llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
681         }
682     }
683
684     pub fn zext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
685         self.count_insn("zext");
686         unsafe {
687             llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty.to_ref(), noname())
688         }
689     }
690
691     pub fn sext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
692         self.count_insn("sext");
693         unsafe {
694             llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty.to_ref(), noname())
695         }
696     }
697
698     pub fn fptoui(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
699         self.count_insn("fptoui");
700         unsafe {
701             llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty.to_ref(), noname())
702         }
703     }
704
705     pub fn fptosi(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
706         self.count_insn("fptosi");
707         unsafe {
708             llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty.to_ref(),noname())
709         }
710     }
711
712     pub fn uitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
713         self.count_insn("uitofp");
714         unsafe {
715             llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
716         }
717     }
718
719     pub fn sitofp(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
720         self.count_insn("sitofp");
721         unsafe {
722             llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty.to_ref(), noname())
723         }
724     }
725
726     pub fn fptrunc(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
727         self.count_insn("fptrunc");
728         unsafe {
729             llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty.to_ref(), noname())
730         }
731     }
732
733     pub fn fpext(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
734         self.count_insn("fpext");
735         unsafe {
736             llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty.to_ref(), noname())
737         }
738     }
739
740     pub fn ptrtoint(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
741         self.count_insn("ptrtoint");
742         unsafe {
743             llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty.to_ref(), noname())
744         }
745     }
746
747     pub fn inttoptr(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
748         self.count_insn("inttoptr");
749         unsafe {
750             llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty.to_ref(), noname())
751         }
752     }
753
754     pub fn bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
755         self.count_insn("bitcast");
756         unsafe {
757             llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
758         }
759     }
760
761     pub fn zext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
762         self.count_insn("zextorbitcast");
763         unsafe {
764             llvm::LLVMBuildZExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
765         }
766     }
767
768     pub fn sext_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
769         self.count_insn("sextorbitcast");
770         unsafe {
771             llvm::LLVMBuildSExtOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
772         }
773     }
774
775     pub fn trunc_or_bitcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
776         self.count_insn("truncorbitcast");
777         unsafe {
778             llvm::LLVMBuildTruncOrBitCast(self.llbuilder, val, dest_ty.to_ref(), noname())
779         }
780     }
781
782     pub fn cast(&self, op: Opcode, val: ValueRef, dest_ty: Type) -> ValueRef {
783         self.count_insn("cast");
784         unsafe {
785             llvm::LLVMBuildCast(self.llbuilder, op, val, dest_ty.to_ref(), noname())
786         }
787     }
788
789     pub fn pointercast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
790         self.count_insn("pointercast");
791         unsafe {
792             llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty.to_ref(), noname())
793         }
794     }
795
796     pub fn intcast(&self, val: ValueRef, dest_ty: Type, is_signed: bool) -> ValueRef {
797         self.count_insn("intcast");
798         unsafe {
799             llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty.to_ref(), is_signed)
800         }
801     }
802
803     pub fn fpcast(&self, val: ValueRef, dest_ty: Type) -> ValueRef {
804         self.count_insn("fpcast");
805         unsafe {
806             llvm::LLVMBuildFPCast(self.llbuilder, val, dest_ty.to_ref(), noname())
807         }
808     }
809
810
811     /* Comparisons */
812     pub fn icmp(&self, op: IntPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
813         self.count_insn("icmp");
814         unsafe {
815             llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
816         }
817     }
818
819     pub fn fcmp(&self, op: RealPredicate, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
820         self.count_insn("fcmp");
821         unsafe {
822             llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
823         }
824     }
825
826     /* Miscellaneous instructions */
827     pub fn empty_phi(&self, ty: Type) -> ValueRef {
828         self.count_insn("emptyphi");
829         unsafe {
830             llvm::LLVMBuildPhi(self.llbuilder, ty.to_ref(), noname())
831         }
832     }
833
834     pub fn phi(&self, ty: Type, vals: &[ValueRef], bbs: &[BasicBlockRef]) -> ValueRef {
835         assert_eq!(vals.len(), bbs.len());
836         let phi = self.empty_phi(ty);
837         self.count_insn("addincoming");
838         unsafe {
839             llvm::LLVMAddIncoming(phi, vals.as_ptr(),
840                                   bbs.as_ptr(),
841                                   vals.len() as c_uint);
842             phi
843         }
844     }
845
846     pub fn add_span_comment(&self, sp: Span, text: &str) {
847         if self.ccx.sess().asm_comments() {
848             let s = format!("{} ({})",
849                             text,
850                             self.ccx.sess().codemap().span_to_string(sp));
851             debug!("{}", s);
852             self.add_comment(&s);
853         }
854     }
855
856     pub fn add_comment(&self, text: &str) {
857         if self.ccx.sess().asm_comments() {
858             let sanitized = text.replace("$", "");
859             let comment_text = format!("{} {}", "#",
860                                        sanitized.replace("\n", "\n\t# "));
861             self.count_insn("inlineasm");
862             let comment_text = CString::new(comment_text).unwrap();
863             let asm = unsafe {
864                 llvm::LLVMConstInlineAsm(Type::func(&[], &Type::void(self.ccx)).to_ref(),
865                                          comment_text.as_ptr(), noname(), False,
866                                          False)
867             };
868             self.call(asm, &[], None);
869         }
870     }
871
872     pub fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
873                          inputs: &[ValueRef], output: Type,
874                          volatile: bool, alignstack: bool,
875                          dia: AsmDialect) -> ValueRef {
876         self.count_insn("inlineasm");
877
878         let volatile = if volatile { llvm::True }
879                        else        { llvm::False };
880         let alignstack = if alignstack { llvm::True }
881                          else          { llvm::False };
882
883         let argtys = inputs.iter().map(|v| {
884             debug!("Asm Input Type: {:?}", Value(*v));
885             val_ty(*v)
886         }).collect::<Vec<_>>();
887
888         debug!("Asm Output Type: {:?}", output);
889         let fty = Type::func(&argtys[..], &output);
890         unsafe {
891             let v = llvm::LLVMRustInlineAsm(
892                 fty.to_ref(), asm, cons, volatile, alignstack, dia);
893             self.call(v, inputs, None)
894         }
895     }
896
897     pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
898                 bundle: Option<&OperandBundleDef>) -> ValueRef {
899         self.count_insn("call");
900
901         debug!("Call {:?} with args ({})",
902                Value(llfn),
903                args.iter()
904                    .map(|&v| format!("{:?}", Value(v)))
905                    .collect::<Vec<String>>()
906                    .join(", "));
907
908         let args = self.check_call("call", llfn, args);
909         let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut());
910
911         unsafe {
912             llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
913                                     args.len() as c_uint, bundle, noname())
914         }
915     }
916
917     pub fn select(&self, cond: ValueRef, then_val: ValueRef, else_val: ValueRef) -> ValueRef {
918         self.count_insn("select");
919         unsafe {
920             llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
921         }
922     }
923
924     pub fn va_arg(&self, list: ValueRef, ty: Type) -> ValueRef {
925         self.count_insn("vaarg");
926         unsafe {
927             llvm::LLVMBuildVAArg(self.llbuilder, list, ty.to_ref(), noname())
928         }
929     }
930
931     pub fn extract_element(&self, vec: ValueRef, idx: ValueRef) -> ValueRef {
932         self.count_insn("extractelement");
933         unsafe {
934             llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
935         }
936     }
937
938     pub fn insert_element(&self, vec: ValueRef, elt: ValueRef, idx: ValueRef) -> ValueRef {
939         self.count_insn("insertelement");
940         unsafe {
941             llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
942         }
943     }
944
945     pub fn shuffle_vector(&self, v1: ValueRef, v2: ValueRef, mask: ValueRef) -> ValueRef {
946         self.count_insn("shufflevector");
947         unsafe {
948             llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
949         }
950     }
951
952     pub fn vector_splat(&self, num_elts: usize, elt: ValueRef) -> ValueRef {
953         unsafe {
954             let elt_ty = val_ty(elt);
955             let undef = llvm::LLVMGetUndef(Type::vector(&elt_ty, num_elts as u64).to_ref());
956             let vec = self.insert_element(undef, elt, C_i32(self.ccx, 0));
957             let vec_i32_ty = Type::vector(&Type::i32(self.ccx), num_elts as u64);
958             self.shuffle_vector(vec, undef, C_null(vec_i32_ty))
959         }
960     }
961
962     pub fn extract_value(&self, agg_val: ValueRef, idx: u64) -> ValueRef {
963         self.count_insn("extractvalue");
964         assert_eq!(idx as c_uint as u64, idx);
965         unsafe {
966             llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
967         }
968     }
969
970     pub fn insert_value(&self, agg_val: ValueRef, elt: ValueRef,
971                        idx: u64) -> ValueRef {
972         self.count_insn("insertvalue");
973         assert_eq!(idx as c_uint as u64, idx);
974         unsafe {
975             llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
976                                        noname())
977         }
978     }
979
980     pub fn is_null(&self, val: ValueRef) -> ValueRef {
981         self.count_insn("isnull");
982         unsafe {
983             llvm::LLVMBuildIsNull(self.llbuilder, val, noname())
984         }
985     }
986
987     pub fn is_not_null(&self, val: ValueRef) -> ValueRef {
988         self.count_insn("isnotnull");
989         unsafe {
990             llvm::LLVMBuildIsNotNull(self.llbuilder, val, noname())
991         }
992     }
993
994     pub fn ptrdiff(&self, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
995         self.count_insn("ptrdiff");
996         unsafe {
997             llvm::LLVMBuildPtrDiff(self.llbuilder, lhs, rhs, noname())
998         }
999     }
1000
1001     pub fn trap(&self) {
1002         unsafe {
1003             let bb: BasicBlockRef = llvm::LLVMGetInsertBlock(self.llbuilder);
1004             let fn_: ValueRef = llvm::LLVMGetBasicBlockParent(bb);
1005             let m: ModuleRef = llvm::LLVMGetGlobalParent(fn_);
1006             let p = "llvm.trap\0".as_ptr();
1007             let t: ValueRef = llvm::LLVMGetNamedFunction(m, p as *const _);
1008             assert!((t as isize != 0));
1009             let args: &[ValueRef] = &[];
1010             self.count_insn("trap");
1011             llvm::LLVMRustBuildCall(self.llbuilder, t,
1012                                     args.as_ptr(), args.len() as c_uint,
1013                                     ptr::null_mut(),
1014                                     noname());
1015         }
1016     }
1017
1018     pub fn landing_pad(&self, ty: Type, pers_fn: ValueRef,
1019                        num_clauses: usize,
1020                        llfn: ValueRef) -> ValueRef {
1021         self.count_insn("landingpad");
1022         unsafe {
1023             llvm::LLVMRustBuildLandingPad(self.llbuilder, ty.to_ref(), pers_fn,
1024                                           num_clauses as c_uint, noname(), llfn)
1025         }
1026     }
1027
1028     pub fn add_clause(&self, landing_pad: ValueRef, clause: ValueRef) {
1029         unsafe {
1030             llvm::LLVMAddClause(landing_pad, clause);
1031         }
1032     }
1033
1034     pub fn set_cleanup(&self, landing_pad: ValueRef) {
1035         self.count_insn("setcleanup");
1036         unsafe {
1037             llvm::LLVMSetCleanup(landing_pad, llvm::True);
1038         }
1039     }
1040
1041     pub fn resume(&self, exn: ValueRef) -> ValueRef {
1042         self.count_insn("resume");
1043         unsafe {
1044             llvm::LLVMBuildResume(self.llbuilder, exn)
1045         }
1046     }
1047
1048     pub fn cleanup_pad(&self,
1049                        parent: Option<ValueRef>,
1050                        args: &[ValueRef]) -> ValueRef {
1051         self.count_insn("cleanuppad");
1052         let parent = parent.unwrap_or(ptr::null_mut());
1053         let name = CString::new("cleanuppad").unwrap();
1054         let ret = unsafe {
1055             llvm::LLVMRustBuildCleanupPad(self.llbuilder,
1056                                           parent,
1057                                           args.len() as c_uint,
1058                                           args.as_ptr(),
1059                                           name.as_ptr())
1060         };
1061         assert!(!ret.is_null(), "LLVM does not have support for cleanuppad");
1062         return ret
1063     }
1064
1065     pub fn cleanup_ret(&self, cleanup: ValueRef,
1066                        unwind: Option<BasicBlockRef>) -> ValueRef {
1067         self.count_insn("cleanupret");
1068         let unwind = unwind.unwrap_or(ptr::null_mut());
1069         let ret = unsafe {
1070             llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
1071         };
1072         assert!(!ret.is_null(), "LLVM does not have support for cleanupret");
1073         return ret
1074     }
1075
1076     pub fn catch_pad(&self,
1077                      parent: ValueRef,
1078                      args: &[ValueRef]) -> ValueRef {
1079         self.count_insn("catchpad");
1080         let name = CString::new("catchpad").unwrap();
1081         let ret = unsafe {
1082             llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
1083                                         args.len() as c_uint, args.as_ptr(),
1084                                         name.as_ptr())
1085         };
1086         assert!(!ret.is_null(), "LLVM does not have support for catchpad");
1087         return ret
1088     }
1089
1090     pub fn catch_ret(&self, pad: ValueRef, unwind: BasicBlockRef) -> ValueRef {
1091         self.count_insn("catchret");
1092         let ret = unsafe {
1093             llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
1094         };
1095         assert!(!ret.is_null(), "LLVM does not have support for catchret");
1096         return ret
1097     }
1098
1099     pub fn catch_switch(&self,
1100                         parent: Option<ValueRef>,
1101                         unwind: Option<BasicBlockRef>,
1102                         num_handlers: usize) -> ValueRef {
1103         self.count_insn("catchswitch");
1104         let parent = parent.unwrap_or(ptr::null_mut());
1105         let unwind = unwind.unwrap_or(ptr::null_mut());
1106         let name = CString::new("catchswitch").unwrap();
1107         let ret = unsafe {
1108             llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
1109                                            num_handlers as c_uint,
1110                                            name.as_ptr())
1111         };
1112         assert!(!ret.is_null(), "LLVM does not have support for catchswitch");
1113         return ret
1114     }
1115
1116     pub fn add_handler(&self, catch_switch: ValueRef, handler: BasicBlockRef) {
1117         unsafe {
1118             llvm::LLVMRustAddHandler(catch_switch, handler);
1119         }
1120     }
1121
1122     pub fn set_personality_fn(&self, personality: ValueRef) {
1123         unsafe {
1124             llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1125         }
1126     }
1127
1128     // Atomic Operations
1129     pub fn atomic_cmpxchg(&self, dst: ValueRef,
1130                          cmp: ValueRef, src: ValueRef,
1131                          order: AtomicOrdering,
1132                          failure_order: AtomicOrdering,
1133                          weak: llvm::Bool) -> ValueRef {
1134         unsafe {
1135             llvm::LLVMRustBuildAtomicCmpXchg(self.llbuilder, dst, cmp, src,
1136                                          order, failure_order, weak)
1137         }
1138     }
1139     pub fn atomic_rmw(&self, op: AtomicRmwBinOp,
1140                      dst: ValueRef, src: ValueRef,
1141                      order: AtomicOrdering) -> ValueRef {
1142         unsafe {
1143             llvm::LLVMBuildAtomicRMW(self.llbuilder, op, dst, src, order, False)
1144         }
1145     }
1146
1147     pub fn atomic_fence(&self, order: AtomicOrdering, scope: SynchronizationScope) {
1148         unsafe {
1149             llvm::LLVMRustBuildAtomicFence(self.llbuilder, order, scope);
1150         }
1151     }
1152
1153     pub fn add_case(&self, s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
1154         unsafe {
1155             llvm::LLVMAddCase(s, on_val, dest)
1156         }
1157     }
1158
1159     pub fn add_incoming_to_phi(&self, phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
1160         unsafe {
1161             llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
1162         }
1163     }
1164
1165     pub fn set_invariant_load(&self, load: ValueRef) {
1166         unsafe {
1167             llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1168                                   llvm::LLVMMDNodeInContext(self.ccx.llcx(), ptr::null(), 0));
1169         }
1170     }
1171
1172     /// Returns the ptr value that should be used for storing `val`.
1173     fn check_store<'b>(&self,
1174                        val: ValueRef,
1175                        ptr: ValueRef) -> ValueRef {
1176         let dest_ptr_ty = val_ty(ptr);
1177         let stored_ty = val_ty(val);
1178         let stored_ptr_ty = stored_ty.ptr_to();
1179
1180         assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer);
1181
1182         if dest_ptr_ty == stored_ptr_ty {
1183             ptr
1184         } else {
1185             debug!("Type mismatch in store. \
1186                     Expected {:?}, got {:?}; inserting bitcast",
1187                    dest_ptr_ty, stored_ptr_ty);
1188             self.bitcast(ptr, stored_ptr_ty)
1189         }
1190     }
1191
1192     /// Returns the args that should be used for a call to `llfn`.
1193     fn check_call<'b>(&self,
1194                       typ: &str,
1195                       llfn: ValueRef,
1196                       args: &'b [ValueRef]) -> Cow<'b, [ValueRef]> {
1197         let mut fn_ty = val_ty(llfn);
1198         // Strip off pointers
1199         while fn_ty.kind() == llvm::TypeKind::Pointer {
1200             fn_ty = fn_ty.element_type();
1201         }
1202
1203         assert!(fn_ty.kind() == llvm::TypeKind::Function,
1204                 "builder::{} not passed a function, but {:?}", typ, fn_ty);
1205
1206         let param_tys = fn_ty.func_params();
1207
1208         let all_args_match = param_tys.iter()
1209             .zip(args.iter().map(|&v| val_ty(v)))
1210             .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
1211
1212         if all_args_match {
1213             return Cow::Borrowed(args);
1214         }
1215
1216         let casted_args: Vec<_> = param_tys.into_iter()
1217             .zip(args.iter())
1218             .enumerate()
1219             .map(|(i, (expected_ty, &actual_val))| {
1220                 let actual_ty = val_ty(actual_val);
1221                 if expected_ty != actual_ty {
1222                     debug!("Type mismatch in function call of {:?}. \
1223                             Expected {:?} for param {}, got {:?}; injecting bitcast",
1224                            Value(llfn),
1225                            expected_ty, i, actual_ty);
1226                     self.bitcast(actual_val, expected_ty)
1227                 } else {
1228                     actual_val
1229                 }
1230             })
1231             .collect();
1232
1233         return Cow::Owned(casted_args);
1234     }
1235
1236     pub fn lifetime_start(&self, ptr: ValueRef, size: Size) {
1237         self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
1238     }
1239
1240     pub fn lifetime_end(&self, ptr: ValueRef, size: Size) {
1241         self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
1242     }
1243
1244     /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
1245     /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
1246     /// and the intrinsic for `lt` and passes them to `emit`, which is in
1247     /// charge of generating code to call the passed intrinsic on whatever
1248     /// block of generated code is targetted for the intrinsic.
1249     ///
1250     /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
1251     /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1252     fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: ValueRef, size: Size) {
1253         if self.ccx.sess().opts.optimize == config::OptLevel::No {
1254             return;
1255         }
1256
1257         let size = size.bytes();
1258         if size == 0 {
1259             return;
1260         }
1261
1262         let lifetime_intrinsic = self.ccx.get_intrinsic(intrinsic);
1263
1264         let ptr = self.pointercast(ptr, Type::i8p(self.ccx));
1265         self.call(lifetime_intrinsic, &[C_u64(self.ccx, size), ptr], None);
1266     }
1267 }