1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 /*! rustc compiler intrinsics.
13 The corresponding definitions are in librustc/middle/trans/foreign.rs.
17 The volatile intrinsics provide operations intended to act on I/O
18 memory, which are guaranteed to not be reordered by the compiler
19 across other volatile intrinsics. See the LLVM documentation on
22 [volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses
26 The atomic intrinsics provide common atomic operations on machine
27 words, with multiple possible memory orderings. They obey the same
28 semantics as C++11. See the LLVM documentation on [[atomics]].
30 [atomics]: http://llvm.org/docs/Atomics.html
32 A quick refresher on memory ordering:
34 * Acquire - a barrier for acquiring a lock. Subsequent reads and writes
35 take place after the barrier.
36 * Release - a barrier for releasing a lock. Preceding reads and writes
37 take place before the barrier.
38 * Sequentially consistent - sequentially consistent operations are
39 guaranteed to happen in order. This is the standard mode for working
40 with atomic types and is equivalent to Java's `volatile`.
44 #[allow(missing_doc)];
46 // This is needed to prevent duplicate lang item definitions.
48 pub use realstd::intrinsics::{TyDesc, Opaque, TyVisitor, TypeId};
50 pub type GlueFn = extern "Rust" fn(*i8);
61 // Called when a value of type `T` is no longer needed
64 // Called by reflection visitor to visit a value of type `T`
67 // Name corresponding to the type
80 fn visit_bot(&mut self) -> bool;
81 fn visit_nil(&mut self) -> bool;
82 fn visit_bool(&mut self) -> bool;
84 fn visit_int(&mut self) -> bool;
85 fn visit_i8(&mut self) -> bool;
86 fn visit_i16(&mut self) -> bool;
87 fn visit_i32(&mut self) -> bool;
88 fn visit_i64(&mut self) -> bool;
90 fn visit_uint(&mut self) -> bool;
91 fn visit_u8(&mut self) -> bool;
92 fn visit_u16(&mut self) -> bool;
93 fn visit_u32(&mut self) -> bool;
94 fn visit_u64(&mut self) -> bool;
96 fn visit_f32(&mut self) -> bool;
97 fn visit_f64(&mut self) -> bool;
99 fn visit_char(&mut self) -> bool;
101 fn visit_estr_box(&mut self) -> bool;
102 fn visit_estr_uniq(&mut self) -> bool;
103 fn visit_estr_slice(&mut self) -> bool;
104 fn visit_estr_fixed(&mut self, n: uint, sz: uint, align: uint) -> bool;
106 fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
107 fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
108 fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
109 fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
111 fn visit_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
112 fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
113 fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
114 fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
115 fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
116 fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
117 mtbl: uint, inner: *TyDesc) -> bool;
119 fn visit_enter_rec(&mut self, n_fields: uint,
120 sz: uint, align: uint) -> bool;
121 fn visit_rec_field(&mut self, i: uint, name: &str,
122 mtbl: uint, inner: *TyDesc) -> bool;
123 fn visit_leave_rec(&mut self, n_fields: uint,
124 sz: uint, align: uint) -> bool;
126 fn visit_enter_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
127 sz: uint, align: uint) -> bool;
128 fn visit_class_field(&mut self, i: uint, name: &str, named: bool,
129 mtbl: uint, inner: *TyDesc) -> bool;
130 fn visit_leave_class(&mut self, name: &str, named_fields: bool, n_fields: uint,
131 sz: uint, align: uint) -> bool;
133 fn visit_enter_tup(&mut self, n_fields: uint,
134 sz: uint, align: uint) -> bool;
135 fn visit_tup_field(&mut self, i: uint, inner: *TyDesc) -> bool;
136 fn visit_leave_tup(&mut self, n_fields: uint,
137 sz: uint, align: uint) -> bool;
139 fn visit_enter_enum(&mut self, n_variants: uint,
140 get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
141 sz: uint, align: uint) -> bool;
142 fn visit_enter_enum_variant(&mut self, variant: uint,
146 fn visit_enum_variant_field(&mut self, i: uint, offset: uint, inner: *TyDesc) -> bool;
147 fn visit_leave_enum_variant(&mut self, variant: uint,
151 fn visit_leave_enum(&mut self, n_variants: uint,
152 get_disr: extern unsafe fn(ptr: *Opaque) -> Disr,
153 sz: uint, align: uint) -> bool;
155 fn visit_enter_fn(&mut self, purity: uint, proto: uint,
156 n_inputs: uint, retstyle: uint) -> bool;
157 fn visit_fn_input(&mut self, i: uint, mode: uint, inner: *TyDesc) -> bool;
158 fn visit_fn_output(&mut self, retstyle: uint, variadic: bool, inner: *TyDesc) -> bool;
159 fn visit_leave_fn(&mut self, purity: uint, proto: uint,
160 n_inputs: uint, retstyle: uint) -> bool;
162 fn visit_trait(&mut self, name: &str) -> bool;
163 fn visit_param(&mut self, i: uint) -> bool;
164 fn visit_self(&mut self) -> bool;
168 extern "rust-intrinsic" {
169 pub fn atomic_cxchg<T>(dst: &mut T, old: T, src: T) -> T;
170 pub fn atomic_cxchg_acq<T>(dst: &mut T, old: T, src: T) -> T;
171 pub fn atomic_cxchg_rel<T>(dst: &mut T, old: T, src: T) -> T;
172 pub fn atomic_cxchg_acqrel<T>(dst: &mut T, old: T, src: T) -> T;
173 pub fn atomic_cxchg_relaxed<T>(dst: &mut T, old: T, src: T) -> T;
175 pub fn atomic_load<T>(src: &T) -> T;
176 pub fn atomic_load_acq<T>(src: &T) -> T;
177 pub fn atomic_load_relaxed<T>(src: &T) -> T;
179 pub fn atomic_store<T>(dst: &mut T, val: T);
180 pub fn atomic_store_rel<T>(dst: &mut T, val: T);
181 pub fn atomic_store_relaxed<T>(dst: &mut T, val: T);
183 pub fn atomic_xchg<T>(dst: &mut T, src: T) -> T;
184 pub fn atomic_xchg_acq<T>(dst: &mut T, src: T) -> T;
185 pub fn atomic_xchg_rel<T>(dst: &mut T, src: T) -> T;
186 pub fn atomic_xchg_acqrel<T>(dst: &mut T, src: T) -> T;
187 pub fn atomic_xchg_relaxed<T>(dst: &mut T, src: T) -> T;
189 pub fn atomic_xadd<T>(dst: &mut T, src: T) -> T;
190 pub fn atomic_xadd_acq<T>(dst: &mut T, src: T) -> T;
191 pub fn atomic_xadd_rel<T>(dst: &mut T, src: T) -> T;
192 pub fn atomic_xadd_acqrel<T>(dst: &mut T, src: T) -> T;
193 pub fn atomic_xadd_relaxed<T>(dst: &mut T, src: T) -> T;
195 pub fn atomic_xsub<T>(dst: &mut T, src: T) -> T;
196 pub fn atomic_xsub_acq<T>(dst: &mut T, src: T) -> T;
197 pub fn atomic_xsub_rel<T>(dst: &mut T, src: T) -> T;
198 pub fn atomic_xsub_acqrel<T>(dst: &mut T, src: T) -> T;
199 pub fn atomic_xsub_relaxed<T>(dst: &mut T, src: T) -> T;
201 pub fn atomic_and<T>(dst: &mut T, src: T) -> T;
202 pub fn atomic_and_acq<T>(dst: &mut T, src: T) -> T;
203 pub fn atomic_and_rel<T>(dst: &mut T, src: T) -> T;
204 pub fn atomic_and_acqrel<T>(dst: &mut T, src: T) -> T;
205 pub fn atomic_and_relaxed<T>(dst: &mut T, src: T) -> T;
207 pub fn atomic_nand<T>(dst: &mut T, src: T) -> T;
208 pub fn atomic_nand_acq<T>(dst: &mut T, src: T) -> T;
209 pub fn atomic_nand_rel<T>(dst: &mut T, src: T) -> T;
210 pub fn atomic_nand_acqrel<T>(dst: &mut T, src: T) -> T;
211 pub fn atomic_nand_relaxed<T>(dst: &mut T, src: T) -> T;
213 pub fn atomic_or<T>(dst: &mut T, src: T) -> T;
214 pub fn atomic_or_acq<T>(dst: &mut T, src: T) -> T;
215 pub fn atomic_or_rel<T>(dst: &mut T, src: T) -> T;
216 pub fn atomic_or_acqrel<T>(dst: &mut T, src: T) -> T;
217 pub fn atomic_or_relaxed<T>(dst: &mut T, src: T) -> T;
219 pub fn atomic_xor<T>(dst: &mut T, src: T) -> T;
220 pub fn atomic_xor_acq<T>(dst: &mut T, src: T) -> T;
221 pub fn atomic_xor_rel<T>(dst: &mut T, src: T) -> T;
222 pub fn atomic_xor_acqrel<T>(dst: &mut T, src: T) -> T;
223 pub fn atomic_xor_relaxed<T>(dst: &mut T, src: T) -> T;
225 pub fn atomic_max<T>(dst: &mut T, src: T) -> T;
226 pub fn atomic_max_acq<T>(dst: &mut T, src: T) -> T;
227 pub fn atomic_max_rel<T>(dst: &mut T, src: T) -> T;
228 pub fn atomic_max_acqrel<T>(dst: &mut T, src: T) -> T;
229 pub fn atomic_max_relaxed<T>(dst: &mut T, src: T) -> T;
231 pub fn atomic_min<T>(dst: &mut T, src: T) -> T;
232 pub fn atomic_min_acq<T>(dst: &mut T, src: T) -> T;
233 pub fn atomic_min_rel<T>(dst: &mut T, src: T) -> T;
234 pub fn atomic_min_acqrel<T>(dst: &mut T, src: T) -> T;
235 pub fn atomic_min_relaxed<T>(dst: &mut T, src: T) -> T;
237 pub fn atomic_umin<T>(dst: &mut T, src: T) -> T;
238 pub fn atomic_umin_acq<T>(dst: &mut T, src: T) -> T;
239 pub fn atomic_umin_rel<T>(dst: &mut T, src: T) -> T;
240 pub fn atomic_umin_acqrel<T>(dst: &mut T, src: T) -> T;
241 pub fn atomic_umin_relaxed<T>(dst: &mut T, src: T) -> T;
243 pub fn atomic_umax<T>(dst: &mut T, src: T) -> T;
244 pub fn atomic_umax_acq<T>(dst: &mut T, src: T) -> T;
245 pub fn atomic_umax_rel<T>(dst: &mut T, src: T) -> T;
246 pub fn atomic_umax_acqrel<T>(dst: &mut T, src: T) -> T;
247 pub fn atomic_umax_relaxed<T>(dst: &mut T, src: T) -> T;
251 extern "rust-intrinsic" {
253 // NB: These intrinsics take unsafe pointers because they mutate aliased
254 // memory, which is not valid for either `&` or `&mut`.
256 pub fn atomic_cxchg<T>(dst: *mut T, old: T, src: T) -> T;
257 pub fn atomic_cxchg_acq<T>(dst: *mut T, old: T, src: T) -> T;
258 pub fn atomic_cxchg_rel<T>(dst: *mut T, old: T, src: T) -> T;
259 pub fn atomic_cxchg_acqrel<T>(dst: *mut T, old: T, src: T) -> T;
260 pub fn atomic_cxchg_relaxed<T>(dst: *mut T, old: T, src: T) -> T;
262 pub fn atomic_load<T>(src: *T) -> T;
263 pub fn atomic_load_acq<T>(src: *T) -> T;
264 pub fn atomic_load_relaxed<T>(src: *T) -> T;
266 pub fn atomic_store<T>(dst: *mut T, val: T);
267 pub fn atomic_store_rel<T>(dst: *mut T, val: T);
268 pub fn atomic_store_relaxed<T>(dst: *mut T, val: T);
270 pub fn atomic_xchg<T>(dst: *mut T, src: T) -> T;
271 pub fn atomic_xchg_acq<T>(dst: *mut T, src: T) -> T;
272 pub fn atomic_xchg_rel<T>(dst: *mut T, src: T) -> T;
273 pub fn atomic_xchg_acqrel<T>(dst: *mut T, src: T) -> T;
274 pub fn atomic_xchg_relaxed<T>(dst: *mut T, src: T) -> T;
276 pub fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
277 pub fn atomic_xadd_acq<T>(dst: *mut T, src: T) -> T;
278 pub fn atomic_xadd_rel<T>(dst: *mut T, src: T) -> T;
279 pub fn atomic_xadd_acqrel<T>(dst: *mut T, src: T) -> T;
280 pub fn atomic_xadd_relaxed<T>(dst: *mut T, src: T) -> T;
282 pub fn atomic_xsub<T>(dst: *mut T, src: T) -> T;
283 pub fn atomic_xsub_acq<T>(dst: *mut T, src: T) -> T;
284 pub fn atomic_xsub_rel<T>(dst: *mut T, src: T) -> T;
285 pub fn atomic_xsub_acqrel<T>(dst: *mut T, src: T) -> T;
286 pub fn atomic_xsub_relaxed<T>(dst: *mut T, src: T) -> T;
288 pub fn atomic_and<T>(dst: *mut T, src: T) -> T;
289 pub fn atomic_and_acq<T>(dst: *mut T, src: T) -> T;
290 pub fn atomic_and_rel<T>(dst: *mut T, src: T) -> T;
291 pub fn atomic_and_acqrel<T>(dst: *mut T, src: T) -> T;
292 pub fn atomic_and_relaxed<T>(dst: *mut T, src: T) -> T;
294 pub fn atomic_nand<T>(dst: *mut T, src: T) -> T;
295 pub fn atomic_nand_acq<T>(dst: *mut T, src: T) -> T;
296 pub fn atomic_nand_rel<T>(dst: *mut T, src: T) -> T;
297 pub fn atomic_nand_acqrel<T>(dst: *mut T, src: T) -> T;
298 pub fn atomic_nand_relaxed<T>(dst: *mut T, src: T) -> T;
300 pub fn atomic_or<T>(dst: *mut T, src: T) -> T;
301 pub fn atomic_or_acq<T>(dst: *mut T, src: T) -> T;
302 pub fn atomic_or_rel<T>(dst: *mut T, src: T) -> T;
303 pub fn atomic_or_acqrel<T>(dst: *mut T, src: T) -> T;
304 pub fn atomic_or_relaxed<T>(dst: *mut T, src: T) -> T;
306 pub fn atomic_xor<T>(dst: *mut T, src: T) -> T;
307 pub fn atomic_xor_acq<T>(dst: *mut T, src: T) -> T;
308 pub fn atomic_xor_rel<T>(dst: *mut T, src: T) -> T;
309 pub fn atomic_xor_acqrel<T>(dst: *mut T, src: T) -> T;
310 pub fn atomic_xor_relaxed<T>(dst: *mut T, src: T) -> T;
312 pub fn atomic_max<T>(dst: *mut T, src: T) -> T;
313 pub fn atomic_max_acq<T>(dst: *mut T, src: T) -> T;
314 pub fn atomic_max_rel<T>(dst: *mut T, src: T) -> T;
315 pub fn atomic_max_acqrel<T>(dst: *mut T, src: T) -> T;
316 pub fn atomic_max_relaxed<T>(dst: *mut T, src: T) -> T;
318 pub fn atomic_min<T>(dst: *mut T, src: T) -> T;
319 pub fn atomic_min_acq<T>(dst: *mut T, src: T) -> T;
320 pub fn atomic_min_rel<T>(dst: *mut T, src: T) -> T;
321 pub fn atomic_min_acqrel<T>(dst: *mut T, src: T) -> T;
322 pub fn atomic_min_relaxed<T>(dst: *mut T, src: T) -> T;
324 pub fn atomic_umin<T>(dst: *mut T, src: T) -> T;
325 pub fn atomic_umin_acq<T>(dst: *mut T, src: T) -> T;
326 pub fn atomic_umin_rel<T>(dst: *mut T, src: T) -> T;
327 pub fn atomic_umin_acqrel<T>(dst: *mut T, src: T) -> T;
328 pub fn atomic_umin_relaxed<T>(dst: *mut T, src: T) -> T;
330 pub fn atomic_umax<T>(dst: *mut T, src: T) -> T;
331 pub fn atomic_umax_acq<T>(dst: *mut T, src: T) -> T;
332 pub fn atomic_umax_rel<T>(dst: *mut T, src: T) -> T;
333 pub fn atomic_umax_acqrel<T>(dst: *mut T, src: T) -> T;
334 pub fn atomic_umax_relaxed<T>(dst: *mut T, src: T) -> T;
337 extern "rust-intrinsic" {
339 pub fn atomic_fence();
340 pub fn atomic_fence_acq();
341 pub fn atomic_fence_rel();
342 pub fn atomic_fence_acqrel();
344 /// Abort the execution of the process.
347 /// Execute a breakpoint trap, for inspection by a debugger.
350 pub fn volatile_load<T>(src: *T) -> T;
351 pub fn volatile_store<T>(dst: *mut T, val: T);
354 /// The size of a type in bytes.
356 /// This is the exact number of bytes in memory taken up by a
357 /// value of the given type. In other words, a memset of this size
358 /// would *exactly* overwrite a value. When laid out in vectors
359 /// and structures there may be additional padding between
361 pub fn size_of<T>() -> uint;
363 /// Move a value to an uninitialized memory location.
365 /// Drop glue is not run on the destination.
366 pub fn move_val_init<T>(dst: &mut T, src: T);
368 pub fn min_align_of<T>() -> uint;
369 pub fn pref_align_of<T>() -> uint;
371 /// Get a static pointer to a type descriptor.
372 pub fn get_tydesc<T>() -> *TyDesc;
374 /// Gets an identifier which is globally unique to the specified type. This
375 /// function will return the same value for a type regardless of whichever
376 /// crate it is invoked in.
377 pub fn type_id<T: 'static>() -> TypeId;
380 /// Create a value initialized to zero.
382 /// `init` is unsafe because it returns a zeroed-out datum,
383 /// which is unsafe unless T is Pod.
384 pub fn init<T>() -> T;
386 /// Create an uninitialized value.
387 pub fn uninit<T>() -> T;
389 /// Move a value out of scope without running drop glue.
391 /// `forget` is unsafe because the caller is responsible for
392 /// ensuring the argument is deallocated already.
393 pub fn forget<T>(_: T) -> ();
394 pub fn transmute<T,U>(e: T) -> U;
396 /// Returns `true` if a type requires drop glue.
397 pub fn needs_drop<T>() -> bool;
399 /// Returns `true` if a type is managed (will be allocated on the local heap)
400 pub fn owns_managed<T>() -> bool;
402 pub fn visit_tydesc(td: *TyDesc, tv: &mut TyVisitor);
404 /// Calculates the offset from a pointer. The offset *must* be in-bounds of
405 /// the object, or one-byte-past-the-end. An arithmetic overflow is also
406 /// undefined behaviour.
408 /// This is implemented as an intrinsic to avoid converting to and from an
409 /// integer, since the conversion would throw away aliasing information.
410 pub fn offset<T>(dst: *T, offset: int) -> *T;
412 /// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with
413 /// a size of `count` * `size_of::<T>()` and an alignment of
414 /// `min_align_of::<T>()`
415 pub fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *T, count: uint);
417 /// Equivalent to the appropriate `llvm.memmove.p0i8.0i8.*` intrinsic, with
418 /// a size of `count` * `size_of::<T>()` and an alignment of
419 /// `min_align_of::<T>()`
420 pub fn copy_memory<T>(dst: *mut T, src: *T, count: uint);
422 /// Equivalent to the appropriate `llvm.memset.p0i8.*` intrinsic, with a
423 /// size of `count` * `size_of::<T>()` and an alignment of
424 /// `min_align_of::<T>()`
425 pub fn set_memory<T>(dst: *mut T, val: u8, count: uint);
427 pub fn sqrtf32(x: f32) -> f32;
428 pub fn sqrtf64(x: f64) -> f64;
430 pub fn powif32(a: f32, x: i32) -> f32;
431 pub fn powif64(a: f64, x: i32) -> f64;
433 pub fn sinf32(x: f32) -> f32;
434 pub fn sinf64(x: f64) -> f64;
436 pub fn cosf32(x: f32) -> f32;
437 pub fn cosf64(x: f64) -> f64;
439 pub fn powf32(a: f32, x: f32) -> f32;
440 pub fn powf64(a: f64, x: f64) -> f64;
442 pub fn expf32(x: f32) -> f32;
443 pub fn expf64(x: f64) -> f64;
445 pub fn exp2f32(x: f32) -> f32;
446 pub fn exp2f64(x: f64) -> f64;
448 pub fn logf32(x: f32) -> f32;
449 pub fn logf64(x: f64) -> f64;
451 pub fn log10f32(x: f32) -> f32;
452 pub fn log10f64(x: f64) -> f64;
454 pub fn log2f32(x: f32) -> f32;
455 pub fn log2f64(x: f64) -> f64;
457 pub fn fmaf32(a: f32, b: f32, c: f32) -> f32;
458 pub fn fmaf64(a: f64, b: f64, c: f64) -> f64;
460 pub fn fabsf32(x: f32) -> f32;
461 pub fn fabsf64(x: f64) -> f64;
463 pub fn copysignf32(x: f32, y: f32) -> f32;
464 pub fn copysignf64(x: f64, y: f64) -> f64;
466 pub fn floorf32(x: f32) -> f32;
467 pub fn floorf64(x: f64) -> f64;
469 pub fn ceilf32(x: f32) -> f32;
470 pub fn ceilf64(x: f64) -> f64;
472 pub fn truncf32(x: f32) -> f32;
473 pub fn truncf64(x: f64) -> f64;
475 pub fn rintf32(x: f32) -> f32;
476 pub fn rintf64(x: f64) -> f64;
478 pub fn nearbyintf32(x: f32) -> f32;
479 pub fn nearbyintf64(x: f64) -> f64;
481 pub fn roundf32(x: f32) -> f32;
482 pub fn roundf64(x: f64) -> f64;
484 pub fn ctpop8(x: i8) -> i8;
485 pub fn ctpop16(x: i16) -> i16;
486 pub fn ctpop32(x: i32) -> i32;
487 pub fn ctpop64(x: i64) -> i64;
489 pub fn ctlz8(x: i8) -> i8;
490 pub fn ctlz16(x: i16) -> i16;
491 pub fn ctlz32(x: i32) -> i32;
492 pub fn ctlz64(x: i64) -> i64;
494 pub fn cttz8(x: i8) -> i8;
495 pub fn cttz16(x: i16) -> i16;
496 pub fn cttz32(x: i32) -> i32;
497 pub fn cttz64(x: i64) -> i64;
499 pub fn bswap16(x: i16) -> i16;
500 pub fn bswap32(x: i32) -> i32;
501 pub fn bswap64(x: i64) -> i64;
503 pub fn i8_add_with_overflow(x: i8, y: i8) -> (i8, bool);
504 pub fn i16_add_with_overflow(x: i16, y: i16) -> (i16, bool);
505 pub fn i32_add_with_overflow(x: i32, y: i32) -> (i32, bool);
506 pub fn i64_add_with_overflow(x: i64, y: i64) -> (i64, bool);
508 pub fn u8_add_with_overflow(x: u8, y: u8) -> (u8, bool);
509 pub fn u16_add_with_overflow(x: u16, y: u16) -> (u16, bool);
510 pub fn u32_add_with_overflow(x: u32, y: u32) -> (u32, bool);
511 pub fn u64_add_with_overflow(x: u64, y: u64) -> (u64, bool);
513 pub fn i8_sub_with_overflow(x: i8, y: i8) -> (i8, bool);
514 pub fn i16_sub_with_overflow(x: i16, y: i16) -> (i16, bool);
515 pub fn i32_sub_with_overflow(x: i32, y: i32) -> (i32, bool);
516 pub fn i64_sub_with_overflow(x: i64, y: i64) -> (i64, bool);
518 pub fn u8_sub_with_overflow(x: u8, y: u8) -> (u8, bool);
519 pub fn u16_sub_with_overflow(x: u16, y: u16) -> (u16, bool);
520 pub fn u32_sub_with_overflow(x: u32, y: u32) -> (u32, bool);
521 pub fn u64_sub_with_overflow(x: u64, y: u64) -> (u64, bool);
523 pub fn i8_mul_with_overflow(x: i8, y: i8) -> (i8, bool);
524 pub fn i16_mul_with_overflow(x: i16, y: i16) -> (i16, bool);
525 pub fn i32_mul_with_overflow(x: i32, y: i32) -> (i32, bool);
526 pub fn i64_mul_with_overflow(x: i64, y: i64) -> (i64, bool);
528 pub fn u8_mul_with_overflow(x: u8, y: u8) -> (u8, bool);
529 pub fn u16_mul_with_overflow(x: u16, y: u16) -> (u16, bool);
530 pub fn u32_mul_with_overflow(x: u32, y: u32) -> (u32, bool);
531 pub fn u64_mul_with_overflow(x: u64, y: u64) -> (u64, bool);
535 /// `TypeId` represents a globally unique identifier for a type
536 #[lang="type_id"] // This needs to be kept in lockstep with the code in trans/intrinsic.rs and
537 // middle/lang_items.rs
538 #[deriving(Eq, Hash, Show)]
546 /// Returns the `TypeId` of the type this generic function has been instantiated with
547 pub fn of<T: 'static>() -> TypeId {
548 unsafe { type_id::<T>() }