unsafe {
llvm::LLVMPointerType(
self.llvm_type(cx),
- cx.data_layout().instruction_address_space as c_uint,
+ cx.data_layout().instruction_address_space.0 as c_uint,
)
}
}
use rustc_middle::mir::interpret::{Allocation, GlobalAlloc, Scalar};
use rustc_middle::ty::layout::TyAndLayout;
use rustc_span::symbol::Symbol;
-use rustc_target::abi::{self, HasDataLayout, LayoutOf, Pointer, Size};
+use rustc_target::abi::{self, AddressSpace, HasDataLayout, LayoutOf, Pointer, Size};
use libc::{c_char, c_uint};
use log::debug;
}
}
Scalar::Ptr(ptr) => {
- let base_addr = match self.tcx.global_alloc(ptr.alloc_id) {
+ let (base_addr, base_addr_space) = match self.tcx.global_alloc(ptr.alloc_id) {
GlobalAlloc::Memory(alloc) => {
let init = const_alloc_to_llvm(self, alloc);
let value = match alloc.mutability {
if !self.sess().fewer_names() {
llvm::set_value_name(value, format!("{:?}", ptr.alloc_id).as_bytes());
}
- value
+ (value, AddressSpace::DATA)
}
- GlobalAlloc::Function(fn_instance) => self.get_fn_addr(fn_instance),
+ GlobalAlloc::Function(fn_instance) => (
+ self.get_fn_addr(fn_instance),
+ self.data_layout().instruction_address_space,
+ ),
GlobalAlloc::Static(def_id) => {
assert!(self.tcx.is_static(def_id));
assert!(!self.tcx.is_thread_local_static(def_id));
- self.get_static(def_id)
+ (self.get_static(def_id), AddressSpace::DATA)
}
};
let llval = unsafe {
llvm::LLVMConstInBoundsGEP(
- self.const_bitcast(base_addr, self.type_i8p()),
+ self.const_bitcast(base_addr, self.type_i8p_ext(base_addr_space)),
&self.const_usize(ptr.offset.bytes()),
1,
)
use rustc_hir::Node;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::interpret::{
- read_target_uint, Allocation, ConstValue, ErrorHandled, Pointer,
+ read_target_uint, Allocation, ConstValue, ErrorHandled, GlobalAlloc, Pointer,
};
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::{self, Instance, Ty};
use rustc_middle::{bug, span_bug};
use rustc_span::symbol::sym;
use rustc_span::Span;
-use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Primitive, Scalar, Size};
+use rustc_target::abi::{AddressSpace, Align, HasDataLayout, LayoutOf, Primitive, Scalar, Size};
use std::ffi::CStr;
)
.expect("const_alloc_to_llvm: could not read relocation pointer")
as u64;
+
+ let address_space = match cx.tcx.global_alloc(alloc_id) {
+ GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space,
+ GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) => AddressSpace::DATA,
+ };
+
llvals.push(cx.scalar_to_backend(
Pointer::new(alloc_id, Size::from_bytes(ptr_offset)).into(),
&Scalar { value: Primitive::Pointer, valid_range: 0..=!0 },
- cx.type_i8p(),
+ cx.type_i8p_ext(address_space),
));
next_offset = offset + pointer_size;
}
use rustc_middle::ty::layout::TyAndLayout;
use rustc_middle::ty::Ty;
use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
-use rustc_target::abi::{Align, Integer, Size};
+use rustc_target::abi::{AddressSpace, Align, Integer, Size};
use std::fmt;
use std::ptr;
assert_ne!(
self.type_kind(ty),
TypeKind::Function,
- "don't call ptr_to on function types, use ptr_to_llvm_type on FnAbi instead"
+ "don't call ptr_to on function types, use ptr_to_llvm_type on FnAbi instead or explicitly specify an address space if it makes sense"
);
- ty.ptr_to()
+ ty.ptr_to(AddressSpace::DATA)
+ }
+
+ fn type_ptr_to_ext(&self, ty: &'ll Type, address_space: AddressSpace) -> &'ll Type {
+ ty.ptr_to(address_space)
}
fn element_type(&self, ty: &'ll Type) -> &'ll Type {
}
pub fn i8p_llcx(llcx: &llvm::Context) -> &Type {
- Type::i8_llcx(llcx).ptr_to()
+ Type::i8_llcx(llcx).ptr_to(AddressSpace::DATA)
}
- fn ptr_to(&self) -> &Type {
- unsafe { llvm::LLVMPointerType(&self, 0) }
+ fn ptr_to(&self, address_space: AddressSpace) -> &Type {
+ unsafe { llvm::LLVMPointerType(&self, address_space.0) }
}
}
}
// Not in the cache; build it.
- let nullptr = cx.const_null(cx.type_i8p());
+ let nullptr = cx.const_null(cx.type_i8p_ext(cx.data_layout().instruction_address_space));
let methods_root;
let methods = if let Some(trait_ref) = trait_ref {
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt, TyAndLayout};
use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
use rustc_target::abi::call::{FnAbi, PassMode};
+use rustc_target::abi::HasDataLayout;
use std::iter;
// C++ personality function, but `catch (...)` has no type so
// it's null. The 64 here is actually a bitfield which
// represents that this is a catch-all block.
- let null = bx.const_null(bx.type_i8p());
+ let null = bx.const_null(
+ bx.type_i8p_ext(bx.cx().data_layout().instruction_address_space),
+ );
let sixty_four = bx.const_i32(64);
funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
cp_bx.br(llbb);
use rustc_middle::ty::{self, Ty};
use rustc_span::DUMMY_SP;
use rustc_target::abi::call::{ArgAbi, CastTarget, FnAbi, Reg};
-use rustc_target::abi::Integer;
+use rustc_target::abi::{AddressSpace, Integer};
// This depends on `Backend` and not `BackendTypes`, because consumers will probably want to use
// `LayoutOf` or `HasTyCtxt`. This way, they don't have to add a constraint on it themselves.
fn type_struct(&self, els: &[Self::Type], packed: bool) -> Self::Type;
fn type_kind(&self, ty: Self::Type) -> TypeKind;
fn type_ptr_to(&self, ty: Self::Type) -> Self::Type;
+ fn type_ptr_to_ext(&self, ty: Self::Type, address_space: AddressSpace) -> Self::Type;
fn element_type(&self, ty: Self::Type) -> Self::Type;
/// Returns the number of elements in `self` if it is a LLVM vector type.
pub trait DerivedTypeMethods<'tcx>: BaseTypeMethods<'tcx> + MiscMethods<'tcx> {
fn type_i8p(&self) -> Self::Type {
- self.type_ptr_to(self.type_i8())
+ self.type_i8p_ext(AddressSpace::DATA)
+ }
+
+ fn type_i8p_ext(&self, address_space: AddressSpace) -> Self::Type {
+ self.type_ptr_to_ext(self.type_i8(), address_space)
}
fn type_int(&self) -> Self::Type {
/// Alignments for vector types.
pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
- pub instruction_address_space: u32,
+ pub instruction_address_space: AddressSpace,
}
impl Default for TargetDataLayout {
(Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
(Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
],
- instruction_address_space: 0,
+ instruction_address_space: AddressSpace::DATA,
}
}
}
pub fn parse(target: &Target) -> Result<TargetDataLayout, String> {
// Parse an address space index from a string.
let parse_address_space = |s: &str, cause: &str| {
- s.parse::<u32>().map_err(|err| {
+ s.parse::<u32>().map(AddressSpace).map_err(|err| {
format!("invalid address space `{}` for `{}` in \"data-layout\": {}", s, cause, err)
})
};
}
}
+/// An identifier that specifies the address space that some operation
+/// should operate on. Special address spaces have an effect on code generation,
+/// depending on the target and the address spaces it implements.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub struct AddressSpace(pub u32);
+
+impl AddressSpace {
+ /// The default address space, corresponding to data space.
+ pub const DATA: Self = AddressSpace(0);
+}
+
/// Describes how values of the type are passed by target ABIs,
/// in terms of categories of C types there are ABI rules for.
#[derive(Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
--- /dev/null
+// compile-flags: -O --target=avr-unknown-unknown --crate-type=rlib
+
+// This test validates that function pointers can be stored in global variables
+// and called upon. It ensures that Rust emits function pointers in the correct
+// address space to LLVM so that an assertion error relating to casting is
+// not triggered.
+//
+// It also validates that functions can be called through function pointers
+// through traits.
+
+#![feature(no_core, lang_items, unboxed_closures, arbitrary_self_types)]
+#![crate_type = "lib"]
+#![no_core]
+
+#[lang = "sized"]
+pub trait Sized { }
+#[lang = "copy"]
+pub trait Copy { }
+#[lang = "receiver"]
+pub trait Receiver { }
+
+pub struct Result<T, E> { _a: T, _b: E }
+
+impl Copy for usize {}
+
+#[lang = "drop_in_place"]
+pub unsafe fn drop_in_place<T: ?Sized>(_: *mut T) {}
+
+#[lang = "fn_once"]
+pub trait FnOnce<Args> {
+ #[lang = "fn_once_output"]
+ type Output;
+
+ extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
+}
+
+#[lang = "fn_mut"]
+pub trait FnMut<Args> : FnOnce<Args> {
+ extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
+}
+
+#[lang = "fn"]
+pub trait Fn<Args>: FnOnce<Args> {
+ /// Performs the call operation.
+ extern "rust-call" fn call(&self, args: Args) -> Self::Output;
+}
+
+impl<'a, A, R> FnOnce<A> for &'a fn(A) -> R {
+ type Output = R;
+
+ extern "rust-call" fn call_once(self, args: A) -> R {
+ (*self)(args)
+ }
+}
+
+pub static mut STORAGE_FOO: fn(&usize, &mut u32) -> Result<(), ()> = arbitrary_black_box;
+pub static mut STORAGE_BAR: u32 = 12;
+
+fn arbitrary_black_box(ptr: &usize, _: &mut u32) -> Result<(), ()> {
+ let raw_ptr = ptr as *const usize;
+ let _v: usize = unsafe { *raw_ptr };
+ loop {}
+}
+
+#[inline(never)]
+#[no_mangle]
+fn call_through_fn_trait(a: &mut impl Fn<(), Output=()>) {
+ (*a)()
+}
+
+#[inline(never)]
+fn update_bar_value() {
+ unsafe {
+ STORAGE_BAR = 88;
+ }
+}
+
+// CHECK: define void @test(){{.+}}addrspace(1)
+#[no_mangle]
+pub extern "C" fn test() {
+ let mut buf = 7;
+
+ // A call through the Fn trait must use address space 1.
+ //
+ // CHECK: call{{.+}}addrspace(1) void @call_through_fn_trait()
+ call_through_fn_trait(&mut update_bar_value);
+
+ // A call through a global variable must use address space 1.
+ // CHECK: load {{.*}}addrspace(1){{.+}}FOO
+ unsafe {
+ STORAGE_FOO(&1, &mut buf);
+ }
+}