llfn: &'ll Value,
name: &'b str
) -> Self {
- let bx = Builder::with_cx(cx);
+ let mut bx = Builder::with_cx(cx);
let llbb = unsafe {
let name = SmallCStr::new(name);
llvm::LLVMAppendBasicBlockInContext(
}
}
- fn set_value_name(&self, value: &'ll Value, name: &str) {
+ fn set_value_name(&mut self, value: &'ll Value, name: &str) {
let cname = SmallCStr::new(name);
unsafe {
llvm::LLVMSetValueName(value, cname.as_ptr());
}
}
- fn position_at_end(&self, llbb: &'ll BasicBlock) {
+ fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
unsafe {
llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
}
}
- fn position_at_start(&self, llbb: &'ll BasicBlock) {
+ fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
unsafe {
llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
}
}
- fn ret_void(&self) {
+ fn ret_void(&mut self) {
self.count_insn("retvoid");
unsafe {
llvm::LLVMBuildRetVoid(self.llbuilder);
}
}
- fn ret(&self, v: &'ll Value) {
+ fn ret(&mut self, v: &'ll Value) {
self.count_insn("ret");
unsafe {
llvm::LLVMBuildRet(self.llbuilder, v);
}
}
- fn br(&self, dest: &'ll BasicBlock) {
+ fn br(&mut self, dest: &'ll BasicBlock) {
self.count_insn("br");
unsafe {
llvm::LLVMBuildBr(self.llbuilder, dest);
}
fn cond_br(
- &self,
+ &mut self,
cond: &'ll Value,
then_llbb: &'ll BasicBlock,
else_llbb: &'ll BasicBlock,
}
fn alloca(&self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
- let bx = Builder::with_cx(self.cx);
+ let mut bx = Builder::with_cx(self.cx);
bx.position_at_start(unsafe {
llvm::LLVMGetFirstBasicBlock(self.llfn())
});
bx.set_personality_fn(bx.cx().eh_personality());
- let normal = bx.build_sibling_block("normal");
+ let mut normal = bx.build_sibling_block("normal");
let catchswitch = bx.build_sibling_block("catchswitch");
let catchpad = bx.build_sibling_block("catchpad");
- let caught = bx.build_sibling_block("caught");
+ let mut caught = bx.build_sibling_block("caught");
let func = llvm::get_param(bx.llfn(), 0);
let data = llvm::get_param(bx.llfn(), 1);
// expected to be `*mut *mut u8` for this to actually work, but that's
// managed by the standard library.
- let then = bx.build_sibling_block("then");
- let catch = bx.build_sibling_block("catch");
+ let mut then = bx.build_sibling_block("then");
+ let mut catch = bx.build_sibling_block("catch");
let func = llvm::get_param(bx.llfn(), 0);
let data = llvm::get_param(bx.llfn(), 1);
pub struct LlvmCodegenBackend(());
-impl BackendMethods for LlvmCodegenBackend {
+impl ExtraBackendMethods for LlvmCodegenBackend {
type Module = ModuleLlvm;
type OngoingCodegen = OngoingCodegen;
cx.set_frame_pointer_elimination(llfn);
cx.apply_target_cpu_attr(llfn);
- let bx = Bx::new_block(&cx, llfn, "top");
+ let mut bx = Bx::new_block(&cx, llfn, "top");
bx.insert_reference_to_gdb_debug_scripts_section_global();
time_graph::WorkPackageKind(&["#DE9597", "#FED1D3", "#FDC5C7", "#B46668", "#88494B"]);
-pub fn codegen_crate<B: BackendMethods>(
+pub fn codegen_crate<B: ExtraBackendMethods>(
backend: B,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
rx: mpsc::Receiver<Box<dyn Any + Send>>
/// If you see this comment in the code, then it means that this workaround
/// worked! We may yet one day track down the mysterious cause of that
/// segfault...
-struct AbortCodegenOnDrop<B: BackendMethods>(Option<B::OngoingCodegen>);
+struct AbortCodegenOnDrop<B: ExtraBackendMethods>(Option<B::OngoingCodegen>);
-impl<B: BackendMethods> AbortCodegenOnDrop<B> {
+impl<B: ExtraBackendMethods> AbortCodegenOnDrop<B> {
fn into_inner(mut self) -> B::OngoingCodegen {
self.0.take().unwrap()
}
}
-impl<B: BackendMethods> Deref for AbortCodegenOnDrop<B> {
+impl<B: ExtraBackendMethods> Deref for AbortCodegenOnDrop<B> {
type Target = B::OngoingCodegen;
fn deref(&self) -> &B::OngoingCodegen {
}
}
-impl<B: BackendMethods> DerefMut for AbortCodegenOnDrop<B> {
+impl<B: ExtraBackendMethods> DerefMut for AbortCodegenOnDrop<B> {
fn deref_mut(&mut self) -> &mut B::OngoingCodegen {
self.0.as_mut().unwrap()
}
}
-impl<B: BackendMethods> Drop for AbortCodegenOnDrop<B> {
+impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> {
fn drop(&mut self) {
if let Some(codegen) = self.0.take() {
B::codegen_aborted(codegen);
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::util::time_graph::TimeGraph;
+use rustc_codegen_utils::codegen_backend::CodegenBackend;
use std::any::Any;
use std::sync::mpsc::Receiver;
use syntax_pos::symbol::InternedString;
Self: BackendTypes + HasTyCtxt<'tcx> + LayoutOf<Ty = Ty<'tcx>, TyLayout = TyLayout<'tcx>>
{}
-pub trait BackendMethods {
+pub trait ExtraBackendMethods: CodegenBackend {
type Module;
type OngoingCodegen;
fn llbb(&self) -> Self::BasicBlock;
fn count_insn(&self, category: &str);
- fn set_value_name(&self, value: Self::Value, name: &str);
- fn position_at_end(&self, llbb: Self::BasicBlock);
- fn position_at_start(&self, llbb: Self::BasicBlock);
- fn ret_void(&self);
- fn ret(&self, v: Self::Value);
- fn br(&self, dest: Self::BasicBlock);
- fn cond_br(&self, cond: Self::Value, then_llbb: Self::BasicBlock, else_llbb: Self::BasicBlock);
+ fn set_value_name(&mut self, value: Self::Value, name: &str);
+ fn position_at_end(&mut self, llbb: Self::BasicBlock);
+ fn position_at_start(&mut self, llbb: Self::BasicBlock);
+ fn ret_void(&mut self);
+ fn ret(&mut self, v: Self::Value);
+ fn br(&mut self, dest: Self::BasicBlock);
+ fn cond_br(
+ &mut self,
+ cond: Self::Value,
+ then_llbb: Self::BasicBlock,
+ else_llbb: Self::BasicBlock,
+ );
fn switch(&self, v: Self::Value, else_llbb: Self::BasicBlock, num_cases: usize) -> Self::Value;
fn invoke(
&self,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+//! Interface of a Rust codegen backend
+//!
+//! This crate defines all the traits that have to be implemented by a codegen backend in order to
+//! use the backend-agnostic codegen code in `rustc_codegen_ssa`.
+//!
+//! The interface is designed around two backend-specific data structures, the codegen context and
+//! the builder. The codegen context is supposed to be read-only after its creation and during the
+//! actual codegen, while the builder stores the information about the function during codegen and
+//! is used to produce the instructions of the backend IR.
+//!
+//! Finaly, a third `Backend` structure has to implement methods related to how codegen information
+//! is passed to the backend, especially for asynchronous compilation.
+//!
+//! The traits contain associated types that are backend-specific, such as the backend's value or
+//! basic blocks.
+
mod abi;
mod asm;
mod backend;
pub use self::abi::{AbiBuilderMethods, AbiMethods};
pub use self::asm::{AsmBuilderMethods, AsmMethods};
-pub use self::backend::{Backend, BackendMethods, BackendTypes};
+pub use self::backend::{Backend, BackendTypes, ExtraBackendMethods};
pub use self::builder::BuilderMethods;
pub use self::consts::ConstMethods;
pub use self::debuginfo::{DebugInfoBuilderMethods, DebugInfoMethods};
use rustc::ty::layout::{self, Align, Size};
use rustc::ty::Ty;
use rustc::util::nodemap::FxHashMap;
- use rustc_target::abi::call::{ArgType, CastTarget, FnType, Reg};
+use rustc_target::abi::call::{ArgType, CastTarget, FnType, Reg};
use std::cell::RefCell;
use syntax::ast;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! # Note
-//!
-//! This API is completely unstable and subject to change.
-
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![allow(dead_code)]
#![feature(quote)]
+//! This crate contains codegen code that is used by all codegen backends (LLVM and others).
+//! The backend-agnostic functions of this crate use functions defined in various traits that
+//! have to be implemented by each backends.
+
#[macro_use] extern crate bitflags;
#[macro_use] extern crate log;
extern crate rustc_apfloat;
};
let funclet_br =
- |this: &mut Self, bx: &Bx, target: mir::BasicBlock| {
+ |this: &mut Self, bx: &mut Bx, target: mir::BasicBlock| {
let (lltarget, is_cleanupret) = lltarget(this, target);
if is_cleanupret {
// micro-optimization: generate a `ret` rather than a jump
let do_call = |
this: &mut Self,
- bx: &Bx,
+ bx: &mut Bx,
fn_ty: FnType<'tcx, Ty<'tcx>>,
fn_ptr: Bx::Value,
llargs: &[Bx::Value],
}
mir::TerminatorKind::Goto { target } => {
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
}
mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
// we don't actually need to drop anything.
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
return
}
bx.cx().fn_type_of_instance(&drop_fn))
}
};
- do_call(self, &bx, fn_ty, drop_fn, args,
+ do_call(self, &mut bx, fn_ty, drop_fn, args,
Some((ReturnDest::Nothing, target)),
unwind);
}
// Don't codegen the panic block if success if known.
if const_cond == Some(expected) {
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
return;
}
let llfn = bx.cx().get_fn(instance);
// Codegen the actual panic invoke/call.
- do_call(self, &bx, fn_ty, llfn, &args, None, cleanup);
+ do_call(self, &mut bx, fn_ty, llfn, &args, None, cleanup);
}
mir::TerminatorKind::DropAndReplace { .. } => {
if let Some(destination_ref) = destination.as_ref() {
let &(ref dest, target) = destination_ref;
self.codegen_transmute(&bx, &args[0], dest);
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
} else {
// If we are trying to transmute to an uninhabited type,
// it is likely there is no allotted destination. In fact,
Some(ty::InstanceDef::DropGlue(_, None)) => {
// empty drop glue - a nop.
let &(_, target) = destination.as_ref().unwrap();
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
return;
}
_ => bx.cx().new_fn_type(sig, &extra_args)
// Codegen the actual panic invoke/call.
do_call(
self,
- &bx,
+ &mut bx,
fn_ty,
llfn,
&[msg_file_line_col],
}
if let Some((_, target)) = *destination {
- funclet_br(self, &bx, target);
+ funclet_br(self, &mut bx, target);
} else {
bx.unreachable();
}
_ => span_bug!(span, "no llfn for call"),
};
- do_call(self, &bx, fn_ty, fn_ptr, &llargs,
+ do_call(self, &mut bx, fn_ty, fn_ptr, &llargs,
destination.as_ref().map(|&(_, target)| (ret_dest, target)),
cleanup);
}
span_bug!(self.mir.span, "landing pad was not inserted?")
}
- let bx = self.new_block("cleanup");
+ let mut bx = self.new_block("cleanup");
let llpersonality = self.cx.eh_personality();
let llretty = self.landing_pad_type();
&self,
bb: mir::BasicBlock
) -> Bx {
- let bx = Bx::with_cx(self.cx);
+ let mut bx = Bx::with_cx(self.cx);
bx.position_at_end(self.blocks[bb]);
bx
}
debug!("fn_ty: {:?}", fn_ty);
let debug_context =
cx.create_function_debug_context(instance, sig, llfn, mir);
- let bx = Bx::new_block(cx, llfn, "start");
+ let mut bx = Bx::new_block(cx, llfn, "start");
if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
bx.set_personality_fn(cx.eh_personality());
// Compute debuginfo scopes from MIR scopes.
let scopes = cx.create_mir_scopes(mir, &debug_context);
- let (landing_pads, funclets) = create_funclets(mir, &bx, &cleanup_kinds, &block_bxs);
+ let (landing_pads, funclets) = create_funclets(mir, &mut bx, &cleanup_kinds, &block_bxs);
let mut fx = FunctionCx {
instance,
// Allocate variable and temp allocas
fx.locals = {
- let args = arg_local_refs(&bx, &fx, &fx.scopes, &memory_locals);
+ let args = arg_local_refs(&mut bx, &fx, &fx.scopes, &memory_locals);
let allocate_local = |local| {
let decl = &mir.local_decls[local];
fn create_funclets<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
mir: &'a Mir<'tcx>,
- bx: &Bx,
+ bx: &mut Bx,
cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
block_bxs: &IndexVec<mir::BasicBlock, Bx::BasicBlock>)
-> (IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
// }
Some(&mir::TerminatorKind::Abort) => {
let cs_bx = bx.build_sibling_block(&format!("cs_funclet{:?}", bb));
- let cp_bx = bx.build_sibling_block(&format!("cp_funclet{:?}", bb));
+ let mut cp_bx = bx.build_sibling_block(&format!("cp_funclet{:?}", bb));
ret_llbb = cs_bx.llbb();
let cs = cs_bx.catch_switch(None, None, 1);
cp_bx.br(llbb);
}
_ => {
- let cleanup_bx = bx.build_sibling_block(&format!("funclet_{:?}", bb));
+ let mut cleanup_bx = bx.build_sibling_block(&format!("funclet_{:?}", bb));
ret_llbb = cleanup_bx.llbb();
funclet = cleanup_bx.cleanup_pad(None, &[]);
cleanup_bx.br(llbb);
/// argument's value. As arguments are places, these are always
/// indirect.
fn arg_local_refs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
- bx: &Bx,
+ bx: &mut Bx,
fx: &FunctionCx<'a, 'tcx, Bx>,
scopes: &IndexVec<
mir::SourceScope,
memory_locals: &BitSet<mir::Local>,
) -> Vec<LocalRef<'tcx, Bx::Value>> {
let mir = fx.mir;
- let tcx = bx.tcx();
+ let tcx = fx.cx.tcx();
let mut idx = 0;
let mut llarg_idx = fx.fn_ty.ret.is_indirect() as usize;
impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_rvalue(
&mut self,
- bx: Bx,
+ mut bx: Bx,
dest: PlaceRef<'tcx, Bx::Value>,
rvalue: &mir::Rvalue<'tcx>
) -> Bx {
let count = bx.cx().const_usize(count);
let end = dest.project_index(&bx, count).llval;
- let header_bx = bx.build_sibling_block("repeat_loop_header");
- let body_bx = bx.build_sibling_block("repeat_loop_body");
+ let mut header_bx = bx.build_sibling_block("repeat_loop_header");
+ let mut body_bx = bx.build_sibling_block("repeat_loop_body");
let next_bx = bx.build_sibling_block("repeat_loop_next");
bx.br(header_bx.llbb());