#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Statement<'tcx> {
pub span: Span,
+ pub scope: ScopeId,
pub kind: StatementKind<'tcx>,
}
}
}
}
+
///////////////////////////////////////////////////////////////////////////
// Lvalues
unpack!(block = this.into(destination, block, expr));
} else {
// FIXME(#31472)
- this.cfg.push_assign_unit(block, span, destination);
+ let scope_id = this.innermost_scope_id();
+ this.cfg.push_assign_unit(block, scope_id, span, destination);
}
// Finally, we pop all the let scopes before exiting out from the scope of block
// itself.
pub fn push_assign(&mut self,
block: BasicBlock,
+ scope: ScopeId,
span: Span,
lvalue: &Lvalue<'tcx>,
rvalue: Rvalue<'tcx>) {
self.push(block, Statement {
+ scope: scope,
span: span,
kind: StatementKind::Assign(lvalue.clone(), rvalue)
});
pub fn push_assign_constant(&mut self,
block: BasicBlock,
+ scope: ScopeId,
span: Span,
temp: &Lvalue<'tcx>,
constant: Constant<'tcx>) {
- self.push_assign(block, span, temp, Rvalue::Use(Operand::Constant(constant)));
+ self.push_assign(block, scope, span, temp,
+ Rvalue::Use(Operand::Constant(constant)));
}
pub fn push_assign_unit(&mut self,
block: BasicBlock,
+ scope: ScopeId,
span: Span,
lvalue: &Lvalue<'tcx>) {
- self.push_assign(block, span, lvalue, Rvalue::Aggregate(
+ self.push_assign(block, scope, span, lvalue, Rvalue::Aggregate(
AggregateKind::Tuple, vec![]
));
}
debug!("expr_as_lvalue(block={:?}, expr={:?})", block, expr);
let this = self;
+ let scope_id = this.innermost_scope_id();
let expr_span = expr.span;
match expr.kind {
ExprKind::Scope { extent, value } => {
// bounds check:
let (len, lt) = (this.temp(usize_ty.clone()), this.temp(bool_ty));
- this.cfg.push_assign(block, expr_span, // len = len(slice)
+ this.cfg.push_assign(block, scope_id, expr_span, // len = len(slice)
&len, Rvalue::Len(slice.clone()));
- this.cfg.push_assign(block, expr_span, // lt = idx < len
+ this.cfg.push_assign(block, scope_id, expr_span, // lt = idx < len
<, Rvalue::BinaryOp(BinOp::Lt,
idx.clone(),
Operand::Consume(len.clone())));
debug!("expr_as_rvalue(block={:?}, expr={:?})", block, expr);
let this = self;
+ let scope_id = this.innermost_scope_id();
let expr_span = expr.span;
match expr.kind {
let value = this.hir.mirror(value);
let result = this.temp(expr.ty);
// to start, malloc some memory of suitable type (thus far, uninitialized):
- this.cfg.push_assign(block, expr_span, &result, Rvalue::Box(value.ty));
+ this.cfg.push_assign(block, scope_id, expr_span, &result, Rvalue::Box(value.ty));
this.in_scope(value_extents, block, |this, _| {
// schedule a shallow free of that memory, lest we unwind:
this.schedule_box_free(expr_span, value_extents, &result, value.ty);
let expr_span = expr.span;
let lvalue = unpack!(block = this.as_lvalue(block, expr));
let rvalue = Rvalue::Use(Operand::Consume(lvalue));
- this.cfg.push_assign(block, expr_span, &temp, rvalue);
+ let scope_id = this.innermost_scope_id();
+ this.cfg.push_assign(block, scope_id, expr_span, &temp, rvalue);
}
_ => {
unpack!(block = this.into(&temp, block, expr));
// just use the name `this` uniformly
let this = self;
let expr_span = expr.span;
+ let scope_id = this.innermost_scope_id();
match expr.kind {
ExprKind::Scope { extent, value } => {
} else {
// Body of the `if` expression without an `else` clause must return `()`, thus
// we implicitly generate a `else {}` if it is not specified.
- this.cfg.push_assign_unit(else_block, expr_span, destination);
+ let scope_id = this.innermost_scope_id();
+ this.cfg.push_assign_unit(else_block, scope_id, expr_span, destination);
else_block
};
});
this.cfg.push_assign_constant(
- true_block, expr_span, destination,
+ true_block, scope_id, expr_span, destination,
Constant {
span: expr_span,
ty: this.hir.bool_ty(),
});
this.cfg.push_assign_constant(
- false_block, expr_span, destination,
+ false_block, scope_id, expr_span, destination,
Constant {
span: expr_span,
ty: this.hir.bool_ty(),
// If the loop may reach its exit_block, we assign an empty tuple to the
// destination to keep the MIR well-formed.
if might_break {
- this.cfg.push_assign_unit(exit_block, expr_span, destination);
+ this.cfg.push_assign_unit(exit_block, scope_id, expr_span, destination);
}
exit_block.unit()
}
let rhs = unpack!(block = this.as_operand(block, rhs));
let lhs = unpack!(block = this.as_lvalue(block, lhs));
unpack!(block = this.build_drop(block, lhs.clone()));
- this.cfg.push_assign(block, expr_span, &lhs, Rvalue::Use(rhs));
+ this.cfg.push_assign(block, scope_id, expr_span, &lhs, Rvalue::Use(rhs));
block.unit()
}
ExprKind::AssignOp { op, lhs, rhs } => {
// we don't have to drop prior contents or anything
// because AssignOp is only legal for Copy types
// (overloaded ops should be desugared into a call).
- this.cfg.push_assign(block, expr_span, &lhs,
+ this.cfg.push_assign(block, scope_id, expr_span, &lhs,
Rvalue::BinaryOp(op,
Operand::Consume(lhs.clone()),
rhs));
block = match value {
Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)),
None => {
- this.cfg.push_assign_unit(block, expr_span, &Lvalue::ReturnPointer);
+ this.cfg.push_assign_unit(block, scope_id, expr_span, &Lvalue::ReturnPointer);
block
}
};
});
let rvalue = unpack!(block = this.as_rvalue(block, expr));
- this.cfg.push_assign(block, expr_span, destination, rvalue);
+ this.cfg.push_assign(block, scope_id, expr_span, destination, rvalue);
block.unit()
}
}
Rvalue::Ref(region, borrow_kind, binding.source),
};
- self.cfg.push_assign(block, binding.span, &Lvalue::Var(var_index), rvalue);
+ let scope_id = self.innermost_scope_id();
+ self.cfg.push_assign(block, scope_id, binding.span,
+ &Lvalue::Var(var_index), rvalue);
}
}
lvalue: &Lvalue<'tcx>,
test: &Test<'tcx>)
-> Vec<BasicBlock> {
+ let scope_id = self.innermost_scope_id();
match test.kind {
TestKind::Switch { adt_def } => {
let num_enum_variants = self.hir.num_variants(adt_def);
if let ty::TyArray(_, _) = mt.ty.sty {
ty = tcx.mk_imm_ref(region, tcx.mk_slice(tcx.types.u8));
let val_slice = self.temp(ty);
- self.cfg.push_assign(block, test.span, &val_slice,
+ self.cfg.push_assign(block, scope_id, test.span, &val_slice,
Rvalue::Cast(CastKind::Unsize, val, ty));
val = Operand::Consume(val_slice);
}
});
let slice = self.temp(ty);
- self.cfg.push_assign(block, test.span, &slice,
+ self.cfg.push_assign(block, scope_id, test.span, &slice,
Rvalue::Cast(CastKind::Unsize, array, ty));
Operand::Consume(slice)
} else {
let (actual, result) = (self.temp(usize_ty), self.temp(bool_ty));
// actual = len(lvalue)
- self.cfg.push_assign(block, test.span, &actual, Rvalue::Len(lvalue.clone()));
+ self.cfg.push_assign(block, scope_id, test.span, &actual, Rvalue::Len(lvalue.clone()));
// expected = <N>
- let expected = self.push_usize(block, test.span, len);
+ let expected = self.push_usize(block, scope_id, test.span, len);
// result = actual == expected OR result = actual < expected
self.cfg.push_assign(block,
+ scope_id,
test.span,
&result,
Rvalue::BinaryOp(op,
let result = self.temp(bool_ty);
// result = op(left, right)
- self.cfg.push_assign(block, span, &result, Rvalue::BinaryOp(op, left, right));
+ let scope_id = self.innermost_scope_id();
+ self.cfg.push_assign(block, scope_id, span, &result,
+ Rvalue::BinaryOp(op, left, right));
// branch based on result
let target_block = self.cfg.start_new_block();
from_end: suffix_len,
};
let temp = self.temp(slice.ty.clone()); // no need to schedule drop, temp is always copy
- self.cfg.push_assign(block, slice.span, &temp, rvalue);
+ let scope_id = self.innermost_scope_id();
+ self.cfg.push_assign(block, scope_id, slice.span, &temp, rvalue);
match_pairs.push(MatchPair::new(temp, slice));
}
Operand::Constant(constant)
}
- pub fn push_usize(&mut self, block: BasicBlock, span: Span, value: u64) -> Lvalue<'tcx> {
+ pub fn push_usize(&mut self,
+ block: BasicBlock,
+ scope_id: ScopeId,
+ span: Span,
+ value: u64)
+ -> Lvalue<'tcx> {
let usize_ty = self.hir.usize_ty();
let temp = self.temp(usize_ty);
self.cfg.push_assign_constant(
- block, span, &temp,
+ block, scope_id, span, &temp,
Constant {
span: span,
ty: self.hir.usize_ty(),
let (tuple, tuple_ref) = (self.temp(tup_ty), self.temp(ref_ty));
let (file, line) = self.span_to_fileline_args(span);
let elems = vec![Operand::Constant(file), Operand::Constant(line)];
+ let scope_id = self.innermost_scope_id();
// FIXME: We should have this as a constant, rather than a stack variable (to not pollute
// icache with cold branch code), however to achieve that we either have to rely on rvalue
// promotion or have some way, in MIR, to create constants.
- self.cfg.push_assign(block, span, &tuple, // tuple = (file_arg, line_arg);
+ self.cfg.push_assign(block, scope_id, span, &tuple, // tuple = (file_arg, line_arg);
Rvalue::Aggregate(AggregateKind::Tuple, elems));
// FIXME: is this region really correct here?
- self.cfg.push_assign(block, span, &tuple_ref, // tuple_ref = &tuple;
+ self.cfg.push_assign(block, scope_id, span, &tuple_ref, // tuple_ref = &tuple;
Rvalue::Ref(region, BorrowKind::Shared, tuple));
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, Terminator::Call {
let elems = vec![Operand::Constant(message),
Operand::Constant(file),
Operand::Constant(line)];
+ let scope_id = self.innermost_scope_id();
// FIXME: We should have this as a constant, rather than a stack variable (to not pollute
// icache with cold branch code), however to achieve that we either have to rely on rvalue
// promotion or have some way, in MIR, to create constants.
- self.cfg.push_assign(block, span, &tuple, // tuple = (message_arg, file_arg, line_arg);
+ self.cfg.push_assign(block, scope_id, span, &tuple, // tuple = (message_arg, file_arg, line_arg);
Rvalue::Aggregate(AggregateKind::Tuple, elems));
// FIXME: is this region really correct here?
- self.cfg.push_assign(block, span, &tuple_ref, // tuple_ref = &tuple;
+ self.cfg.push_assign(block, scope_id, span, &tuple_ref, // tuple_ref = &tuple;
Rvalue::Ref(region, BorrowKind::Shared, tuple));
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, Terminator::Call {