1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
3 use rustc_errors::{struct_span_err, Applicability, Diagnostic};
4 use rustc_hir::def_id::DefId;
5 use rustc_hir::{self as hir, HirId, LangItem};
6 use rustc_infer::infer::TyCtxtInferExt;
7 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
8 use rustc_middle::mir::*;
9 use rustc_middle::ty::cast::CastTy;
10 use rustc_middle::ty::subst::GenericArgKind;
11 use rustc_middle::ty::{
12 self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt, TypeAndMut,
14 use rustc_span::{sym, Span, Symbol};
15 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
16 use rustc_trait_selection::traits::{self, TraitEngine};
21 use super::ops::{self, NonConstOp, Status};
22 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
23 use super::resolver::FlowSensitiveAnalysis;
24 use super::{is_lang_panic_fn, ConstCx, Qualif};
25 use crate::const_eval::is_unstable_const_fn;
26 use crate::dataflow::impls::MaybeMutBorrowedLocals;
27 use crate::dataflow::{self, Analysis};
29 // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
30 // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
31 // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
32 type IndirectlyMutableResults<'mir, 'tcx> =
33 dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
35 type QualifResults<'mir, 'tcx, Q> =
36 dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
39 pub struct Qualifs<'mir, 'tcx> {
40 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
41 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
42 indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
45 impl Qualifs<'mir, 'tcx> {
46 pub fn indirectly_mutable(
48 ccx: &'mir ConstCx<'mir, 'tcx>,
52 let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
53 let ConstCx { tcx, body, def_id, param_env, .. } = *ccx;
55 // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
56 // allowed in a const.
58 // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
59 // without breaking stable code?
60 MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
61 .unsound_ignore_borrow_on_drop()
62 .into_engine(tcx, &body, def_id.to_def_id())
63 .pass_name("const_qualification")
64 .iterate_to_fixpoint()
65 .into_results_cursor(&body)
68 indirectly_mutable.seek_before_primary_effect(location);
69 indirectly_mutable.get().contains(local)
72 /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
74 /// Only updates the cursor if absolutely necessary
77 ccx: &'mir ConstCx<'mir, 'tcx>,
81 let ty = ccx.body.local_decls[local].ty;
82 if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
86 let needs_drop = self.needs_drop.get_or_insert_with(|| {
87 let ConstCx { tcx, body, def_id, .. } = *ccx;
89 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
90 .into_engine(tcx, &body, def_id.to_def_id())
91 .iterate_to_fixpoint()
92 .into_results_cursor(&body)
95 needs_drop.seek_before_primary_effect(location);
96 needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
99 /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
101 /// Only updates the cursor if absolutely necessary.
102 pub fn has_mut_interior(
104 ccx: &'mir ConstCx<'mir, 'tcx>,
108 let ty = ccx.body.local_decls[local].ty;
109 if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
113 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
114 let ConstCx { tcx, body, def_id, .. } = *ccx;
116 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
117 .into_engine(tcx, &body, def_id.to_def_id())
118 .iterate_to_fixpoint()
119 .into_results_cursor(&body)
122 has_mut_interior.seek_before_primary_effect(location);
123 has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
126 fn in_return_place(&mut self, ccx: &'mir ConstCx<'mir, 'tcx>) -> ConstQualifs {
127 // Find the `Return` terminator if one exists.
129 // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
130 // qualifs for the return type.
131 let return_block = ccx
135 .find(|(_, block)| match block.terminator().kind {
136 TerminatorKind::Return => true,
141 let return_block = match return_block {
142 None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty()),
146 let return_loc = ccx.body.terminator_loc(return_block);
148 let custom_eq = match ccx.const_kind() {
149 // We don't care whether a `const fn` returns a value that is not structurally
150 // matchable. Functions calls are opaque and always use type-based qualification, so
151 // this value should never be used.
152 hir::ConstContext::ConstFn => true,
154 // If we know that all values of the return type are structurally matchable, there's no
155 // need to run dataflow.
156 _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
158 hir::ConstContext::Const | hir::ConstContext::Static(_) => {
159 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
160 .into_engine(ccx.tcx, &ccx.body, ccx.def_id.to_def_id())
161 .iterate_to_fixpoint()
162 .into_results_cursor(&ccx.body);
164 cursor.seek_after_primary_effect(return_loc);
165 cursor.contains(RETURN_PLACE)
170 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
171 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
177 pub struct Validator<'mir, 'tcx> {
178 ccx: &'mir ConstCx<'mir, 'tcx>,
179 qualifs: Qualifs<'mir, 'tcx>,
181 /// The span of the current statement.
185 secondary_errors: Vec<Diagnostic>,
188 impl Deref for Validator<'mir, 'tcx> {
189 type Target = ConstCx<'mir, 'tcx>;
191 fn deref(&self) -> &Self::Target {
196 impl Validator<'mir, 'tcx> {
197 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
201 qualifs: Default::default(),
202 error_emitted: false,
203 secondary_errors: Vec::new(),
207 pub fn check_body(&mut self) {
208 let ConstCx { tcx, body, def_id, .. } = *self.ccx;
210 // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
211 // no need to emit duplicate errors here.
212 if is_async_fn(self.ccx) || body.generator_kind.is_some() {
213 tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
217 // The local type and predicate checks are not free and only relevant for `const fn`s.
218 if self.const_kind() == hir::ConstContext::ConstFn {
219 // Prevent const trait methods from being annotated as `stable`.
220 // FIXME: Do this as part of stability checking.
221 if self.is_const_stable_const_fn() {
222 let hir_id = tcx.hir().local_def_id_to_hir_id(self.def_id);
223 if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
228 "trait methods cannot be stable const fn"
234 self.check_item_predicates();
236 for (idx, local) in body.local_decls.iter_enumerated() {
237 // Handle the return place below.
238 if idx == RETURN_PLACE || local.internal {
242 self.span = local.source_info.span;
243 self.check_local_or_return_ty(local.ty, idx);
246 // impl trait is gone in MIR, so check the return type of a const fn by its signature
247 // instead of the type of the return place.
248 self.span = body.local_decls[RETURN_PLACE].source_info.span;
249 let return_ty = tcx.fn_sig(def_id).output();
250 self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
253 self.visit_body(&body);
255 // Ensure that the end result is `Sync` in a non-thread local `static`.
256 let should_check_for_sync = self.const_kind()
257 == hir::ConstContext::Static(hir::Mutability::Not)
258 && !tcx.is_thread_local_static(def_id.to_def_id());
260 if should_check_for_sync {
261 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
262 check_return_ty_is_sync(tcx, &body, hir_id);
265 // If we got through const-checking without emitting any "primary" errors, emit any
266 // "secondary" errors if they occurred.
267 let secondary_errors = mem::take(&mut self.secondary_errors);
268 if !self.error_emitted {
269 for error in secondary_errors {
270 self.tcx.sess.diagnostic().emit_diagnostic(&error);
273 assert!(self.tcx.sess.has_errors());
277 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
278 self.qualifs.in_return_place(self.ccx)
281 /// Emits an error if an expression cannot be evaluated in the current context.
282 pub fn check_op(&mut self, op: impl NonConstOp) {
283 self.check_op_spanned(op, self.span);
286 /// Emits an error at the given `span` if an expression cannot be evaluated in the current
288 pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
289 let gate = match op.status_in_item(self.ccx) {
290 Status::Allowed => return,
292 Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
293 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
294 && !super::allow_internal_unstable(self.tcx, self.def_id.to_def_id(), gate);
295 if unstable_in_stable {
296 emit_unstable_in_stable_error(self.ccx, span, gate);
302 Status::Unstable(gate) => Some(gate),
303 Status::Forbidden => None,
306 if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
307 self.tcx.sess.miri_unleashed_feature(span, gate);
311 let mut err = op.build_error(self.ccx, span);
312 assert!(err.is_error());
314 match op.importance() {
315 ops::DiagnosticImportance::Primary => {
316 self.error_emitted = true;
320 ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
324 fn check_static(&mut self, def_id: DefId, span: Span) {
326 !self.tcx.is_thread_local_static(def_id),
327 "tls access is checked in `Rvalue::ThreadLocalRef"
329 self.check_op_spanned(ops::StaticAccess, span)
332 fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
333 let kind = self.body.local_kind(local);
335 for ty in ty.walk() {
336 let ty = match ty.unpack() {
337 GenericArgKind::Type(ty) => ty,
339 // No constraints on lifetimes or constants, except potentially
340 // constants' types, but `walk` will get to them as well.
341 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
345 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
346 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
347 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
349 ty::Dynamic(preds, _) => {
350 for pred in preds.iter() {
351 match pred.skip_binder() {
352 ty::ExistentialPredicate::AutoTrait(_)
353 | ty::ExistentialPredicate::Projection(_) => {
354 self.check_op(ops::ty::TraitBound(kind))
356 ty::ExistentialPredicate::Trait(trait_ref) => {
357 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
358 self.check_op(ops::ty::TraitBound(kind))
369 fn check_item_predicates(&mut self) {
370 let ConstCx { tcx, def_id, .. } = *self.ccx;
372 let mut current = def_id.to_def_id();
374 let predicates = tcx.predicates_of(current);
375 for (predicate, _) in predicates.predicates {
376 match predicate.skip_binders() {
377 ty::PredicateAtom::RegionOutlives(_)
378 | ty::PredicateAtom::TypeOutlives(_)
379 | ty::PredicateAtom::WellFormed(_)
380 | ty::PredicateAtom::Projection(_)
381 | ty::PredicateAtom::ConstEvaluatable(..)
382 | ty::PredicateAtom::ConstEquate(..)
383 | ty::PredicateAtom::TypeWellFormedFromEnv(..) => continue,
384 ty::PredicateAtom::ObjectSafe(_) => {
385 bug!("object safe predicate on function: {:#?}", predicate)
387 ty::PredicateAtom::ClosureKind(..) => {
388 bug!("closure kind predicate on function: {:#?}", predicate)
390 ty::PredicateAtom::Subtype(_) => {
391 bug!("subtype predicate on function: {:#?}", predicate)
393 ty::PredicateAtom::Trait(pred, constness) => {
394 if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
397 match pred.self_ty().kind() {
399 let generics = tcx.generics_of(current);
400 let def = generics.type_param(p, tcx);
401 let span = tcx.def_span(def.def_id);
403 // These are part of the function signature, so treat them like
404 // arguments when determining importance.
405 let kind = LocalKind::Arg;
407 if constness == hir::Constness::Const {
408 self.check_op_spanned(ops::ty::TraitBound(kind), span);
409 } else if !tcx.features().const_fn
410 || self.ccx.is_const_stable_const_fn()
412 // HACK: We shouldn't need the conditional above, but trait
413 // bounds on containing impl blocks are wrongly being marked as
415 self.check_op_spanned(ops::ty::TraitBound(kind), span);
418 // other kinds of bounds are either tautologies
419 // or cause errors in other passes
425 match predicates.parent {
426 Some(parent) => current = parent,
433 impl Visitor<'tcx> for Validator<'mir, 'tcx> {
434 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
435 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
437 // Just as the old checker did, we skip const-checking basic blocks on the unwind path.
438 // These blocks often drop locals that would otherwise be returned from the function.
440 // FIXME: This shouldn't be unsound since a panic at compile time will cause a compiler
441 // error anyway, but maybe we should do more here?
442 if block.is_cleanup {
446 self.super_basic_block_data(bb, block);
449 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
450 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
452 // Special-case reborrows to be more like a copy of a reference.
454 Rvalue::Ref(_, kind, place) => {
455 if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
456 let ctx = match kind {
457 BorrowKind::Shared => {
458 PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
460 BorrowKind::Shallow => {
461 PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
463 BorrowKind::Unique => {
464 PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
466 BorrowKind::Mut { .. } => {
467 PlaceContext::MutatingUse(MutatingUseContext::Borrow)
470 self.visit_local(&place.local, ctx, location);
471 self.visit_projection(place.local, reborrowed_proj, ctx, location);
475 Rvalue::AddressOf(mutbl, place) => {
476 if let Some(reborrowed_proj) = place_as_reborrow(self.tcx, self.body, place) {
477 let ctx = match mutbl {
479 PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
481 Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
483 self.visit_local(&place.local, ctx, location);
484 self.visit_projection(place.local, reborrowed_proj, ctx, location);
491 self.super_rvalue(rvalue, location);
494 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
498 | Rvalue::Discriminant(..)
500 | Rvalue::Aggregate(..) => {}
502 Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
503 | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
504 let ty = place.ty(self.body, self.tcx).ty;
505 let is_allowed = match ty.kind() {
506 // Inside a `static mut`, `&mut [...]` is allowed.
507 ty::Array(..) | ty::Slice(_)
508 if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
513 // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
514 // that this is merely a ZST and it is already eligible for promotion.
515 // This may require an RFC?
517 ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
524 if let BorrowKind::Mut { .. } = kind {
525 self.check_op(ops::MutBorrow);
527 self.check_op(ops::CellBorrow);
532 Rvalue::AddressOf(Mutability::Mut, _) => self.check_op(ops::MutAddressOf),
534 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
535 | Rvalue::AddressOf(Mutability::Not, ref place) => {
536 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
538 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
542 if borrowed_place_has_mut_interior {
543 self.check_op(ops::CellBorrow);
548 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
555 PointerCast::UnsafeFnPointer
556 | PointerCast::ClosureFnPointer(_)
557 | PointerCast::ReifyFnPointer,
561 ) => self.check_op(ops::FnPtrCast),
563 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, cast_ty) => {
564 if let Some(TypeAndMut { ty, .. }) = cast_ty.builtin_deref(true) {
565 let unsized_ty = self.tcx.struct_tail_erasing_lifetimes(ty, self.param_env);
567 // Casting/coercing things to slices is fine.
568 if let ty::Slice(_) | ty::Str = unsized_ty.kind() {
573 self.check_op(ops::UnsizingCast);
576 Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
577 let operand_ty = operand.ty(self.body, self.tcx);
578 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
579 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
581 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
582 self.check_op(ops::RawPtrToIntCast);
586 Rvalue::NullaryOp(NullOp::SizeOf, _) => {}
587 Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
589 Rvalue::UnaryOp(_, ref operand) => {
590 let ty = operand.ty(self.body, self.tcx);
591 if is_int_bool_or_char(ty) {
592 // Int, bool, and char operations are fine.
593 } else if ty.is_floating_point() {
594 self.check_op(ops::FloatingPointOp);
596 span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
600 Rvalue::BinaryOp(op, ref lhs, ref rhs)
601 | Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {
602 let lhs_ty = lhs.ty(self.body, self.tcx);
603 let rhs_ty = rhs.ty(self.body, self.tcx);
605 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
606 // Int, bool, and char operations are fine.
607 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
608 assert_eq!(lhs_ty, rhs_ty);
616 || op == BinOp::Offset
619 self.check_op(ops::RawPtrComparison);
620 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
621 self.check_op(ops::FloatingPointOp);
625 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
634 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
635 self.super_operand(op, location);
636 if let Operand::Constant(c) = op {
637 if let Some(def_id) = c.check_static_ptr(self.tcx) {
638 self.check_static(def_id, self.span);
642 fn visit_projection_elem(
645 proj_base: &[PlaceElem<'tcx>],
646 elem: PlaceElem<'tcx>,
647 context: PlaceContext,
651 "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
652 context={:?} location={:?}",
660 self.super_projection_elem(place_local, proj_base, elem, context, location);
663 ProjectionElem::Deref => {
664 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
665 if let ty::RawPtr(_) = base_ty.kind() {
666 if proj_base.is_empty() {
667 if let (local, []) = (place_local, proj_base) {
668 let decl = &self.body.local_decls[local];
669 if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
670 let span = decl.source_info.span;
671 self.check_static(def_id, span);
676 self.check_op(ops::RawPtrDeref);
679 if context.is_mutating_use() {
680 self.check_op(ops::MutDeref);
684 ProjectionElem::ConstantIndex { .. }
685 | ProjectionElem::Downcast(..)
686 | ProjectionElem::Subslice { .. }
687 | ProjectionElem::Field(..)
688 | ProjectionElem::Index(_) => {
689 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
690 match base_ty.ty_adt_def() {
691 Some(def) if def.is_union() => {
692 self.check_op(ops::UnionAccess);
701 fn visit_source_info(&mut self, source_info: &SourceInfo) {
702 trace!("visit_source_info: source_info={:?}", source_info);
703 self.span = source_info.span;
706 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
707 trace!("visit_statement: statement={:?} location={:?}", statement, location);
709 match statement.kind {
710 StatementKind::Assign(..) | StatementKind::SetDiscriminant { .. } => {
711 self.super_statement(statement, location);
714 StatementKind::LlvmInlineAsm { .. } => {
715 self.super_statement(statement, location);
716 self.check_op(ops::InlineAsm);
719 StatementKind::FakeRead(..)
720 | StatementKind::StorageLive(_)
721 | StatementKind::StorageDead(_)
722 | StatementKind::Retag { .. }
723 | StatementKind::AscribeUserType(..)
724 | StatementKind::Coverage(..)
725 | StatementKind::Nop => {}
729 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
730 use rustc_target::spec::abi::Abi::RustIntrinsic;
732 trace!("visit_terminator: terminator={:?} location={:?}", terminator, location);
733 self.super_terminator(terminator, location);
735 match &terminator.kind {
736 TerminatorKind::Call { func, .. } => {
737 let ConstCx { tcx, body, def_id: caller, param_env, .. } = *self.ccx;
738 let caller = caller.to_def_id();
740 let fn_ty = func.ty(body, tcx);
742 let (mut callee, substs) = match *fn_ty.kind() {
743 ty::FnDef(def_id, substs) => (def_id, substs),
746 self.check_op(ops::FnCallIndirect);
750 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
754 // Resolve a trait method call to its concrete implementation, which may be in a
755 // `const` trait impl.
756 if self.tcx.features().const_trait_impl {
757 let instance = Instance::resolve(tcx, param_env, callee, substs);
758 debug!("Resolving ({:?}) -> {:?}", callee, instance);
759 if let Ok(Some(func)) = instance {
760 if let InstanceDef::Item(def) = func.def {
766 // At this point, we are calling a function, `callee`, whose `DefId` is known...
768 if is_lang_panic_fn(tcx, callee) {
769 self.check_op(ops::Panic);
773 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
774 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
776 let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
777 self.check_op(ops::Generator(kind));
781 // HACK: This is to "unstabilize" the `transmute` intrinsic
782 // within const fns. `transmute` is allowed in all other const contexts.
783 // This won't really scale to more intrinsics or functions. Let's allow const
784 // transmutes in const fn before we add more hacks to this.
785 if tcx.fn_sig(callee).abi() == RustIntrinsic
786 && tcx.item_name(callee) == sym::transmute
788 self.check_op(ops::Transmute);
792 if !tcx.is_const_fn_raw(callee) {
793 self.check_op(ops::FnCallNonConst(callee));
797 // If the `const fn` we are trying to call is not const-stable, ensure that we have
798 // the proper feature gate enabled.
799 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
800 if self.span.allows_unstable(gate) {
804 // Calling an unstable function *always* requires that the corresponding gate
805 // be enabled, even if the function has `#[allow_internal_unstable(the_gate)]`.
806 if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
807 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
811 // If this crate is not using stability attributes, or the caller is not claiming to be a
812 // stable `const fn`, that is all that is required.
813 if !self.ccx.is_const_stable_const_fn() {
817 // Otherwise, we are something const-stable calling a const-unstable fn.
819 if super::allow_internal_unstable(tcx, caller, gate) {
823 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
827 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
828 // have no `rustc_const_stable` attributes to be const-unstable as well. This
829 // should be fixed later.
830 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
831 && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
832 if callee_is_unstable_unmarked {
833 if self.ccx.is_const_stable_const_fn() {
834 self.check_op(ops::FnCallUnstable(callee, None));
839 // Forbid all `Drop` terminators unless the place being dropped is a local with no
840 // projections that cannot be `NeedsDrop`.
841 TerminatorKind::Drop { place: dropped_place, .. }
842 | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
843 // If we are checking live drops after drop-elaboration, don't emit duplicate
845 if super::post_drop_elaboration::checking_enabled(self.ccx) {
849 let mut err_span = self.span;
851 // Check to see if the type of this place can ever have a drop impl. If not, this
852 // `Drop` terminator is frivolous.
854 dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
860 let needs_drop = if let Some(local) = dropped_place.as_local() {
861 // Use the span where the local was declared as the span of the drop error.
862 err_span = self.body.local_decls[local].source_info.span;
863 self.qualifs.needs_drop(self.ccx, local, location)
869 self.check_op_spanned(
870 ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
876 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
877 TerminatorKind::Abort => self.check_op(ops::Abort),
879 TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
880 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
883 TerminatorKind::Assert { .. }
884 | TerminatorKind::FalseEdge { .. }
885 | TerminatorKind::FalseUnwind { .. }
886 | TerminatorKind::Goto { .. }
887 | TerminatorKind::Resume
888 | TerminatorKind::Return
889 | TerminatorKind::SwitchInt { .. }
890 | TerminatorKind::Unreachable => {}
895 fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
896 let ty = body.return_ty();
897 tcx.infer_ctxt().enter(|infcx| {
898 let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
899 let mut fulfillment_cx = traits::FulfillmentContext::new();
900 let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
901 fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
902 if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
903 infcx.report_fulfillment_errors(&err, None, false);
908 fn place_as_reborrow(
912 ) -> Option<&'a [PlaceElem<'tcx>]> {
913 place.projection.split_last().and_then(|(outermost, inner)| {
914 if outermost != &ProjectionElem::Deref {
918 // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
919 // that points to the allocation for the static. Don't treat these as reborrows.
920 if body.local_decls[place.local].is_ref_to_static() {
924 // Ensure the type being derefed is a reference and not a raw pointer.
926 // This is sufficient to prevent an access to a `static mut` from being marked as a
927 // reborrow, even if the check above were to disappear.
928 let inner_ty = Place::ty_from(place.local, inner, body, tcx).ty;
929 match inner_ty.kind() {
930 ty::Ref(..) => Some(inner),
936 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
937 ty.is_bool() || ty.is_integral() || ty.is_char()
940 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
941 ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
944 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
945 let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
951 &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
955 "if it is not part of the public API, make this function unstably const",
956 concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
957 Applicability::HasPlaceholders,
961 "otherwise `#[allow_internal_unstable]` can be used to bypass stability checks",
962 format!("#[allow_internal_unstable({})]\n", gate),
963 Applicability::MaybeIncorrect,