1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
5 use rustc_hir::def_id::DefId;
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
15 use rustc_mir_dataflow::{self, Analysis};
16 use rustc_span::{sym, Span, Symbol};
17 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
18 use rustc_trait_selection::traits::SelectionContext;
23 use super::ops::{self, NonConstOp, Status};
24 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
25 use super::resolver::FlowSensitiveAnalysis;
26 use super::{ConstCx, Qualif};
27 use crate::const_eval::is_unstable_const_fn;
29 type QualifResults<'mir, 'tcx, Q> =
30 rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
33 pub struct Qualifs<'mir, 'tcx> {
34 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
35 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
36 needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
39 impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
40 /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
42 /// Only updates the cursor if absolutely necessary
45 ccx: &'mir ConstCx<'mir, 'tcx>,
49 let ty = ccx.body.local_decls[local].ty;
50 if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
54 let needs_drop = self.needs_drop.get_or_insert_with(|| {
55 let ConstCx { tcx, body, .. } = *ccx;
57 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
58 .into_engine(tcx, &body)
59 .iterate_to_fixpoint()
60 .into_results_cursor(&body)
63 needs_drop.seek_before_primary_effect(location);
64 needs_drop.get().contains(local)
67 /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
69 /// Only updates the cursor if absolutely necessary
70 pub fn needs_non_const_drop(
72 ccx: &'mir ConstCx<'mir, 'tcx>,
76 let ty = ccx.body.local_decls[local].ty;
77 if !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
81 let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
82 let ConstCx { tcx, body, .. } = *ccx;
84 FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
85 .into_engine(tcx, &body)
86 .iterate_to_fixpoint()
87 .into_results_cursor(&body)
90 needs_non_const_drop.seek_before_primary_effect(location);
91 needs_non_const_drop.get().contains(local)
94 /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
96 /// Only updates the cursor if absolutely necessary.
97 pub fn has_mut_interior(
99 ccx: &'mir ConstCx<'mir, 'tcx>,
103 let ty = ccx.body.local_decls[local].ty;
104 if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
108 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
109 let ConstCx { tcx, body, .. } = *ccx;
111 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
112 .into_engine(tcx, &body)
113 .iterate_to_fixpoint()
114 .into_results_cursor(&body)
117 has_mut_interior.seek_before_primary_effect(location);
118 has_mut_interior.get().contains(local)
123 ccx: &'mir ConstCx<'mir, 'tcx>,
124 tainted_by_errors: Option<ErrorReported>,
126 // Find the `Return` terminator if one exists.
128 // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
129 // qualifs for the return type.
130 let return_block = ccx
134 .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
137 let Some(return_block) = return_block else {
138 return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
141 let return_loc = ccx.body.terminator_loc(return_block);
143 let custom_eq = match ccx.const_kind() {
144 // We don't care whether a `const fn` returns a value that is not structurally
145 // matchable. Functions calls are opaque and always use type-based qualification, so
146 // this value should never be used.
147 hir::ConstContext::ConstFn => true,
149 // If we know that all values of the return type are structurally matchable, there's no
150 // need to run dataflow.
151 _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
153 hir::ConstContext::Const | hir::ConstContext::Static(_) => {
154 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
155 .into_engine(ccx.tcx, &ccx.body)
156 .iterate_to_fixpoint()
157 .into_results_cursor(&ccx.body);
159 cursor.seek_after_primary_effect(return_loc);
160 cursor.get().contains(RETURN_PLACE)
165 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
166 needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
167 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
174 pub struct Checker<'mir, 'tcx> {
175 ccx: &'mir ConstCx<'mir, 'tcx>,
176 qualifs: Qualifs<'mir, 'tcx>,
178 /// The span of the current statement.
181 /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
182 local_has_storage_dead: Option<BitSet<Local>>,
184 error_emitted: Option<ErrorReported>,
185 secondary_errors: Vec<Diagnostic>,
188 impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
189 type Target = ConstCx<'mir, 'tcx>;
191 fn deref(&self) -> &Self::Target {
196 impl<'mir, 'tcx> Checker<'mir, 'tcx> {
197 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
201 qualifs: Default::default(),
202 local_has_storage_dead: None,
204 secondary_errors: Vec::new(),
208 pub fn check_body(&mut self) {
209 let ConstCx { tcx, body, .. } = *self.ccx;
210 let def_id = self.ccx.def_id();
212 // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
213 // no need to emit duplicate errors here.
214 if is_async_fn(self.ccx) || body.generator.is_some() {
215 tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
219 // The local type and predicate checks are not free and only relevant for `const fn`s.
220 if self.const_kind() == hir::ConstContext::ConstFn {
221 // Prevent const trait methods from being annotated as `stable`.
222 // FIXME: Do this as part of stability checking.
223 if self.is_const_stable_const_fn() {
224 if crate::const_eval::is_parent_const_impl_raw(tcx, def_id) {
228 .struct_span_err(self.span, "trait methods cannot be stable const fn")
233 self.check_item_predicates();
235 for (idx, local) in body.local_decls.iter_enumerated() {
236 // Handle the return place below.
237 if idx == RETURN_PLACE || local.internal {
241 self.span = local.source_info.span;
242 self.check_local_or_return_ty(local.ty, idx);
245 // impl trait is gone in MIR, so check the return type of a const fn by its signature
246 // instead of the type of the return place.
247 self.span = body.local_decls[RETURN_PLACE].source_info.span;
248 let return_ty = tcx.fn_sig(def_id).output();
249 self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
252 if !tcx.has_attr(def_id.to_def_id(), sym::rustc_do_not_const_check) {
253 self.visit_body(&body);
256 // If we got through const-checking without emitting any "primary" errors, emit any
257 // "secondary" errors if they occurred.
258 let secondary_errors = mem::take(&mut self.secondary_errors);
259 if self.error_emitted.is_none() {
260 for error in secondary_errors {
261 self.tcx.sess.diagnostic().emit_diagnostic(&error);
264 assert!(self.tcx.sess.has_errors());
268 fn local_has_storage_dead(&mut self, local: Local) -> bool {
270 self.local_has_storage_dead
271 .get_or_insert_with(|| {
272 struct StorageDeads {
273 locals: BitSet<Local>,
275 impl<'tcx> Visitor<'tcx> for StorageDeads {
276 fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
277 if let StatementKind::StorageDead(l) = stmt.kind {
278 self.locals.insert(l);
282 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
283 v.visit_body(ccx.body);
289 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
290 self.qualifs.in_return_place(self.ccx, self.error_emitted)
293 /// Emits an error if an expression cannot be evaluated in the current context.
294 pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
295 self.check_op_spanned(op, self.span);
298 /// Emits an error at the given `span` if an expression cannot be evaluated in the current
300 pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
301 let gate = match op.status_in_item(self.ccx) {
302 Status::Allowed => return,
304 Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
305 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
306 && !super::rustc_allow_const_fn_unstable(
308 self.def_id().to_def_id(),
311 if unstable_in_stable {
312 emit_unstable_in_stable_error(self.ccx, span, gate);
318 Status::Unstable(gate) => Some(gate),
319 Status::Forbidden => None,
322 if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
323 self.tcx.sess.miri_unleashed_feature(span, gate);
327 let mut err = op.build_error(self.ccx, span);
328 assert!(err.is_error());
330 match op.importance() {
331 ops::DiagnosticImportance::Primary => {
332 self.error_emitted = Some(ErrorReported);
336 ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
340 fn check_static(&mut self, def_id: DefId, span: Span) {
341 if self.tcx.is_thread_local_static(def_id) {
342 self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
344 self.check_op_spanned(ops::StaticAccess, span)
347 fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
348 let kind = self.body.local_kind(local);
350 for ty in ty.walk() {
351 let ty = match ty.unpack() {
352 GenericArgKind::Type(ty) => ty,
354 // No constraints on lifetimes or constants, except potentially
355 // constants' types, but `walk` will get to them as well.
356 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
360 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
361 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
362 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
364 ty::Dynamic(preds, _) => {
365 for pred in preds.iter() {
366 match pred.skip_binder() {
367 ty::ExistentialPredicate::AutoTrait(_)
368 | ty::ExistentialPredicate::Projection(_) => {
369 self.check_op(ops::ty::DynTrait(kind))
371 ty::ExistentialPredicate::Trait(trait_ref) => {
372 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
373 self.check_op(ops::ty::DynTrait(kind))
384 fn check_item_predicates(&mut self) {
385 let ConstCx { tcx, .. } = *self.ccx;
387 let mut current = self.def_id().to_def_id();
389 let predicates = tcx.predicates_of(current);
390 for (predicate, _) in predicates.predicates {
391 match predicate.kind().skip_binder() {
392 ty::PredicateKind::RegionOutlives(_)
393 | ty::PredicateKind::TypeOutlives(_)
394 | ty::PredicateKind::WellFormed(_)
395 | ty::PredicateKind::Projection(_)
396 | ty::PredicateKind::ConstEvaluatable(..)
397 | ty::PredicateKind::ConstEquate(..)
398 | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
399 ty::PredicateKind::ObjectSafe(_) => {
400 bug!("object safe predicate on function: {:#?}", predicate)
402 ty::PredicateKind::ClosureKind(..) => {
403 bug!("closure kind predicate on function: {:#?}", predicate)
405 ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) => {
406 bug!("subtype/coerce predicate on function: {:#?}", predicate)
408 ty::PredicateKind::Trait(pred) => {
409 if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
412 match pred.self_ty().kind() {
414 let generics = tcx.generics_of(current);
415 let def = generics.type_param(p, tcx);
416 let span = tcx.def_span(def.def_id);
418 // These are part of the function signature, so treat them like
419 // arguments when determining importance.
420 let kind = LocalKind::Arg;
422 self.check_op_spanned(ops::ty::TraitBound(kind), span);
424 // other kinds of bounds are either tautologies
425 // or cause errors in other passes
431 match predicates.parent {
432 Some(parent) => current = parent,
438 fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
439 match self.const_kind() {
440 // In a const fn all borrows are transient or point to the places given via
441 // references in the arguments (so we already checked them with
442 // TransientMutBorrow/MutBorrow as appropriate).
443 // The borrow checker guarantees that no new non-transient borrows are created.
444 // NOTE: Once we have heap allocations during CTFE we need to figure out
445 // how to prevent `const fn` to create long-lived allocations that point
446 // to mutable memory.
447 hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
449 // Locals with StorageDead do not live beyond the evaluation and can
450 // thus safely be borrowed without being able to be leaked to the final
451 // value of the constant.
452 if self.local_has_storage_dead(local) {
453 self.check_op(ops::TransientMutBorrow(kind));
455 self.check_op(ops::MutBorrow(kind));
462 impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
463 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
464 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
466 // We don't const-check basic blocks on the cleanup path since we never unwind during
467 // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
468 // are unreachable during const-eval.
470 // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
471 // locals that would never be dropped during normal execution are sometimes dropped during
472 // unwinding, which means backwards-incompatible live-drop errors.
473 if block.is_cleanup {
477 self.super_basic_block_data(bb, block);
480 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
481 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
483 // Special-case reborrows to be more like a copy of a reference.
485 Rvalue::Ref(_, kind, place) => {
486 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
487 let ctx = match kind {
488 BorrowKind::Shared => {
489 PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
491 BorrowKind::Shallow => {
492 PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
494 BorrowKind::Unique => {
495 PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
497 BorrowKind::Mut { .. } => {
498 PlaceContext::MutatingUse(MutatingUseContext::Borrow)
501 self.visit_local(&reborrowed_place_ref.local, ctx, location);
502 self.visit_projection(reborrowed_place_ref, ctx, location);
506 Rvalue::AddressOf(mutbl, place) => {
507 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
508 let ctx = match mutbl {
510 PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
512 Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
514 self.visit_local(&reborrowed_place_ref.local, ctx, location);
515 self.visit_projection(reborrowed_place_ref, ctx, location);
522 self.super_rvalue(rvalue, location);
525 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
529 | Rvalue::Discriminant(..)
531 | Rvalue::Aggregate(..) => {}
533 Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
534 | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
535 let ty = place.ty(self.body, self.tcx).ty;
536 let is_allowed = match ty.kind() {
537 // Inside a `static mut`, `&mut [...]` is allowed.
538 ty::Array(..) | ty::Slice(_)
539 if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
544 // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
545 // that this is merely a ZST and it is already eligible for promotion.
546 // This may require an RFC?
548 ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
555 if let BorrowKind::Mut { .. } = kind {
556 self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
558 self.check_op(ops::CellBorrow);
563 Rvalue::AddressOf(Mutability::Mut, ref place) => {
564 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
567 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
568 | Rvalue::AddressOf(Mutability::Not, ref place) => {
569 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
571 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
575 if borrowed_place_has_mut_interior {
576 match self.const_kind() {
577 // In a const fn all borrows are transient or point to the places given via
578 // references in the arguments (so we already checked them with
579 // TransientCellBorrow/CellBorrow as appropriate).
580 // The borrow checker guarantees that no new non-transient borrows are created.
581 // NOTE: Once we have heap allocations during CTFE we need to figure out
582 // how to prevent `const fn` to create long-lived allocations that point
583 // to (interior) mutable memory.
584 hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
586 // Locals with StorageDead are definitely not part of the final constant value, and
587 // it is thus inherently safe to permit such locals to have their
588 // address taken as we can't end up with a reference to them in the
590 // Note: This is only sound if every local that has a `StorageDead` has a
591 // `StorageDead` in every control flow path leading to a `return` terminator.
592 if self.local_has_storage_dead(place.local) {
593 self.check_op(ops::TransientCellBorrow);
595 self.check_op(ops::CellBorrow);
603 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
610 PointerCast::UnsafeFnPointer
611 | PointerCast::ClosureFnPointer(_)
612 | PointerCast::ReifyFnPointer,
616 ) => self.check_op(ops::FnPtrCast),
618 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
619 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
620 // in the type of any local, which also excludes casts).
623 Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
624 let operand_ty = operand.ty(self.body, self.tcx);
625 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
626 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
628 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
629 self.check_op(ops::RawPtrToIntCast);
633 Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
634 Rvalue::ShallowInitBox(_, _) => {}
636 Rvalue::UnaryOp(_, ref operand) => {
637 let ty = operand.ty(self.body, self.tcx);
638 if is_int_bool_or_char(ty) {
639 // Int, bool, and char operations are fine.
640 } else if ty.is_floating_point() {
641 self.check_op(ops::FloatingPointOp);
643 span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
647 Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
648 | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
649 let lhs_ty = lhs.ty(self.body, self.tcx);
650 let rhs_ty = rhs.ty(self.body, self.tcx);
652 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
653 // Int, bool, and char operations are fine.
654 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
655 assert_eq!(lhs_ty, rhs_ty);
663 || op == BinOp::Offset
666 self.check_op(ops::RawPtrComparison);
667 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
668 self.check_op(ops::FloatingPointOp);
672 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
681 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
682 self.super_operand(op, location);
683 if let Operand::Constant(c) = op {
684 if let Some(def_id) = c.check_static_ptr(self.tcx) {
685 self.check_static(def_id, self.span);
689 fn visit_projection_elem(
692 proj_base: &[PlaceElem<'tcx>],
693 elem: PlaceElem<'tcx>,
694 context: PlaceContext,
698 "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
699 context={:?} location={:?}",
707 self.super_projection_elem(place_local, proj_base, elem, context, location);
710 ProjectionElem::Deref => {
711 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
712 if base_ty.is_unsafe_ptr() {
713 if proj_base.is_empty() {
714 let decl = &self.body.local_decls[place_local];
715 if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
716 let span = decl.source_info.span;
717 self.check_static(def_id, span);
722 // `*const T` is stable, `*mut T` is not
723 if !base_ty.is_mutable_ptr() {
727 self.check_op(ops::RawMutPtrDeref);
730 if context.is_mutating_use() {
731 self.check_op(ops::MutDeref);
735 ProjectionElem::ConstantIndex { .. }
736 | ProjectionElem::Downcast(..)
737 | ProjectionElem::Subslice { .. }
738 | ProjectionElem::Field(..)
739 | ProjectionElem::Index(_) => {}
743 fn visit_source_info(&mut self, source_info: &SourceInfo) {
744 trace!("visit_source_info: source_info={:?}", source_info);
745 self.span = source_info.span;
748 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
749 trace!("visit_statement: statement={:?} location={:?}", statement, location);
751 self.super_statement(statement, location);
753 match statement.kind {
754 StatementKind::Assign(..)
755 | StatementKind::SetDiscriminant { .. }
756 | StatementKind::FakeRead(..)
757 | StatementKind::StorageLive(_)
758 | StatementKind::StorageDead(_)
759 | StatementKind::Retag { .. }
760 | StatementKind::AscribeUserType(..)
761 | StatementKind::Coverage(..)
762 | StatementKind::CopyNonOverlapping(..)
763 | StatementKind::Nop => {}
767 #[instrument(level = "debug", skip(self))]
768 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
769 use rustc_target::spec::abi::Abi::RustIntrinsic;
771 self.super_terminator(terminator, location);
773 match &terminator.kind {
774 TerminatorKind::Call { func, args, fn_span, from_hir_call, .. } => {
775 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
776 let caller = self.def_id().to_def_id();
778 let fn_ty = func.ty(body, tcx);
780 let (mut callee, mut substs) = match *fn_ty.kind() {
781 ty::FnDef(def_id, substs) => (def_id, substs),
784 self.check_op(ops::FnCallIndirect);
788 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
792 let mut nonconst_call_permission = false;
794 // Attempting to call a trait method?
795 if let Some(trait_id) = tcx.trait_of_item(callee) {
796 trace!("attempting to call a trait method");
797 if !self.tcx.features().const_trait_impl {
798 self.check_op(ops::FnCallNonConst {
803 from_hir_call: *from_hir_call,
808 let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
809 let poly_trait_pred = Binder::dummy(TraitPredicate {
811 constness: ty::BoundConstness::ConstIfConst,
812 polarity: ty::ImplPolarity::Positive,
815 Obligation::new(ObligationCause::dummy(), param_env, poly_trait_pred);
817 let implsrc = tcx.infer_ctxt().enter(|infcx| {
818 let mut selcx = SelectionContext::new(&infcx);
819 selcx.select(&obligation)
823 Ok(Some(ImplSource::Param(_, ty::BoundConstness::ConstIfConst))) => {
825 "const_trait_impl: provided {:?} via where-clause in {:?}",
830 Ok(Some(ImplSource::UserDefined(data))) => {
831 let callee_name = tcx.item_name(callee);
832 if let Some(&did) = tcx
833 .associated_item_def_ids(data.impl_def_id)
835 .find(|did| tcx.item_name(**did) == callee_name)
837 // using internal substs is ok here, since this is only
838 // used for the `resolve` call below
839 substs = InternalSubsts::identity_for_item(tcx, did);
843 if let hir::Constness::NotConst = tcx.impl_constness(data.impl_def_id) {
844 self.check_op(ops::FnCallNonConst {
849 from_hir_call: *from_hir_call,
854 _ if !tcx.is_const_fn_raw(callee) => {
855 // At this point, it is only legal when the caller is marked with
856 // #[default_method_body_is_const], and the callee is in the same
858 let callee_trait = tcx.trait_of_item(callee);
859 if callee_trait.is_some()
860 && tcx.has_attr(caller, sym::default_method_body_is_const)
861 && callee_trait == tcx.trait_of_item(caller)
862 // Can only call methods when it's `<Self as TheTrait>::f`.
863 && tcx.types.self_param == substs.type_at(0)
865 nonconst_call_permission = true;
868 if !nonconst_call_permission {
869 let obligation = Obligation::new(
870 ObligationCause::dummy_with_span(*fn_span),
873 poly_trait_pred.map_bound(ty::PredicateKind::Trait),
877 // improve diagnostics by showing what failed. Our requirements are stricter this time
878 // as we are going to error again anyways.
879 tcx.infer_ctxt().enter(|infcx| {
880 if let Err(e) = implsrc {
881 infcx.report_selection_error(
890 self.check_op(ops::FnCallNonConst {
895 from_hir_call: *from_hir_call,
903 // Resolve a trait method call to its concrete implementation, which may be in a
904 // `const` trait impl.
905 let instance = Instance::resolve(tcx, param_env, callee, substs);
906 debug!("Resolving ({:?}) -> {:?}", callee, instance);
907 if let Ok(Some(func)) = instance {
908 if let InstanceDef::Item(def) = func.def {
914 // At this point, we are calling a function, `callee`, whose `DefId` is known...
916 // `begin_panic` and `panic_display` are generic functions that accept
917 // types other than str. Check to enforce that only str can be used in
920 // const-eval of the `begin_panic` fn assumes the argument is `&str`
921 if Some(callee) == tcx.lang_items().begin_panic_fn() {
922 match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
923 ty::Ref(_, ty, _) if ty.is_str() => return,
924 _ => self.check_op(ops::PanicNonStr),
928 // const-eval of the `panic_display` fn assumes the argument is `&&str`
929 if Some(callee) == tcx.lang_items().panic_display() {
930 match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
931 ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
935 _ => self.check_op(ops::PanicNonStr),
939 if Some(callee) == tcx.lang_items().exchange_malloc_fn() {
940 self.check_op(ops::HeapAllocation);
944 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
945 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
947 let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
948 self.check_op(ops::Generator(kind));
952 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
954 if !tcx.is_const_fn_raw(callee) {
955 if tcx.trait_of_item(callee).is_some() {
956 if tcx.has_attr(callee, sym::default_method_body_is_const) {
957 // To get to here we must have already found a const impl for the
958 // trait, but for it to still be non-const can be that the impl is
959 // using default method bodies.
960 nonconst_call_permission = true;
964 if !nonconst_call_permission {
965 self.check_op(ops::FnCallNonConst {
970 from_hir_call: *from_hir_call,
976 // If the `const fn` we are trying to call is not const-stable, ensure that we have
977 // the proper feature gate enabled.
978 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
979 trace!(?gate, "calling unstable const fn");
980 if self.span.allows_unstable(gate) {
984 // Calling an unstable function *always* requires that the corresponding gate
985 // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
986 if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
987 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
991 // If this crate is not using stability attributes, or the caller is not claiming to be a
992 // stable `const fn`, that is all that is required.
993 if !self.ccx.is_const_stable_const_fn() {
994 trace!("crate not using stability attributes or caller not stably const");
998 // Otherwise, we are something const-stable calling a const-unstable fn.
1000 if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
1001 trace!("rustc_allow_const_fn_unstable gate active");
1005 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
1009 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
1010 // have no `rustc_const_stable` attributes to be const-unstable as well. This
1011 // should be fixed later.
1012 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
1013 && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
1014 if callee_is_unstable_unmarked {
1015 trace!("callee_is_unstable_unmarked");
1016 // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
1017 // `extern` funtions, and these have no way to get marked `const`. So instead we
1018 // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
1019 if self.ccx.is_const_stable_const_fn() || is_intrinsic {
1020 self.check_op(ops::FnCallUnstable(callee, None));
1024 trace!("permitting call");
1027 // Forbid all `Drop` terminators unless the place being dropped is a local with no
1028 // projections that cannot be `NeedsNonConstDrop`.
1029 TerminatorKind::Drop { place: dropped_place, .. }
1030 | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
1031 // If we are checking live drops after drop-elaboration, don't emit duplicate
1033 if super::post_drop_elaboration::checking_enabled(self.ccx) {
1037 let mut err_span = self.span;
1038 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
1040 let ty_needs_non_const_drop =
1041 qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place);
1043 debug!(?ty_of_dropped_place, ?ty_needs_non_const_drop);
1045 if !ty_needs_non_const_drop {
1049 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
1050 // Use the span where the local was declared as the span of the drop error.
1051 err_span = self.body.local_decls[local].source_info.span;
1052 self.qualifs.needs_non_const_drop(self.ccx, local, location)
1057 if needs_non_const_drop {
1058 self.check_op_spanned(
1059 ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1065 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1067 TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1068 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1071 TerminatorKind::Abort => {
1072 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1073 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1076 TerminatorKind::Assert { .. }
1077 | TerminatorKind::FalseEdge { .. }
1078 | TerminatorKind::FalseUnwind { .. }
1079 | TerminatorKind::Goto { .. }
1080 | TerminatorKind::Resume
1081 | TerminatorKind::Return
1082 | TerminatorKind::SwitchInt { .. }
1083 | TerminatorKind::Unreachable => {}
1088 fn place_as_reborrow<'tcx>(
1092 ) -> Option<PlaceRef<'tcx>> {
1093 match place.as_ref().last_projection() {
1094 Some((place_base, ProjectionElem::Deref)) => {
1095 // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1096 // that points to the allocation for the static. Don't treat these as reborrows.
1097 if body.local_decls[place_base.local].is_ref_to_static() {
1100 // Ensure the type being derefed is a reference and not a raw pointer.
1101 // This is sufficient to prevent an access to a `static mut` from being marked as a
1102 // reborrow, even if the check above were to disappear.
1103 let inner_ty = place_base.ty(body, tcx).ty;
1105 if let ty::Ref(..) = inner_ty.kind() {
1106 return Some(place_base);
1116 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1117 ty.is_bool() || ty.is_integral() || ty.is_char()
1120 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1121 ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1124 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1125 let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1131 &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1135 "if it is not part of the public API, make this function unstably const",
1136 concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1137 Applicability::HasPlaceholders,
1141 "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1142 format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1143 Applicability::MaybeIncorrect,