1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
4 use rustc_hir::def_id::DefId;
5 use rustc_hir::{self as hir, HirId, LangItem};
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::GenericArgKind;
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
15 use rustc_span::{sym, Span, Symbol};
16 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
17 use rustc_trait_selection::traits::{self, SelectionContext, TraitEngine};
22 use super::ops::{self, NonConstOp, Status};
23 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
24 use super::resolver::FlowSensitiveAnalysis;
25 use super::{is_lang_panic_fn, ConstCx, Qualif};
26 use crate::const_eval::is_unstable_const_fn;
27 use crate::dataflow::impls::MaybeMutBorrowedLocals;
28 use crate::dataflow::{self, Analysis};
30 // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
31 // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
32 // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
33 type IndirectlyMutableResults<'mir, 'tcx> =
34 dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
36 type QualifResults<'mir, 'tcx, Q> =
37 dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
40 pub struct Qualifs<'mir, 'tcx> {
41 has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
42 needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
43 indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
46 impl Qualifs<'mir, 'tcx> {
47 pub fn indirectly_mutable(
49 ccx: &'mir ConstCx<'mir, 'tcx>,
53 let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
54 let ConstCx { tcx, body, param_env, .. } = *ccx;
56 // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
57 // allowed in a const.
59 // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
60 // without breaking stable code?
61 MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
62 .unsound_ignore_borrow_on_drop()
63 .into_engine(tcx, &body)
64 .pass_name("const_qualification")
65 .iterate_to_fixpoint()
66 .into_results_cursor(&body)
69 indirectly_mutable.seek_before_primary_effect(location);
70 indirectly_mutable.get().contains(local)
73 /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
75 /// Only updates the cursor if absolutely necessary
78 ccx: &'mir ConstCx<'mir, 'tcx>,
82 let ty = ccx.body.local_decls[local].ty;
83 if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
87 let needs_drop = self.needs_drop.get_or_insert_with(|| {
88 let ConstCx { tcx, body, .. } = *ccx;
90 FlowSensitiveAnalysis::new(NeedsDrop, ccx)
91 .into_engine(tcx, &body)
92 .iterate_to_fixpoint()
93 .into_results_cursor(&body)
96 needs_drop.seek_before_primary_effect(location);
97 needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
100 /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
102 /// Only updates the cursor if absolutely necessary.
103 pub fn has_mut_interior(
105 ccx: &'mir ConstCx<'mir, 'tcx>,
109 let ty = ccx.body.local_decls[local].ty;
110 if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
114 let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
115 let ConstCx { tcx, body, .. } = *ccx;
117 FlowSensitiveAnalysis::new(HasMutInterior, ccx)
118 .into_engine(tcx, &body)
119 .iterate_to_fixpoint()
120 .into_results_cursor(&body)
123 has_mut_interior.seek_before_primary_effect(location);
124 has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
129 ccx: &'mir ConstCx<'mir, 'tcx>,
130 error_occured: Option<ErrorReported>,
132 // Find the `Return` terminator if one exists.
134 // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
135 // qualifs for the return type.
136 let return_block = ccx
140 .find(|(_, block)| match block.terminator().kind {
141 TerminatorKind::Return => true,
146 let return_block = match return_block {
147 None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
151 let return_loc = ccx.body.terminator_loc(return_block);
153 let custom_eq = match ccx.const_kind() {
154 // We don't care whether a `const fn` returns a value that is not structurally
155 // matchable. Functions calls are opaque and always use type-based qualification, so
156 // this value should never be used.
157 hir::ConstContext::ConstFn => true,
159 // If we know that all values of the return type are structurally matchable, there's no
160 // need to run dataflow.
161 _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
163 hir::ConstContext::Const | hir::ConstContext::Static(_) => {
164 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
165 .into_engine(ccx.tcx, &ccx.body)
166 .iterate_to_fixpoint()
167 .into_results_cursor(&ccx.body);
169 cursor.seek_after_primary_effect(return_loc);
170 cursor.contains(RETURN_PLACE)
175 needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
176 has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
183 pub struct Validator<'mir, 'tcx> {
184 ccx: &'mir ConstCx<'mir, 'tcx>,
185 qualifs: Qualifs<'mir, 'tcx>,
187 /// The span of the current statement.
190 /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
191 local_has_storage_dead: Option<BitSet<Local>>,
193 error_emitted: Option<ErrorReported>,
194 secondary_errors: Vec<Diagnostic>,
197 impl Deref for Validator<'mir, 'tcx> {
198 type Target = ConstCx<'mir, 'tcx>;
200 fn deref(&self) -> &Self::Target {
205 impl Validator<'mir, 'tcx> {
206 pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
210 qualifs: Default::default(),
211 local_has_storage_dead: None,
213 secondary_errors: Vec::new(),
217 pub fn check_body(&mut self) {
218 let ConstCx { tcx, body, .. } = *self.ccx;
219 let def_id = self.ccx.def_id();
221 // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
222 // no need to emit duplicate errors here.
223 if is_async_fn(self.ccx) || body.generator.is_some() {
224 tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
228 // The local type and predicate checks are not free and only relevant for `const fn`s.
229 if self.const_kind() == hir::ConstContext::ConstFn {
230 // Prevent const trait methods from being annotated as `stable`.
231 // FIXME: Do this as part of stability checking.
232 if self.is_const_stable_const_fn() {
233 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
234 if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
238 .struct_span_err(self.span, "trait methods cannot be stable const fn")
243 self.check_item_predicates();
245 for (idx, local) in body.local_decls.iter_enumerated() {
246 // Handle the return place below.
247 if idx == RETURN_PLACE || local.internal {
251 self.span = local.source_info.span;
252 self.check_local_or_return_ty(local.ty, idx);
255 // impl trait is gone in MIR, so check the return type of a const fn by its signature
256 // instead of the type of the return place.
257 self.span = body.local_decls[RETURN_PLACE].source_info.span;
258 let return_ty = tcx.fn_sig(def_id).output();
259 self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
262 self.visit_body(&body);
264 // Ensure that the end result is `Sync` in a non-thread local `static`.
265 let should_check_for_sync = self.const_kind()
266 == hir::ConstContext::Static(hir::Mutability::Not)
267 && !tcx.is_thread_local_static(def_id.to_def_id());
269 if should_check_for_sync {
270 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
271 check_return_ty_is_sync(tcx, &body, hir_id);
274 // If we got through const-checking without emitting any "primary" errors, emit any
275 // "secondary" errors if they occurred.
276 let secondary_errors = mem::take(&mut self.secondary_errors);
277 if self.error_emitted.is_none() {
278 for error in secondary_errors {
279 self.tcx.sess.diagnostic().emit_diagnostic(&error);
282 assert!(self.tcx.sess.has_errors());
286 fn local_has_storage_dead(&mut self, local: Local) -> bool {
288 self.local_has_storage_dead
289 .get_or_insert_with(|| {
290 struct StorageDeads {
291 locals: BitSet<Local>,
293 impl Visitor<'tcx> for StorageDeads {
294 fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
295 if let StatementKind::StorageDead(l) = stmt.kind {
296 self.locals.insert(l);
300 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
301 v.visit_body(ccx.body);
307 pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
308 self.qualifs.in_return_place(self.ccx, self.error_emitted)
311 /// Emits an error if an expression cannot be evaluated in the current context.
312 pub fn check_op(&mut self, op: impl NonConstOp) {
313 self.check_op_spanned(op, self.span);
316 /// Emits an error at the given `span` if an expression cannot be evaluated in the current
318 pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
319 let gate = match op.status_in_item(self.ccx) {
320 Status::Allowed => return,
322 Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
323 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
324 && !super::rustc_allow_const_fn_unstable(
326 self.def_id().to_def_id(),
329 if unstable_in_stable {
330 emit_unstable_in_stable_error(self.ccx, span, gate);
336 Status::Unstable(gate) => Some(gate),
337 Status::Forbidden => None,
340 if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
341 self.tcx.sess.miri_unleashed_feature(span, gate);
345 let mut err = op.build_error(self.ccx, span);
346 assert!(err.is_error());
348 match op.importance() {
349 ops::DiagnosticImportance::Primary => {
350 self.error_emitted = Some(ErrorReported);
354 ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
358 fn check_static(&mut self, def_id: DefId, span: Span) {
359 if self.tcx.is_thread_local_static(def_id) {
360 self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
362 self.check_op_spanned(ops::StaticAccess, span)
365 fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
366 let kind = self.body.local_kind(local);
368 for ty in ty.walk() {
369 let ty = match ty.unpack() {
370 GenericArgKind::Type(ty) => ty,
372 // No constraints on lifetimes or constants, except potentially
373 // constants' types, but `walk` will get to them as well.
374 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
378 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
379 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
380 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
382 ty::Dynamic(preds, _) => {
383 for pred in preds.iter() {
384 match pred.skip_binder() {
385 ty::ExistentialPredicate::AutoTrait(_)
386 | ty::ExistentialPredicate::Projection(_) => {
387 self.check_op(ops::ty::TraitBound(kind))
389 ty::ExistentialPredicate::Trait(trait_ref) => {
390 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
391 self.check_op(ops::ty::TraitBound(kind))
402 fn check_item_predicates(&mut self) {
403 let ConstCx { tcx, .. } = *self.ccx;
405 let mut current = self.def_id().to_def_id();
407 let predicates = tcx.predicates_of(current);
408 for (predicate, _) in predicates.predicates {
409 match predicate.kind().skip_binder() {
410 ty::PredicateKind::RegionOutlives(_)
411 | ty::PredicateKind::TypeOutlives(_)
412 | ty::PredicateKind::WellFormed(_)
413 | ty::PredicateKind::Projection(_)
414 | ty::PredicateKind::ConstEvaluatable(..)
415 | ty::PredicateKind::ConstEquate(..)
416 | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
417 ty::PredicateKind::ObjectSafe(_) => {
418 bug!("object safe predicate on function: {:#?}", predicate)
420 ty::PredicateKind::ClosureKind(..) => {
421 bug!("closure kind predicate on function: {:#?}", predicate)
423 ty::PredicateKind::Subtype(_) => {
424 bug!("subtype predicate on function: {:#?}", predicate)
426 ty::PredicateKind::Trait(pred, _constness) => {
427 if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
430 match pred.self_ty().kind() {
432 let generics = tcx.generics_of(current);
433 let def = generics.type_param(p, tcx);
434 let span = tcx.def_span(def.def_id);
436 // These are part of the function signature, so treat them like
437 // arguments when determining importance.
438 let kind = LocalKind::Arg;
440 self.check_op_spanned(ops::ty::TraitBound(kind), span);
442 // other kinds of bounds are either tautologies
443 // or cause errors in other passes
449 match predicates.parent {
450 Some(parent) => current = parent,
456 fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
457 match self.const_kind() {
458 // In a const fn all borrows are transient or point to the places given via
459 // references in the arguments (so we already checked them with
460 // TransientMutBorrow/MutBorrow as appropriate).
461 // The borrow checker guarantees that no new non-transient borrows are created.
462 // NOTE: Once we have heap allocations during CTFE we need to figure out
463 // how to prevent `const fn` to create long-lived allocations that point
464 // to mutable memory.
465 hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
467 // Locals with StorageDead do not live beyond the evaluation and can
468 // thus safely be borrowed without being able to be leaked to the final
469 // value of the constant.
470 if self.local_has_storage_dead(local) {
471 self.check_op(ops::TransientMutBorrow(kind));
473 self.check_op(ops::MutBorrow(kind));
480 impl Visitor<'tcx> for Validator<'mir, 'tcx> {
481 fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
482 trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
484 // We don't const-check basic blocks on the cleanup path since we never unwind during
485 // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
486 // are unreachable during const-eval.
488 // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
489 // locals that would never be dropped during normal execution are sometimes dropped during
490 // unwinding, which means backwards-incompatible live-drop errors.
491 if block.is_cleanup {
495 self.super_basic_block_data(bb, block);
498 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
499 trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
501 // Special-case reborrows to be more like a copy of a reference.
503 Rvalue::Ref(_, kind, place) => {
504 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
505 let ctx = match kind {
506 BorrowKind::Shared => {
507 PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
509 BorrowKind::Shallow => {
510 PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
512 BorrowKind::Unique => {
513 PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
515 BorrowKind::Mut { .. } => {
516 PlaceContext::MutatingUse(MutatingUseContext::Borrow)
519 self.visit_local(&reborrowed_place_ref.local, ctx, location);
520 self.visit_projection(reborrowed_place_ref, ctx, location);
524 Rvalue::AddressOf(mutbl, place) => {
525 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
526 let ctx = match mutbl {
528 PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
530 Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
532 self.visit_local(&reborrowed_place_ref.local, ctx, location);
533 self.visit_projection(reborrowed_place_ref, ctx, location);
540 self.super_rvalue(rvalue, location);
543 Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
547 | Rvalue::Discriminant(..)
549 | Rvalue::Aggregate(..) => {}
551 Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
552 | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
553 let ty = place.ty(self.body, self.tcx).ty;
554 let is_allowed = match ty.kind() {
555 // Inside a `static mut`, `&mut [...]` is allowed.
556 ty::Array(..) | ty::Slice(_)
557 if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
562 // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
563 // that this is merely a ZST and it is already eligible for promotion.
564 // This may require an RFC?
566 ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
573 if let BorrowKind::Mut { .. } = kind {
574 self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
576 self.check_op(ops::CellBorrow);
581 Rvalue::AddressOf(Mutability::Mut, ref place) => {
582 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
585 Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
586 | Rvalue::AddressOf(Mutability::Not, ref place) => {
587 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
589 &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
593 if borrowed_place_has_mut_interior {
594 match self.const_kind() {
595 // In a const fn all borrows are transient or point to the places given via
596 // references in the arguments (so we already checked them with
597 // TransientCellBorrow/CellBorrow as appropriate).
598 // The borrow checker guarantees that no new non-transient borrows are created.
599 // NOTE: Once we have heap allocations during CTFE we need to figure out
600 // how to prevent `const fn` to create long-lived allocations that point
601 // to (interior) mutable memory.
602 hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
604 // Locals with StorageDead are definitely not part of the final constant value, and
605 // it is thus inherently safe to permit such locals to have their
606 // address taken as we can't end up with a reference to them in the
608 // Note: This is only sound if every local that has a `StorageDead` has a
609 // `StorageDead` in every control flow path leading to a `return` terminator.
610 if self.local_has_storage_dead(place.local) {
611 self.check_op(ops::TransientCellBorrow);
613 self.check_op(ops::CellBorrow);
621 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
628 PointerCast::UnsafeFnPointer
629 | PointerCast::ClosureFnPointer(_)
630 | PointerCast::ReifyFnPointer,
634 ) => self.check_op(ops::FnPtrCast),
636 Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
637 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
638 // in the type of any local, which also excludes casts).
641 Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
642 let operand_ty = operand.ty(self.body, self.tcx);
643 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
644 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
646 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
647 self.check_op(ops::RawPtrToIntCast);
651 Rvalue::NullaryOp(NullOp::SizeOf, _) => {}
652 Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
654 Rvalue::UnaryOp(_, ref operand) => {
655 let ty = operand.ty(self.body, self.tcx);
656 if is_int_bool_or_char(ty) {
657 // Int, bool, and char operations are fine.
658 } else if ty.is_floating_point() {
659 self.check_op(ops::FloatingPointOp);
661 span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
665 Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
666 | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
667 let lhs_ty = lhs.ty(self.body, self.tcx);
668 let rhs_ty = rhs.ty(self.body, self.tcx);
670 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
671 // Int, bool, and char operations are fine.
672 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
673 assert_eq!(lhs_ty, rhs_ty);
681 || op == BinOp::Offset
684 self.check_op(ops::RawPtrComparison);
685 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
686 self.check_op(ops::FloatingPointOp);
690 "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
699 fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
700 self.super_operand(op, location);
701 if let Operand::Constant(c) = op {
702 if let Some(def_id) = c.check_static_ptr(self.tcx) {
703 self.check_static(def_id, self.span);
707 fn visit_projection_elem(
710 proj_base: &[PlaceElem<'tcx>],
711 elem: PlaceElem<'tcx>,
712 context: PlaceContext,
716 "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
717 context={:?} location={:?}",
725 self.super_projection_elem(place_local, proj_base, elem, context, location);
728 ProjectionElem::Deref => {
729 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
730 if let ty::RawPtr(_) = base_ty.kind() {
731 if proj_base.is_empty() {
732 if let (local, []) = (place_local, proj_base) {
733 let decl = &self.body.local_decls[local];
734 if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
735 let span = decl.source_info.span;
736 self.check_static(def_id, span);
741 self.check_op(ops::RawPtrDeref);
744 if context.is_mutating_use() {
745 self.check_op(ops::MutDeref);
749 ProjectionElem::ConstantIndex { .. }
750 | ProjectionElem::Downcast(..)
751 | ProjectionElem::Subslice { .. }
752 | ProjectionElem::Field(..)
753 | ProjectionElem::Index(_) => {
754 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
755 if base_ty.is_union() {
756 self.check_op(ops::UnionAccess);
762 fn visit_source_info(&mut self, source_info: &SourceInfo) {
763 trace!("visit_source_info: source_info={:?}", source_info);
764 self.span = source_info.span;
767 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
768 trace!("visit_statement: statement={:?} location={:?}", statement, location);
770 self.super_statement(statement, location);
772 match statement.kind {
773 StatementKind::LlvmInlineAsm { .. } => {
774 self.check_op(ops::InlineAsm);
777 StatementKind::Assign(..)
778 | StatementKind::SetDiscriminant { .. }
779 | StatementKind::FakeRead(..)
780 | StatementKind::StorageLive(_)
781 | StatementKind::StorageDead(_)
782 | StatementKind::Retag { .. }
783 | StatementKind::AscribeUserType(..)
784 | StatementKind::Coverage(..)
785 | StatementKind::CopyNonOverlapping(..)
786 | StatementKind::Nop => {}
790 #[instrument(level = "debug", skip(self))]
791 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
792 use rustc_target::spec::abi::Abi::RustIntrinsic;
794 self.super_terminator(terminator, location);
796 match &terminator.kind {
797 TerminatorKind::Call { func, args, .. } => {
798 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
799 let caller = self.def_id().to_def_id();
801 let fn_ty = func.ty(body, tcx);
803 let (mut callee, substs) = match *fn_ty.kind() {
804 ty::FnDef(def_id, substs) => (def_id, substs),
807 self.check_op(ops::FnCallIndirect);
811 span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
815 // Attempting to call a trait method?
816 if let Some(trait_id) = tcx.trait_of_item(callee) {
817 trace!("attempting to call a trait method");
818 if !self.tcx.features().const_trait_impl {
819 self.check_op(ops::FnCallNonConst);
823 let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
824 let obligation = Obligation::new(
825 ObligationCause::dummy(),
829 trait_ref: TraitRef::from_method(tcx, trait_id, substs),
835 let implsrc = tcx.infer_ctxt().enter(|infcx| {
836 let mut selcx = SelectionContext::new(&infcx);
837 selcx.select(&obligation).unwrap()
840 // If the method is provided via a where-clause that does not use the `?const`
841 // opt-out, the call is allowed.
842 if let Some(ImplSource::Param(_, hir::Constness::Const)) = implsrc {
844 "const_trait_impl: provided {:?} via where-clause in {:?}",
850 // Resolve a trait method call to its concrete implementation, which may be in a
851 // `const` trait impl.
852 let instance = Instance::resolve(tcx, param_env, callee, substs);
853 debug!("Resolving ({:?}) -> {:?}", callee, instance);
854 if let Ok(Some(func)) = instance {
855 if let InstanceDef::Item(def) = func.def {
861 // At this point, we are calling a function, `callee`, whose `DefId` is known...
862 if is_lang_panic_fn(tcx, callee) {
863 self.check_op(ops::Panic);
865 // const-eval of the `begin_panic` fn assumes the argument is `&str`
866 if Some(callee) == tcx.lang_items().begin_panic_fn() {
867 match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
868 ty::Ref(_, ty, _) if ty.is_str() => (),
869 _ => self.check_op(ops::PanicNonStr),
876 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
877 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
879 let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
880 self.check_op(ops::Generator(kind));
884 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
886 // HACK: This is to "unstabilize" the `transmute` intrinsic
887 // within const fns. `transmute` is allowed in all other const contexts.
888 // This won't really scale to more intrinsics or functions. Let's allow const
889 // transmutes in const fn before we add more hacks to this.
890 if is_intrinsic && tcx.item_name(callee) == sym::transmute {
891 self.check_op(ops::Transmute);
895 if !tcx.is_const_fn_raw(callee) {
896 self.check_op(ops::FnCallNonConst);
900 // If the `const fn` we are trying to call is not const-stable, ensure that we have
901 // the proper feature gate enabled.
902 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
903 trace!(?gate, "calling unstable const fn");
904 if self.span.allows_unstable(gate) {
908 // Calling an unstable function *always* requires that the corresponding gate
909 // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
910 if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
911 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
915 // If this crate is not using stability attributes, or the caller is not claiming to be a
916 // stable `const fn`, that is all that is required.
917 if !self.ccx.is_const_stable_const_fn() {
918 trace!("crate not using stability attributes or caller not stably const");
922 // Otherwise, we are something const-stable calling a const-unstable fn.
924 if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
925 trace!("rustc_allow_const_fn_unstable gate active");
929 self.check_op(ops::FnCallUnstable(callee, Some(gate)));
933 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
934 // have no `rustc_const_stable` attributes to be const-unstable as well. This
935 // should be fixed later.
936 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
937 && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
938 if callee_is_unstable_unmarked {
939 trace!("callee_is_unstable_unmarked");
940 // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
941 // `extern` funtions, and these have no way to get marked `const`. So instead we
942 // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
943 if self.ccx.is_const_stable_const_fn() || is_intrinsic {
944 self.check_op(ops::FnCallUnstable(callee, None));
948 trace!("permitting call");
951 // Forbid all `Drop` terminators unless the place being dropped is a local with no
952 // projections that cannot be `NeedsDrop`.
953 TerminatorKind::Drop { place: dropped_place, .. }
954 | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
955 // If we are checking live drops after drop-elaboration, don't emit duplicate
957 if super::post_drop_elaboration::checking_enabled(self.ccx) {
961 let mut err_span = self.span;
963 // Check to see if the type of this place can ever have a drop impl. If not, this
964 // `Drop` terminator is frivolous.
966 dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
972 let needs_drop = if let Some(local) = dropped_place.as_local() {
973 // Use the span where the local was declared as the span of the drop error.
974 err_span = self.body.local_decls[local].source_info.span;
975 self.qualifs.needs_drop(self.ccx, local, location)
981 self.check_op_spanned(
982 ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
988 TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
990 TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
991 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
994 TerminatorKind::Abort => {
995 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
996 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
999 TerminatorKind::Assert { .. }
1000 | TerminatorKind::FalseEdge { .. }
1001 | TerminatorKind::FalseUnwind { .. }
1002 | TerminatorKind::Goto { .. }
1003 | TerminatorKind::Resume
1004 | TerminatorKind::Return
1005 | TerminatorKind::SwitchInt { .. }
1006 | TerminatorKind::Unreachable => {}
1011 fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
1012 let ty = body.return_ty();
1013 tcx.infer_ctxt().enter(|infcx| {
1014 let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
1015 let mut fulfillment_cx = traits::FulfillmentContext::new();
1016 let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
1017 fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
1018 if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
1019 infcx.report_fulfillment_errors(&err, None, false);
1024 fn place_as_reborrow(
1028 ) -> Option<PlaceRef<'tcx>> {
1029 match place.as_ref().last_projection() {
1030 Some((place_base, ProjectionElem::Deref)) => {
1031 // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1032 // that points to the allocation for the static. Don't treat these as reborrows.
1033 if body.local_decls[place_base.local].is_ref_to_static() {
1036 // Ensure the type being derefed is a reference and not a raw pointer.
1037 // This is sufficient to prevent an access to a `static mut` from being marked as a
1038 // reborrow, even if the check above were to disappear.
1039 let inner_ty = place_base.ty(body, tcx).ty;
1041 if let ty::Ref(..) = inner_ty.kind() {
1042 return Some(place_base);
1052 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1053 ty.is_bool() || ty.is_integral() || ty.is_char()
1056 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1057 ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1060 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1061 let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1067 &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1071 "if it is not part of the public API, make this function unstably const",
1072 concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1073 Applicability::HasPlaceholders,
1077 "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1078 format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1079 Applicability::MaybeIncorrect,