]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir/src/transform/check_consts/check.rs
Fix #88155
[rust.git] / compiler / rustc_mir / src / transform / check_consts / check.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
4 use rustc_hir::def_id::DefId;
5 use rustc_hir::{self as hir, HirId, LangItem};
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::GenericArgKind;
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
15 use rustc_span::{sym, Span, Symbol};
16 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
17 use rustc_trait_selection::traits::{self, SelectionContext, TraitEngine};
18
19 use std::mem;
20 use std::ops::Deref;
21
22 use super::ops::{self, NonConstOp, Status};
23 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
24 use super::resolver::FlowSensitiveAnalysis;
25 use super::{is_lang_panic_fn, ConstCx, Qualif};
26 use crate::const_eval::is_unstable_const_fn;
27 use crate::dataflow::impls::MaybeMutBorrowedLocals;
28 use crate::dataflow::{self, Analysis};
29
30 // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
31 // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
32 // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
33 type IndirectlyMutableResults<'mir, 'tcx> =
34     dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
35
36 type QualifResults<'mir, 'tcx, Q> =
37     dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
38
39 #[derive(Default)]
40 pub struct Qualifs<'mir, 'tcx> {
41     has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
42     needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
43     indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
44 }
45
46 impl Qualifs<'mir, 'tcx> {
47     pub fn indirectly_mutable(
48         &mut self,
49         ccx: &'mir ConstCx<'mir, 'tcx>,
50         local: Local,
51         location: Location,
52     ) -> bool {
53         let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
54             let ConstCx { tcx, body, param_env, .. } = *ccx;
55
56             // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
57             // allowed in a const.
58             //
59             // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
60             // without breaking stable code?
61             MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
62                 .unsound_ignore_borrow_on_drop()
63                 .into_engine(tcx, &body)
64                 .pass_name("const_qualification")
65                 .iterate_to_fixpoint()
66                 .into_results_cursor(&body)
67         });
68
69         indirectly_mutable.seek_before_primary_effect(location);
70         indirectly_mutable.get().contains(local)
71     }
72
73     /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
74     ///
75     /// Only updates the cursor if absolutely necessary
76     pub fn needs_drop(
77         &mut self,
78         ccx: &'mir ConstCx<'mir, 'tcx>,
79         local: Local,
80         location: Location,
81     ) -> bool {
82         let ty = ccx.body.local_decls[local].ty;
83         if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
84             return false;
85         }
86
87         let needs_drop = self.needs_drop.get_or_insert_with(|| {
88             let ConstCx { tcx, body, .. } = *ccx;
89
90             FlowSensitiveAnalysis::new(NeedsDrop, ccx)
91                 .into_engine(tcx, &body)
92                 .iterate_to_fixpoint()
93                 .into_results_cursor(&body)
94         });
95
96         needs_drop.seek_before_primary_effect(location);
97         needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
98     }
99
100     /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
101     ///
102     /// Only updates the cursor if absolutely necessary.
103     pub fn has_mut_interior(
104         &mut self,
105         ccx: &'mir ConstCx<'mir, 'tcx>,
106         local: Local,
107         location: Location,
108     ) -> bool {
109         let ty = ccx.body.local_decls[local].ty;
110         if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
111             return false;
112         }
113
114         let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
115             let ConstCx { tcx, body, .. } = *ccx;
116
117             FlowSensitiveAnalysis::new(HasMutInterior, ccx)
118                 .into_engine(tcx, &body)
119                 .iterate_to_fixpoint()
120                 .into_results_cursor(&body)
121         });
122
123         has_mut_interior.seek_before_primary_effect(location);
124         has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
125     }
126
127     fn in_return_place(
128         &mut self,
129         ccx: &'mir ConstCx<'mir, 'tcx>,
130         error_occured: Option<ErrorReported>,
131     ) -> ConstQualifs {
132         // Find the `Return` terminator if one exists.
133         //
134         // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
135         // qualifs for the return type.
136         let return_block = ccx
137             .body
138             .basic_blocks()
139             .iter_enumerated()
140             .find(|(_, block)| match block.terminator().kind {
141                 TerminatorKind::Return => true,
142                 _ => false,
143             })
144             .map(|(bb, _)| bb);
145
146         let return_block = match return_block {
147             None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
148             Some(bb) => bb,
149         };
150
151         let return_loc = ccx.body.terminator_loc(return_block);
152
153         let custom_eq = match ccx.const_kind() {
154             // We don't care whether a `const fn` returns a value that is not structurally
155             // matchable. Functions calls are opaque and always use type-based qualification, so
156             // this value should never be used.
157             hir::ConstContext::ConstFn => true,
158
159             // If we know that all values of the return type are structurally matchable, there's no
160             // need to run dataflow.
161             _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
162
163             hir::ConstContext::Const | hir::ConstContext::Static(_) => {
164                 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
165                     .into_engine(ccx.tcx, &ccx.body)
166                     .iterate_to_fixpoint()
167                     .into_results_cursor(&ccx.body);
168
169                 cursor.seek_after_primary_effect(return_loc);
170                 cursor.contains(RETURN_PLACE)
171             }
172         };
173
174         ConstQualifs {
175             needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
176             has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
177             custom_eq,
178             error_occured,
179         }
180     }
181 }
182
183 pub struct Checker<'mir, 'tcx> {
184     ccx: &'mir ConstCx<'mir, 'tcx>,
185     qualifs: Qualifs<'mir, 'tcx>,
186
187     /// The span of the current statement.
188     span: Span,
189
190     /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
191     local_has_storage_dead: Option<BitSet<Local>>,
192
193     error_emitted: Option<ErrorReported>,
194     secondary_errors: Vec<Diagnostic>,
195 }
196
197 impl Deref for Checker<'mir, 'tcx> {
198     type Target = ConstCx<'mir, 'tcx>;
199
200     fn deref(&self) -> &Self::Target {
201         &self.ccx
202     }
203 }
204
205 impl Checker<'mir, 'tcx> {
206     pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
207         Checker {
208             span: ccx.body.span,
209             ccx,
210             qualifs: Default::default(),
211             local_has_storage_dead: None,
212             error_emitted: None,
213             secondary_errors: Vec::new(),
214         }
215     }
216
217     pub fn check_body(&mut self) {
218         let ConstCx { tcx, body, .. } = *self.ccx;
219         let def_id = self.ccx.def_id();
220
221         // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
222         // no need to emit duplicate errors here.
223         if is_async_fn(self.ccx) || body.generator.is_some() {
224             tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
225             return;
226         }
227
228         // The local type and predicate checks are not free and only relevant for `const fn`s.
229         if self.const_kind() == hir::ConstContext::ConstFn {
230             // Prevent const trait methods from being annotated as `stable`.
231             // FIXME: Do this as part of stability checking.
232             if self.is_const_stable_const_fn() {
233                 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
234                 if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
235                     self.ccx
236                         .tcx
237                         .sess
238                         .struct_span_err(self.span, "trait methods cannot be stable const fn")
239                         .emit();
240                 }
241             }
242
243             self.check_item_predicates();
244
245             for (idx, local) in body.local_decls.iter_enumerated() {
246                 // Handle the return place below.
247                 if idx == RETURN_PLACE || local.internal {
248                     continue;
249                 }
250
251                 self.span = local.source_info.span;
252                 self.check_local_or_return_ty(local.ty, idx);
253             }
254
255             // impl trait is gone in MIR, so check the return type of a const fn by its signature
256             // instead of the type of the return place.
257             self.span = body.local_decls[RETURN_PLACE].source_info.span;
258             let return_ty = tcx.fn_sig(def_id).output();
259             self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
260         }
261
262         self.visit_body(&body);
263
264         // Ensure that the end result is `Sync` in a non-thread local `static`.
265         let should_check_for_sync = self.const_kind()
266             == hir::ConstContext::Static(hir::Mutability::Not)
267             && !tcx.is_thread_local_static(def_id.to_def_id());
268
269         if should_check_for_sync {
270             let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
271             check_return_ty_is_sync(tcx, &body, hir_id);
272         }
273
274         // If we got through const-checking without emitting any "primary" errors, emit any
275         // "secondary" errors if they occurred.
276         let secondary_errors = mem::take(&mut self.secondary_errors);
277         if self.error_emitted.is_none() {
278             for error in secondary_errors {
279                 self.tcx.sess.diagnostic().emit_diagnostic(&error);
280             }
281         } else {
282             assert!(self.tcx.sess.has_errors());
283         }
284     }
285
286     fn local_has_storage_dead(&mut self, local: Local) -> bool {
287         let ccx = self.ccx;
288         self.local_has_storage_dead
289             .get_or_insert_with(|| {
290                 struct StorageDeads {
291                     locals: BitSet<Local>,
292                 }
293                 impl Visitor<'tcx> for StorageDeads {
294                     fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
295                         if let StatementKind::StorageDead(l) = stmt.kind {
296                             self.locals.insert(l);
297                         }
298                     }
299                 }
300                 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
301                 v.visit_body(ccx.body);
302                 v.locals
303             })
304             .contains(local)
305     }
306
307     pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
308         self.qualifs.in_return_place(self.ccx, self.error_emitted)
309     }
310
311     /// Emits an error if an expression cannot be evaluated in the current context.
312     pub fn check_op(&mut self, op: impl NonConstOp) {
313         self.check_op_spanned(op, self.span);
314     }
315
316     /// Emits an error at the given `span` if an expression cannot be evaluated in the current
317     /// context.
318     pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
319         let gate = match op.status_in_item(self.ccx) {
320             Status::Allowed => return,
321
322             Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
323                 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
324                     && !super::rustc_allow_const_fn_unstable(
325                         self.tcx,
326                         self.def_id().to_def_id(),
327                         gate,
328                     );
329                 if unstable_in_stable {
330                     emit_unstable_in_stable_error(self.ccx, span, gate);
331                 }
332
333                 return;
334             }
335
336             Status::Unstable(gate) => Some(gate),
337             Status::Forbidden => None,
338         };
339
340         if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
341             self.tcx.sess.miri_unleashed_feature(span, gate);
342             return;
343         }
344
345         let mut err = op.build_error(self.ccx, span);
346         assert!(err.is_error());
347
348         match op.importance() {
349             ops::DiagnosticImportance::Primary => {
350                 self.error_emitted = Some(ErrorReported);
351                 err.emit();
352             }
353
354             ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
355         }
356     }
357
358     fn check_static(&mut self, def_id: DefId, span: Span) {
359         if self.tcx.is_thread_local_static(def_id) {
360             self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
361         }
362         self.check_op_spanned(ops::StaticAccess, span)
363     }
364
365     fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
366         let kind = self.body.local_kind(local);
367
368         for ty in ty.walk(self.tcx) {
369             let ty = match ty.unpack() {
370                 GenericArgKind::Type(ty) => ty,
371
372                 // No constraints on lifetimes or constants, except potentially
373                 // constants' types, but `walk` will get to them as well.
374                 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
375             };
376
377             match *ty.kind() {
378                 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
379                 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
380                 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
381
382                 ty::Dynamic(preds, _) => {
383                     for pred in preds.iter() {
384                         match pred.skip_binder() {
385                             ty::ExistentialPredicate::AutoTrait(_)
386                             | ty::ExistentialPredicate::Projection(_) => {
387                                 self.check_op(ops::ty::TraitBound(kind))
388                             }
389                             ty::ExistentialPredicate::Trait(trait_ref) => {
390                                 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
391                                     self.check_op(ops::ty::TraitBound(kind))
392                                 }
393                             }
394                         }
395                     }
396                 }
397                 _ => {}
398             }
399         }
400     }
401
402     fn check_item_predicates(&mut self) {
403         let ConstCx { tcx, .. } = *self.ccx;
404
405         let mut current = self.def_id().to_def_id();
406         loop {
407             let predicates = tcx.predicates_of(current);
408             for (predicate, _) in predicates.predicates {
409                 match predicate.kind().skip_binder() {
410                     ty::PredicateKind::RegionOutlives(_)
411                     | ty::PredicateKind::TypeOutlives(_)
412                     | ty::PredicateKind::WellFormed(_)
413                     | ty::PredicateKind::Projection(_)
414                     | ty::PredicateKind::ConstEvaluatable(..)
415                     | ty::PredicateKind::ConstEquate(..)
416                     | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
417                     ty::PredicateKind::ObjectSafe(_) => {
418                         bug!("object safe predicate on function: {:#?}", predicate)
419                     }
420                     ty::PredicateKind::ClosureKind(..) => {
421                         bug!("closure kind predicate on function: {:#?}", predicate)
422                     }
423                     ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) => {
424                         bug!("subtype/coerce predicate on function: {:#?}", predicate)
425                     }
426                     ty::PredicateKind::Trait(pred) => {
427                         if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
428                             continue;
429                         }
430                         match pred.self_ty().kind() {
431                             ty::Param(p) => {
432                                 let generics = tcx.generics_of(current);
433                                 let def = generics.type_param(p, tcx);
434                                 let span = tcx.def_span(def.def_id);
435
436                                 // These are part of the function signature, so treat them like
437                                 // arguments when determining importance.
438                                 let kind = LocalKind::Arg;
439
440                                 self.check_op_spanned(ops::ty::TraitBound(kind), span);
441                             }
442                             // other kinds of bounds are either tautologies
443                             // or cause errors in other passes
444                             _ => continue,
445                         }
446                     }
447                 }
448             }
449             match predicates.parent {
450                 Some(parent) => current = parent,
451                 None => break,
452             }
453         }
454     }
455
456     fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
457         match self.const_kind() {
458             // In a const fn all borrows are transient or point to the places given via
459             // references in the arguments (so we already checked them with
460             // TransientMutBorrow/MutBorrow as appropriate).
461             // The borrow checker guarantees that no new non-transient borrows are created.
462             // NOTE: Once we have heap allocations during CTFE we need to figure out
463             // how to prevent `const fn` to create long-lived allocations that point
464             // to mutable memory.
465             hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
466             _ => {
467                 // Locals with StorageDead do not live beyond the evaluation and can
468                 // thus safely be borrowed without being able to be leaked to the final
469                 // value of the constant.
470                 if self.local_has_storage_dead(local) {
471                     self.check_op(ops::TransientMutBorrow(kind));
472                 } else {
473                     self.check_op(ops::MutBorrow(kind));
474                 }
475             }
476         }
477     }
478 }
479
480 impl Visitor<'tcx> for Checker<'mir, 'tcx> {
481     fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
482         trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
483
484         // We don't const-check basic blocks on the cleanup path since we never unwind during
485         // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
486         // are unreachable during const-eval.
487         //
488         // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
489         // locals that would never be dropped during normal execution are sometimes dropped during
490         // unwinding, which means backwards-incompatible live-drop errors.
491         if block.is_cleanup {
492             return;
493         }
494
495         self.super_basic_block_data(bb, block);
496     }
497
498     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
499         trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
500
501         // Special-case reborrows to be more like a copy of a reference.
502         match *rvalue {
503             Rvalue::Ref(_, kind, place) => {
504                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
505                     let ctx = match kind {
506                         BorrowKind::Shared => {
507                             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
508                         }
509                         BorrowKind::Shallow => {
510                             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
511                         }
512                         BorrowKind::Unique => {
513                             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
514                         }
515                         BorrowKind::Mut { .. } => {
516                             PlaceContext::MutatingUse(MutatingUseContext::Borrow)
517                         }
518                     };
519                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
520                     self.visit_projection(reborrowed_place_ref, ctx, location);
521                     return;
522                 }
523             }
524             Rvalue::AddressOf(mutbl, place) => {
525                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
526                     let ctx = match mutbl {
527                         Mutability::Not => {
528                             PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
529                         }
530                         Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
531                     };
532                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
533                     self.visit_projection(reborrowed_place_ref, ctx, location);
534                     return;
535                 }
536             }
537             _ => {}
538         }
539
540         self.super_rvalue(rvalue, location);
541
542         match *rvalue {
543             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
544
545             Rvalue::Use(_)
546             | Rvalue::Repeat(..)
547             | Rvalue::Discriminant(..)
548             | Rvalue::Len(_)
549             | Rvalue::Aggregate(..) => {}
550
551             Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
552             | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
553                 let ty = place.ty(self.body, self.tcx).ty;
554                 let is_allowed = match ty.kind() {
555                     // Inside a `static mut`, `&mut [...]` is allowed.
556                     ty::Array(..) | ty::Slice(_)
557                         if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
558                     {
559                         true
560                     }
561
562                     // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
563                     // that this is merely a ZST and it is already eligible for promotion.
564                     // This may require an RFC?
565                     /*
566                     ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
567                         => true,
568                     */
569                     _ => false,
570                 };
571
572                 if !is_allowed {
573                     if let BorrowKind::Mut { .. } = kind {
574                         self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
575                     } else {
576                         self.check_op(ops::CellBorrow);
577                     }
578                 }
579             }
580
581             Rvalue::AddressOf(Mutability::Mut, ref place) => {
582                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
583             }
584
585             Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
586             | Rvalue::AddressOf(Mutability::Not, ref place) => {
587                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
588                     &self.ccx,
589                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
590                     place.as_ref(),
591                 );
592
593                 if borrowed_place_has_mut_interior {
594                     match self.const_kind() {
595                         // In a const fn all borrows are transient or point to the places given via
596                         // references in the arguments (so we already checked them with
597                         // TransientCellBorrow/CellBorrow as appropriate).
598                         // The borrow checker guarantees that no new non-transient borrows are created.
599                         // NOTE: Once we have heap allocations during CTFE we need to figure out
600                         // how to prevent `const fn` to create long-lived allocations that point
601                         // to (interior) mutable memory.
602                         hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
603                         _ => {
604                             // Locals with StorageDead are definitely not part of the final constant value, and
605                             // it is thus inherently safe to permit such locals to have their
606                             // address taken as we can't end up with a reference to them in the
607                             // final value.
608                             // Note: This is only sound if every local that has a `StorageDead` has a
609                             // `StorageDead` in every control flow path leading to a `return` terminator.
610                             if self.local_has_storage_dead(place.local) {
611                                 self.check_op(ops::TransientCellBorrow);
612                             } else {
613                                 self.check_op(ops::CellBorrow);
614                             }
615                         }
616                     }
617                 }
618             }
619
620             Rvalue::Cast(
621                 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
622                 _,
623                 _,
624             ) => {}
625
626             Rvalue::Cast(
627                 CastKind::Pointer(
628                     PointerCast::UnsafeFnPointer
629                     | PointerCast::ClosureFnPointer(_)
630                     | PointerCast::ReifyFnPointer,
631                 ),
632                 _,
633                 _,
634             ) => self.check_op(ops::FnPtrCast),
635
636             Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
637                 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
638                 // in the type of any local, which also excludes casts).
639             }
640
641             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
642                 let operand_ty = operand.ty(self.body, self.tcx);
643                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
644                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
645
646                 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
647                     self.check_op(ops::RawPtrToIntCast);
648                 }
649             }
650
651             Rvalue::NullaryOp(NullOp::SizeOf, _) => {}
652             Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
653
654             Rvalue::UnaryOp(_, ref operand) => {
655                 let ty = operand.ty(self.body, self.tcx);
656                 if is_int_bool_or_char(ty) {
657                     // Int, bool, and char operations are fine.
658                 } else if ty.is_floating_point() {
659                     self.check_op(ops::FloatingPointOp);
660                 } else {
661                     span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
662                 }
663             }
664
665             Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
666             | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
667                 let lhs_ty = lhs.ty(self.body, self.tcx);
668                 let rhs_ty = rhs.ty(self.body, self.tcx);
669
670                 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
671                     // Int, bool, and char operations are fine.
672                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
673                     assert_eq!(lhs_ty, rhs_ty);
674                     assert!(
675                         op == BinOp::Eq
676                             || op == BinOp::Ne
677                             || op == BinOp::Le
678                             || op == BinOp::Lt
679                             || op == BinOp::Ge
680                             || op == BinOp::Gt
681                             || op == BinOp::Offset
682                     );
683
684                     self.check_op(ops::RawPtrComparison);
685                 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
686                     self.check_op(ops::FloatingPointOp);
687                 } else {
688                     span_bug!(
689                         self.span,
690                         "non-primitive type in `Rvalue::BinaryOp`: {:?} âš¬ {:?}",
691                         lhs_ty,
692                         rhs_ty
693                     );
694                 }
695             }
696         }
697     }
698
699     fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
700         self.super_operand(op, location);
701         if let Operand::Constant(c) = op {
702             if let Some(def_id) = c.check_static_ptr(self.tcx) {
703                 self.check_static(def_id, self.span);
704             }
705         }
706     }
707     fn visit_projection_elem(
708         &mut self,
709         place_local: Local,
710         proj_base: &[PlaceElem<'tcx>],
711         elem: PlaceElem<'tcx>,
712         context: PlaceContext,
713         location: Location,
714     ) {
715         trace!(
716             "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
717             context={:?} location={:?}",
718             place_local,
719             proj_base,
720             elem,
721             context,
722             location,
723         );
724
725         self.super_projection_elem(place_local, proj_base, elem, context, location);
726
727         match elem {
728             ProjectionElem::Deref => {
729                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
730                 if let ty::RawPtr(_) = base_ty.kind() {
731                     if proj_base.is_empty() {
732                         let decl = &self.body.local_decls[place_local];
733                         if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
734                             let span = decl.source_info.span;
735                             self.check_static(def_id, span);
736                             return;
737                         }
738                     }
739                     self.check_op(ops::RawPtrDeref);
740                 }
741
742                 if context.is_mutating_use() {
743                     self.check_op(ops::MutDeref);
744                 }
745             }
746
747             ProjectionElem::ConstantIndex { .. }
748             | ProjectionElem::Downcast(..)
749             | ProjectionElem::Subslice { .. }
750             | ProjectionElem::Field(..)
751             | ProjectionElem::Index(_) => {}
752         }
753     }
754
755     fn visit_source_info(&mut self, source_info: &SourceInfo) {
756         trace!("visit_source_info: source_info={:?}", source_info);
757         self.span = source_info.span;
758     }
759
760     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
761         trace!("visit_statement: statement={:?} location={:?}", statement, location);
762
763         self.super_statement(statement, location);
764
765         match statement.kind {
766             StatementKind::LlvmInlineAsm { .. } => {
767                 self.check_op(ops::InlineAsm);
768             }
769
770             StatementKind::Assign(..)
771             | StatementKind::SetDiscriminant { .. }
772             | StatementKind::FakeRead(..)
773             | StatementKind::StorageLive(_)
774             | StatementKind::StorageDead(_)
775             | StatementKind::Retag { .. }
776             | StatementKind::AscribeUserType(..)
777             | StatementKind::Coverage(..)
778             | StatementKind::CopyNonOverlapping(..)
779             | StatementKind::Nop => {}
780         }
781     }
782
783     #[instrument(level = "debug", skip(self))]
784     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
785         use rustc_target::spec::abi::Abi::RustIntrinsic;
786
787         self.super_terminator(terminator, location);
788
789         match &terminator.kind {
790             TerminatorKind::Call { func, args, .. } => {
791                 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
792                 let caller = self.def_id().to_def_id();
793
794                 let fn_ty = func.ty(body, tcx);
795
796                 let (mut callee, substs) = match *fn_ty.kind() {
797                     ty::FnDef(def_id, substs) => (def_id, substs),
798
799                     ty::FnPtr(_) => {
800                         self.check_op(ops::FnCallIndirect);
801                         return;
802                     }
803                     _ => {
804                         span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
805                     }
806                 };
807
808                 let mut nonconst_call_permission = false;
809
810                 // Attempting to call a trait method?
811                 if let Some(trait_id) = tcx.trait_of_item(callee) {
812                     trace!("attempting to call a trait method");
813                     if !self.tcx.features().const_trait_impl {
814                         self.check_op(ops::FnCallNonConst);
815                         return;
816                     }
817
818                     let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
819                     let obligation = Obligation::new(
820                         ObligationCause::dummy(),
821                         param_env,
822                         Binder::dummy(TraitPredicate {
823                             trait_ref,
824                             constness: hir::Constness::Const,
825                         }),
826                     );
827
828                     let implsrc = tcx.infer_ctxt().enter(|infcx| {
829                         let mut selcx = SelectionContext::with_constness(&infcx, hir::Constness::Const);
830                         selcx.select(&obligation)
831                     });
832
833                     match implsrc {
834                         Ok(Some(ImplSource::Param(_, hir::Constness::Const))) => {
835                             debug!(
836                                 "const_trait_impl: provided {:?} via where-clause in {:?}",
837                                  trait_ref, param_env
838                             );
839                             return;
840                         }
841                         Ok(Some(ImplSource::UserDefined(data))) => {
842                             let callee_name = tcx.item_name(callee);
843                             if let Some(&did) = tcx.associated_item_def_ids(data.impl_def_id).iter().find(|did| tcx.item_name(**did) == callee_name) {
844                                 callee = did;
845                             }
846                         }
847                         _ => {
848                             if !tcx.is_const_fn_raw(callee) {
849                                 // At this point, it is only legal when the caller is marked with
850                                 // #[default_method_body_is_const], and the callee is in the same
851                                 // trait.
852                                 let callee_trait = tcx.trait_of_item(callee);
853                                 if callee_trait.is_some() {
854                                     if tcx.has_attr(caller, sym::default_method_body_is_const) {
855                                         if tcx.trait_of_item(caller) == callee_trait {
856                                             nonconst_call_permission = true;
857                                         }
858                                     }
859                                 }
860
861                                 if !nonconst_call_permission {
862                                     self.check_op(ops::FnCallNonConst);
863                                     return;
864                                 }
865                             }
866                         }
867                     }
868
869                     // Resolve a trait method call to its concrete implementation, which may be in a
870                     // `const` trait impl.
871                     let instance = Instance::resolve(tcx, param_env, callee, substs);
872                     debug!("Resolving ({:?}) -> {:?}", callee, instance);
873                     if let Ok(Some(func)) = instance {
874                         if let InstanceDef::Item(def) = func.def {
875                             callee = def.did;
876                         }
877                     }
878                 }
879
880                 // At this point, we are calling a function, `callee`, whose `DefId` is known...
881                 if is_lang_panic_fn(tcx, callee) {
882                     self.check_op(ops::Panic);
883
884                     // const-eval of the `begin_panic` fn assumes the argument is `&str`
885                     if Some(callee) == tcx.lang_items().begin_panic_fn() {
886                         match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
887                             ty::Ref(_, ty, _) if ty.is_str() => (),
888                             _ => self.check_op(ops::PanicNonStr),
889                         }
890                     }
891
892                     return;
893                 }
894
895                 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
896                 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
897                 if is_async_block {
898                     let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
899                     self.check_op(ops::Generator(kind));
900                     return;
901                 }
902
903                 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
904
905                 if !tcx.is_const_fn_raw(callee) {
906                     if tcx.trait_of_item(callee).is_some() {
907                         if tcx.has_attr(callee, sym::default_method_body_is_const) {
908                             // To get to here we must have already found a const impl for the
909                             // trait, but for it to still be non-const can be that the impl is
910                             // using default method bodies.
911                             nonconst_call_permission = true;
912                         }    
913                     }
914
915                     if !nonconst_call_permission {
916                         self.check_op(ops::FnCallNonConst);
917                         return;
918                     }
919                 }
920
921                 // If the `const fn` we are trying to call is not const-stable, ensure that we have
922                 // the proper feature gate enabled.
923                 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
924                     trace!(?gate, "calling unstable const fn");
925                     if self.span.allows_unstable(gate) {
926                         return;
927                     }
928
929                     // Calling an unstable function *always* requires that the corresponding gate
930                     // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
931                     if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
932                         self.check_op(ops::FnCallUnstable(callee, Some(gate)));
933                         return;
934                     }
935
936                     // If this crate is not using stability attributes, or the caller is not claiming to be a
937                     // stable `const fn`, that is all that is required.
938                     if !self.ccx.is_const_stable_const_fn() {
939                         trace!("crate not using stability attributes or caller not stably const");
940                         return;
941                     }
942
943                     // Otherwise, we are something const-stable calling a const-unstable fn.
944
945                     if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
946                         trace!("rustc_allow_const_fn_unstable gate active");
947                         return;
948                     }
949
950                     self.check_op(ops::FnCallUnstable(callee, Some(gate)));
951                     return;
952                 }
953
954                 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
955                 // have no `rustc_const_stable` attributes to be const-unstable as well. This
956                 // should be fixed later.
957                 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
958                     && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
959                 if callee_is_unstable_unmarked {
960                     trace!("callee_is_unstable_unmarked");
961                     // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
962                     // `extern` funtions, and these have no way to get marked `const`. So instead we
963                     // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
964                     if self.ccx.is_const_stable_const_fn() || is_intrinsic {
965                         self.check_op(ops::FnCallUnstable(callee, None));
966                         return;
967                     }
968                 }
969                 trace!("permitting call");
970             }
971
972             // Forbid all `Drop` terminators unless the place being dropped is a local with no
973             // projections that cannot be `NeedsDrop`.
974             TerminatorKind::Drop { place: dropped_place, .. }
975             | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
976                 // If we are checking live drops after drop-elaboration, don't emit duplicate
977                 // errors here.
978                 if super::post_drop_elaboration::checking_enabled(self.ccx) {
979                     return;
980                 }
981
982                 let mut err_span = self.span;
983
984                 // Check to see if the type of this place can ever have a drop impl. If not, this
985                 // `Drop` terminator is frivolous.
986                 let ty_needs_drop =
987                     dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
988
989                 if !ty_needs_drop {
990                     return;
991                 }
992
993                 let needs_drop = if let Some(local) = dropped_place.as_local() {
994                     // Use the span where the local was declared as the span of the drop error.
995                     err_span = self.body.local_decls[local].source_info.span;
996                     self.qualifs.needs_drop(self.ccx, local, location)
997                 } else {
998                     true
999                 };
1000
1001                 if needs_drop {
1002                     self.check_op_spanned(
1003                         ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1004                         err_span,
1005                     );
1006                 }
1007             }
1008
1009             TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1010
1011             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1012                 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1013             }
1014
1015             TerminatorKind::Abort => {
1016                 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1017                 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1018             }
1019
1020             TerminatorKind::Assert { .. }
1021             | TerminatorKind::FalseEdge { .. }
1022             | TerminatorKind::FalseUnwind { .. }
1023             | TerminatorKind::Goto { .. }
1024             | TerminatorKind::Resume
1025             | TerminatorKind::Return
1026             | TerminatorKind::SwitchInt { .. }
1027             | TerminatorKind::Unreachable => {}
1028         }
1029     }
1030 }
1031
1032 fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
1033     let ty = body.return_ty();
1034     tcx.infer_ctxt().enter(|infcx| {
1035         let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
1036         let mut fulfillment_cx = traits::FulfillmentContext::new();
1037         let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
1038         fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
1039         if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
1040             infcx.report_fulfillment_errors(&err, None, false);
1041         }
1042     });
1043 }
1044
1045 fn place_as_reborrow(
1046     tcx: TyCtxt<'tcx>,
1047     body: &Body<'tcx>,
1048     place: Place<'tcx>,
1049 ) -> Option<PlaceRef<'tcx>> {
1050     match place.as_ref().last_projection() {
1051         Some((place_base, ProjectionElem::Deref)) => {
1052             // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1053             // that points to the allocation for the static. Don't treat these as reborrows.
1054             if body.local_decls[place_base.local].is_ref_to_static() {
1055                 None
1056             } else {
1057                 // Ensure the type being derefed is a reference and not a raw pointer.
1058                 // This is sufficient to prevent an access to a `static mut` from being marked as a
1059                 // reborrow, even if the check above were to disappear.
1060                 let inner_ty = place_base.ty(body, tcx).ty;
1061
1062                 if let ty::Ref(..) = inner_ty.kind() {
1063                     return Some(place_base);
1064                 } else {
1065                     return None;
1066                 }
1067             }
1068         }
1069         _ => None,
1070     }
1071 }
1072
1073 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1074     ty.is_bool() || ty.is_integral() || ty.is_char()
1075 }
1076
1077 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1078     ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1079 }
1080
1081 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1082     let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1083
1084     ccx.tcx
1085         .sess
1086         .struct_span_err(
1087             span,
1088             &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1089         )
1090         .span_suggestion(
1091             attr_span,
1092             "if it is not part of the public API, make this function unstably const",
1093             concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1094             Applicability::HasPlaceholders,
1095         )
1096         .span_suggestion(
1097             attr_span,
1098             "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1099             format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1100             Applicability::MaybeIncorrect,
1101         )
1102         .emit();
1103 }