]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_const_eval/src/transform/check_consts/check.rs
Rollup merge of #92917 - jackh726:issue-91762-2, r=nikomatsakis
[rust.git] / compiler / rustc_const_eval / src / transform / check_consts / check.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
4 use rustc_hir as hir;
5 use rustc_hir::def_id::DefId;
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef, TypeFoldable};
15 use rustc_mir_dataflow::{self, Analysis};
16 use rustc_span::{sym, Span, Symbol};
17 use rustc_trait_selection::traits::SelectionContext;
18
19 use std::mem;
20 use std::ops::Deref;
21
22 use super::ops::{self, NonConstOp, Status};
23 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
24 use super::resolver::FlowSensitiveAnalysis;
25 use super::{ConstCx, Qualif};
26 use crate::const_eval::is_unstable_const_fn;
27
28 type QualifResults<'mir, 'tcx, Q> =
29     rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
30
31 #[derive(Default)]
32 pub struct Qualifs<'mir, 'tcx> {
33     has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
34     needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
35     needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
36 }
37
38 impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
39     /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
40     ///
41     /// Only updates the cursor if absolutely necessary
42     pub fn needs_drop(
43         &mut self,
44         ccx: &'mir ConstCx<'mir, 'tcx>,
45         local: Local,
46         location: Location,
47     ) -> bool {
48         let ty = ccx.body.local_decls[local].ty;
49         // Peeking into opaque types causes cycles if the current function declares said opaque
50         // type. Thus we avoid short circuiting on the type and instead run the more expensive
51         // analysis that looks at the actual usage within this function
52         if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
53             return false;
54         }
55
56         let needs_drop = self.needs_drop.get_or_insert_with(|| {
57             let ConstCx { tcx, body, .. } = *ccx;
58
59             FlowSensitiveAnalysis::new(NeedsDrop, ccx)
60                 .into_engine(tcx, &body)
61                 .iterate_to_fixpoint()
62                 .into_results_cursor(&body)
63         });
64
65         needs_drop.seek_before_primary_effect(location);
66         needs_drop.get().contains(local)
67     }
68
69     /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
70     ///
71     /// Only updates the cursor if absolutely necessary
72     pub fn needs_non_const_drop(
73         &mut self,
74         ccx: &'mir ConstCx<'mir, 'tcx>,
75         local: Local,
76         location: Location,
77     ) -> bool {
78         let ty = ccx.body.local_decls[local].ty;
79         if !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
80             return false;
81         }
82
83         let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
84             let ConstCx { tcx, body, .. } = *ccx;
85
86             FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
87                 .into_engine(tcx, &body)
88                 .iterate_to_fixpoint()
89                 .into_results_cursor(&body)
90         });
91
92         needs_non_const_drop.seek_before_primary_effect(location);
93         needs_non_const_drop.get().contains(local)
94     }
95
96     /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
97     ///
98     /// Only updates the cursor if absolutely necessary.
99     pub fn has_mut_interior(
100         &mut self,
101         ccx: &'mir ConstCx<'mir, 'tcx>,
102         local: Local,
103         location: Location,
104     ) -> bool {
105         let ty = ccx.body.local_decls[local].ty;
106         // Peeking into opaque types causes cycles if the current function declares said opaque
107         // type. Thus we avoid short circuiting on the type and instead run the more expensive
108         // analysis that looks at the actual usage within this function
109         if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
110             return false;
111         }
112
113         let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
114             let ConstCx { tcx, body, .. } = *ccx;
115
116             FlowSensitiveAnalysis::new(HasMutInterior, ccx)
117                 .into_engine(tcx, &body)
118                 .iterate_to_fixpoint()
119                 .into_results_cursor(&body)
120         });
121
122         has_mut_interior.seek_before_primary_effect(location);
123         has_mut_interior.get().contains(local)
124     }
125
126     fn in_return_place(
127         &mut self,
128         ccx: &'mir ConstCx<'mir, 'tcx>,
129         error_occured: Option<ErrorReported>,
130     ) -> ConstQualifs {
131         // Find the `Return` terminator if one exists.
132         //
133         // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
134         // qualifs for the return type.
135         let return_block = ccx
136             .body
137             .basic_blocks()
138             .iter_enumerated()
139             .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
140             .map(|(bb, _)| bb);
141
142         let return_block = match return_block {
143             None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
144             Some(bb) => bb,
145         };
146
147         let return_loc = ccx.body.terminator_loc(return_block);
148
149         let custom_eq = match ccx.const_kind() {
150             // We don't care whether a `const fn` returns a value that is not structurally
151             // matchable. Functions calls are opaque and always use type-based qualification, so
152             // this value should never be used.
153             hir::ConstContext::ConstFn => true,
154
155             // If we know that all values of the return type are structurally matchable, there's no
156             // need to run dataflow.
157             // Opaque types do not participate in const generics or pattern matching, so we can safely count them out.
158             _ if ccx.body.return_ty().has_opaque_types()
159                 || !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) =>
160             {
161                 false
162             }
163
164             hir::ConstContext::Const | hir::ConstContext::Static(_) => {
165                 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
166                     .into_engine(ccx.tcx, &ccx.body)
167                     .iterate_to_fixpoint()
168                     .into_results_cursor(&ccx.body);
169
170                 cursor.seek_after_primary_effect(return_loc);
171                 cursor.get().contains(RETURN_PLACE)
172             }
173         };
174
175         ConstQualifs {
176             needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
177             needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
178             has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
179             custom_eq,
180             error_occured,
181         }
182     }
183 }
184
185 pub struct Checker<'mir, 'tcx> {
186     ccx: &'mir ConstCx<'mir, 'tcx>,
187     qualifs: Qualifs<'mir, 'tcx>,
188
189     /// The span of the current statement.
190     span: Span,
191
192     /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
193     local_has_storage_dead: Option<BitSet<Local>>,
194
195     error_emitted: Option<ErrorReported>,
196     secondary_errors: Vec<Diagnostic>,
197 }
198
199 impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
200     type Target = ConstCx<'mir, 'tcx>;
201
202     fn deref(&self) -> &Self::Target {
203         &self.ccx
204     }
205 }
206
207 impl<'mir, 'tcx> Checker<'mir, 'tcx> {
208     pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
209         Checker {
210             span: ccx.body.span,
211             ccx,
212             qualifs: Default::default(),
213             local_has_storage_dead: None,
214             error_emitted: None,
215             secondary_errors: Vec::new(),
216         }
217     }
218
219     pub fn check_body(&mut self) {
220         let ConstCx { tcx, body, .. } = *self.ccx;
221         let def_id = self.ccx.def_id();
222
223         // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
224         // no need to emit duplicate errors here.
225         if is_async_fn(self.ccx) || body.generator.is_some() {
226             tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
227             return;
228         }
229
230         // The local type and predicate checks are not free and only relevant for `const fn`s.
231         if self.const_kind() == hir::ConstContext::ConstFn {
232             // Prevent const trait methods from being annotated as `stable`.
233             // FIXME: Do this as part of stability checking.
234             if self.is_const_stable_const_fn() {
235                 if crate::const_eval::is_parent_const_impl_raw(tcx, def_id) {
236                     self.ccx
237                         .tcx
238                         .sess
239                         .struct_span_err(self.span, "trait methods cannot be stable const fn")
240                         .emit();
241                 }
242             }
243
244             self.check_item_predicates();
245
246             for (idx, local) in body.local_decls.iter_enumerated() {
247                 // Handle the return place below.
248                 if idx == RETURN_PLACE || local.internal {
249                     continue;
250                 }
251
252                 self.span = local.source_info.span;
253                 self.check_local_or_return_ty(local.ty, idx);
254             }
255
256             // impl trait is gone in MIR, so check the return type of a const fn by its signature
257             // instead of the type of the return place.
258             self.span = body.local_decls[RETURN_PLACE].source_info.span;
259             let return_ty = tcx.fn_sig(def_id).output();
260             self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
261         }
262
263         if !tcx.has_attr(def_id.to_def_id(), sym::rustc_do_not_const_check) {
264             self.visit_body(&body);
265         }
266
267         // If we got through const-checking without emitting any "primary" errors, emit any
268         // "secondary" errors if they occurred.
269         let secondary_errors = mem::take(&mut self.secondary_errors);
270         if self.error_emitted.is_none() {
271             for error in secondary_errors {
272                 self.tcx.sess.diagnostic().emit_diagnostic(&error);
273             }
274         } else {
275             assert!(self.tcx.sess.has_errors());
276         }
277     }
278
279     fn local_has_storage_dead(&mut self, local: Local) -> bool {
280         let ccx = self.ccx;
281         self.local_has_storage_dead
282             .get_or_insert_with(|| {
283                 struct StorageDeads {
284                     locals: BitSet<Local>,
285                 }
286                 impl<'tcx> Visitor<'tcx> for StorageDeads {
287                     fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
288                         if let StatementKind::StorageDead(l) = stmt.kind {
289                             self.locals.insert(l);
290                         }
291                     }
292                 }
293                 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
294                 v.visit_body(ccx.body);
295                 v.locals
296             })
297             .contains(local)
298     }
299
300     pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
301         self.qualifs.in_return_place(self.ccx, self.error_emitted)
302     }
303
304     /// Emits an error if an expression cannot be evaluated in the current context.
305     pub fn check_op(&mut self, op: impl NonConstOp) {
306         self.check_op_spanned(op, self.span);
307     }
308
309     /// Emits an error at the given `span` if an expression cannot be evaluated in the current
310     /// context.
311     pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
312         let gate = match op.status_in_item(self.ccx) {
313             Status::Allowed => return,
314
315             Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
316                 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
317                     && !super::rustc_allow_const_fn_unstable(
318                         self.tcx,
319                         self.def_id().to_def_id(),
320                         gate,
321                     );
322                 if unstable_in_stable {
323                     emit_unstable_in_stable_error(self.ccx, span, gate);
324                 }
325
326                 return;
327             }
328
329             Status::Unstable(gate) => Some(gate),
330             Status::Forbidden => None,
331         };
332
333         if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
334             self.tcx.sess.miri_unleashed_feature(span, gate);
335             return;
336         }
337
338         let mut err = op.build_error(self.ccx, span);
339         assert!(err.is_error());
340
341         match op.importance() {
342             ops::DiagnosticImportance::Primary => {
343                 self.error_emitted = Some(ErrorReported);
344                 err.emit();
345             }
346
347             ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
348         }
349     }
350
351     fn check_static(&mut self, def_id: DefId, span: Span) {
352         if self.tcx.is_thread_local_static(def_id) {
353             self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
354         }
355         self.check_op_spanned(ops::StaticAccess, span)
356     }
357
358     fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
359         let kind = self.body.local_kind(local);
360
361         for ty in ty.walk() {
362             let ty = match ty.unpack() {
363                 GenericArgKind::Type(ty) => ty,
364
365                 // No constraints on lifetimes or constants, except potentially
366                 // constants' types, but `walk` will get to them as well.
367                 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
368             };
369
370             match *ty.kind() {
371                 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
372                 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
373                 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
374
375                 ty::Dynamic(preds, _) => {
376                     for pred in preds.iter() {
377                         match pred.skip_binder() {
378                             ty::ExistentialPredicate::AutoTrait(_)
379                             | ty::ExistentialPredicate::Projection(_) => {
380                                 self.check_op(ops::ty::DynTrait(kind))
381                             }
382                             ty::ExistentialPredicate::Trait(trait_ref) => {
383                                 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
384                                     self.check_op(ops::ty::DynTrait(kind))
385                                 }
386                             }
387                         }
388                     }
389                 }
390                 _ => {}
391             }
392         }
393     }
394
395     fn check_item_predicates(&mut self) {
396         let ConstCx { tcx, .. } = *self.ccx;
397
398         let mut current = self.def_id().to_def_id();
399         loop {
400             let predicates = tcx.predicates_of(current);
401             for (predicate, _) in predicates.predicates {
402                 match predicate.kind().skip_binder() {
403                     ty::PredicateKind::RegionOutlives(_)
404                     | ty::PredicateKind::TypeOutlives(_)
405                     | ty::PredicateKind::WellFormed(_)
406                     | ty::PredicateKind::Projection(_)
407                     | ty::PredicateKind::ConstEvaluatable(..)
408                     | ty::PredicateKind::ConstEquate(..)
409                     | ty::PredicateKind::OpaqueType(..)
410                     | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
411                     ty::PredicateKind::ObjectSafe(_) => {
412                         bug!("object safe predicate on function: {:#?}", predicate)
413                     }
414                     ty::PredicateKind::ClosureKind(..) => {
415                         bug!("closure kind predicate on function: {:#?}", predicate)
416                     }
417                     ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) => {
418                         bug!("subtype/coerce predicate on function: {:#?}", predicate)
419                     }
420                     ty::PredicateKind::Trait(pred) => {
421                         if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
422                             continue;
423                         }
424                         match pred.self_ty().kind() {
425                             ty::Param(p) => {
426                                 let generics = tcx.generics_of(current);
427                                 let def = generics.type_param(p, tcx);
428                                 let span = tcx.def_span(def.def_id);
429
430                                 // These are part of the function signature, so treat them like
431                                 // arguments when determining importance.
432                                 let kind = LocalKind::Arg;
433
434                                 self.check_op_spanned(ops::ty::TraitBound(kind), span);
435                             }
436                             // other kinds of bounds are either tautologies
437                             // or cause errors in other passes
438                             _ => continue,
439                         }
440                     }
441                 }
442             }
443             match predicates.parent {
444                 Some(parent) => current = parent,
445                 None => break,
446             }
447         }
448     }
449
450     fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
451         match self.const_kind() {
452             // In a const fn all borrows are transient or point to the places given via
453             // references in the arguments (so we already checked them with
454             // TransientMutBorrow/MutBorrow as appropriate).
455             // The borrow checker guarantees that no new non-transient borrows are created.
456             // NOTE: Once we have heap allocations during CTFE we need to figure out
457             // how to prevent `const fn` to create long-lived allocations that point
458             // to mutable memory.
459             hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
460             _ => {
461                 // Locals with StorageDead do not live beyond the evaluation and can
462                 // thus safely be borrowed without being able to be leaked to the final
463                 // value of the constant.
464                 if self.local_has_storage_dead(local) {
465                     self.check_op(ops::TransientMutBorrow(kind));
466                 } else {
467                     self.check_op(ops::MutBorrow(kind));
468                 }
469             }
470         }
471     }
472 }
473
474 impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
475     fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
476         trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
477
478         // We don't const-check basic blocks on the cleanup path since we never unwind during
479         // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
480         // are unreachable during const-eval.
481         //
482         // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
483         // locals that would never be dropped during normal execution are sometimes dropped during
484         // unwinding, which means backwards-incompatible live-drop errors.
485         if block.is_cleanup {
486             return;
487         }
488
489         self.super_basic_block_data(bb, block);
490     }
491
492     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
493         trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
494
495         // Special-case reborrows to be more like a copy of a reference.
496         match *rvalue {
497             Rvalue::Ref(_, kind, place) => {
498                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
499                     let ctx = match kind {
500                         BorrowKind::Shared => {
501                             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
502                         }
503                         BorrowKind::Shallow => {
504                             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
505                         }
506                         BorrowKind::Unique => {
507                             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
508                         }
509                         BorrowKind::Mut { .. } => {
510                             PlaceContext::MutatingUse(MutatingUseContext::Borrow)
511                         }
512                     };
513                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
514                     self.visit_projection(reborrowed_place_ref, ctx, location);
515                     return;
516                 }
517             }
518             Rvalue::AddressOf(mutbl, place) => {
519                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
520                     let ctx = match mutbl {
521                         Mutability::Not => {
522                             PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
523                         }
524                         Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
525                     };
526                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
527                     self.visit_projection(reborrowed_place_ref, ctx, location);
528                     return;
529                 }
530             }
531             _ => {}
532         }
533
534         self.super_rvalue(rvalue, location);
535
536         match *rvalue {
537             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
538
539             Rvalue::Use(_)
540             | Rvalue::Repeat(..)
541             | Rvalue::Discriminant(..)
542             | Rvalue::Len(_)
543             | Rvalue::Aggregate(..) => {}
544
545             Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
546             | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
547                 let ty = place.ty(self.body, self.tcx).ty;
548                 let is_allowed = match ty.kind() {
549                     // Inside a `static mut`, `&mut [...]` is allowed.
550                     ty::Array(..) | ty::Slice(_)
551                         if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
552                     {
553                         true
554                     }
555
556                     // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
557                     // that this is merely a ZST and it is already eligible for promotion.
558                     // This may require an RFC?
559                     /*
560                     ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
561                         => true,
562                     */
563                     _ => false,
564                 };
565
566                 if !is_allowed {
567                     if let BorrowKind::Mut { .. } = kind {
568                         self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
569                     } else {
570                         self.check_op(ops::CellBorrow);
571                     }
572                 }
573             }
574
575             Rvalue::AddressOf(Mutability::Mut, ref place) => {
576                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
577             }
578
579             Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
580             | Rvalue::AddressOf(Mutability::Not, ref place) => {
581                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
582                     &self.ccx,
583                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
584                     place.as_ref(),
585                 );
586
587                 if borrowed_place_has_mut_interior {
588                     match self.const_kind() {
589                         // In a const fn all borrows are transient or point to the places given via
590                         // references in the arguments (so we already checked them with
591                         // TransientCellBorrow/CellBorrow as appropriate).
592                         // The borrow checker guarantees that no new non-transient borrows are created.
593                         // NOTE: Once we have heap allocations during CTFE we need to figure out
594                         // how to prevent `const fn` to create long-lived allocations that point
595                         // to (interior) mutable memory.
596                         hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
597                         _ => {
598                             // Locals with StorageDead are definitely not part of the final constant value, and
599                             // it is thus inherently safe to permit such locals to have their
600                             // address taken as we can't end up with a reference to them in the
601                             // final value.
602                             // Note: This is only sound if every local that has a `StorageDead` has a
603                             // `StorageDead` in every control flow path leading to a `return` terminator.
604                             if self.local_has_storage_dead(place.local) {
605                                 self.check_op(ops::TransientCellBorrow);
606                             } else {
607                                 self.check_op(ops::CellBorrow);
608                             }
609                         }
610                     }
611                 }
612             }
613
614             Rvalue::Cast(
615                 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
616                 _,
617                 _,
618             ) => {}
619
620             Rvalue::Cast(
621                 CastKind::Pointer(
622                     PointerCast::UnsafeFnPointer
623                     | PointerCast::ClosureFnPointer(_)
624                     | PointerCast::ReifyFnPointer,
625                 ),
626                 _,
627                 _,
628             ) => self.check_op(ops::FnPtrCast),
629
630             Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
631                 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
632                 // in the type of any local, which also excludes casts).
633             }
634
635             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
636                 let operand_ty = operand.ty(self.body, self.tcx);
637                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
638                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
639
640                 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
641                     self.check_op(ops::RawPtrToIntCast);
642                 }
643             }
644
645             Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
646             Rvalue::ShallowInitBox(_, _) => {}
647
648             Rvalue::UnaryOp(_, ref operand) => {
649                 let ty = operand.ty(self.body, self.tcx);
650                 if is_int_bool_or_char(ty) {
651                     // Int, bool, and char operations are fine.
652                 } else if ty.is_floating_point() {
653                     self.check_op(ops::FloatingPointOp);
654                 } else {
655                     span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
656                 }
657             }
658
659             Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
660             | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
661                 let lhs_ty = lhs.ty(self.body, self.tcx);
662                 let rhs_ty = rhs.ty(self.body, self.tcx);
663
664                 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
665                     // Int, bool, and char operations are fine.
666                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
667                     assert_eq!(lhs_ty, rhs_ty);
668                     assert!(
669                         op == BinOp::Eq
670                             || op == BinOp::Ne
671                             || op == BinOp::Le
672                             || op == BinOp::Lt
673                             || op == BinOp::Ge
674                             || op == BinOp::Gt
675                             || op == BinOp::Offset
676                     );
677
678                     self.check_op(ops::RawPtrComparison);
679                 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
680                     self.check_op(ops::FloatingPointOp);
681                 } else {
682                     span_bug!(
683                         self.span,
684                         "non-primitive type in `Rvalue::BinaryOp`: {:?} âš¬ {:?}",
685                         lhs_ty,
686                         rhs_ty
687                     );
688                 }
689             }
690         }
691     }
692
693     fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
694         self.super_operand(op, location);
695         if let Operand::Constant(c) = op {
696             if let Some(def_id) = c.check_static_ptr(self.tcx) {
697                 self.check_static(def_id, self.span);
698             }
699         }
700     }
701     fn visit_projection_elem(
702         &mut self,
703         place_local: Local,
704         proj_base: &[PlaceElem<'tcx>],
705         elem: PlaceElem<'tcx>,
706         context: PlaceContext,
707         location: Location,
708     ) {
709         trace!(
710             "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
711             context={:?} location={:?}",
712             place_local,
713             proj_base,
714             elem,
715             context,
716             location,
717         );
718
719         self.super_projection_elem(place_local, proj_base, elem, context, location);
720
721         match elem {
722             ProjectionElem::Deref => {
723                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
724                 if base_ty.is_unsafe_ptr() {
725                     if proj_base.is_empty() {
726                         let decl = &self.body.local_decls[place_local];
727                         if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
728                             let span = decl.source_info.span;
729                             self.check_static(def_id, span);
730                             return;
731                         }
732                     }
733
734                     // `*const T` is stable, `*mut T` is not
735                     if !base_ty.is_mutable_ptr() {
736                         return;
737                     }
738
739                     self.check_op(ops::RawMutPtrDeref);
740                 }
741
742                 if context.is_mutating_use() {
743                     self.check_op(ops::MutDeref);
744                 }
745             }
746
747             ProjectionElem::ConstantIndex { .. }
748             | ProjectionElem::Downcast(..)
749             | ProjectionElem::Subslice { .. }
750             | ProjectionElem::Field(..)
751             | ProjectionElem::Index(_) => {}
752         }
753     }
754
755     fn visit_source_info(&mut self, source_info: &SourceInfo) {
756         trace!("visit_source_info: source_info={:?}", source_info);
757         self.span = source_info.span;
758     }
759
760     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
761         trace!("visit_statement: statement={:?} location={:?}", statement, location);
762
763         self.super_statement(statement, location);
764
765         match statement.kind {
766             StatementKind::Assign(..)
767             | StatementKind::SetDiscriminant { .. }
768             | StatementKind::FakeRead(..)
769             | StatementKind::StorageLive(_)
770             | StatementKind::StorageDead(_)
771             | StatementKind::Retag { .. }
772             | StatementKind::AscribeUserType(..)
773             | StatementKind::Coverage(..)
774             | StatementKind::CopyNonOverlapping(..)
775             | StatementKind::Nop => {}
776         }
777     }
778
779     #[instrument(level = "debug", skip(self))]
780     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
781         use rustc_target::spec::abi::Abi::RustIntrinsic;
782
783         self.super_terminator(terminator, location);
784
785         match &terminator.kind {
786             TerminatorKind::Call { func, args, .. } => {
787                 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
788                 let caller = self.def_id().to_def_id();
789
790                 let fn_ty = func.ty(body, tcx);
791
792                 let (mut callee, mut substs) = match *fn_ty.kind() {
793                     ty::FnDef(def_id, substs) => (def_id, substs),
794
795                     ty::FnPtr(_) => {
796                         self.check_op(ops::FnCallIndirect);
797                         return;
798                     }
799                     _ => {
800                         span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
801                     }
802                 };
803
804                 let mut nonconst_call_permission = false;
805
806                 // Attempting to call a trait method?
807                 if let Some(trait_id) = tcx.trait_of_item(callee) {
808                     trace!("attempting to call a trait method");
809                     if !self.tcx.features().const_trait_impl {
810                         self.check_op(ops::FnCallNonConst(Some((callee, substs))));
811                         return;
812                     }
813
814                     let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
815                     let obligation = Obligation::new(
816                         ObligationCause::dummy(),
817                         param_env,
818                         Binder::dummy(TraitPredicate {
819                             trait_ref,
820                             constness: ty::BoundConstness::NotConst,
821                             polarity: ty::ImplPolarity::Positive,
822                         }),
823                     );
824
825                     let implsrc = tcx.infer_ctxt().enter(|infcx| {
826                         let mut selcx = SelectionContext::new(&infcx);
827                         selcx.select(&obligation)
828                     });
829
830                     match implsrc {
831                         Ok(Some(ImplSource::Param(_, ty::BoundConstness::ConstIfConst))) => {
832                             debug!(
833                                 "const_trait_impl: provided {:?} via where-clause in {:?}",
834                                 trait_ref, param_env
835                             );
836                             return;
837                         }
838                         Ok(Some(ImplSource::UserDefined(data))) => {
839                             if let hir::Constness::NotConst = tcx.impl_constness(data.impl_def_id) {
840                                 self.check_op(ops::FnCallNonConst(None));
841                                 return;
842                             }
843                             let callee_name = tcx.item_name(callee);
844                             if let Some(&did) = tcx
845                                 .associated_item_def_ids(data.impl_def_id)
846                                 .iter()
847                                 .find(|did| tcx.item_name(**did) == callee_name)
848                             {
849                                 // using internal substs is ok here, since this is only
850                                 // used for the `resolve` call below
851                                 substs = InternalSubsts::identity_for_item(tcx, did);
852                                 callee = did;
853                             }
854                         }
855                         _ if !tcx.is_const_fn_raw(callee) => {
856                             // At this point, it is only legal when the caller is marked with
857                             // #[default_method_body_is_const], and the callee is in the same
858                             // trait.
859                             let callee_trait = tcx.trait_of_item(callee);
860                             if callee_trait.is_some() {
861                                 if tcx.has_attr(caller, sym::default_method_body_is_const) {
862                                     if tcx.trait_of_item(caller) == callee_trait {
863                                         nonconst_call_permission = true;
864                                     }
865                                 }
866                             }
867
868                             if !nonconst_call_permission {
869                                 self.check_op(ops::FnCallNonConst(None));
870                                 return;
871                             }
872                         }
873                         _ => {}
874                     }
875
876                     // Resolve a trait method call to its concrete implementation, which may be in a
877                     // `const` trait impl.
878                     let instance = Instance::resolve(tcx, param_env, callee, substs);
879                     debug!("Resolving ({:?}) -> {:?}", callee, instance);
880                     if let Ok(Some(func)) = instance {
881                         if let InstanceDef::Item(def) = func.def {
882                             callee = def.did;
883                         }
884                     }
885                 }
886
887                 // At this point, we are calling a function, `callee`, whose `DefId` is known...
888
889                 // `begin_panic` and `panic_display` are generic functions that accept
890                 // types other than str. Check to enforce that only str can be used in
891                 // const-eval.
892
893                 // const-eval of the `begin_panic` fn assumes the argument is `&str`
894                 if Some(callee) == tcx.lang_items().begin_panic_fn() {
895                     match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
896                         ty::Ref(_, ty, _) if ty.is_str() => return,
897                         _ => self.check_op(ops::PanicNonStr),
898                     }
899                 }
900
901                 // const-eval of the `panic_display` fn assumes the argument is `&&str`
902                 if Some(callee) == tcx.lang_items().panic_display() {
903                     match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
904                         ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
905                         {
906                             return;
907                         }
908                         _ => self.check_op(ops::PanicNonStr),
909                     }
910                 }
911
912                 if Some(callee) == tcx.lang_items().exchange_malloc_fn() {
913                     self.check_op(ops::HeapAllocation);
914                     return;
915                 }
916
917                 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
918                 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
919                 if is_async_block {
920                     let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
921                     self.check_op(ops::Generator(kind));
922                     return;
923                 }
924
925                 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
926
927                 if !tcx.is_const_fn_raw(callee) {
928                     if tcx.trait_of_item(callee).is_some() {
929                         if tcx.has_attr(callee, sym::default_method_body_is_const) {
930                             // To get to here we must have already found a const impl for the
931                             // trait, but for it to still be non-const can be that the impl is
932                             // using default method bodies.
933                             nonconst_call_permission = true;
934                         }
935                     }
936
937                     if !nonconst_call_permission {
938                         self.check_op(ops::FnCallNonConst(None));
939                         return;
940                     }
941                 }
942
943                 // If the `const fn` we are trying to call is not const-stable, ensure that we have
944                 // the proper feature gate enabled.
945                 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
946                     trace!(?gate, "calling unstable const fn");
947                     if self.span.allows_unstable(gate) {
948                         return;
949                     }
950
951                     // Calling an unstable function *always* requires that the corresponding gate
952                     // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
953                     if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
954                         self.check_op(ops::FnCallUnstable(callee, Some(gate)));
955                         return;
956                     }
957
958                     // If this crate is not using stability attributes, or the caller is not claiming to be a
959                     // stable `const fn`, that is all that is required.
960                     if !self.ccx.is_const_stable_const_fn() {
961                         trace!("crate not using stability attributes or caller not stably const");
962                         return;
963                     }
964
965                     // Otherwise, we are something const-stable calling a const-unstable fn.
966
967                     if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
968                         trace!("rustc_allow_const_fn_unstable gate active");
969                         return;
970                     }
971
972                     self.check_op(ops::FnCallUnstable(callee, Some(gate)));
973                     return;
974                 }
975
976                 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
977                 // have no `rustc_const_stable` attributes to be const-unstable as well. This
978                 // should be fixed later.
979                 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
980                     && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
981                 if callee_is_unstable_unmarked {
982                     trace!("callee_is_unstable_unmarked");
983                     // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
984                     // `extern` funtions, and these have no way to get marked `const`. So instead we
985                     // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
986                     if self.ccx.is_const_stable_const_fn() || is_intrinsic {
987                         self.check_op(ops::FnCallUnstable(callee, None));
988                         return;
989                     }
990                 }
991                 trace!("permitting call");
992             }
993
994             // Forbid all `Drop` terminators unless the place being dropped is a local with no
995             // projections that cannot be `NeedsNonConstDrop`.
996             TerminatorKind::Drop { place: dropped_place, .. }
997             | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
998                 // If we are checking live drops after drop-elaboration, don't emit duplicate
999                 // errors here.
1000                 if super::post_drop_elaboration::checking_enabled(self.ccx) {
1001                     return;
1002                 }
1003
1004                 let mut err_span = self.span;
1005                 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
1006
1007                 let ty_needs_non_const_drop =
1008                     qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place);
1009
1010                 debug!(?ty_of_dropped_place, ?ty_needs_non_const_drop);
1011
1012                 if !ty_needs_non_const_drop {
1013                     return;
1014                 }
1015
1016                 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
1017                     // Use the span where the local was declared as the span of the drop error.
1018                     err_span = self.body.local_decls[local].source_info.span;
1019                     self.qualifs.needs_non_const_drop(self.ccx, local, location)
1020                 } else {
1021                     true
1022                 };
1023
1024                 if needs_non_const_drop {
1025                     self.check_op_spanned(
1026                         ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1027                         err_span,
1028                     );
1029                 }
1030             }
1031
1032             TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1033
1034             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1035                 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1036             }
1037
1038             TerminatorKind::Abort => {
1039                 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1040                 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1041             }
1042
1043             TerminatorKind::Assert { .. }
1044             | TerminatorKind::FalseEdge { .. }
1045             | TerminatorKind::FalseUnwind { .. }
1046             | TerminatorKind::Goto { .. }
1047             | TerminatorKind::Resume
1048             | TerminatorKind::Return
1049             | TerminatorKind::SwitchInt { .. }
1050             | TerminatorKind::Unreachable => {}
1051         }
1052     }
1053 }
1054
1055 fn place_as_reborrow<'tcx>(
1056     tcx: TyCtxt<'tcx>,
1057     body: &Body<'tcx>,
1058     place: Place<'tcx>,
1059 ) -> Option<PlaceRef<'tcx>> {
1060     match place.as_ref().last_projection() {
1061         Some((place_base, ProjectionElem::Deref)) => {
1062             // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1063             // that points to the allocation for the static. Don't treat these as reborrows.
1064             if body.local_decls[place_base.local].is_ref_to_static() {
1065                 None
1066             } else {
1067                 // Ensure the type being derefed is a reference and not a raw pointer.
1068                 // This is sufficient to prevent an access to a `static mut` from being marked as a
1069                 // reborrow, even if the check above were to disappear.
1070                 let inner_ty = place_base.ty(body, tcx).ty;
1071
1072                 if let ty::Ref(..) = inner_ty.kind() {
1073                     return Some(place_base);
1074                 } else {
1075                     return None;
1076                 }
1077             }
1078         }
1079         _ => None,
1080     }
1081 }
1082
1083 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1084     ty.is_bool() || ty.is_integral() || ty.is_char()
1085 }
1086
1087 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1088     ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1089 }
1090
1091 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1092     let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1093
1094     ccx.tcx
1095         .sess
1096         .struct_span_err(
1097             span,
1098             &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1099         )
1100         .span_suggestion(
1101             attr_span,
1102             "if it is not part of the public API, make this function unstably const",
1103             concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1104             Applicability::HasPlaceholders,
1105         )
1106         .span_suggestion(
1107             attr_span,
1108             "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1109             format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1110             Applicability::MaybeIncorrect,
1111         )
1112         .emit();
1113 }