]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_const_eval/src/transform/check_consts/check.rs
Move `{core,std}::stream::Stream` to `{core,std}::async_iter::AsyncIterator`.
[rust.git] / compiler / rustc_const_eval / src / transform / check_consts / check.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
4 use rustc_hir as hir;
5 use rustc_hir::def_id::DefId;
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
15 use rustc_mir_dataflow::{self, Analysis};
16 use rustc_span::{sym, Span, Symbol};
17 use rustc_trait_selection::traits::SelectionContext;
18
19 use std::mem;
20 use std::ops::Deref;
21
22 use super::ops::{self, NonConstOp, Status};
23 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
24 use super::resolver::FlowSensitiveAnalysis;
25 use super::{ConstCx, Qualif};
26 use crate::const_eval::is_unstable_const_fn;
27
28 type QualifResults<'mir, 'tcx, Q> =
29     rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
30
31 #[derive(Default)]
32 pub struct Qualifs<'mir, 'tcx> {
33     has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
34     needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
35     needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
36 }
37
38 impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
39     /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
40     ///
41     /// Only updates the cursor if absolutely necessary
42     pub fn needs_drop(
43         &mut self,
44         ccx: &'mir ConstCx<'mir, 'tcx>,
45         local: Local,
46         location: Location,
47     ) -> bool {
48         let ty = ccx.body.local_decls[local].ty;
49         if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
50             return false;
51         }
52
53         let needs_drop = self.needs_drop.get_or_insert_with(|| {
54             let ConstCx { tcx, body, .. } = *ccx;
55
56             FlowSensitiveAnalysis::new(NeedsDrop, ccx)
57                 .into_engine(tcx, &body)
58                 .iterate_to_fixpoint()
59                 .into_results_cursor(&body)
60         });
61
62         needs_drop.seek_before_primary_effect(location);
63         needs_drop.get().contains(local)
64     }
65
66     /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
67     ///
68     /// Only updates the cursor if absolutely necessary
69     pub fn needs_non_const_drop(
70         &mut self,
71         ccx: &'mir ConstCx<'mir, 'tcx>,
72         local: Local,
73         location: Location,
74     ) -> bool {
75         let ty = ccx.body.local_decls[local].ty;
76         if !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
77             return false;
78         }
79
80         let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
81             let ConstCx { tcx, body, .. } = *ccx;
82
83             FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
84                 .into_engine(tcx, &body)
85                 .iterate_to_fixpoint()
86                 .into_results_cursor(&body)
87         });
88
89         needs_non_const_drop.seek_before_primary_effect(location);
90         needs_non_const_drop.get().contains(local)
91     }
92
93     /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
94     ///
95     /// Only updates the cursor if absolutely necessary.
96     pub fn has_mut_interior(
97         &mut self,
98         ccx: &'mir ConstCx<'mir, 'tcx>,
99         local: Local,
100         location: Location,
101     ) -> bool {
102         let ty = ccx.body.local_decls[local].ty;
103         if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
104             return false;
105         }
106
107         let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
108             let ConstCx { tcx, body, .. } = *ccx;
109
110             FlowSensitiveAnalysis::new(HasMutInterior, ccx)
111                 .into_engine(tcx, &body)
112                 .iterate_to_fixpoint()
113                 .into_results_cursor(&body)
114         });
115
116         has_mut_interior.seek_before_primary_effect(location);
117         has_mut_interior.get().contains(local)
118     }
119
120     fn in_return_place(
121         &mut self,
122         ccx: &'mir ConstCx<'mir, 'tcx>,
123         error_occured: Option<ErrorReported>,
124     ) -> ConstQualifs {
125         // Find the `Return` terminator if one exists.
126         //
127         // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
128         // qualifs for the return type.
129         let return_block = ccx
130             .body
131             .basic_blocks()
132             .iter_enumerated()
133             .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
134             .map(|(bb, _)| bb);
135
136         let return_block = match return_block {
137             None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
138             Some(bb) => bb,
139         };
140
141         let return_loc = ccx.body.terminator_loc(return_block);
142
143         let custom_eq = match ccx.const_kind() {
144             // We don't care whether a `const fn` returns a value that is not structurally
145             // matchable. Functions calls are opaque and always use type-based qualification, so
146             // this value should never be used.
147             hir::ConstContext::ConstFn => true,
148
149             // If we know that all values of the return type are structurally matchable, there's no
150             // need to run dataflow.
151             _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
152
153             hir::ConstContext::Const | hir::ConstContext::Static(_) => {
154                 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
155                     .into_engine(ccx.tcx, &ccx.body)
156                     .iterate_to_fixpoint()
157                     .into_results_cursor(&ccx.body);
158
159                 cursor.seek_after_primary_effect(return_loc);
160                 cursor.get().contains(RETURN_PLACE)
161             }
162         };
163
164         ConstQualifs {
165             needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
166             needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
167             has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
168             custom_eq,
169             error_occured,
170         }
171     }
172 }
173
174 pub struct Checker<'mir, 'tcx> {
175     ccx: &'mir ConstCx<'mir, 'tcx>,
176     qualifs: Qualifs<'mir, 'tcx>,
177
178     /// The span of the current statement.
179     span: Span,
180
181     /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
182     local_has_storage_dead: Option<BitSet<Local>>,
183
184     error_emitted: Option<ErrorReported>,
185     secondary_errors: Vec<Diagnostic>,
186 }
187
188 impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
189     type Target = ConstCx<'mir, 'tcx>;
190
191     fn deref(&self) -> &Self::Target {
192         &self.ccx
193     }
194 }
195
196 impl<'mir, 'tcx> Checker<'mir, 'tcx> {
197     pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
198         Checker {
199             span: ccx.body.span,
200             ccx,
201             qualifs: Default::default(),
202             local_has_storage_dead: None,
203             error_emitted: None,
204             secondary_errors: Vec::new(),
205         }
206     }
207
208     pub fn check_body(&mut self) {
209         let ConstCx { tcx, body, .. } = *self.ccx;
210         let def_id = self.ccx.def_id();
211
212         // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
213         // no need to emit duplicate errors here.
214         if is_async_fn(self.ccx) || body.generator.is_some() {
215             tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
216             return;
217         }
218
219         // The local type and predicate checks are not free and only relevant for `const fn`s.
220         if self.const_kind() == hir::ConstContext::ConstFn {
221             // Prevent const trait methods from being annotated as `stable`.
222             // FIXME: Do this as part of stability checking.
223             if self.is_const_stable_const_fn() {
224                 if crate::const_eval::is_parent_const_impl_raw(tcx, def_id) {
225                     self.ccx
226                         .tcx
227                         .sess
228                         .struct_span_err(self.span, "trait methods cannot be stable const fn")
229                         .emit();
230                 }
231             }
232
233             self.check_item_predicates();
234
235             for (idx, local) in body.local_decls.iter_enumerated() {
236                 // Handle the return place below.
237                 if idx == RETURN_PLACE || local.internal {
238                     continue;
239                 }
240
241                 self.span = local.source_info.span;
242                 self.check_local_or_return_ty(local.ty, idx);
243             }
244
245             // impl trait is gone in MIR, so check the return type of a const fn by its signature
246             // instead of the type of the return place.
247             self.span = body.local_decls[RETURN_PLACE].source_info.span;
248             let return_ty = tcx.fn_sig(def_id).output();
249             self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
250         }
251
252         if !tcx.has_attr(def_id.to_def_id(), sym::rustc_do_not_const_check) {
253             self.visit_body(&body);
254         }
255
256         // If we got through const-checking without emitting any "primary" errors, emit any
257         // "secondary" errors if they occurred.
258         let secondary_errors = mem::take(&mut self.secondary_errors);
259         if self.error_emitted.is_none() {
260             for error in secondary_errors {
261                 self.tcx.sess.diagnostic().emit_diagnostic(&error);
262             }
263         } else {
264             assert!(self.tcx.sess.has_errors());
265         }
266     }
267
268     fn local_has_storage_dead(&mut self, local: Local) -> bool {
269         let ccx = self.ccx;
270         self.local_has_storage_dead
271             .get_or_insert_with(|| {
272                 struct StorageDeads {
273                     locals: BitSet<Local>,
274                 }
275                 impl<'tcx> Visitor<'tcx> for StorageDeads {
276                     fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
277                         if let StatementKind::StorageDead(l) = stmt.kind {
278                             self.locals.insert(l);
279                         }
280                     }
281                 }
282                 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
283                 v.visit_body(ccx.body);
284                 v.locals
285             })
286             .contains(local)
287     }
288
289     pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
290         self.qualifs.in_return_place(self.ccx, self.error_emitted)
291     }
292
293     /// Emits an error if an expression cannot be evaluated in the current context.
294     pub fn check_op(&mut self, op: impl NonConstOp) {
295         self.check_op_spanned(op, self.span);
296     }
297
298     /// Emits an error at the given `span` if an expression cannot be evaluated in the current
299     /// context.
300     pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
301         let gate = match op.status_in_item(self.ccx) {
302             Status::Allowed => return,
303
304             Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
305                 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
306                     && !super::rustc_allow_const_fn_unstable(
307                         self.tcx,
308                         self.def_id().to_def_id(),
309                         gate,
310                     );
311                 if unstable_in_stable {
312                     emit_unstable_in_stable_error(self.ccx, span, gate);
313                 }
314
315                 return;
316             }
317
318             Status::Unstable(gate) => Some(gate),
319             Status::Forbidden => None,
320         };
321
322         if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
323             self.tcx.sess.miri_unleashed_feature(span, gate);
324             return;
325         }
326
327         let mut err = op.build_error(self.ccx, span);
328         assert!(err.is_error());
329
330         match op.importance() {
331             ops::DiagnosticImportance::Primary => {
332                 self.error_emitted = Some(ErrorReported);
333                 err.emit();
334             }
335
336             ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
337         }
338     }
339
340     fn check_static(&mut self, def_id: DefId, span: Span) {
341         if self.tcx.is_thread_local_static(def_id) {
342             self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
343         }
344         self.check_op_spanned(ops::StaticAccess, span)
345     }
346
347     fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
348         let kind = self.body.local_kind(local);
349
350         for ty in ty.walk() {
351             let ty = match ty.unpack() {
352                 GenericArgKind::Type(ty) => ty,
353
354                 // No constraints on lifetimes or constants, except potentially
355                 // constants' types, but `walk` will get to them as well.
356                 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
357             };
358
359             match *ty.kind() {
360                 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
361                 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
362                 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
363
364                 ty::Dynamic(preds, _) => {
365                     for pred in preds.iter() {
366                         match pred.skip_binder() {
367                             ty::ExistentialPredicate::AutoTrait(_)
368                             | ty::ExistentialPredicate::Projection(_) => {
369                                 self.check_op(ops::ty::DynTrait(kind))
370                             }
371                             ty::ExistentialPredicate::Trait(trait_ref) => {
372                                 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
373                                     self.check_op(ops::ty::DynTrait(kind))
374                                 }
375                             }
376                         }
377                     }
378                 }
379                 _ => {}
380             }
381         }
382     }
383
384     fn check_item_predicates(&mut self) {
385         let ConstCx { tcx, .. } = *self.ccx;
386
387         let mut current = self.def_id().to_def_id();
388         loop {
389             let predicates = tcx.predicates_of(current);
390             for (predicate, _) in predicates.predicates {
391                 match predicate.kind().skip_binder() {
392                     ty::PredicateKind::RegionOutlives(_)
393                     | ty::PredicateKind::TypeOutlives(_)
394                     | ty::PredicateKind::WellFormed(_)
395                     | ty::PredicateKind::Projection(_)
396                     | ty::PredicateKind::ConstEvaluatable(..)
397                     | ty::PredicateKind::ConstEquate(..)
398                     | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
399                     ty::PredicateKind::ObjectSafe(_) => {
400                         bug!("object safe predicate on function: {:#?}", predicate)
401                     }
402                     ty::PredicateKind::ClosureKind(..) => {
403                         bug!("closure kind predicate on function: {:#?}", predicate)
404                     }
405                     ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) => {
406                         bug!("subtype/coerce predicate on function: {:#?}", predicate)
407                     }
408                     ty::PredicateKind::Trait(pred) => {
409                         if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
410                             continue;
411                         }
412                         match pred.self_ty().kind() {
413                             ty::Param(p) => {
414                                 let generics = tcx.generics_of(current);
415                                 let def = generics.type_param(p, tcx);
416                                 let span = tcx.def_span(def.def_id);
417
418                                 // These are part of the function signature, so treat them like
419                                 // arguments when determining importance.
420                                 let kind = LocalKind::Arg;
421
422                                 self.check_op_spanned(ops::ty::TraitBound(kind), span);
423                             }
424                             // other kinds of bounds are either tautologies
425                             // or cause errors in other passes
426                             _ => continue,
427                         }
428                     }
429                 }
430             }
431             match predicates.parent {
432                 Some(parent) => current = parent,
433                 None => break,
434             }
435         }
436     }
437
438     fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
439         match self.const_kind() {
440             // In a const fn all borrows are transient or point to the places given via
441             // references in the arguments (so we already checked them with
442             // TransientMutBorrow/MutBorrow as appropriate).
443             // The borrow checker guarantees that no new non-transient borrows are created.
444             // NOTE: Once we have heap allocations during CTFE we need to figure out
445             // how to prevent `const fn` to create long-lived allocations that point
446             // to mutable memory.
447             hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
448             _ => {
449                 // Locals with StorageDead do not live beyond the evaluation and can
450                 // thus safely be borrowed without being able to be leaked to the final
451                 // value of the constant.
452                 if self.local_has_storage_dead(local) {
453                     self.check_op(ops::TransientMutBorrow(kind));
454                 } else {
455                     self.check_op(ops::MutBorrow(kind));
456                 }
457             }
458         }
459     }
460 }
461
462 impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
463     fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
464         trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
465
466         // We don't const-check basic blocks on the cleanup path since we never unwind during
467         // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
468         // are unreachable during const-eval.
469         //
470         // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
471         // locals that would never be dropped during normal execution are sometimes dropped during
472         // unwinding, which means backwards-incompatible live-drop errors.
473         if block.is_cleanup {
474             return;
475         }
476
477         self.super_basic_block_data(bb, block);
478     }
479
480     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
481         trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
482
483         // Special-case reborrows to be more like a copy of a reference.
484         match *rvalue {
485             Rvalue::Ref(_, kind, place) => {
486                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
487                     let ctx = match kind {
488                         BorrowKind::Shared => {
489                             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
490                         }
491                         BorrowKind::Shallow => {
492                             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
493                         }
494                         BorrowKind::Unique => {
495                             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
496                         }
497                         BorrowKind::Mut { .. } => {
498                             PlaceContext::MutatingUse(MutatingUseContext::Borrow)
499                         }
500                     };
501                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
502                     self.visit_projection(reborrowed_place_ref, ctx, location);
503                     return;
504                 }
505             }
506             Rvalue::AddressOf(mutbl, place) => {
507                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
508                     let ctx = match mutbl {
509                         Mutability::Not => {
510                             PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
511                         }
512                         Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
513                     };
514                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
515                     self.visit_projection(reborrowed_place_ref, ctx, location);
516                     return;
517                 }
518             }
519             _ => {}
520         }
521
522         self.super_rvalue(rvalue, location);
523
524         match *rvalue {
525             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
526
527             Rvalue::Use(_)
528             | Rvalue::Repeat(..)
529             | Rvalue::Discriminant(..)
530             | Rvalue::Len(_)
531             | Rvalue::Aggregate(..) => {}
532
533             Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
534             | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
535                 let ty = place.ty(self.body, self.tcx).ty;
536                 let is_allowed = match ty.kind() {
537                     // Inside a `static mut`, `&mut [...]` is allowed.
538                     ty::Array(..) | ty::Slice(_)
539                         if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
540                     {
541                         true
542                     }
543
544                     // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
545                     // that this is merely a ZST and it is already eligible for promotion.
546                     // This may require an RFC?
547                     /*
548                     ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
549                         => true,
550                     */
551                     _ => false,
552                 };
553
554                 if !is_allowed {
555                     if let BorrowKind::Mut { .. } = kind {
556                         self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
557                     } else {
558                         self.check_op(ops::CellBorrow);
559                     }
560                 }
561             }
562
563             Rvalue::AddressOf(Mutability::Mut, ref place) => {
564                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
565             }
566
567             Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
568             | Rvalue::AddressOf(Mutability::Not, ref place) => {
569                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
570                     &self.ccx,
571                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
572                     place.as_ref(),
573                 );
574
575                 if borrowed_place_has_mut_interior {
576                     match self.const_kind() {
577                         // In a const fn all borrows are transient or point to the places given via
578                         // references in the arguments (so we already checked them with
579                         // TransientCellBorrow/CellBorrow as appropriate).
580                         // The borrow checker guarantees that no new non-transient borrows are created.
581                         // NOTE: Once we have heap allocations during CTFE we need to figure out
582                         // how to prevent `const fn` to create long-lived allocations that point
583                         // to (interior) mutable memory.
584                         hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
585                         _ => {
586                             // Locals with StorageDead are definitely not part of the final constant value, and
587                             // it is thus inherently safe to permit such locals to have their
588                             // address taken as we can't end up with a reference to them in the
589                             // final value.
590                             // Note: This is only sound if every local that has a `StorageDead` has a
591                             // `StorageDead` in every control flow path leading to a `return` terminator.
592                             if self.local_has_storage_dead(place.local) {
593                                 self.check_op(ops::TransientCellBorrow);
594                             } else {
595                                 self.check_op(ops::CellBorrow);
596                             }
597                         }
598                     }
599                 }
600             }
601
602             Rvalue::Cast(
603                 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
604                 _,
605                 _,
606             ) => {}
607
608             Rvalue::Cast(
609                 CastKind::Pointer(
610                     PointerCast::UnsafeFnPointer
611                     | PointerCast::ClosureFnPointer(_)
612                     | PointerCast::ReifyFnPointer,
613                 ),
614                 _,
615                 _,
616             ) => self.check_op(ops::FnPtrCast),
617
618             Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
619                 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
620                 // in the type of any local, which also excludes casts).
621             }
622
623             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
624                 let operand_ty = operand.ty(self.body, self.tcx);
625                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
626                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
627
628                 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
629                     self.check_op(ops::RawPtrToIntCast);
630                 }
631             }
632
633             Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
634             Rvalue::ShallowInitBox(_, _) => {}
635
636             Rvalue::UnaryOp(_, ref operand) => {
637                 let ty = operand.ty(self.body, self.tcx);
638                 if is_int_bool_or_char(ty) {
639                     // Int, bool, and char operations are fine.
640                 } else if ty.is_floating_point() {
641                     self.check_op(ops::FloatingPointOp);
642                 } else {
643                     span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
644                 }
645             }
646
647             Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
648             | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
649                 let lhs_ty = lhs.ty(self.body, self.tcx);
650                 let rhs_ty = rhs.ty(self.body, self.tcx);
651
652                 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
653                     // Int, bool, and char operations are fine.
654                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
655                     assert_eq!(lhs_ty, rhs_ty);
656                     assert!(
657                         op == BinOp::Eq
658                             || op == BinOp::Ne
659                             || op == BinOp::Le
660                             || op == BinOp::Lt
661                             || op == BinOp::Ge
662                             || op == BinOp::Gt
663                             || op == BinOp::Offset
664                     );
665
666                     self.check_op(ops::RawPtrComparison);
667                 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
668                     self.check_op(ops::FloatingPointOp);
669                 } else {
670                     span_bug!(
671                         self.span,
672                         "non-primitive type in `Rvalue::BinaryOp`: {:?} âš¬ {:?}",
673                         lhs_ty,
674                         rhs_ty
675                     );
676                 }
677             }
678         }
679     }
680
681     fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
682         self.super_operand(op, location);
683         if let Operand::Constant(c) = op {
684             if let Some(def_id) = c.check_static_ptr(self.tcx) {
685                 self.check_static(def_id, self.span);
686             }
687         }
688     }
689     fn visit_projection_elem(
690         &mut self,
691         place_local: Local,
692         proj_base: &[PlaceElem<'tcx>],
693         elem: PlaceElem<'tcx>,
694         context: PlaceContext,
695         location: Location,
696     ) {
697         trace!(
698             "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
699             context={:?} location={:?}",
700             place_local,
701             proj_base,
702             elem,
703             context,
704             location,
705         );
706
707         self.super_projection_elem(place_local, proj_base, elem, context, location);
708
709         match elem {
710             ProjectionElem::Deref => {
711                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
712                 if base_ty.is_unsafe_ptr() {
713                     if proj_base.is_empty() {
714                         let decl = &self.body.local_decls[place_local];
715                         if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
716                             let span = decl.source_info.span;
717                             self.check_static(def_id, span);
718                             return;
719                         }
720                     }
721
722                     // `*const T` is stable, `*mut T` is not
723                     if !base_ty.is_mutable_ptr() {
724                         return;
725                     }
726
727                     self.check_op(ops::RawMutPtrDeref);
728                 }
729
730                 if context.is_mutating_use() {
731                     self.check_op(ops::MutDeref);
732                 }
733             }
734
735             ProjectionElem::ConstantIndex { .. }
736             | ProjectionElem::Downcast(..)
737             | ProjectionElem::Subslice { .. }
738             | ProjectionElem::Field(..)
739             | ProjectionElem::Index(_) => {}
740         }
741     }
742
743     fn visit_source_info(&mut self, source_info: &SourceInfo) {
744         trace!("visit_source_info: source_info={:?}", source_info);
745         self.span = source_info.span;
746     }
747
748     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
749         trace!("visit_statement: statement={:?} location={:?}", statement, location);
750
751         self.super_statement(statement, location);
752
753         match statement.kind {
754             StatementKind::Assign(..)
755             | StatementKind::SetDiscriminant { .. }
756             | StatementKind::FakeRead(..)
757             | StatementKind::StorageLive(_)
758             | StatementKind::StorageDead(_)
759             | StatementKind::Retag { .. }
760             | StatementKind::AscribeUserType(..)
761             | StatementKind::Coverage(..)
762             | StatementKind::CopyNonOverlapping(..)
763             | StatementKind::Nop => {}
764         }
765     }
766
767     #[instrument(level = "debug", skip(self))]
768     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
769         use rustc_target::spec::abi::Abi::RustIntrinsic;
770
771         self.super_terminator(terminator, location);
772
773         match &terminator.kind {
774             TerminatorKind::Call { func, args, .. } => {
775                 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
776                 let caller = self.def_id().to_def_id();
777
778                 let fn_ty = func.ty(body, tcx);
779
780                 let (mut callee, mut substs) = match *fn_ty.kind() {
781                     ty::FnDef(def_id, substs) => (def_id, substs),
782
783                     ty::FnPtr(_) => {
784                         self.check_op(ops::FnCallIndirect);
785                         return;
786                     }
787                     _ => {
788                         span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
789                     }
790                 };
791
792                 let mut nonconst_call_permission = false;
793
794                 // Attempting to call a trait method?
795                 if let Some(trait_id) = tcx.trait_of_item(callee) {
796                     trace!("attempting to call a trait method");
797                     if !self.tcx.features().const_trait_impl {
798                         self.check_op(ops::FnCallNonConst(Some((callee, substs))));
799                         return;
800                     }
801
802                     let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
803                     let obligation = Obligation::new(
804                         ObligationCause::dummy(),
805                         param_env,
806                         Binder::dummy(TraitPredicate {
807                             trait_ref,
808                             constness: ty::BoundConstness::NotConst,
809                             polarity: ty::ImplPolarity::Positive,
810                         }),
811                     );
812
813                     let implsrc = tcx.infer_ctxt().enter(|infcx| {
814                         let mut selcx = SelectionContext::new(&infcx);
815                         selcx.select(&obligation)
816                     });
817
818                     match implsrc {
819                         Ok(Some(ImplSource::Param(_, ty::BoundConstness::ConstIfConst))) => {
820                             debug!(
821                                 "const_trait_impl: provided {:?} via where-clause in {:?}",
822                                 trait_ref, param_env
823                             );
824                             return;
825                         }
826                         Ok(Some(ImplSource::UserDefined(data))) => {
827                             if let hir::Constness::NotConst = tcx.impl_constness(data.impl_def_id) {
828                                 self.check_op(ops::FnCallNonConst(None));
829                                 return;
830                             }
831                             let callee_name = tcx.item_name(callee);
832                             if let Some(&did) = tcx
833                                 .associated_item_def_ids(data.impl_def_id)
834                                 .iter()
835                                 .find(|did| tcx.item_name(**did) == callee_name)
836                             {
837                                 // using internal substs is ok here, since this is only
838                                 // used for the `resolve` call below
839                                 substs = InternalSubsts::identity_for_item(tcx, did);
840                                 callee = did;
841                             }
842                         }
843                         _ if !tcx.is_const_fn_raw(callee) => {
844                             // At this point, it is only legal when the caller is marked with
845                             // #[default_method_body_is_const], and the callee is in the same
846                             // trait.
847                             let callee_trait = tcx.trait_of_item(callee);
848                             if callee_trait.is_some() {
849                                 if tcx.has_attr(caller, sym::default_method_body_is_const) {
850                                     if tcx.trait_of_item(caller) == callee_trait {
851                                         nonconst_call_permission = true;
852                                     }
853                                 }
854                             }
855
856                             if !nonconst_call_permission {
857                                 self.check_op(ops::FnCallNonConst(None));
858                                 return;
859                             }
860                         }
861                         _ => {}
862                     }
863
864                     // Resolve a trait method call to its concrete implementation, which may be in a
865                     // `const` trait impl.
866                     let instance = Instance::resolve(tcx, param_env, callee, substs);
867                     debug!("Resolving ({:?}) -> {:?}", callee, instance);
868                     if let Ok(Some(func)) = instance {
869                         if let InstanceDef::Item(def) = func.def {
870                             callee = def.did;
871                         }
872                     }
873                 }
874
875                 // At this point, we are calling a function, `callee`, whose `DefId` is known...
876
877                 // `begin_panic` and `panic_display` are generic functions that accept
878                 // types other than str. Check to enforce that only str can be used in
879                 // const-eval.
880
881                 // const-eval of the `begin_panic` fn assumes the argument is `&str`
882                 if Some(callee) == tcx.lang_items().begin_panic_fn() {
883                     match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
884                         ty::Ref(_, ty, _) if ty.is_str() => return,
885                         _ => self.check_op(ops::PanicNonStr),
886                     }
887                 }
888
889                 // const-eval of the `panic_display` fn assumes the argument is `&&str`
890                 if Some(callee) == tcx.lang_items().panic_display() {
891                     match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
892                         ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
893                         {
894                             return;
895                         }
896                         _ => self.check_op(ops::PanicNonStr),
897                     }
898                 }
899
900                 if Some(callee) == tcx.lang_items().exchange_malloc_fn() {
901                     self.check_op(ops::HeapAllocation);
902                     return;
903                 }
904
905                 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
906                 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
907                 if is_async_block {
908                     let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
909                     self.check_op(ops::Generator(kind));
910                     return;
911                 }
912
913                 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
914
915                 if !tcx.is_const_fn_raw(callee) {
916                     if tcx.trait_of_item(callee).is_some() {
917                         if tcx.has_attr(callee, sym::default_method_body_is_const) {
918                             // To get to here we must have already found a const impl for the
919                             // trait, but for it to still be non-const can be that the impl is
920                             // using default method bodies.
921                             nonconst_call_permission = true;
922                         }
923                     }
924
925                     if !nonconst_call_permission {
926                         self.check_op(ops::FnCallNonConst(None));
927                         return;
928                     }
929                 }
930
931                 // If the `const fn` we are trying to call is not const-stable, ensure that we have
932                 // the proper feature gate enabled.
933                 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
934                     trace!(?gate, "calling unstable const fn");
935                     if self.span.allows_unstable(gate) {
936                         return;
937                     }
938
939                     // Calling an unstable function *always* requires that the corresponding gate
940                     // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
941                     if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
942                         self.check_op(ops::FnCallUnstable(callee, Some(gate)));
943                         return;
944                     }
945
946                     // If this crate is not using stability attributes, or the caller is not claiming to be a
947                     // stable `const fn`, that is all that is required.
948                     if !self.ccx.is_const_stable_const_fn() {
949                         trace!("crate not using stability attributes or caller not stably const");
950                         return;
951                     }
952
953                     // Otherwise, we are something const-stable calling a const-unstable fn.
954
955                     if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
956                         trace!("rustc_allow_const_fn_unstable gate active");
957                         return;
958                     }
959
960                     self.check_op(ops::FnCallUnstable(callee, Some(gate)));
961                     return;
962                 }
963
964                 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
965                 // have no `rustc_const_stable` attributes to be const-unstable as well. This
966                 // should be fixed later.
967                 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
968                     && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
969                 if callee_is_unstable_unmarked {
970                     trace!("callee_is_unstable_unmarked");
971                     // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
972                     // `extern` funtions, and these have no way to get marked `const`. So instead we
973                     // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
974                     if self.ccx.is_const_stable_const_fn() || is_intrinsic {
975                         self.check_op(ops::FnCallUnstable(callee, None));
976                         return;
977                     }
978                 }
979                 trace!("permitting call");
980             }
981
982             // Forbid all `Drop` terminators unless the place being dropped is a local with no
983             // projections that cannot be `NeedsNonConstDrop`.
984             TerminatorKind::Drop { place: dropped_place, .. }
985             | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
986                 // If we are checking live drops after drop-elaboration, don't emit duplicate
987                 // errors here.
988                 if super::post_drop_elaboration::checking_enabled(self.ccx) {
989                     return;
990                 }
991
992                 let mut err_span = self.span;
993                 let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
994
995                 let ty_needs_non_const_drop =
996                     qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place);
997
998                 debug!(?ty_of_dropped_place, ?ty_needs_non_const_drop);
999
1000                 if !ty_needs_non_const_drop {
1001                     return;
1002                 }
1003
1004                 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
1005                     // Use the span where the local was declared as the span of the drop error.
1006                     err_span = self.body.local_decls[local].source_info.span;
1007                     self.qualifs.needs_non_const_drop(self.ccx, local, location)
1008                 } else {
1009                     true
1010                 };
1011
1012                 if needs_non_const_drop {
1013                     self.check_op_spanned(
1014                         ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1015                         err_span,
1016                     );
1017                 }
1018             }
1019
1020             TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1021
1022             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1023                 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1024             }
1025
1026             TerminatorKind::Abort => {
1027                 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1028                 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1029             }
1030
1031             TerminatorKind::Assert { .. }
1032             | TerminatorKind::FalseEdge { .. }
1033             | TerminatorKind::FalseUnwind { .. }
1034             | TerminatorKind::Goto { .. }
1035             | TerminatorKind::Resume
1036             | TerminatorKind::Return
1037             | TerminatorKind::SwitchInt { .. }
1038             | TerminatorKind::Unreachable => {}
1039         }
1040     }
1041 }
1042
1043 fn place_as_reborrow<'tcx>(
1044     tcx: TyCtxt<'tcx>,
1045     body: &Body<'tcx>,
1046     place: Place<'tcx>,
1047 ) -> Option<PlaceRef<'tcx>> {
1048     match place.as_ref().last_projection() {
1049         Some((place_base, ProjectionElem::Deref)) => {
1050             // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1051             // that points to the allocation for the static. Don't treat these as reborrows.
1052             if body.local_decls[place_base.local].is_ref_to_static() {
1053                 None
1054             } else {
1055                 // Ensure the type being derefed is a reference and not a raw pointer.
1056                 // This is sufficient to prevent an access to a `static mut` from being marked as a
1057                 // reborrow, even if the check above were to disappear.
1058                 let inner_ty = place_base.ty(body, tcx).ty;
1059
1060                 if let ty::Ref(..) = inner_ty.kind() {
1061                     return Some(place_base);
1062                 } else {
1063                     return None;
1064                 }
1065             }
1066         }
1067         _ => None,
1068     }
1069 }
1070
1071 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1072     ty.is_bool() || ty.is_integral() || ty.is_char()
1073 }
1074
1075 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1076     ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1077 }
1078
1079 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1080     let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1081
1082     ccx.tcx
1083         .sess
1084         .struct_span_err(
1085             span,
1086             &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1087         )
1088         .span_suggestion(
1089             attr_span,
1090             "if it is not part of the public API, make this function unstably const",
1091             concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1092             Applicability::HasPlaceholders,
1093         )
1094         .span_suggestion(
1095             attr_span,
1096             "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1097             format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1098             Applicability::MaybeIncorrect,
1099         )
1100         .emit();
1101 }