]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_const_eval/src/transform/check_consts/check.rs
Rollup merge of #90025 - JohnTitor:revert-86011, r=estebank
[rust.git] / compiler / rustc_const_eval / src / transform / check_consts / check.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{Applicability, Diagnostic, ErrorReported};
4 use rustc_hir::def_id::DefId;
5 use rustc_hir::{self as hir, HirId, LangItem};
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
13 use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt};
14 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
15 use rustc_mir_dataflow::impls::MaybeMutBorrowedLocals;
16 use rustc_mir_dataflow::{self, Analysis};
17 use rustc_span::{sym, Span, Symbol};
18 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
19 use rustc_trait_selection::traits::{self, SelectionContext, TraitEngine};
20
21 use std::mem;
22 use std::ops::Deref;
23
24 use super::ops::{self, NonConstOp, Status};
25 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
26 use super::resolver::FlowSensitiveAnalysis;
27 use super::{is_lang_panic_fn, is_lang_special_const_fn, ConstCx, Qualif};
28 use crate::const_eval::is_unstable_const_fn;
29
30 // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
31 // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
32 // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
33 type IndirectlyMutableResults<'mir, 'tcx> =
34     rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
35
36 type QualifResults<'mir, 'tcx, Q> =
37     rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
38
39 #[derive(Default)]
40 pub struct Qualifs<'mir, 'tcx> {
41     has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
42     needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
43     needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
44     indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
45 }
46
47 impl Qualifs<'mir, 'tcx> {
48     pub fn indirectly_mutable(
49         &mut self,
50         ccx: &'mir ConstCx<'mir, 'tcx>,
51         local: Local,
52         location: Location,
53     ) -> bool {
54         let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
55             let ConstCx { tcx, body, param_env, .. } = *ccx;
56
57             // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
58             // allowed in a const.
59             //
60             // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
61             // without breaking stable code?
62             MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
63                 .unsound_ignore_borrow_on_drop()
64                 .into_engine(tcx, &body)
65                 .pass_name("const_qualification")
66                 .iterate_to_fixpoint()
67                 .into_results_cursor(&body)
68         });
69
70         indirectly_mutable.seek_before_primary_effect(location);
71         indirectly_mutable.get().contains(local)
72     }
73
74     /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
75     ///
76     /// Only updates the cursor if absolutely necessary
77     pub fn needs_drop(
78         &mut self,
79         ccx: &'mir ConstCx<'mir, 'tcx>,
80         local: Local,
81         location: Location,
82     ) -> bool {
83         let ty = ccx.body.local_decls[local].ty;
84         if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
85             return false;
86         }
87
88         let needs_drop = self.needs_drop.get_or_insert_with(|| {
89             let ConstCx { tcx, body, .. } = *ccx;
90
91             FlowSensitiveAnalysis::new(NeedsDrop, ccx)
92                 .into_engine(tcx, &body)
93                 .iterate_to_fixpoint()
94                 .into_results_cursor(&body)
95         });
96
97         needs_drop.seek_before_primary_effect(location);
98         needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
99     }
100
101     /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
102     ///
103     /// Only updates the cursor if absolutely necessary
104     pub fn needs_non_const_drop(
105         &mut self,
106         ccx: &'mir ConstCx<'mir, 'tcx>,
107         local: Local,
108         location: Location,
109     ) -> bool {
110         let ty = ccx.body.local_decls[local].ty;
111         if !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
112             return false;
113         }
114
115         let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
116             let ConstCx { tcx, body, .. } = *ccx;
117
118             FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
119                 .into_engine(tcx, &body)
120                 .iterate_to_fixpoint()
121                 .into_results_cursor(&body)
122         });
123
124         needs_non_const_drop.seek_before_primary_effect(location);
125         needs_non_const_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
126     }
127
128     /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
129     ///
130     /// Only updates the cursor if absolutely necessary.
131     pub fn has_mut_interior(
132         &mut self,
133         ccx: &'mir ConstCx<'mir, 'tcx>,
134         local: Local,
135         location: Location,
136     ) -> bool {
137         let ty = ccx.body.local_decls[local].ty;
138         if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
139             return false;
140         }
141
142         let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
143             let ConstCx { tcx, body, .. } = *ccx;
144
145             FlowSensitiveAnalysis::new(HasMutInterior, ccx)
146                 .into_engine(tcx, &body)
147                 .iterate_to_fixpoint()
148                 .into_results_cursor(&body)
149         });
150
151         has_mut_interior.seek_before_primary_effect(location);
152         has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
153     }
154
155     fn in_return_place(
156         &mut self,
157         ccx: &'mir ConstCx<'mir, 'tcx>,
158         error_occured: Option<ErrorReported>,
159     ) -> ConstQualifs {
160         // Find the `Return` terminator if one exists.
161         //
162         // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
163         // qualifs for the return type.
164         let return_block = ccx
165             .body
166             .basic_blocks()
167             .iter_enumerated()
168             .find(|(_, block)| match block.terminator().kind {
169                 TerminatorKind::Return => true,
170                 _ => false,
171             })
172             .map(|(bb, _)| bb);
173
174         let return_block = match return_block {
175             None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
176             Some(bb) => bb,
177         };
178
179         let return_loc = ccx.body.terminator_loc(return_block);
180
181         let custom_eq = match ccx.const_kind() {
182             // We don't care whether a `const fn` returns a value that is not structurally
183             // matchable. Functions calls are opaque and always use type-based qualification, so
184             // this value should never be used.
185             hir::ConstContext::ConstFn => true,
186
187             // If we know that all values of the return type are structurally matchable, there's no
188             // need to run dataflow.
189             _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
190
191             hir::ConstContext::Const | hir::ConstContext::Static(_) => {
192                 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
193                     .into_engine(ccx.tcx, &ccx.body)
194                     .iterate_to_fixpoint()
195                     .into_results_cursor(&ccx.body);
196
197                 cursor.seek_after_primary_effect(return_loc);
198                 cursor.contains(RETURN_PLACE)
199             }
200         };
201
202         ConstQualifs {
203             needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
204             needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
205             has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
206             custom_eq,
207             error_occured,
208         }
209     }
210 }
211
212 pub struct Checker<'mir, 'tcx> {
213     ccx: &'mir ConstCx<'mir, 'tcx>,
214     qualifs: Qualifs<'mir, 'tcx>,
215
216     /// The span of the current statement.
217     span: Span,
218
219     /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
220     local_has_storage_dead: Option<BitSet<Local>>,
221
222     error_emitted: Option<ErrorReported>,
223     secondary_errors: Vec<Diagnostic>,
224 }
225
226 impl Deref for Checker<'mir, 'tcx> {
227     type Target = ConstCx<'mir, 'tcx>;
228
229     fn deref(&self) -> &Self::Target {
230         &self.ccx
231     }
232 }
233
234 impl Checker<'mir, 'tcx> {
235     pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
236         Checker {
237             span: ccx.body.span,
238             ccx,
239             qualifs: Default::default(),
240             local_has_storage_dead: None,
241             error_emitted: None,
242             secondary_errors: Vec::new(),
243         }
244     }
245
246     pub fn check_body(&mut self) {
247         let ConstCx { tcx, body, .. } = *self.ccx;
248         let def_id = self.ccx.def_id();
249
250         // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
251         // no need to emit duplicate errors here.
252         if is_async_fn(self.ccx) || body.generator.is_some() {
253             tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
254             return;
255         }
256
257         // The local type and predicate checks are not free and only relevant for `const fn`s.
258         if self.const_kind() == hir::ConstContext::ConstFn {
259             // Prevent const trait methods from being annotated as `stable`.
260             // FIXME: Do this as part of stability checking.
261             if self.is_const_stable_const_fn() {
262                 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
263                 if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
264                     self.ccx
265                         .tcx
266                         .sess
267                         .struct_span_err(self.span, "trait methods cannot be stable const fn")
268                         .emit();
269                 }
270             }
271
272             self.check_item_predicates();
273
274             for (idx, local) in body.local_decls.iter_enumerated() {
275                 // Handle the return place below.
276                 if idx == RETURN_PLACE || local.internal {
277                     continue;
278                 }
279
280                 self.span = local.source_info.span;
281                 self.check_local_or_return_ty(local.ty, idx);
282             }
283
284             // impl trait is gone in MIR, so check the return type of a const fn by its signature
285             // instead of the type of the return place.
286             self.span = body.local_decls[RETURN_PLACE].source_info.span;
287             let return_ty = tcx.fn_sig(def_id).output();
288             self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
289         }
290
291         if !tcx.has_attr(def_id.to_def_id(), sym::rustc_do_not_const_check) {
292             self.visit_body(&body);
293         }
294
295         // Ensure that the end result is `Sync` in a non-thread local `static`.
296         let should_check_for_sync = self.const_kind()
297             == hir::ConstContext::Static(hir::Mutability::Not)
298             && !tcx.is_thread_local_static(def_id.to_def_id());
299
300         if should_check_for_sync {
301             let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
302             check_return_ty_is_sync(tcx, &body, hir_id);
303         }
304
305         // If we got through const-checking without emitting any "primary" errors, emit any
306         // "secondary" errors if they occurred.
307         let secondary_errors = mem::take(&mut self.secondary_errors);
308         if self.error_emitted.is_none() {
309             for error in secondary_errors {
310                 self.tcx.sess.diagnostic().emit_diagnostic(&error);
311             }
312         } else {
313             assert!(self.tcx.sess.has_errors());
314         }
315     }
316
317     fn local_has_storage_dead(&mut self, local: Local) -> bool {
318         let ccx = self.ccx;
319         self.local_has_storage_dead
320             .get_or_insert_with(|| {
321                 struct StorageDeads {
322                     locals: BitSet<Local>,
323                 }
324                 impl Visitor<'tcx> for StorageDeads {
325                     fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
326                         if let StatementKind::StorageDead(l) = stmt.kind {
327                             self.locals.insert(l);
328                         }
329                     }
330                 }
331                 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
332                 v.visit_body(ccx.body);
333                 v.locals
334             })
335             .contains(local)
336     }
337
338     pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
339         self.qualifs.in_return_place(self.ccx, self.error_emitted)
340     }
341
342     /// Emits an error if an expression cannot be evaluated in the current context.
343     pub fn check_op(&mut self, op: impl NonConstOp) {
344         self.check_op_spanned(op, self.span);
345     }
346
347     /// Emits an error at the given `span` if an expression cannot be evaluated in the current
348     /// context.
349     pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
350         let gate = match op.status_in_item(self.ccx) {
351             Status::Allowed => return,
352
353             Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
354                 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
355                     && !super::rustc_allow_const_fn_unstable(
356                         self.tcx,
357                         self.def_id().to_def_id(),
358                         gate,
359                     );
360                 if unstable_in_stable {
361                     emit_unstable_in_stable_error(self.ccx, span, gate);
362                 }
363
364                 return;
365             }
366
367             Status::Unstable(gate) => Some(gate),
368             Status::Forbidden => None,
369         };
370
371         if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
372             self.tcx.sess.miri_unleashed_feature(span, gate);
373             return;
374         }
375
376         let mut err = op.build_error(self.ccx, span);
377         assert!(err.is_error());
378
379         match op.importance() {
380             ops::DiagnosticImportance::Primary => {
381                 self.error_emitted = Some(ErrorReported);
382                 err.emit();
383             }
384
385             ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
386         }
387     }
388
389     fn check_static(&mut self, def_id: DefId, span: Span) {
390         if self.tcx.is_thread_local_static(def_id) {
391             self.tcx.sess.delay_span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef");
392         }
393         self.check_op_spanned(ops::StaticAccess, span)
394     }
395
396     fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
397         let kind = self.body.local_kind(local);
398
399         for ty in ty.walk(self.tcx) {
400             let ty = match ty.unpack() {
401                 GenericArgKind::Type(ty) => ty,
402
403                 // No constraints on lifetimes or constants, except potentially
404                 // constants' types, but `walk` will get to them as well.
405                 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
406             };
407
408             match *ty.kind() {
409                 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
410                 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
411                 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
412
413                 ty::Dynamic(preds, _) => {
414                     for pred in preds.iter() {
415                         match pred.skip_binder() {
416                             ty::ExistentialPredicate::AutoTrait(_)
417                             | ty::ExistentialPredicate::Projection(_) => {
418                                 self.check_op(ops::ty::DynTrait(kind))
419                             }
420                             ty::ExistentialPredicate::Trait(trait_ref) => {
421                                 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
422                                     self.check_op(ops::ty::DynTrait(kind))
423                                 }
424                             }
425                         }
426                     }
427                 }
428                 _ => {}
429             }
430         }
431     }
432
433     fn check_item_predicates(&mut self) {
434         let ConstCx { tcx, .. } = *self.ccx;
435
436         let mut current = self.def_id().to_def_id();
437         loop {
438             let predicates = tcx.predicates_of(current);
439             for (predicate, _) in predicates.predicates {
440                 match predicate.kind().skip_binder() {
441                     ty::PredicateKind::RegionOutlives(_)
442                     | ty::PredicateKind::TypeOutlives(_)
443                     | ty::PredicateKind::WellFormed(_)
444                     | ty::PredicateKind::Projection(_)
445                     | ty::PredicateKind::ConstEvaluatable(..)
446                     | ty::PredicateKind::ConstEquate(..)
447                     | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
448                     ty::PredicateKind::ObjectSafe(_) => {
449                         bug!("object safe predicate on function: {:#?}", predicate)
450                     }
451                     ty::PredicateKind::ClosureKind(..) => {
452                         bug!("closure kind predicate on function: {:#?}", predicate)
453                     }
454                     ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) => {
455                         bug!("subtype/coerce predicate on function: {:#?}", predicate)
456                     }
457                     ty::PredicateKind::Trait(pred) => {
458                         if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
459                             continue;
460                         }
461                         match pred.self_ty().kind() {
462                             ty::Param(p) => {
463                                 let generics = tcx.generics_of(current);
464                                 let def = generics.type_param(p, tcx);
465                                 let span = tcx.def_span(def.def_id);
466
467                                 // These are part of the function signature, so treat them like
468                                 // arguments when determining importance.
469                                 let kind = LocalKind::Arg;
470
471                                 self.check_op_spanned(ops::ty::TraitBound(kind), span);
472                             }
473                             // other kinds of bounds are either tautologies
474                             // or cause errors in other passes
475                             _ => continue,
476                         }
477                     }
478                 }
479             }
480             match predicates.parent {
481                 Some(parent) => current = parent,
482                 None => break,
483             }
484         }
485     }
486
487     fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
488         match self.const_kind() {
489             // In a const fn all borrows are transient or point to the places given via
490             // references in the arguments (so we already checked them with
491             // TransientMutBorrow/MutBorrow as appropriate).
492             // The borrow checker guarantees that no new non-transient borrows are created.
493             // NOTE: Once we have heap allocations during CTFE we need to figure out
494             // how to prevent `const fn` to create long-lived allocations that point
495             // to mutable memory.
496             hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
497             _ => {
498                 // Locals with StorageDead do not live beyond the evaluation and can
499                 // thus safely be borrowed without being able to be leaked to the final
500                 // value of the constant.
501                 if self.local_has_storage_dead(local) {
502                     self.check_op(ops::TransientMutBorrow(kind));
503                 } else {
504                     self.check_op(ops::MutBorrow(kind));
505                 }
506             }
507         }
508     }
509 }
510
511 impl Visitor<'tcx> for Checker<'mir, 'tcx> {
512     fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
513         trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
514
515         // We don't const-check basic blocks on the cleanup path since we never unwind during
516         // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
517         // are unreachable during const-eval.
518         //
519         // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
520         // locals that would never be dropped during normal execution are sometimes dropped during
521         // unwinding, which means backwards-incompatible live-drop errors.
522         if block.is_cleanup {
523             return;
524         }
525
526         self.super_basic_block_data(bb, block);
527     }
528
529     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
530         trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
531
532         // Special-case reborrows to be more like a copy of a reference.
533         match *rvalue {
534             Rvalue::Ref(_, kind, place) => {
535                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
536                     let ctx = match kind {
537                         BorrowKind::Shared => {
538                             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
539                         }
540                         BorrowKind::Shallow => {
541                             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
542                         }
543                         BorrowKind::Unique => {
544                             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
545                         }
546                         BorrowKind::Mut { .. } => {
547                             PlaceContext::MutatingUse(MutatingUseContext::Borrow)
548                         }
549                     };
550                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
551                     self.visit_projection(reborrowed_place_ref, ctx, location);
552                     return;
553                 }
554             }
555             Rvalue::AddressOf(mutbl, place) => {
556                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
557                     let ctx = match mutbl {
558                         Mutability::Not => {
559                             PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
560                         }
561                         Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
562                     };
563                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
564                     self.visit_projection(reborrowed_place_ref, ctx, location);
565                     return;
566                 }
567             }
568             _ => {}
569         }
570
571         self.super_rvalue(rvalue, location);
572
573         match *rvalue {
574             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
575
576             Rvalue::Use(_)
577             | Rvalue::Repeat(..)
578             | Rvalue::Discriminant(..)
579             | Rvalue::Len(_)
580             | Rvalue::Aggregate(..) => {}
581
582             Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
583             | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
584                 let ty = place.ty(self.body, self.tcx).ty;
585                 let is_allowed = match ty.kind() {
586                     // Inside a `static mut`, `&mut [...]` is allowed.
587                     ty::Array(..) | ty::Slice(_)
588                         if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
589                     {
590                         true
591                     }
592
593                     // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
594                     // that this is merely a ZST and it is already eligible for promotion.
595                     // This may require an RFC?
596                     /*
597                     ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
598                         => true,
599                     */
600                     _ => false,
601                 };
602
603                 if !is_allowed {
604                     if let BorrowKind::Mut { .. } = kind {
605                         self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
606                     } else {
607                         self.check_op(ops::CellBorrow);
608                     }
609                 }
610             }
611
612             Rvalue::AddressOf(Mutability::Mut, ref place) => {
613                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
614             }
615
616             Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
617             | Rvalue::AddressOf(Mutability::Not, ref place) => {
618                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
619                     &self.ccx,
620                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
621                     place.as_ref(),
622                 );
623
624                 if borrowed_place_has_mut_interior {
625                     match self.const_kind() {
626                         // In a const fn all borrows are transient or point to the places given via
627                         // references in the arguments (so we already checked them with
628                         // TransientCellBorrow/CellBorrow as appropriate).
629                         // The borrow checker guarantees that no new non-transient borrows are created.
630                         // NOTE: Once we have heap allocations during CTFE we need to figure out
631                         // how to prevent `const fn` to create long-lived allocations that point
632                         // to (interior) mutable memory.
633                         hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
634                         _ => {
635                             // Locals with StorageDead are definitely not part of the final constant value, and
636                             // it is thus inherently safe to permit such locals to have their
637                             // address taken as we can't end up with a reference to them in the
638                             // final value.
639                             // Note: This is only sound if every local that has a `StorageDead` has a
640                             // `StorageDead` in every control flow path leading to a `return` terminator.
641                             if self.local_has_storage_dead(place.local) {
642                                 self.check_op(ops::TransientCellBorrow);
643                             } else {
644                                 self.check_op(ops::CellBorrow);
645                             }
646                         }
647                     }
648                 }
649             }
650
651             Rvalue::Cast(
652                 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
653                 _,
654                 _,
655             ) => {}
656
657             Rvalue::Cast(
658                 CastKind::Pointer(
659                     PointerCast::UnsafeFnPointer
660                     | PointerCast::ClosureFnPointer(_)
661                     | PointerCast::ReifyFnPointer,
662                 ),
663                 _,
664                 _,
665             ) => self.check_op(ops::FnPtrCast),
666
667             Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => {
668                 // Nothing to check here (`check_local_or_return_ty` ensures no trait objects occur
669                 // in the type of any local, which also excludes casts).
670             }
671
672             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
673                 let operand_ty = operand.ty(self.body, self.tcx);
674                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
675                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
676
677                 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
678                     self.check_op(ops::RawPtrToIntCast);
679                 }
680             }
681
682             Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _) => {}
683             Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
684             Rvalue::ShallowInitBox(_, _) => {}
685
686             Rvalue::UnaryOp(_, ref operand) => {
687                 let ty = operand.ty(self.body, self.tcx);
688                 if is_int_bool_or_char(ty) {
689                     // Int, bool, and char operations are fine.
690                 } else if ty.is_floating_point() {
691                     self.check_op(ops::FloatingPointOp);
692                 } else {
693                     span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
694                 }
695             }
696
697             Rvalue::BinaryOp(op, box (ref lhs, ref rhs))
698             | Rvalue::CheckedBinaryOp(op, box (ref lhs, ref rhs)) => {
699                 let lhs_ty = lhs.ty(self.body, self.tcx);
700                 let rhs_ty = rhs.ty(self.body, self.tcx);
701
702                 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
703                     // Int, bool, and char operations are fine.
704                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
705                     assert_eq!(lhs_ty, rhs_ty);
706                     assert!(
707                         op == BinOp::Eq
708                             || op == BinOp::Ne
709                             || op == BinOp::Le
710                             || op == BinOp::Lt
711                             || op == BinOp::Ge
712                             || op == BinOp::Gt
713                             || op == BinOp::Offset
714                     );
715
716                     self.check_op(ops::RawPtrComparison);
717                 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
718                     self.check_op(ops::FloatingPointOp);
719                 } else {
720                     span_bug!(
721                         self.span,
722                         "non-primitive type in `Rvalue::BinaryOp`: {:?} âš¬ {:?}",
723                         lhs_ty,
724                         rhs_ty
725                     );
726                 }
727             }
728         }
729     }
730
731     fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
732         self.super_operand(op, location);
733         if let Operand::Constant(c) = op {
734             if let Some(def_id) = c.check_static_ptr(self.tcx) {
735                 self.check_static(def_id, self.span);
736             }
737         }
738     }
739     fn visit_projection_elem(
740         &mut self,
741         place_local: Local,
742         proj_base: &[PlaceElem<'tcx>],
743         elem: PlaceElem<'tcx>,
744         context: PlaceContext,
745         location: Location,
746     ) {
747         trace!(
748             "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
749             context={:?} location={:?}",
750             place_local,
751             proj_base,
752             elem,
753             context,
754             location,
755         );
756
757         self.super_projection_elem(place_local, proj_base, elem, context, location);
758
759         match elem {
760             ProjectionElem::Deref => {
761                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
762                 if let ty::RawPtr(_) = base_ty.kind() {
763                     if proj_base.is_empty() {
764                         let decl = &self.body.local_decls[place_local];
765                         if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
766                             let span = decl.source_info.span;
767                             self.check_static(def_id, span);
768                             return;
769                         }
770                     }
771                     self.check_op(ops::RawPtrDeref);
772                 }
773
774                 if context.is_mutating_use() {
775                     self.check_op(ops::MutDeref);
776                 }
777             }
778
779             ProjectionElem::ConstantIndex { .. }
780             | ProjectionElem::Downcast(..)
781             | ProjectionElem::Subslice { .. }
782             | ProjectionElem::Field(..)
783             | ProjectionElem::Index(_) => {}
784         }
785     }
786
787     fn visit_source_info(&mut self, source_info: &SourceInfo) {
788         trace!("visit_source_info: source_info={:?}", source_info);
789         self.span = source_info.span;
790     }
791
792     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
793         trace!("visit_statement: statement={:?} location={:?}", statement, location);
794
795         self.super_statement(statement, location);
796
797         match statement.kind {
798             StatementKind::LlvmInlineAsm { .. } => {
799                 self.check_op(ops::InlineAsm);
800             }
801
802             StatementKind::Assign(..)
803             | StatementKind::SetDiscriminant { .. }
804             | StatementKind::FakeRead(..)
805             | StatementKind::StorageLive(_)
806             | StatementKind::StorageDead(_)
807             | StatementKind::Retag { .. }
808             | StatementKind::AscribeUserType(..)
809             | StatementKind::Coverage(..)
810             | StatementKind::CopyNonOverlapping(..)
811             | StatementKind::Nop => {}
812         }
813     }
814
815     #[instrument(level = "debug", skip(self))]
816     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
817         use rustc_target::spec::abi::Abi::RustIntrinsic;
818
819         self.super_terminator(terminator, location);
820
821         match &terminator.kind {
822             TerminatorKind::Call { func, args, .. } => {
823                 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
824                 let caller = self.def_id().to_def_id();
825
826                 let fn_ty = func.ty(body, tcx);
827
828                 let (mut callee, mut substs) = match *fn_ty.kind() {
829                     ty::FnDef(def_id, substs) => (def_id, substs),
830
831                     ty::FnPtr(_) => {
832                         self.check_op(ops::FnCallIndirect);
833                         return;
834                     }
835                     _ => {
836                         span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
837                     }
838                 };
839
840                 let mut nonconst_call_permission = false;
841
842                 // Attempting to call a trait method?
843                 if let Some(trait_id) = tcx.trait_of_item(callee) {
844                     trace!("attempting to call a trait method");
845                     if !self.tcx.features().const_trait_impl {
846                         self.check_op(ops::FnCallNonConst);
847                         return;
848                     }
849
850                     let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
851                     let obligation = Obligation::new(
852                         ObligationCause::dummy(),
853                         param_env,
854                         Binder::dummy(TraitPredicate {
855                             trait_ref,
856                             constness: ty::BoundConstness::ConstIfConst,
857                         }),
858                     );
859
860                     let implsrc = tcx.infer_ctxt().enter(|infcx| {
861                         let mut selcx =
862                             SelectionContext::with_constness(&infcx, hir::Constness::Const);
863                         selcx.select(&obligation)
864                     });
865
866                     match implsrc {
867                         Ok(Some(ImplSource::Param(_, ty::BoundConstness::ConstIfConst))) => {
868                             debug!(
869                                 "const_trait_impl: provided {:?} via where-clause in {:?}",
870                                 trait_ref, param_env
871                             );
872                             return;
873                         }
874                         Ok(Some(ImplSource::UserDefined(data))) => {
875                             let callee_name = tcx.item_name(callee);
876                             if let Some(&did) = tcx
877                                 .associated_item_def_ids(data.impl_def_id)
878                                 .iter()
879                                 .find(|did| tcx.item_name(**did) == callee_name)
880                             {
881                                 // using internal substs is ok here, since this is only
882                                 // used for the `resolve` call below
883                                 substs = InternalSubsts::identity_for_item(tcx, did);
884                                 callee = did;
885                             }
886                         }
887                         _ if !tcx.is_const_fn_raw(callee) => {
888                             // At this point, it is only legal when the caller is marked with
889                             // #[default_method_body_is_const], and the callee is in the same
890                             // trait.
891                             let callee_trait = tcx.trait_of_item(callee);
892                             if callee_trait.is_some() {
893                                 if tcx.has_attr(caller, sym::default_method_body_is_const) {
894                                     if tcx.trait_of_item(caller) == callee_trait {
895                                         nonconst_call_permission = true;
896                                     }
897                                 }
898                             }
899
900                             if !nonconst_call_permission {
901                                 self.check_op(ops::FnCallNonConst);
902                                 return;
903                             }
904                         }
905                         _ => {}
906                     }
907
908                     // Resolve a trait method call to its concrete implementation, which may be in a
909                     // `const` trait impl.
910                     let instance = Instance::resolve(tcx, param_env, callee, substs);
911                     debug!("Resolving ({:?}) -> {:?}", callee, instance);
912                     if let Ok(Some(func)) = instance {
913                         if let InstanceDef::Item(def) = func.def {
914                             callee = def.did;
915                         }
916                     }
917                 }
918
919                 // At this point, we are calling a function, `callee`, whose `DefId` is known...
920                 if is_lang_special_const_fn(tcx, callee) {
921                     // `begin_panic` and `panic_display` are generic functions that accept
922                     // types other than str. Check to enforce that only str can be used in
923                     // const-eval.
924
925                     // const-eval of the `begin_panic` fn assumes the argument is `&str`
926                     if Some(callee) == tcx.lang_items().begin_panic_fn() {
927                         match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
928                             ty::Ref(_, ty, _) if ty.is_str() => (),
929                             _ => self.check_op(ops::PanicNonStr),
930                         }
931                     }
932
933                     // const-eval of the `panic_display` fn assumes the argument is `&&str`
934                     if Some(callee) == tcx.lang_items().panic_display() {
935                         match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
936                             ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
937                                 {}
938                             _ => self.check_op(ops::PanicNonStr),
939                         }
940                     }
941
942                     if is_lang_panic_fn(tcx, callee) {
943                         // run stability check on non-panic special const fns.
944                         return;
945                     }
946                 }
947
948                 if Some(callee) == tcx.lang_items().exchange_malloc_fn() {
949                     self.check_op(ops::HeapAllocation);
950                     return;
951                 }
952
953                 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
954                 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
955                 if is_async_block {
956                     let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
957                     self.check_op(ops::Generator(kind));
958                     return;
959                 }
960
961                 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
962
963                 if !tcx.is_const_fn_raw(callee) {
964                     if tcx.trait_of_item(callee).is_some() {
965                         if tcx.has_attr(callee, sym::default_method_body_is_const) {
966                             // To get to here we must have already found a const impl for the
967                             // trait, but for it to still be non-const can be that the impl is
968                             // using default method bodies.
969                             nonconst_call_permission = true;
970                         }
971                     }
972
973                     if !nonconst_call_permission {
974                         self.check_op(ops::FnCallNonConst);
975                         return;
976                     }
977                 }
978
979                 // If the `const fn` we are trying to call is not const-stable, ensure that we have
980                 // the proper feature gate enabled.
981                 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
982                     trace!(?gate, "calling unstable const fn");
983                     if self.span.allows_unstable(gate) {
984                         return;
985                     }
986
987                     // Calling an unstable function *always* requires that the corresponding gate
988                     // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
989                     if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
990                         self.check_op(ops::FnCallUnstable(callee, Some(gate)));
991                         return;
992                     }
993
994                     // If this crate is not using stability attributes, or the caller is not claiming to be a
995                     // stable `const fn`, that is all that is required.
996                     if !self.ccx.is_const_stable_const_fn() {
997                         trace!("crate not using stability attributes or caller not stably const");
998                         return;
999                     }
1000
1001                     // Otherwise, we are something const-stable calling a const-unstable fn.
1002
1003                     if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
1004                         trace!("rustc_allow_const_fn_unstable gate active");
1005                         return;
1006                     }
1007
1008                     self.check_op(ops::FnCallUnstable(callee, Some(gate)));
1009                     return;
1010                 }
1011
1012                 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
1013                 // have no `rustc_const_stable` attributes to be const-unstable as well. This
1014                 // should be fixed later.
1015                 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
1016                     && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
1017                 if callee_is_unstable_unmarked {
1018                     trace!("callee_is_unstable_unmarked");
1019                     // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
1020                     // `extern` funtions, and these have no way to get marked `const`. So instead we
1021                     // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
1022                     if self.ccx.is_const_stable_const_fn() || is_intrinsic {
1023                         self.check_op(ops::FnCallUnstable(callee, None));
1024                         return;
1025                     }
1026                 }
1027                 trace!("permitting call");
1028             }
1029
1030             // Forbid all `Drop` terminators unless the place being dropped is a local with no
1031             // projections that cannot be `NeedsNonConstDrop`.
1032             TerminatorKind::Drop { place: dropped_place, .. }
1033             | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
1034                 // If we are checking live drops after drop-elaboration, don't emit duplicate
1035                 // errors here.
1036                 if super::post_drop_elaboration::checking_enabled(self.ccx) {
1037                     return;
1038                 }
1039
1040                 let mut err_span = self.span;
1041
1042                 let ty_needs_non_const_drop = qualifs::NeedsNonConstDrop::in_any_value_of_ty(
1043                     self.ccx,
1044                     dropped_place.ty(self.body, self.tcx).ty,
1045                 );
1046
1047                 if !ty_needs_non_const_drop {
1048                     return;
1049                 }
1050
1051                 let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
1052                     // Use the span where the local was declared as the span of the drop error.
1053                     err_span = self.body.local_decls[local].source_info.span;
1054                     self.qualifs.needs_non_const_drop(self.ccx, local, location)
1055                 } else {
1056                     true
1057                 };
1058
1059                 if needs_non_const_drop {
1060                     self.check_op_spanned(
1061                         ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1062                         err_span,
1063                     );
1064                 }
1065             }
1066
1067             TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1068
1069             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1070                 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1071             }
1072
1073             TerminatorKind::Abort => {
1074                 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1075                 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1076             }
1077
1078             TerminatorKind::Assert { .. }
1079             | TerminatorKind::FalseEdge { .. }
1080             | TerminatorKind::FalseUnwind { .. }
1081             | TerminatorKind::Goto { .. }
1082             | TerminatorKind::Resume
1083             | TerminatorKind::Return
1084             | TerminatorKind::SwitchInt { .. }
1085             | TerminatorKind::Unreachable => {}
1086         }
1087     }
1088 }
1089
1090 fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
1091     let ty = body.return_ty();
1092     tcx.infer_ctxt().enter(|infcx| {
1093         let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
1094         let mut fulfillment_cx = traits::FulfillmentContext::new();
1095         let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
1096         fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
1097         if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
1098             infcx.report_fulfillment_errors(&err, None, false);
1099         }
1100     });
1101 }
1102
1103 fn place_as_reborrow(
1104     tcx: TyCtxt<'tcx>,
1105     body: &Body<'tcx>,
1106     place: Place<'tcx>,
1107 ) -> Option<PlaceRef<'tcx>> {
1108     match place.as_ref().last_projection() {
1109         Some((place_base, ProjectionElem::Deref)) => {
1110             // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1111             // that points to the allocation for the static. Don't treat these as reborrows.
1112             if body.local_decls[place_base.local].is_ref_to_static() {
1113                 None
1114             } else {
1115                 // Ensure the type being derefed is a reference and not a raw pointer.
1116                 // This is sufficient to prevent an access to a `static mut` from being marked as a
1117                 // reborrow, even if the check above were to disappear.
1118                 let inner_ty = place_base.ty(body, tcx).ty;
1119
1120                 if let ty::Ref(..) = inner_ty.kind() {
1121                     return Some(place_base);
1122                 } else {
1123                     return None;
1124                 }
1125             }
1126         }
1127         _ => None,
1128     }
1129 }
1130
1131 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1132     ty.is_bool() || ty.is_integral() || ty.is_char()
1133 }
1134
1135 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1136     ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1137 }
1138
1139 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1140     let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1141
1142     ccx.tcx
1143         .sess
1144         .struct_span_err(
1145             span,
1146             &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1147         )
1148         .span_suggestion(
1149             attr_span,
1150             "if it is not part of the public API, make this function unstably const",
1151             concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1152             Applicability::HasPlaceholders,
1153         )
1154         .span_suggestion(
1155             attr_span,
1156             "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1157             format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1158             Applicability::MaybeIncorrect,
1159         )
1160         .emit();
1161 }