]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_mir/src/transform/check_consts/validation.rs
Rollup merge of #80734 - abonander:ab/issue-66693, r=oli-obk
[rust.git] / compiler / rustc_mir / src / transform / check_consts / validation.rs
1 //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3 use rustc_errors::{struct_span_err, Applicability, Diagnostic, ErrorReported};
4 use rustc_hir::def_id::DefId;
5 use rustc_hir::{self as hir, HirId, LangItem};
6 use rustc_index::bit_set::BitSet;
7 use rustc_infer::infer::TyCtxtInferExt;
8 use rustc_infer::traits::{ImplSource, Obligation, ObligationCause};
9 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::cast::CastTy;
12 use rustc_middle::ty::subst::GenericArgKind;
13 use rustc_middle::ty::{
14     self, adjustment::PointerCast, Instance, InstanceDef, Ty, TyCtxt, TypeAndMut,
15 };
16 use rustc_middle::ty::{Binder, TraitPredicate, TraitRef};
17 use rustc_span::{sym, Span, Symbol};
18 use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
19 use rustc_trait_selection::traits::{self, SelectionContext, TraitEngine};
20
21 use std::mem;
22 use std::ops::Deref;
23
24 use super::ops::{self, NonConstOp, Status};
25 use super::qualifs::{self, CustomEq, HasMutInterior, NeedsDrop};
26 use super::resolver::FlowSensitiveAnalysis;
27 use super::{is_lang_panic_fn, ConstCx, Qualif};
28 use crate::const_eval::is_unstable_const_fn;
29 use crate::dataflow::impls::MaybeMutBorrowedLocals;
30 use crate::dataflow::{self, Analysis};
31
32 // We are using `MaybeMutBorrowedLocals` as a proxy for whether an item may have been mutated
33 // through a pointer prior to the given point. This is okay even though `MaybeMutBorrowedLocals`
34 // kills locals upon `StorageDead` because a local will never be used after a `StorageDead`.
35 type IndirectlyMutableResults<'mir, 'tcx> =
36     dataflow::ResultsCursor<'mir, 'tcx, MaybeMutBorrowedLocals<'mir, 'tcx>>;
37
38 type QualifResults<'mir, 'tcx, Q> =
39     dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'mir, 'tcx, Q>>;
40
41 #[derive(Default)]
42 pub struct Qualifs<'mir, 'tcx> {
43     has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
44     needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
45     indirectly_mutable: Option<IndirectlyMutableResults<'mir, 'tcx>>,
46 }
47
48 impl Qualifs<'mir, 'tcx> {
49     pub fn indirectly_mutable(
50         &mut self,
51         ccx: &'mir ConstCx<'mir, 'tcx>,
52         local: Local,
53         location: Location,
54     ) -> bool {
55         let indirectly_mutable = self.indirectly_mutable.get_or_insert_with(|| {
56             let ConstCx { tcx, body, param_env, .. } = *ccx;
57
58             // We can use `unsound_ignore_borrow_on_drop` here because custom drop impls are not
59             // allowed in a const.
60             //
61             // FIXME(ecstaticmorse): Someday we want to allow custom drop impls. How do we do this
62             // without breaking stable code?
63             MaybeMutBorrowedLocals::mut_borrows_only(tcx, &body, param_env)
64                 .unsound_ignore_borrow_on_drop()
65                 .into_engine(tcx, &body)
66                 .pass_name("const_qualification")
67                 .iterate_to_fixpoint()
68                 .into_results_cursor(&body)
69         });
70
71         indirectly_mutable.seek_before_primary_effect(location);
72         indirectly_mutable.get().contains(local)
73     }
74
75     /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
76     ///
77     /// Only updates the cursor if absolutely necessary
78     pub fn needs_drop(
79         &mut self,
80         ccx: &'mir ConstCx<'mir, 'tcx>,
81         local: Local,
82         location: Location,
83     ) -> bool {
84         let ty = ccx.body.local_decls[local].ty;
85         if !NeedsDrop::in_any_value_of_ty(ccx, ty) {
86             return false;
87         }
88
89         let needs_drop = self.needs_drop.get_or_insert_with(|| {
90             let ConstCx { tcx, body, .. } = *ccx;
91
92             FlowSensitiveAnalysis::new(NeedsDrop, ccx)
93                 .into_engine(tcx, &body)
94                 .iterate_to_fixpoint()
95                 .into_results_cursor(&body)
96         });
97
98         needs_drop.seek_before_primary_effect(location);
99         needs_drop.get().contains(local) || self.indirectly_mutable(ccx, local, location)
100     }
101
102     /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
103     ///
104     /// Only updates the cursor if absolutely necessary.
105     pub fn has_mut_interior(
106         &mut self,
107         ccx: &'mir ConstCx<'mir, 'tcx>,
108         local: Local,
109         location: Location,
110     ) -> bool {
111         let ty = ccx.body.local_decls[local].ty;
112         if !HasMutInterior::in_any_value_of_ty(ccx, ty) {
113             return false;
114         }
115
116         let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
117             let ConstCx { tcx, body, .. } = *ccx;
118
119             FlowSensitiveAnalysis::new(HasMutInterior, ccx)
120                 .into_engine(tcx, &body)
121                 .iterate_to_fixpoint()
122                 .into_results_cursor(&body)
123         });
124
125         has_mut_interior.seek_before_primary_effect(location);
126         has_mut_interior.get().contains(local) || self.indirectly_mutable(ccx, local, location)
127     }
128
129     fn in_return_place(
130         &mut self,
131         ccx: &'mir ConstCx<'mir, 'tcx>,
132         error_occured: Option<ErrorReported>,
133     ) -> ConstQualifs {
134         // Find the `Return` terminator if one exists.
135         //
136         // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
137         // qualifs for the return type.
138         let return_block = ccx
139             .body
140             .basic_blocks()
141             .iter_enumerated()
142             .find(|(_, block)| match block.terminator().kind {
143                 TerminatorKind::Return => true,
144                 _ => false,
145             })
146             .map(|(bb, _)| bb);
147
148         let return_block = match return_block {
149             None => return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), error_occured),
150             Some(bb) => bb,
151         };
152
153         let return_loc = ccx.body.terminator_loc(return_block);
154
155         let custom_eq = match ccx.const_kind() {
156             // We don't care whether a `const fn` returns a value that is not structurally
157             // matchable. Functions calls are opaque and always use type-based qualification, so
158             // this value should never be used.
159             hir::ConstContext::ConstFn => true,
160
161             // If we know that all values of the return type are structurally matchable, there's no
162             // need to run dataflow.
163             _ if !CustomEq::in_any_value_of_ty(ccx, ccx.body.return_ty()) => false,
164
165             hir::ConstContext::Const | hir::ConstContext::Static(_) => {
166                 let mut cursor = FlowSensitiveAnalysis::new(CustomEq, ccx)
167                     .into_engine(ccx.tcx, &ccx.body)
168                     .iterate_to_fixpoint()
169                     .into_results_cursor(&ccx.body);
170
171                 cursor.seek_after_primary_effect(return_loc);
172                 cursor.contains(RETURN_PLACE)
173             }
174         };
175
176         ConstQualifs {
177             needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
178             has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
179             custom_eq,
180             error_occured,
181         }
182     }
183 }
184
185 pub struct Validator<'mir, 'tcx> {
186     ccx: &'mir ConstCx<'mir, 'tcx>,
187     qualifs: Qualifs<'mir, 'tcx>,
188
189     /// The span of the current statement.
190     span: Span,
191
192     /// A set that stores for each local whether it has a `StorageDead` for it somewhere.
193     local_has_storage_dead: Option<BitSet<Local>>,
194
195     error_emitted: Option<ErrorReported>,
196     secondary_errors: Vec<Diagnostic>,
197 }
198
199 impl Deref for Validator<'mir, 'tcx> {
200     type Target = ConstCx<'mir, 'tcx>;
201
202     fn deref(&self) -> &Self::Target {
203         &self.ccx
204     }
205 }
206
207 impl Validator<'mir, 'tcx> {
208     pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
209         Validator {
210             span: ccx.body.span,
211             ccx,
212             qualifs: Default::default(),
213             local_has_storage_dead: None,
214             error_emitted: None,
215             secondary_errors: Vec::new(),
216         }
217     }
218
219     pub fn check_body(&mut self) {
220         let ConstCx { tcx, body, .. } = *self.ccx;
221         let def_id = self.ccx.def_id();
222
223         // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
224         // no need to emit duplicate errors here.
225         if is_async_fn(self.ccx) || body.generator_kind.is_some() {
226             tcx.sess.delay_span_bug(body.span, "`async` functions cannot be `const fn`");
227             return;
228         }
229
230         // The local type and predicate checks are not free and only relevant for `const fn`s.
231         if self.const_kind() == hir::ConstContext::ConstFn {
232             // Prevent const trait methods from being annotated as `stable`.
233             // FIXME: Do this as part of stability checking.
234             if self.is_const_stable_const_fn() {
235                 let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
236                 if crate::const_eval::is_parent_const_impl_raw(tcx, hir_id) {
237                     struct_span_err!(
238                         self.ccx.tcx.sess,
239                         self.span,
240                         E0723,
241                         "trait methods cannot be stable const fn"
242                     )
243                     .emit();
244                 }
245             }
246
247             self.check_item_predicates();
248
249             for (idx, local) in body.local_decls.iter_enumerated() {
250                 // Handle the return place below.
251                 if idx == RETURN_PLACE || local.internal {
252                     continue;
253                 }
254
255                 self.span = local.source_info.span;
256                 self.check_local_or_return_ty(local.ty, idx);
257             }
258
259             // impl trait is gone in MIR, so check the return type of a const fn by its signature
260             // instead of the type of the return place.
261             self.span = body.local_decls[RETURN_PLACE].source_info.span;
262             let return_ty = tcx.fn_sig(def_id).output();
263             self.check_local_or_return_ty(return_ty.skip_binder(), RETURN_PLACE);
264         }
265
266         self.visit_body(&body);
267
268         // Ensure that the end result is `Sync` in a non-thread local `static`.
269         let should_check_for_sync = self.const_kind()
270             == hir::ConstContext::Static(hir::Mutability::Not)
271             && !tcx.is_thread_local_static(def_id.to_def_id());
272
273         if should_check_for_sync {
274             let hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
275             check_return_ty_is_sync(tcx, &body, hir_id);
276         }
277
278         // If we got through const-checking without emitting any "primary" errors, emit any
279         // "secondary" errors if they occurred.
280         let secondary_errors = mem::take(&mut self.secondary_errors);
281         if self.error_emitted.is_none() {
282             for error in secondary_errors {
283                 self.tcx.sess.diagnostic().emit_diagnostic(&error);
284             }
285         } else {
286             assert!(self.tcx.sess.has_errors());
287         }
288     }
289
290     fn local_has_storage_dead(&mut self, local: Local) -> bool {
291         let ccx = self.ccx;
292         self.local_has_storage_dead
293             .get_or_insert_with(|| {
294                 struct StorageDeads {
295                     locals: BitSet<Local>,
296                 }
297                 impl Visitor<'tcx> for StorageDeads {
298                     fn visit_statement(&mut self, stmt: &Statement<'tcx>, _: Location) {
299                         if let StatementKind::StorageDead(l) = stmt.kind {
300                             self.locals.insert(l);
301                         }
302                     }
303                 }
304                 let mut v = StorageDeads { locals: BitSet::new_empty(ccx.body.local_decls.len()) };
305                 v.visit_body(ccx.body);
306                 v.locals
307             })
308             .contains(local)
309     }
310
311     pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
312         self.qualifs.in_return_place(self.ccx, self.error_emitted)
313     }
314
315     /// Emits an error if an expression cannot be evaluated in the current context.
316     pub fn check_op(&mut self, op: impl NonConstOp) {
317         self.check_op_spanned(op, self.span);
318     }
319
320     /// Emits an error at the given `span` if an expression cannot be evaluated in the current
321     /// context.
322     pub fn check_op_spanned<O: NonConstOp>(&mut self, op: O, span: Span) {
323         let gate = match op.status_in_item(self.ccx) {
324             Status::Allowed => return,
325
326             Status::Unstable(gate) if self.tcx.features().enabled(gate) => {
327                 let unstable_in_stable = self.ccx.is_const_stable_const_fn()
328                     && !super::rustc_allow_const_fn_unstable(
329                         self.tcx,
330                         self.def_id().to_def_id(),
331                         gate,
332                     );
333                 if unstable_in_stable {
334                     emit_unstable_in_stable_error(self.ccx, span, gate);
335                 }
336
337                 return;
338             }
339
340             Status::Unstable(gate) => Some(gate),
341             Status::Forbidden => None,
342         };
343
344         if self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
345             self.tcx.sess.miri_unleashed_feature(span, gate);
346             return;
347         }
348
349         let mut err = op.build_error(self.ccx, span);
350         assert!(err.is_error());
351
352         match op.importance() {
353             ops::DiagnosticImportance::Primary => {
354                 self.error_emitted = Some(ErrorReported);
355                 err.emit();
356             }
357
358             ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
359         }
360     }
361
362     fn check_static(&mut self, def_id: DefId, span: Span) {
363         assert!(
364             !self.tcx.is_thread_local_static(def_id),
365             "tls access is checked in `Rvalue::ThreadLocalRef"
366         );
367         self.check_op_spanned(ops::StaticAccess, span)
368     }
369
370     fn check_local_or_return_ty(&mut self, ty: Ty<'tcx>, local: Local) {
371         let kind = self.body.local_kind(local);
372
373         for ty in ty.walk() {
374             let ty = match ty.unpack() {
375                 GenericArgKind::Type(ty) => ty,
376
377                 // No constraints on lifetimes or constants, except potentially
378                 // constants' types, but `walk` will get to them as well.
379                 GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue,
380             };
381
382             match *ty.kind() {
383                 ty::Ref(_, _, hir::Mutability::Mut) => self.check_op(ops::ty::MutRef(kind)),
384                 ty::Opaque(..) => self.check_op(ops::ty::ImplTrait),
385                 ty::FnPtr(..) => self.check_op(ops::ty::FnPtr(kind)),
386
387                 ty::Dynamic(preds, _) => {
388                     for pred in preds.iter() {
389                         match pred.skip_binder() {
390                             ty::ExistentialPredicate::AutoTrait(_)
391                             | ty::ExistentialPredicate::Projection(_) => {
392                                 self.check_op(ops::ty::TraitBound(kind))
393                             }
394                             ty::ExistentialPredicate::Trait(trait_ref) => {
395                                 if Some(trait_ref.def_id) != self.tcx.lang_items().sized_trait() {
396                                     self.check_op(ops::ty::TraitBound(kind))
397                                 }
398                             }
399                         }
400                     }
401                 }
402                 _ => {}
403             }
404         }
405     }
406
407     fn check_item_predicates(&mut self) {
408         let ConstCx { tcx, .. } = *self.ccx;
409
410         let mut current = self.def_id().to_def_id();
411         loop {
412             let predicates = tcx.predicates_of(current);
413             for (predicate, _) in predicates.predicates {
414                 match predicate.kind().skip_binder() {
415                     ty::PredicateKind::RegionOutlives(_)
416                     | ty::PredicateKind::TypeOutlives(_)
417                     | ty::PredicateKind::WellFormed(_)
418                     | ty::PredicateKind::Projection(_)
419                     | ty::PredicateKind::ConstEvaluatable(..)
420                     | ty::PredicateKind::ConstEquate(..)
421                     | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue,
422                     ty::PredicateKind::ObjectSafe(_) => {
423                         bug!("object safe predicate on function: {:#?}", predicate)
424                     }
425                     ty::PredicateKind::ClosureKind(..) => {
426                         bug!("closure kind predicate on function: {:#?}", predicate)
427                     }
428                     ty::PredicateKind::Subtype(_) => {
429                         bug!("subtype predicate on function: {:#?}", predicate)
430                     }
431                     ty::PredicateKind::Trait(pred, constness) => {
432                         if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
433                             continue;
434                         }
435                         match pred.self_ty().kind() {
436                             ty::Param(p) => {
437                                 let generics = tcx.generics_of(current);
438                                 let def = generics.type_param(p, tcx);
439                                 let span = tcx.def_span(def.def_id);
440
441                                 // These are part of the function signature, so treat them like
442                                 // arguments when determining importance.
443                                 let kind = LocalKind::Arg;
444
445                                 if constness == hir::Constness::Const {
446                                     self.check_op_spanned(ops::ty::TraitBound(kind), span);
447                                 } else if !tcx.features().const_fn
448                                     || self.ccx.is_const_stable_const_fn()
449                                 {
450                                     // HACK: We shouldn't need the conditional above, but trait
451                                     // bounds on containing impl blocks are wrongly being marked as
452                                     // "not-const".
453                                     self.check_op_spanned(ops::ty::TraitBound(kind), span);
454                                 }
455                             }
456                             // other kinds of bounds are either tautologies
457                             // or cause errors in other passes
458                             _ => continue,
459                         }
460                     }
461                 }
462             }
463             match predicates.parent {
464                 Some(parent) => current = parent,
465                 None => break,
466             }
467         }
468     }
469
470     fn check_mut_borrow(&mut self, local: Local, kind: hir::BorrowKind) {
471         match self.const_kind() {
472             // In a const fn all borrows are transient or point to the places given via
473             // references in the arguments (so we already checked them with
474             // TransientMutBorrow/MutBorrow as appropriate).
475             // The borrow checker guarantees that no new non-transient borrows are created.
476             // NOTE: Once we have heap allocations during CTFE we need to figure out
477             // how to prevent `const fn` to create long-lived allocations that point
478             // to mutable memory.
479             hir::ConstContext::ConstFn => self.check_op(ops::TransientMutBorrow(kind)),
480             _ => {
481                 // Locals with StorageDead do not live beyond the evaluation and can
482                 // thus safely be borrowed without being able to be leaked to the final
483                 // value of the constant.
484                 if self.local_has_storage_dead(local) {
485                     self.check_op(ops::TransientMutBorrow(kind));
486                 } else {
487                     self.check_op(ops::MutBorrow(kind));
488                 }
489             }
490         }
491     }
492 }
493
494 impl Visitor<'tcx> for Validator<'mir, 'tcx> {
495     fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
496         trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
497
498         // We don't const-check basic blocks on the cleanup path since we never unwind during
499         // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
500         // are unreachable during const-eval.
501         //
502         // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
503         // locals that would never be dropped during normal execution are sometimes dropped during
504         // unwinding, which means backwards-incompatible live-drop errors.
505         if block.is_cleanup {
506             return;
507         }
508
509         self.super_basic_block_data(bb, block);
510     }
511
512     fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
513         trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
514
515         // Special-case reborrows to be more like a copy of a reference.
516         match *rvalue {
517             Rvalue::Ref(_, kind, place) => {
518                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
519                     let ctx = match kind {
520                         BorrowKind::Shared => {
521                             PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow)
522                         }
523                         BorrowKind::Shallow => {
524                             PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow)
525                         }
526                         BorrowKind::Unique => {
527                             PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow)
528                         }
529                         BorrowKind::Mut { .. } => {
530                             PlaceContext::MutatingUse(MutatingUseContext::Borrow)
531                         }
532                     };
533                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
534                     self.visit_projection(reborrowed_place_ref, ctx, location);
535                     return;
536                 }
537             }
538             Rvalue::AddressOf(mutbl, place) => {
539                 if let Some(reborrowed_place_ref) = place_as_reborrow(self.tcx, self.body, place) {
540                     let ctx = match mutbl {
541                         Mutability::Not => {
542                             PlaceContext::NonMutatingUse(NonMutatingUseContext::AddressOf)
543                         }
544                         Mutability::Mut => PlaceContext::MutatingUse(MutatingUseContext::AddressOf),
545                     };
546                     self.visit_local(&reborrowed_place_ref.local, ctx, location);
547                     self.visit_projection(reborrowed_place_ref, ctx, location);
548                     return;
549                 }
550             }
551             _ => {}
552         }
553
554         self.super_rvalue(rvalue, location);
555
556         match *rvalue {
557             Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
558
559             Rvalue::Use(_)
560             | Rvalue::Repeat(..)
561             | Rvalue::Discriminant(..)
562             | Rvalue::Len(_)
563             | Rvalue::Aggregate(..) => {}
564
565             Rvalue::Ref(_, kind @ BorrowKind::Mut { .. }, ref place)
566             | Rvalue::Ref(_, kind @ BorrowKind::Unique, ref place) => {
567                 let ty = place.ty(self.body, self.tcx).ty;
568                 let is_allowed = match ty.kind() {
569                     // Inside a `static mut`, `&mut [...]` is allowed.
570                     ty::Array(..) | ty::Slice(_)
571                         if self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut) =>
572                     {
573                         true
574                     }
575
576                     // FIXME(ecstaticmorse): We could allow `&mut []` inside a const context given
577                     // that this is merely a ZST and it is already eligible for promotion.
578                     // This may require an RFC?
579                     /*
580                     ty::Array(_, len) if len.try_eval_usize(cx.tcx, cx.param_env) == Some(0)
581                         => true,
582                     */
583                     _ => false,
584                 };
585
586                 if !is_allowed {
587                     if let BorrowKind::Mut { .. } = kind {
588                         self.check_mut_borrow(place.local, hir::BorrowKind::Ref)
589                     } else {
590                         self.check_op(ops::CellBorrow);
591                     }
592                 }
593             }
594
595             Rvalue::AddressOf(Mutability::Mut, ref place) => {
596                 self.check_mut_borrow(place.local, hir::BorrowKind::Raw)
597             }
598
599             Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place)
600             | Rvalue::AddressOf(Mutability::Not, ref place) => {
601                 let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
602                     &self.ccx,
603                     &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
604                     place.as_ref(),
605                 );
606
607                 if borrowed_place_has_mut_interior {
608                     match self.const_kind() {
609                         // In a const fn all borrows are transient or point to the places given via
610                         // references in the arguments (so we already checked them with
611                         // TransientCellBorrow/CellBorrow as appropriate).
612                         // The borrow checker guarantees that no new non-transient borrows are created.
613                         // NOTE: Once we have heap allocations during CTFE we need to figure out
614                         // how to prevent `const fn` to create long-lived allocations that point
615                         // to (interior) mutable memory.
616                         hir::ConstContext::ConstFn => self.check_op(ops::TransientCellBorrow),
617                         _ => {
618                             // Locals with StorageDead are definitely not part of the final constant value, and
619                             // it is thus inherently safe to permit such locals to have their
620                             // address taken as we can't end up with a reference to them in the
621                             // final value.
622                             // Note: This is only sound if every local that has a `StorageDead` has a
623                             // `StorageDead` in every control flow path leading to a `return` terminator.
624                             if self.local_has_storage_dead(place.local) {
625                                 self.check_op(ops::TransientCellBorrow);
626                             } else {
627                                 self.check_op(ops::CellBorrow);
628                             }
629                         }
630                     }
631                 }
632             }
633
634             Rvalue::Cast(
635                 CastKind::Pointer(PointerCast::MutToConstPointer | PointerCast::ArrayToPointer),
636                 _,
637                 _,
638             ) => {}
639
640             Rvalue::Cast(
641                 CastKind::Pointer(
642                     PointerCast::UnsafeFnPointer
643                     | PointerCast::ClosureFnPointer(_)
644                     | PointerCast::ReifyFnPointer,
645                 ),
646                 _,
647                 _,
648             ) => self.check_op(ops::FnPtrCast),
649
650             Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, cast_ty) => {
651                 if let Some(TypeAndMut { ty, .. }) = cast_ty.builtin_deref(true) {
652                     let unsized_ty = self.tcx.struct_tail_erasing_lifetimes(ty, self.param_env);
653
654                     // Casting/coercing things to slices is fine.
655                     if let ty::Slice(_) | ty::Str = unsized_ty.kind() {
656                         return;
657                     }
658                 }
659
660                 self.check_op(ops::UnsizingCast);
661             }
662
663             Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
664                 let operand_ty = operand.ty(self.body, self.tcx);
665                 let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
666                 let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
667
668                 if let (CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) = (cast_in, cast_out) {
669                     self.check_op(ops::RawPtrToIntCast);
670                 }
671             }
672
673             Rvalue::NullaryOp(NullOp::SizeOf, _) => {}
674             Rvalue::NullaryOp(NullOp::Box, _) => self.check_op(ops::HeapAllocation),
675
676             Rvalue::UnaryOp(_, ref operand) => {
677                 let ty = operand.ty(self.body, self.tcx);
678                 if is_int_bool_or_char(ty) {
679                     // Int, bool, and char operations are fine.
680                 } else if ty.is_floating_point() {
681                     self.check_op(ops::FloatingPointOp);
682                 } else {
683                     span_bug!(self.span, "non-primitive type in `Rvalue::UnaryOp`: {:?}", ty);
684                 }
685             }
686
687             Rvalue::BinaryOp(op, ref lhs, ref rhs)
688             | Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) => {
689                 let lhs_ty = lhs.ty(self.body, self.tcx);
690                 let rhs_ty = rhs.ty(self.body, self.tcx);
691
692                 if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) {
693                     // Int, bool, and char operations are fine.
694                 } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() {
695                     assert_eq!(lhs_ty, rhs_ty);
696                     assert!(
697                         op == BinOp::Eq
698                             || op == BinOp::Ne
699                             || op == BinOp::Le
700                             || op == BinOp::Lt
701                             || op == BinOp::Ge
702                             || op == BinOp::Gt
703                             || op == BinOp::Offset
704                     );
705
706                     self.check_op(ops::RawPtrComparison);
707                 } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() {
708                     self.check_op(ops::FloatingPointOp);
709                 } else {
710                     span_bug!(
711                         self.span,
712                         "non-primitive type in `Rvalue::BinaryOp`: {:?} âš¬ {:?}",
713                         lhs_ty,
714                         rhs_ty
715                     );
716                 }
717             }
718         }
719     }
720
721     fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
722         self.super_operand(op, location);
723         if let Operand::Constant(c) = op {
724             if let Some(def_id) = c.check_static_ptr(self.tcx) {
725                 self.check_static(def_id, self.span);
726             }
727         }
728     }
729     fn visit_projection_elem(
730         &mut self,
731         place_local: Local,
732         proj_base: &[PlaceElem<'tcx>],
733         elem: PlaceElem<'tcx>,
734         context: PlaceContext,
735         location: Location,
736     ) {
737         trace!(
738             "visit_projection_elem: place_local={:?} proj_base={:?} elem={:?} \
739             context={:?} location={:?}",
740             place_local,
741             proj_base,
742             elem,
743             context,
744             location,
745         );
746
747         self.super_projection_elem(place_local, proj_base, elem, context, location);
748
749         match elem {
750             ProjectionElem::Deref => {
751                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
752                 if let ty::RawPtr(_) = base_ty.kind() {
753                     if proj_base.is_empty() {
754                         if let (local, []) = (place_local, proj_base) {
755                             let decl = &self.body.local_decls[local];
756                             if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
757                                 let span = decl.source_info.span;
758                                 self.check_static(def_id, span);
759                                 return;
760                             }
761                         }
762                     }
763                     self.check_op(ops::RawPtrDeref);
764                 }
765
766                 if context.is_mutating_use() {
767                     self.check_op(ops::MutDeref);
768                 }
769             }
770
771             ProjectionElem::ConstantIndex { .. }
772             | ProjectionElem::Downcast(..)
773             | ProjectionElem::Subslice { .. }
774             | ProjectionElem::Field(..)
775             | ProjectionElem::Index(_) => {
776                 let base_ty = Place::ty_from(place_local, proj_base, self.body, self.tcx).ty;
777                 match base_ty.ty_adt_def() {
778                     Some(def) if def.is_union() => {
779                         self.check_op(ops::UnionAccess);
780                     }
781
782                     _ => {}
783                 }
784             }
785         }
786     }
787
788     fn visit_source_info(&mut self, source_info: &SourceInfo) {
789         trace!("visit_source_info: source_info={:?}", source_info);
790         self.span = source_info.span;
791     }
792
793     fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
794         trace!("visit_statement: statement={:?} location={:?}", statement, location);
795
796         self.super_statement(statement, location);
797
798         match statement.kind {
799             StatementKind::LlvmInlineAsm { .. } => {
800                 self.check_op(ops::InlineAsm);
801             }
802
803             StatementKind::Assign(..)
804             | StatementKind::SetDiscriminant { .. }
805             | StatementKind::FakeRead(..)
806             | StatementKind::StorageLive(_)
807             | StatementKind::StorageDead(_)
808             | StatementKind::Retag { .. }
809             | StatementKind::AscribeUserType(..)
810             | StatementKind::Coverage(..)
811             | StatementKind::Nop => {}
812         }
813     }
814
815     #[instrument(level = "debug", skip(self))]
816     fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
817         use rustc_target::spec::abi::Abi::RustIntrinsic;
818
819         self.super_terminator(terminator, location);
820
821         match &terminator.kind {
822             TerminatorKind::Call { func, args, .. } => {
823                 let ConstCx { tcx, body, param_env, .. } = *self.ccx;
824                 let caller = self.def_id().to_def_id();
825
826                 let fn_ty = func.ty(body, tcx);
827
828                 let (mut callee, substs) = match *fn_ty.kind() {
829                     ty::FnDef(def_id, substs) => (def_id, substs),
830
831                     ty::FnPtr(_) => {
832                         self.check_op(ops::FnCallIndirect);
833                         return;
834                     }
835                     _ => {
836                         span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
837                     }
838                 };
839
840                 // Attempting to call a trait method?
841                 if let Some(trait_id) = tcx.trait_of_item(callee) {
842                     trace!("attempting to call a trait method");
843                     if !self.tcx.features().const_trait_impl {
844                         self.check_op(ops::FnCallNonConst);
845                         return;
846                     }
847
848                     let trait_ref = TraitRef::from_method(tcx, trait_id, substs);
849                     let obligation = Obligation::new(
850                         ObligationCause::dummy(),
851                         param_env,
852                         Binder::bind(TraitPredicate {
853                             trait_ref: TraitRef::from_method(tcx, trait_id, substs),
854                         }),
855                     );
856
857                     let implsrc = tcx.infer_ctxt().enter(|infcx| {
858                         let mut selcx = SelectionContext::new(&infcx);
859                         selcx.select(&obligation).unwrap()
860                     });
861
862                     // If the method is provided via a where-clause that does not use the `?const`
863                     // opt-out, the call is allowed.
864                     if let Some(ImplSource::Param(_, hir::Constness::Const)) = implsrc {
865                         debug!(
866                             "const_trait_impl: provided {:?} via where-clause in {:?}",
867                             trait_ref, param_env
868                         );
869                         return;
870                     }
871
872                     // Resolve a trait method call to its concrete implementation, which may be in a
873                     // `const` trait impl.
874                     let instance = Instance::resolve(tcx, param_env, callee, substs);
875                     debug!("Resolving ({:?}) -> {:?}", callee, instance);
876                     if let Ok(Some(func)) = instance {
877                         if let InstanceDef::Item(def) = func.def {
878                             callee = def.did;
879                         }
880                     }
881                 }
882
883                 // At this point, we are calling a function, `callee`, whose `DefId` is known...
884                 if is_lang_panic_fn(tcx, callee) {
885                     self.check_op(ops::Panic);
886
887                     // const-eval of the `begin_panic` fn assumes the argument is `&str`
888                     if Some(callee) == tcx.lang_items().begin_panic_fn() {
889                         match args[0].ty(&self.ccx.body.local_decls, tcx).kind() {
890                             ty::Ref(_, ty, _) if ty.is_str() => (),
891                             _ => self.check_op(ops::PanicNonStr),
892                         }
893                     }
894
895                     return;
896                 }
897
898                 // `async` blocks get lowered to `std::future::from_generator(/* a closure */)`.
899                 let is_async_block = Some(callee) == tcx.lang_items().from_generator_fn();
900                 if is_async_block {
901                     let kind = hir::GeneratorKind::Async(hir::AsyncGeneratorKind::Block);
902                     self.check_op(ops::Generator(kind));
903                     return;
904                 }
905
906                 let is_intrinsic = tcx.fn_sig(callee).abi() == RustIntrinsic;
907
908                 // HACK: This is to "unstabilize" the `transmute` intrinsic
909                 // within const fns. `transmute` is allowed in all other const contexts.
910                 // This won't really scale to more intrinsics or functions. Let's allow const
911                 // transmutes in const fn before we add more hacks to this.
912                 if is_intrinsic && tcx.item_name(callee) == sym::transmute {
913                     self.check_op(ops::Transmute);
914                     return;
915                 }
916
917                 if !tcx.is_const_fn_raw(callee) {
918                     self.check_op(ops::FnCallNonConst);
919                     return;
920                 }
921
922                 // If the `const fn` we are trying to call is not const-stable, ensure that we have
923                 // the proper feature gate enabled.
924                 if let Some(gate) = is_unstable_const_fn(tcx, callee) {
925                     trace!(?gate, "calling unstable const fn");
926                     if self.span.allows_unstable(gate) {
927                         return;
928                     }
929
930                     // Calling an unstable function *always* requires that the corresponding gate
931                     // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`.
932                     if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) {
933                         self.check_op(ops::FnCallUnstable(callee, Some(gate)));
934                         return;
935                     }
936
937                     // If this crate is not using stability attributes, or the caller is not claiming to be a
938                     // stable `const fn`, that is all that is required.
939                     if !self.ccx.is_const_stable_const_fn() {
940                         trace!("crate not using stability attributes or caller not stably const");
941                         return;
942                     }
943
944                     // Otherwise, we are something const-stable calling a const-unstable fn.
945
946                     if super::rustc_allow_const_fn_unstable(tcx, caller, gate) {
947                         trace!("rustc_allow_const_fn_unstable gate active");
948                         return;
949                     }
950
951                     self.check_op(ops::FnCallUnstable(callee, Some(gate)));
952                     return;
953                 }
954
955                 // FIXME(ecstaticmorse); For compatibility, we consider `unstable` callees that
956                 // have no `rustc_const_stable` attributes to be const-unstable as well. This
957                 // should be fixed later.
958                 let callee_is_unstable_unmarked = tcx.lookup_const_stability(callee).is_none()
959                     && tcx.lookup_stability(callee).map_or(false, |s| s.level.is_unstable());
960                 if callee_is_unstable_unmarked {
961                     trace!("callee_is_unstable_unmarked");
962                     // We do not use `const` modifiers for intrinsic "functions", as intrinsics are
963                     // `extern` funtions, and these have no way to get marked `const`. So instead we
964                     // use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
965                     if self.ccx.is_const_stable_const_fn() || is_intrinsic {
966                         self.check_op(ops::FnCallUnstable(callee, None));
967                         return;
968                     }
969                 }
970                 trace!("permitting call");
971             }
972
973             // Forbid all `Drop` terminators unless the place being dropped is a local with no
974             // projections that cannot be `NeedsDrop`.
975             TerminatorKind::Drop { place: dropped_place, .. }
976             | TerminatorKind::DropAndReplace { place: dropped_place, .. } => {
977                 // If we are checking live drops after drop-elaboration, don't emit duplicate
978                 // errors here.
979                 if super::post_drop_elaboration::checking_enabled(self.ccx) {
980                     return;
981                 }
982
983                 let mut err_span = self.span;
984
985                 // Check to see if the type of this place can ever have a drop impl. If not, this
986                 // `Drop` terminator is frivolous.
987                 let ty_needs_drop =
988                     dropped_place.ty(self.body, self.tcx).ty.needs_drop(self.tcx, self.param_env);
989
990                 if !ty_needs_drop {
991                     return;
992                 }
993
994                 let needs_drop = if let Some(local) = dropped_place.as_local() {
995                     // Use the span where the local was declared as the span of the drop error.
996                     err_span = self.body.local_decls[local].source_info.span;
997                     self.qualifs.needs_drop(self.ccx, local, location)
998                 } else {
999                     true
1000                 };
1001
1002                 if needs_drop {
1003                     self.check_op_spanned(
1004                         ops::LiveDrop { dropped_at: Some(terminator.source_info.span) },
1005                         err_span,
1006                     );
1007                 }
1008             }
1009
1010             TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
1011
1012             TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
1013                 self.check_op(ops::Generator(hir::GeneratorKind::Gen))
1014             }
1015
1016             TerminatorKind::Abort => {
1017                 // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
1018                 span_bug!(self.span, "`Abort` terminator outside of cleanup block")
1019             }
1020
1021             TerminatorKind::Assert { .. }
1022             | TerminatorKind::FalseEdge { .. }
1023             | TerminatorKind::FalseUnwind { .. }
1024             | TerminatorKind::Goto { .. }
1025             | TerminatorKind::Resume
1026             | TerminatorKind::Return
1027             | TerminatorKind::SwitchInt { .. }
1028             | TerminatorKind::Unreachable => {}
1029         }
1030     }
1031 }
1032
1033 fn check_return_ty_is_sync(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, hir_id: HirId) {
1034     let ty = body.return_ty();
1035     tcx.infer_ctxt().enter(|infcx| {
1036         let cause = traits::ObligationCause::new(body.span, hir_id, traits::SharedStatic);
1037         let mut fulfillment_cx = traits::FulfillmentContext::new();
1038         let sync_def_id = tcx.require_lang_item(LangItem::Sync, Some(body.span));
1039         fulfillment_cx.register_bound(&infcx, ty::ParamEnv::empty(), ty, sync_def_id, cause);
1040         if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
1041             infcx.report_fulfillment_errors(&err, None, false);
1042         }
1043     });
1044 }
1045
1046 fn place_as_reborrow(
1047     tcx: TyCtxt<'tcx>,
1048     body: &Body<'tcx>,
1049     place: Place<'tcx>,
1050 ) -> Option<PlaceRef<'tcx>> {
1051     match place.as_ref().last_projection() {
1052         Some((place_base, ProjectionElem::Deref)) => {
1053             // A borrow of a `static` also looks like `&(*_1)` in the MIR, but `_1` is a `const`
1054             // that points to the allocation for the static. Don't treat these as reborrows.
1055             if body.local_decls[place_base.local].is_ref_to_static() {
1056                 None
1057             } else {
1058                 // Ensure the type being derefed is a reference and not a raw pointer.
1059                 // This is sufficient to prevent an access to a `static mut` from being marked as a
1060                 // reborrow, even if the check above were to disappear.
1061                 let inner_ty = place_base.ty(body, tcx).ty;
1062
1063                 if let ty::Ref(..) = inner_ty.kind() {
1064                     return Some(place_base);
1065                 } else {
1066                     return None;
1067                 }
1068             }
1069         }
1070         _ => None,
1071     }
1072 }
1073
1074 fn is_int_bool_or_char(ty: Ty<'_>) -> bool {
1075     ty.is_bool() || ty.is_integral() || ty.is_char()
1076 }
1077
1078 fn is_async_fn(ccx: &ConstCx<'_, '_>) -> bool {
1079     ccx.fn_sig().map_or(false, |sig| sig.header.asyncness == hir::IsAsync::Async)
1080 }
1081
1082 fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol) {
1083     let attr_span = ccx.fn_sig().map_or(ccx.body.span, |sig| sig.span.shrink_to_lo());
1084
1085     ccx.tcx
1086         .sess
1087         .struct_span_err(
1088             span,
1089             &format!("const-stable function cannot use `#[feature({})]`", gate.as_str()),
1090         )
1091         .span_suggestion(
1092             attr_span,
1093             "if it is not part of the public API, make this function unstably const",
1094             concat!(r#"#[rustc_const_unstable(feature = "...", issue = "...")]"#, '\n').to_owned(),
1095             Applicability::HasPlaceholders,
1096         )
1097         .span_suggestion(
1098             attr_span,
1099             "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks",
1100             format!("#[rustc_allow_const_fn_unstable({})]\n", gate),
1101             Applicability::MaybeIncorrect,
1102         )
1103         .emit();
1104 }