]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #107746 - Nathan-Fenner:nathanf/clean-up-adjust_fulfillment_errors...
authorMatthias Krüger <matthias.krueger@famsik.de>
Tue, 7 Feb 2023 16:57:16 +0000 (17:57 +0100)
committerGitHub <noreply@github.com>
Tue, 7 Feb 2023 16:57:16 +0000 (17:57 +0100)
Split fn_ctxt/adjust_fulfillment_errors from fn_ctxt/checks

This is a follow-up from #106477, addressing a small number of the `FIXME`s that were added, by moving some functions into the new(er) `adjust_fulfillment_errors` module.

More cleanup is possible for this file (and I'll hopefully get around to doing some of that soon) but the very first thing is to just move these functions out.

There should be no "real" changes in this PR, besides minor adjustments to imports and the functions being transferred.

21 files changed:
compiler/rustc_abi/src/lib.rs
compiler/rustc_driver_impl/src/lib.rs
compiler/rustc_metadata/src/rmeta/decoder.rs
compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs
compiler/rustc_metadata/src/rmeta/encoder.rs
compiler/rustc_metadata/src/rmeta/mod.rs
compiler/rustc_metadata/src/rmeta/table.rs
compiler/rustc_middle/src/query/mod.rs
compiler/rustc_middle/src/ty/layout.rs
compiler/rustc_span/src/def_id.rs
compiler/rustc_ty_utils/src/abi.rs
compiler/rustc_ty_utils/src/assoc.rs
src/bootstrap/test.rs
src/tools/expand-yaml-anchors/src/main.rs
src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs
src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.rs [deleted file]
src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.stderr [deleted file]
src/tools/miri/tests/pass/stacked-borrows/future-self-referential.rs
src/tools/miri/tests/pass/stacked-borrows/stacked-borrows.rs
tests/codegen/function-arguments-noopt.rs
tests/codegen/function-arguments.rs

index 5af6206c0bb80dcabec6169f2ec00aabfd3ca146..0306cb5ce6abd7615d068c093cff045333e2e26b 100644 (file)
@@ -1439,21 +1439,12 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug)]
 pub enum PointerKind {
-    /// Most general case, we know no restrictions to tell LLVM.
-    SharedMutable,
-
-    /// `&T` where `T` contains no `UnsafeCell`, is `dereferenceable`, `noalias` and `readonly`.
-    Frozen,
-
-    /// `&mut T` which is `dereferenceable` and `noalias` but not `readonly`.
-    UniqueBorrowed,
-
-    /// `&mut !Unpin`, which is `dereferenceable` but neither `noalias` nor `readonly`.
-    UniqueBorrowedPinned,
-
-    /// `Box<T>`, which is `noalias` (even on return types, unlike the above) but neither `readonly`
-    /// nor `dereferenceable`.
-    UniqueOwned,
+    /// Shared reference. `frozen` indicates the absence of any `UnsafeCell`.
+    SharedRef { frozen: bool },
+    /// Mutable reference. `unpin` indicates the absence of any pinned data.
+    MutableRef { unpin: bool },
+    /// Box. `unpin` indicates the absence of any pinned data.
+    Box { unpin: bool },
 }
 
 /// Note that this information is advisory only, and backends are free to ignore it.
index 02e0b042ad2631eadad94771b4a2dfab2c2defd4..1da13afecfa935b9adf8d0568064a0aa675423b3 100644 (file)
@@ -326,14 +326,16 @@ fn run_compiler(
                 }
             }
 
-            let mut gctxt = queries.global_ctxt()?;
+            // Make sure name resolution and macro expansion is run.
+            queries.global_ctxt()?;
+
             if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
                 return early_exit();
             }
 
             // Make sure the `output_filenames` query is run for its side
             // effects of writing the dep-info and reporting errors.
-            gctxt.enter(|tcx| tcx.output_filenames(()));
+            queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(()));
 
             if sess.opts.output_types.contains_key(&OutputType::DepInfo)
                 && sess.opts.output_types.len() == 1
@@ -345,7 +347,7 @@ fn run_compiler(
                 return early_exit();
             }
 
-            gctxt.enter(|tcx| {
+            queries.global_ctxt()?.enter(|tcx| {
                 let result = tcx.analysis(());
                 if sess.opts.unstable_opts.save_analysis {
                     let crate_name = tcx.crate_name(LOCAL_CRATE);
@@ -362,8 +364,6 @@ fn run_compiler(
                 result
             })?;
 
-            drop(gctxt);
-
             if callbacks.after_analysis(compiler, queries) == Compilation::Stop {
                 return early_exit();
             }
index bb2dd290c6d5d1edef0ef7f26ae3f0bbb1793d69..e2b07fad6e782ee24ed5e65f8889a15e1bea3516 100644 (file)
@@ -654,7 +654,7 @@ fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self {
 impl<'a, 'tcx, T> Decodable<DecodeContext<'a, 'tcx>> for LazyArray<T> {
     fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self {
         let len = decoder.read_usize();
-        if len == 0 { LazyArray::empty() } else { decoder.read_lazy_array(len) }
+        if len == 0 { LazyArray::default() } else { decoder.read_lazy_array(len) }
     }
 }
 
@@ -864,7 +864,7 @@ fn get_variant(self, kind: &DefKind, index: DefIndex, parent_did: DefId) -> ty::
                 .tables
                 .children
                 .get(self, index)
-                .unwrap_or_else(LazyArray::empty)
+                .expect("fields are not encoded for a variant")
                 .decode(self)
                 .map(|index| ty::FieldDef {
                     did: self.local_def_id(index),
@@ -896,7 +896,7 @@ fn get_adt_def(self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::AdtDef<'tcx> {
                 .tables
                 .children
                 .get(self, item_id)
-                .unwrap_or_else(LazyArray::empty)
+                .expect("variants are not encoded for an enum")
                 .decode(self)
                 .filter_map(|index| {
                     let kind = self.def_kind(index);
@@ -1045,7 +1045,7 @@ fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool {
             .tables
             .fn_arg_names
             .get(self, id)
-            .unwrap_or_else(LazyArray::empty)
+            .expect("argument names not encoded for a function")
             .decode((self, sess))
             .nth(0)
             .map_or(false, |ident| ident.name == kw::SelfLower)
@@ -1060,7 +1060,7 @@ fn get_associated_item_def_ids(
             .tables
             .children
             .get(self, id)
-            .unwrap_or_else(LazyArray::empty)
+            .expect("associated items not encoded for an item")
             .decode((self, sess))
             .map(move |child_index| self.local_def_id(child_index))
     }
@@ -1068,13 +1068,12 @@ fn get_associated_item_def_ids(
     fn get_associated_item(self, id: DefIndex, sess: &'a Session) -> ty::AssocItem {
         let name = self.item_name(id);
 
-        let kind = match self.def_kind(id) {
-            DefKind::AssocConst => ty::AssocKind::Const,
-            DefKind::AssocFn => ty::AssocKind::Fn,
-            DefKind::AssocTy => ty::AssocKind::Type,
+        let (kind, has_self) = match self.def_kind(id) {
+            DefKind::AssocConst => (ty::AssocKind::Const, false),
+            DefKind::AssocFn => (ty::AssocKind::Fn, self.get_fn_has_self_parameter(id, sess)),
+            DefKind::AssocTy => (ty::AssocKind::Type, false),
             _ => bug!("cannot get associated-item of `{:?}`", self.def_key(id)),
         };
-        let has_self = self.get_fn_has_self_parameter(id, sess);
         let container = self.root.tables.assoc_container.get(self, id).unwrap();
 
         ty::AssocItem {
@@ -1131,7 +1130,7 @@ fn get_struct_field_names(
             .tables
             .children
             .get(self, id)
-            .unwrap_or_else(LazyArray::empty)
+            .expect("fields not encoded for a struct")
             .decode(self)
             .map(move |index| respan(self.get_span(index, sess), self.item_name(index)))
     }
@@ -1144,7 +1143,7 @@ fn get_struct_field_visibilities(
             .tables
             .children
             .get(self, id)
-            .unwrap_or_else(LazyArray::empty)
+            .expect("fields not encoded for a struct")
             .decode(self)
             .map(move |field_index| self.get_visibility(field_index))
     }
@@ -1159,7 +1158,6 @@ fn get_inherent_implementations_for_type(
                 .tables
                 .inherent_impls
                 .get(self, id)
-                .unwrap_or_else(LazyArray::empty)
                 .decode(self)
                 .map(|index| self.local_def_id(index)),
         )
@@ -1174,7 +1172,6 @@ fn get_inherent_impls(self) -> impl Iterator<Item = (DefId, DefId)> + 'a {
                 .tables
                 .inherent_impls
                 .get(self, ty_index)
-                .unwrap_or_else(LazyArray::empty)
                 .decode(self)
                 .map(move |impl_index| (ty_def_id, self.local_def_id(impl_index)))
         })
@@ -1322,7 +1319,7 @@ fn def_path_hash_unlocked(
     ) -> DefPathHash {
         *def_path_hashes
             .entry(index)
-            .or_insert_with(|| self.root.tables.def_path_hashes.get(self, index).unwrap())
+            .or_insert_with(|| self.root.tables.def_path_hashes.get(self, index))
     }
 
     #[inline]
index 9b1401f4a44dfdf922120faf9bd9f165c902d3c9..07cc84ab95368480a2ec09b80fe0bad29ed63e0c 100644 (file)
@@ -1,6 +1,7 @@
 use crate::creader::{CStore, LoadedMacro};
 use crate::foreign_modules;
 use crate::native_libs;
+use crate::rmeta::table::IsDefault;
 use crate::rmeta::AttrFlags;
 
 use rustc_ast as ast;
@@ -88,6 +89,14 @@ macro_rules! provide_one {
             }
         }
     };
+    ($tcx:ident, $def_id:ident, $other:ident, $cdata:ident, $name:ident => { table_defaulted_array }) => {
+        provide_one! {
+            $tcx, $def_id, $other, $cdata, $name => {
+                let lazy = $cdata.root.tables.$name.get($cdata, $def_id.index);
+                if lazy.is_default() { &[] } else { $tcx.arena.alloc_from_iter(lazy.decode(($cdata, $tcx))) }
+            }
+        }
+    };
     ($tcx:ident, $def_id:ident, $other:ident, $cdata:ident, $name:ident => { table_direct }) => {
         provide_one! {
             $tcx, $def_id, $other, $cdata, $name => {
@@ -187,10 +196,10 @@ fn into_args(self) -> (DefId, SimplifiedType) {
 }
 
 provide! { tcx, def_id, other, cdata,
-    explicit_item_bounds => { table }
+    explicit_item_bounds => { table_defaulted_array }
     explicit_predicates_of => { table }
     generics_of => { table }
-    inferred_outlives_of => { table }
+    inferred_outlives_of => { table_defaulted_array }
     super_predicates_of => { table }
     type_of => { table }
     variances_of => { table }
index 9d8f14058f681203548cd2a703b3556b5d6c7129..85e9ae9a98302bb780f1866295e0768970998982 100644 (file)
@@ -76,13 +76,13 @@ pub(super) struct EncodeContext<'a, 'tcx> {
     symbol_table: FxHashMap<Symbol, usize>,
 }
 
-/// If the current crate is a proc-macro, returns early with `LazyArray::empty()`.
+/// If the current crate is a proc-macro, returns early with `LazyArray::default()`.
 /// This is useful for skipping the encoding of things that aren't needed
 /// for proc-macro crates.
 macro_rules! empty_proc_macro {
     ($self:ident) => {
         if $self.is_proc_macro {
-            return LazyArray::empty();
+            return LazyArray::default();
         }
     };
 }
@@ -365,21 +365,31 @@ fn encode_alloc_id(&mut self, alloc_id: &rustc_middle::mir::interpret::AllocId)
     }
 }
 
-// Shorthand for `$self.$tables.$table.set($def_id.index, $self.lazy_value($value))`, which would
+// Shorthand for `$self.$tables.$table.set_some($def_id.index, $self.lazy_value($value))`, which would
 // normally need extra variables to avoid errors about multiple mutable borrows.
 macro_rules! record {
     ($self:ident.$tables:ident.$table:ident[$def_id:expr] <- $value:expr) => {{
         {
             let value = $value;
             let lazy = $self.lazy(value);
-            $self.$tables.$table.set($def_id.index, lazy);
+            $self.$tables.$table.set_some($def_id.index, lazy);
         }
     }};
 }
 
-// Shorthand for `$self.$tables.$table.set($def_id.index, $self.lazy_value($value))`, which would
+// Shorthand for `$self.$tables.$table.set_some($def_id.index, $self.lazy_value($value))`, which would
 // normally need extra variables to avoid errors about multiple mutable borrows.
 macro_rules! record_array {
+    ($self:ident.$tables:ident.$table:ident[$def_id:expr] <- $value:expr) => {{
+        {
+            let value = $value;
+            let lazy = $self.lazy_array(value);
+            $self.$tables.$table.set_some($def_id.index, lazy);
+        }
+    }};
+}
+
+macro_rules! record_defaulted_array {
     ($self:ident.$tables:ident.$table:ident[$def_id:expr] <- $value:expr) => {{
         {
             let value = $value;
@@ -467,13 +477,13 @@ fn encode_def_path_table(&mut self) {
             {
                 let def_key = self.lazy(table.def_key(def_index));
                 let def_path_hash = table.def_path_hash(def_index);
-                self.tables.def_keys.set(def_index, def_key);
+                self.tables.def_keys.set_some(def_index, def_key);
                 self.tables.def_path_hashes.set(def_index, def_path_hash);
             }
         } else {
             for (def_index, def_key, def_path_hash) in table.enumerated_keys_and_path_hashes() {
                 let def_key = self.lazy(def_key);
-                self.tables.def_keys.set(def_index, def_key);
+                self.tables.def_keys.set_some(def_index, def_key);
                 self.tables.def_path_hashes.set(def_index, *def_path_hash);
             }
         }
@@ -548,7 +558,7 @@ fn encode_source_map(&mut self) -> LazyTable<u32, Option<LazyValue<rustc_span::S
 
             let on_disk_index: u32 =
                 on_disk_index.try_into().expect("cannot export more than U32_MAX files");
-            adapted.set(on_disk_index, self.lazy(source_file));
+            adapted.set_some(on_disk_index, self.lazy(source_file));
         }
 
         adapted.encode(&mut self.opaque)
@@ -1147,9 +1157,7 @@ fn encode_attrs(&mut self, def_id: LocalDefId) {
         if state.is_doc_hidden {
             attr_flags |= AttrFlags::IS_DOC_HIDDEN;
         }
-        if !attr_flags.is_empty() {
-            self.tables.attr_flags.set_nullable(def_id.local_def_index, attr_flags);
-        }
+        self.tables.attr_flags.set(def_id.local_def_index, attr_flags);
     }
 
     fn encode_def_ids(&mut self) {
@@ -1161,7 +1169,7 @@ fn encode_def_ids(&mut self) {
             let def_id = local_id.to_def_id();
             let def_kind = tcx.opt_def_kind(local_id);
             let Some(def_kind) = def_kind else { continue };
-            self.tables.opt_def_kind.set(def_id.index, def_kind);
+            self.tables.opt_def_kind.set_some(def_id.index, def_kind);
             let def_span = tcx.def_span(local_id);
             record!(self.tables.def_span[def_id] <- def_span);
             self.encode_attrs(local_id);
@@ -1192,9 +1200,7 @@ fn encode_def_ids(&mut self) {
                 record!(self.tables.generics_of[def_id] <- g);
                 record!(self.tables.explicit_predicates_of[def_id] <- self.tcx.explicit_predicates_of(def_id));
                 let inferred_outlives = self.tcx.inferred_outlives_of(def_id);
-                if !inferred_outlives.is_empty() {
-                    record_array!(self.tables.inferred_outlives_of[def_id] <- inferred_outlives);
-                }
+                record_defaulted_array!(self.tables.inferred_outlives_of[def_id] <- inferred_outlives);
             }
             if should_encode_type(tcx, local_id, def_kind) {
                 record!(self.tables.type_of[def_id] <- self.tcx.type_of(def_id));
@@ -1215,15 +1221,12 @@ fn encode_def_ids(&mut self) {
                 record!(self.tables.trait_impl_trait_tys[def_id] <- table);
             }
         }
+
         let inherent_impls = tcx.with_stable_hashing_context(|hcx| {
             tcx.crate_inherent_impls(()).inherent_impls.to_sorted(&hcx, true)
         });
-
-        for (def_id, implementations) in inherent_impls {
-            if implementations.is_empty() {
-                continue;
-            }
-            record_array!(self.tables.inherent_impls[def_id.to_def_id()] <- implementations.iter().map(|&def_id| {
+        for (def_id, impls) in inherent_impls {
+            record_defaulted_array!(self.tables.inherent_impls[def_id.to_def_id()] <- impls.iter().map(|def_id| {
                 assert!(def_id.is_local());
                 def_id.index
             }));
@@ -1264,14 +1267,14 @@ fn encode_info_for_adt(&mut self, def_id: DefId) {
             };
             record!(self.tables.variant_data[variant.def_id] <- data);
 
-            self.tables.constness.set(variant.def_id.index, hir::Constness::Const);
+            self.tables.constness.set_some(variant.def_id.index, hir::Constness::Const);
             record_array!(self.tables.children[variant.def_id] <- variant.fields.iter().map(|f| {
                 assert!(f.did.is_local());
                 f.did.index
             }));
 
             if let Some((CtorKind::Fn, ctor_def_id)) = variant.ctor {
-                self.tables.constness.set(ctor_def_id.index, hir::Constness::Const);
+                self.tables.constness.set_some(ctor_def_id.index, hir::Constness::Const);
                 let fn_sig = tcx.fn_sig(ctor_def_id);
                 record!(self.tables.fn_sig[ctor_def_id] <- fn_sig);
                 // FIXME only encode signature for ctor_def_id
@@ -1332,9 +1335,7 @@ fn encode_info_for_mod(&mut self, local_def_id: LocalDefId, md: &hir::Mod<'_>) {
     fn encode_explicit_item_bounds(&mut self, def_id: DefId) {
         debug!("EncodeContext::encode_explicit_item_bounds({:?})", def_id);
         let bounds = self.tcx.explicit_item_bounds(def_id);
-        if !bounds.is_empty() {
-            record_array!(self.tables.explicit_item_bounds[def_id] <- bounds);
-        }
+        record_defaulted_array!(self.tables.explicit_item_bounds[def_id] <- bounds);
     }
 
     fn encode_info_for_trait_item(&mut self, def_id: DefId) {
@@ -1342,16 +1343,16 @@ fn encode_info_for_trait_item(&mut self, def_id: DefId) {
         let tcx = self.tcx;
 
         let impl_defaultness = tcx.impl_defaultness(def_id.expect_local());
-        self.tables.impl_defaultness.set(def_id.index, impl_defaultness);
+        self.tables.impl_defaultness.set_some(def_id.index, impl_defaultness);
         let trait_item = tcx.associated_item(def_id);
-        self.tables.assoc_container.set(def_id.index, trait_item.container);
+        self.tables.assoc_container.set_some(def_id.index, trait_item.container);
 
         match trait_item.kind {
             ty::AssocKind::Const => {}
             ty::AssocKind::Fn => {
                 record_array!(self.tables.fn_arg_names[def_id] <- tcx.fn_arg_names(def_id));
-                self.tables.asyncness.set(def_id.index, tcx.asyncness(def_id));
-                self.tables.constness.set(def_id.index, hir::Constness::NotConst);
+                self.tables.asyncness.set_some(def_id.index, tcx.asyncness(def_id));
+                self.tables.constness.set_some(def_id.index, hir::Constness::NotConst);
             }
             ty::AssocKind::Type => {
                 self.encode_explicit_item_bounds(def_id);
@@ -1367,14 +1368,14 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) {
         let tcx = self.tcx;
 
         let ast_item = self.tcx.hir().expect_impl_item(def_id.expect_local());
-        self.tables.impl_defaultness.set(def_id.index, ast_item.defaultness);
+        self.tables.impl_defaultness.set_some(def_id.index, ast_item.defaultness);
         let impl_item = self.tcx.associated_item(def_id);
-        self.tables.assoc_container.set(def_id.index, impl_item.container);
+        self.tables.assoc_container.set_some(def_id.index, impl_item.container);
 
         match impl_item.kind {
             ty::AssocKind::Fn => {
                 let hir::ImplItemKind::Fn(ref sig, body) = ast_item.kind else { bug!() };
-                self.tables.asyncness.set(def_id.index, sig.header.asyncness);
+                self.tables.asyncness.set_some(def_id.index, sig.header.asyncness);
                 record_array!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body));
                 // Can be inside `impl const Trait`, so using sig.header.constness is not reliable
                 let constness = if self.tcx.is_const_fn_raw(def_id) {
@@ -1382,18 +1383,16 @@ fn encode_info_for_impl_item(&mut self, def_id: DefId) {
                 } else {
                     hir::Constness::NotConst
                 };
-                self.tables.constness.set(def_id.index, constness);
+                self.tables.constness.set_some(def_id.index, constness);
             }
             ty::AssocKind::Const | ty::AssocKind::Type => {}
         }
         if let Some(trait_item_def_id) = impl_item.trait_item_def_id {
-            self.tables.trait_item_def_id.set(def_id.index, trait_item_def_id.into());
+            self.tables.trait_item_def_id.set_some(def_id.index, trait_item_def_id.into());
         }
         if impl_item.kind == ty::AssocKind::Fn {
             record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id));
-            if tcx.is_intrinsic(def_id) {
-                self.tables.is_intrinsic.set_nullable(def_id.index, true);
-            }
+            self.tables.is_intrinsic.set(def_id.index, tcx.is_intrinsic(def_id));
         }
     }
 
@@ -1522,14 +1521,12 @@ fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) {
 
         match item.kind {
             hir::ItemKind::Fn(ref sig, .., body) => {
-                self.tables.asyncness.set(def_id.index, sig.header.asyncness);
+                self.tables.asyncness.set_some(def_id.index, sig.header.asyncness);
                 record_array!(self.tables.fn_arg_names[def_id] <- self.tcx.hir().body_param_names(body));
-                self.tables.constness.set(def_id.index, sig.header.constness);
+                self.tables.constness.set_some(def_id.index, sig.header.constness);
             }
             hir::ItemKind::Macro(ref macro_def, _) => {
-                if macro_def.macro_rules {
-                    self.tables.is_macro_rules.set_nullable(def_id.index, true);
-                }
+                self.tables.is_macro_rules.set(def_id.index, macro_def.macro_rules);
                 record!(self.tables.macro_definition[def_id] <- &*macro_def.body);
             }
             hir::ItemKind::Mod(ref m) => {
@@ -1537,20 +1534,20 @@ fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) {
             }
             hir::ItemKind::OpaqueTy(ref opaque) => {
                 self.encode_explicit_item_bounds(def_id);
-                if matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias) {
-                    self.tables.is_type_alias_impl_trait.set_nullable(def_id.index, true);
-                }
+                self.tables
+                    .is_type_alias_impl_trait
+                    .set(def_id.index, matches!(opaque.origin, hir::OpaqueTyOrigin::TyAlias));
             }
             hir::ItemKind::Impl(hir::Impl { defaultness, constness, .. }) => {
-                self.tables.impl_defaultness.set(def_id.index, *defaultness);
-                self.tables.constness.set(def_id.index, *constness);
+                self.tables.impl_defaultness.set_some(def_id.index, *defaultness);
+                self.tables.constness.set_some(def_id.index, *constness);
 
                 let trait_ref = self.tcx.impl_trait_ref(def_id).map(ty::EarlyBinder::skip_binder);
                 if let Some(trait_ref) = trait_ref {
                     let trait_def = self.tcx.trait_def(trait_ref.def_id);
                     if let Ok(mut an) = trait_def.ancestors(self.tcx, def_id) {
                         if let Some(specialization_graph::Node::Impl(parent)) = an.nth(1) {
-                            self.tables.impl_parent.set(def_id.index, parent.into());
+                            self.tables.impl_parent.set_some(def_id.index, parent.into());
                         }
                     }
 
@@ -1564,7 +1561,7 @@ fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) {
                 }
 
                 let polarity = self.tcx.impl_polarity(def_id);
-                self.tables.impl_polarity.set(def_id.index, polarity);
+                self.tables.impl_polarity.set_some(def_id.index, polarity);
             }
             hir::ItemKind::Trait(..) => {
                 let trait_def = self.tcx.trait_def(def_id);
@@ -1601,9 +1598,7 @@ fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) {
         }
         if let hir::ItemKind::Fn(..) = item.kind {
             record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id));
-            if tcx.is_intrinsic(def_id) {
-                self.tables.is_intrinsic.set_nullable(def_id.index, true);
-            }
+            self.tables.is_intrinsic.set(def_id.index, tcx.is_intrinsic(def_id));
         }
         if let hir::ItemKind::Impl { .. } = item.kind {
             if let Some(trait_ref) = self.tcx.impl_trait_ref(def_id) {
@@ -1650,7 +1645,7 @@ fn encode_info_for_closure(&mut self, def_id: LocalDefId) {
 
             ty::Closure(_, substs) => {
                 let constness = self.tcx.constness(def_id.to_def_id());
-                self.tables.constness.set(def_id.to_def_id().index, constness);
+                self.tables.constness.set_some(def_id.to_def_id().index, constness);
                 record!(self.tables.fn_sig[def_id.to_def_id()] <- ty::EarlyBinder(substs.as_closure().sig()));
             }
 
@@ -1678,12 +1673,12 @@ fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable, ExpnHashTabl
         self.hygiene_ctxt.encode(
             &mut (&mut *self, &mut syntax_contexts, &mut expn_data_table, &mut expn_hash_table),
             |(this, syntax_contexts, _, _), index, ctxt_data| {
-                syntax_contexts.set(index, this.lazy(ctxt_data));
+                syntax_contexts.set_some(index, this.lazy(ctxt_data));
             },
             |(this, _, expn_data_table, expn_hash_table), index, expn_data, hash| {
                 if let Some(index) = index.as_local() {
-                    expn_data_table.set(index.as_raw(), this.lazy(expn_data));
-                    expn_hash_table.set(index.as_raw(), this.lazy(hash));
+                    expn_data_table.set_some(index.as_raw(), this.lazy(expn_data));
+                    expn_hash_table.set_some(index.as_raw(), this.lazy(hash));
                 }
             },
         );
@@ -1708,10 +1703,10 @@ fn encode_proc_macros(&mut self) -> Option<ProcMacroData> {
             let spans = self.tcx.sess.parse_sess.proc_macro_quoted_spans();
             for (i, span) in spans.into_iter().enumerate() {
                 let span = self.lazy(span);
-                self.tables.proc_macro_quoted_spans.set(i, span);
+                self.tables.proc_macro_quoted_spans.set_some(i, span);
             }
 
-            self.tables.opt_def_kind.set(LOCAL_CRATE.as_def_id().index, DefKind::Mod);
+            self.tables.opt_def_kind.set_some(LOCAL_CRATE.as_def_id().index, DefKind::Mod);
             record!(self.tables.def_span[LOCAL_CRATE.as_def_id()] <- tcx.def_span(LOCAL_CRATE.as_def_id()));
             self.encode_attrs(LOCAL_CRATE.as_def_id().expect_local());
             let vis = tcx.local_visibility(CRATE_DEF_ID).map_id(|def_id| def_id.local_def_index);
@@ -1753,8 +1748,8 @@ fn encode_proc_macros(&mut self) -> Option<ProcMacroData> {
                 def_key.disambiguated_data.data = DefPathData::MacroNs(name);
 
                 let def_id = id.to_def_id();
-                self.tables.opt_def_kind.set(def_id.index, DefKind::Macro(macro_kind));
-                self.tables.proc_macro.set(def_id.index, macro_kind);
+                self.tables.opt_def_kind.set_some(def_id.index, DefKind::Macro(macro_kind));
+                self.tables.proc_macro.set_some(def_id.index, macro_kind);
                 self.encode_attrs(id);
                 record!(self.tables.def_keys[def_id] <- def_key);
                 record!(self.tables.def_ident_span[def_id] <- span);
@@ -1969,7 +1964,7 @@ fn encode_dylib_dependency_formats(&mut self) -> LazyArray<Option<LinkagePrefere
                 Linkage::Static => Some(LinkagePreference::RequireStatic),
             }));
         }
-        LazyArray::empty()
+        LazyArray::default()
     }
 
     fn encode_info_for_foreign_item(&mut self, def_id: DefId, nitem: &hir::ForeignItem<'_>) {
@@ -1979,22 +1974,20 @@ fn encode_info_for_foreign_item(&mut self, def_id: DefId, nitem: &hir::ForeignIt
 
         match nitem.kind {
             hir::ForeignItemKind::Fn(_, ref names, _) => {
-                self.tables.asyncness.set(def_id.index, hir::IsAsync::NotAsync);
+                self.tables.asyncness.set_some(def_id.index, hir::IsAsync::NotAsync);
                 record_array!(self.tables.fn_arg_names[def_id] <- *names);
                 let constness = if self.tcx.is_const_fn_raw(def_id) {
                     hir::Constness::Const
                 } else {
                     hir::Constness::NotConst
                 };
-                self.tables.constness.set(def_id.index, constness);
+                self.tables.constness.set_some(def_id.index, constness);
                 record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id));
             }
             hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => {}
         }
         if let hir::ForeignItemKind::Fn(..) = nitem.kind {
-            if tcx.is_intrinsic(def_id) {
-                self.tables.is_intrinsic.set_nullable(def_id.index, true);
-            }
+            self.tables.is_intrinsic.set(def_id.index, tcx.is_intrinsic(def_id));
         }
     }
 }
index 37af9e64e9a3da5002ee527d5b6f655d05366506..a74aa381d9eb829097c219163db4f04750a3d098 100644 (file)
@@ -115,14 +115,16 @@ impl<T: ParameterizedOverTcx> ParameterizedOverTcx for LazyArray<T> {
     type Value<'tcx> = LazyArray<T::Value<'tcx>>;
 }
 
+impl<T> Default for LazyArray<T> {
+    fn default() -> LazyArray<T> {
+        LazyArray::from_position_and_num_elems(NonZeroUsize::new(1).unwrap(), 0)
+    }
+}
+
 impl<T> LazyArray<T> {
     fn from_position_and_num_elems(position: NonZeroUsize, num_elems: usize) -> LazyArray<T> {
         LazyArray { position, num_elems, _marker: PhantomData }
     }
-
-    fn empty() -> LazyArray<T> {
-        LazyArray::from_position_and_num_elems(NonZeroUsize::new(1).unwrap(), 0)
-    }
 }
 
 /// A list of lazily-decoded values, with the added capability of random access.
@@ -316,7 +318,7 @@ pub(crate) struct IncoherentImpls {
 /// Define `LazyTables` and `TableBuilders` at the same time.
 macro_rules! define_tables {
     (
-        - nullable: $($name1:ident: Table<$IDX1:ty, $T1:ty>,)+
+        - defaulted: $($name1:ident: Table<$IDX1:ty, $T1:ty>,)+
         - optional: $($name2:ident: Table<$IDX2:ty, $T2:ty>,)+
     ) => {
         #[derive(MetadataEncodable, MetadataDecodable)]
@@ -343,11 +345,15 @@ fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
 }
 
 define_tables! {
-- nullable:
+- defaulted:
     is_intrinsic: Table<DefIndex, bool>,
     is_macro_rules: Table<DefIndex, bool>,
     is_type_alias_impl_trait: Table<DefIndex, bool>,
     attr_flags: Table<DefIndex, AttrFlags>,
+    def_path_hashes: Table<DefIndex, DefPathHash>,
+    explicit_item_bounds: Table<DefIndex, LazyArray<(ty::Predicate<'static>, Span)>>,
+    inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
+    inherent_impls: Table<DefIndex, LazyArray<DefIndex>>,
 
 - optional:
     attributes: Table<DefIndex, LazyArray<ast::Attribute>>,
@@ -360,12 +366,8 @@ fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
     lookup_const_stability: Table<DefIndex, LazyValue<attr::ConstStability>>,
     lookup_default_body_stability: Table<DefIndex, LazyValue<attr::DefaultBodyStability>>,
     lookup_deprecation_entry: Table<DefIndex, LazyValue<attr::Deprecation>>,
-    // As an optimization, a missing entry indicates an empty `&[]`.
-    explicit_item_bounds: Table<DefIndex, LazyArray<(ty::Predicate<'static>, Span)>>,
     explicit_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
     generics_of: Table<DefIndex, LazyValue<ty::Generics>>,
-    // As an optimization, a missing entry indicates an empty `&[]`.
-    inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
     super_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
     type_of: Table<DefIndex, LazyValue<Ty<'static>>>,
     variances_of: Table<DefIndex, LazyArray<ty::Variance>>,
@@ -393,7 +395,6 @@ fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
     generator_kind: Table<DefIndex, LazyValue<hir::GeneratorKind>>,
     trait_def: Table<DefIndex, LazyValue<ty::TraitDef>>,
     trait_item_def_id: Table<DefIndex, RawDefId>,
-    inherent_impls: Table<DefIndex, LazyArray<DefIndex>>,
     expn_that_defined: Table<DefIndex, LazyValue<ExpnId>>,
     unused_generic_params: Table<DefIndex, LazyValue<UnusedGenericParams>>,
     params_in_repr: Table<DefIndex, LazyValue<BitSet<u32>>>,
@@ -403,7 +404,6 @@ fn encode(&self, buf: &mut FileEncoder) -> LazyTables {
     // `DefPathTable` up front, since we may only ever use a few
     // definitions from any given crate.
     def_keys: Table<DefIndex, LazyValue<DefKey>>,
-    def_path_hashes: Table<DefIndex, DefPathHash>,
     proc_macro_quoted_spans: Table<usize, LazyValue<Span>>,
     generator_diagnostic_data: Table<DefIndex, LazyValue<GeneratorDiagnosticData<'static>>>,
     variant_data: Table<DefIndex, LazyValue<VariantData>>,
index 70dbf6476e2fab40d8c4a89b575528b3bd3f6f6b..99bec570600a0faae76c4afb1479ce834e1702c1 100644 (file)
 use std::marker::PhantomData;
 use std::num::NonZeroUsize;
 
+pub(super) trait IsDefault: Default {
+    fn is_default(&self) -> bool;
+}
+
+impl<T> IsDefault for Option<T> {
+    fn is_default(&self) -> bool {
+        self.is_none()
+    }
+}
+
+impl IsDefault for AttrFlags {
+    fn is_default(&self) -> bool {
+        self.is_empty()
+    }
+}
+
+impl IsDefault for bool {
+    fn is_default(&self) -> bool {
+        !self
+    }
+}
+
+impl IsDefault for u32 {
+    fn is_default(&self) -> bool {
+        *self == 0
+    }
+}
+
+impl<T> IsDefault for LazyArray<T> {
+    fn is_default(&self) -> bool {
+        self.num_elems == 0
+    }
+}
+
+impl IsDefault for DefPathHash {
+    fn is_default(&self) -> bool {
+        self.0 == Fingerprint::ZERO
+    }
+}
+
 /// Helper trait, for encoding to, and decoding from, a fixed number of bytes.
 /// Used mainly for Lazy positions and lengths.
 /// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`,
 /// but this has no impact on safety.
-pub(super) trait FixedSizeEncoding: Default {
+pub(super) trait FixedSizeEncoding: IsDefault {
     /// This should be `[u8; BYTE_LEN]`;
     /// Cannot use an associated `const BYTE_LEN: usize` instead due to const eval limitations.
     type ByteArray;
@@ -23,6 +63,8 @@ pub(super) trait FixedSizeEncoding: Default {
     fn write_to_bytes(self, b: &mut Self::ByteArray);
 }
 
+/// This implementation is not used generically, but for reading/writing
+/// concrete `u32` fields in `Lazy*` structures, which may be zero.
 impl FixedSizeEncoding for u32 {
     type ByteArray = [u8; 4];
 
@@ -58,7 +100,7 @@ impl FixedSizeEncoding for Option<$ty> {
             fn write_to_bytes(self, b: &mut [u8;1]) {
                 use $ty::*;
                 b[0] = match self {
-                    None => 0,
+                    None => unreachable!(),
                     $(Some($($pat)*) => 1 + ${index()},)*
                 }
             }
@@ -155,20 +197,18 @@ impl FixedSizeEncoding for Option<$ty> {
 }
 
 // We directly encode `DefPathHash` because a `LazyValue` would incur a 25% cost.
-impl FixedSizeEncoding for Option<DefPathHash> {
+impl FixedSizeEncoding for DefPathHash {
     type ByteArray = [u8; 16];
 
     #[inline]
     fn from_bytes(b: &[u8; 16]) -> Self {
-        Some(DefPathHash(Fingerprint::from_le_bytes(*b)))
+        DefPathHash(Fingerprint::from_le_bytes(*b))
     }
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 16]) {
-        let Some(DefPathHash(fingerprint)) = self else {
-            panic!("Trying to encode absent DefPathHash.")
-        };
-        *b = fingerprint.to_le_bytes();
+        debug_assert!(!self.is_default());
+        *b = self.0.to_le_bytes();
     }
 }
 
@@ -179,17 +219,17 @@ impl FixedSizeEncoding for Option<RawDefId> {
     #[inline]
     fn from_bytes(b: &[u8; 8]) -> Self {
         let krate = u32::from_le_bytes(b[0..4].try_into().unwrap());
-        let index = u32::from_le_bytes(b[4..8].try_into().unwrap());
         if krate == 0 {
             return None;
         }
+        let index = u32::from_le_bytes(b[4..8].try_into().unwrap());
         Some(RawDefId { krate: krate - 1, index })
     }
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 8]) {
         match self {
-            None => *b = [0; 8],
+            None => unreachable!(),
             Some(RawDefId { krate, index }) => {
                 // CrateNum is less than `CrateNum::MAX_AS_U32`.
                 debug_assert!(krate < u32::MAX);
@@ -210,6 +250,7 @@ impl FixedSizeEncoding for AttrFlags {
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 1]) {
+        debug_assert!(!self.is_default());
         b[0] = self.bits();
     }
 }
@@ -224,6 +265,7 @@ impl FixedSizeEncoding for bool {
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 1]) {
+        debug_assert!(!self.is_default());
         b[0] = self as u8
     }
 }
@@ -242,34 +284,72 @@ impl<T> FixedSizeEncoding for Option<LazyValue<T>> {
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 4]) {
-        let position = self.map_or(0, |lazy| lazy.position.get());
+        match self {
+            None => unreachable!(),
+            Some(lazy) => {
+                let position = lazy.position.get();
+                let position: u32 = position.try_into().unwrap();
+                position.write_to_bytes(b)
+            }
+        }
+    }
+}
+
+impl<T> LazyArray<T> {
+    #[inline]
+    fn write_to_bytes_impl(self, b: &mut [u8; 8]) {
+        let ([position_bytes, meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() };
+
+        let position = self.position.get();
         let position: u32 = position.try_into().unwrap();
-        position.write_to_bytes(b)
+        position.write_to_bytes(position_bytes);
+
+        let len = self.num_elems;
+        let len: u32 = len.try_into().unwrap();
+        len.write_to_bytes(meta_bytes);
+    }
+
+    fn from_bytes_impl(position_bytes: &[u8; 4], meta_bytes: &[u8; 4]) -> Option<LazyArray<T>> {
+        let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?;
+        let len = u32::from_bytes(meta_bytes) as usize;
+        Some(LazyArray::from_position_and_num_elems(position, len))
     }
 }
 
-impl<T> FixedSizeEncoding for Option<LazyArray<T>> {
+impl<T> FixedSizeEncoding for LazyArray<T> {
     type ByteArray = [u8; 8];
 
     #[inline]
     fn from_bytes(b: &[u8; 8]) -> Self {
-        let ([ref position_bytes, ref meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
-        let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?;
-        let len = u32::from_bytes(meta_bytes) as usize;
-        Some(LazyArray::from_position_and_num_elems(position, len))
+        let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
+        if *meta_bytes == [0; 4] {
+            return Default::default();
+        }
+        LazyArray::from_bytes_impl(position_bytes, meta_bytes).unwrap()
     }
 
     #[inline]
     fn write_to_bytes(self, b: &mut [u8; 8]) {
-        let ([ref mut position_bytes, ref mut meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() };
+        assert!(!self.is_default());
+        self.write_to_bytes_impl(b)
+    }
+}
 
-        let position = self.map_or(0, |lazy| lazy.position.get());
-        let position: u32 = position.try_into().unwrap();
-        position.write_to_bytes(position_bytes);
+impl<T> FixedSizeEncoding for Option<LazyArray<T>> {
+    type ByteArray = [u8; 8];
 
-        let len = self.map_or(0, |lazy| lazy.num_elems);
-        let len: u32 = len.try_into().unwrap();
-        len.write_to_bytes(meta_bytes);
+    #[inline]
+    fn from_bytes(b: &[u8; 8]) -> Self {
+        let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
+        LazyArray::from_bytes_impl(position_bytes, meta_bytes)
+    }
+
+    #[inline]
+    fn write_to_bytes(self, b: &mut [u8; 8]) {
+        match self {
+            None => unreachable!(),
+            Some(lazy) => lazy.write_to_bytes_impl(b),
+        }
     }
 }
 
@@ -289,20 +369,27 @@ impl<I: Idx, const N: usize, T> TableBuilder<I, Option<T>>
 where
     Option<T>: FixedSizeEncoding<ByteArray = [u8; N]>,
 {
-    pub(crate) fn set(&mut self, i: I, value: T) {
-        self.set_nullable(i, Some(value))
+    pub(crate) fn set_some(&mut self, i: I, value: T) {
+        self.set(i, Some(value))
     }
 }
 
 impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]>> TableBuilder<I, T> {
-    pub(crate) fn set_nullable(&mut self, i: I, value: T) {
-        // FIXME(eddyb) investigate more compact encodings for sparse tables.
-        // On the PR @michaelwoerister mentioned:
-        // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
-        // > trick (i.e. divide things into buckets of 32 or 64 items and then
-        // > store bit-masks of which item in each bucket is actually serialized).
-        self.blocks.ensure_contains_elem(i, || [0; N]);
-        value.write_to_bytes(&mut self.blocks[i]);
+    /// Sets the table value if it is not default.
+    /// ATTENTION: For optimization default values are simply ignored by this function, because
+    /// right now metadata tables never need to reset non-default values to default. If such need
+    /// arises in the future then a new method (e.g. `clear` or `reset`) will need to be introduced
+    /// for doing that explicitly.
+    pub(crate) fn set(&mut self, i: I, value: T) {
+        if !value.is_default() {
+            // FIXME(eddyb) investigate more compact encodings for sparse tables.
+            // On the PR @michaelwoerister mentioned:
+            // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
+            // > trick (i.e. divide things into buckets of 32 or 64 items and then
+            // > store bit-masks of which item in each bucket is actually serialized).
+            self.blocks.ensure_contains_elem(i, || [0; N]);
+            value.write_to_bytes(&mut self.blocks[i]);
+        }
     }
 
     pub(crate) fn encode(&self, buf: &mut FileEncoder) -> LazyTable<I, T> {
@@ -331,10 +418,7 @@ pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(&self, metadata: M, i: I) ->
         let start = self.position.get();
         let bytes = &metadata.blob()[start..start + self.encoded_size];
         let (bytes, []) = bytes.as_chunks::<N>() else { panic!() };
-        match bytes.get(i.index()) {
-            Some(bytes) => FixedSizeEncoding::from_bytes(bytes),
-            None => FixedSizeEncoding::from_bytes(&[0; N]),
-        }
+        bytes.get(i.index()).map_or_else(Default::default, FixedSizeEncoding::from_bytes)
     }
 
     /// Size of the table in entries, including possible gaps.
index 4cebe416354a928fd423f0cd4c56f68af5d3c253..7aa9282b9beceb88301dc76df08af1845b4a101b 100644 (file)
     query upstream_monomorphizations_for(def_id: DefId)
         -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>>
     {
-        arena_cache
         desc { |tcx|
             "collecting available upstream monomorphizations for `{}`",
             tcx.def_path_str(def_id),
index cdcd6281f209bb4d25761ebb9e0a6cd9d1fac8f3..4c2855821384bcf4ed35bda7e3635eaacdcc1e7d 100644 (file)
@@ -818,125 +818,114 @@ fn ty_and_layout_pointee_info_at(
         let tcx = cx.tcx();
         let param_env = cx.param_env();
 
-        let pointee_info =
-            match *this.ty.kind() {
-                ty::RawPtr(mt) if offset.bytes() == 0 => {
-                    tcx.layout_of(param_env.and(mt.ty)).ok().map(|layout| PointeeInfo {
-                        size: layout.size,
-                        align: layout.align.abi,
-                        safe: None,
-                    })
-                }
-                ty::FnPtr(fn_sig) if offset.bytes() == 0 => {
-                    tcx.layout_of(param_env.and(tcx.mk_fn_ptr(fn_sig))).ok().map(|layout| {
-                        PointeeInfo { size: layout.size, align: layout.align.abi, safe: None }
-                    })
-                }
-                ty::Ref(_, ty, mt) if offset.bytes() == 0 => {
-                    let kind = if tcx.sess.opts.optimize == OptLevel::No {
-                        // Use conservative pointer kind if not optimizing. This saves us the
-                        // Freeze/Unpin queries, and can save time in the codegen backend (noalias
-                        // attributes in LLVM have compile-time cost even in unoptimized builds).
-                        PointerKind::SharedMutable
-                    } else {
-                        match mt {
-                            hir::Mutability::Not => {
-                                if ty.is_freeze(tcx, cx.param_env()) {
-                                    PointerKind::Frozen
-                                } else {
-                                    PointerKind::SharedMutable
-                                }
-                            }
-                            hir::Mutability::Mut => {
-                                // References to self-referential structures should not be considered
-                                // noalias, as another pointer to the structure can be obtained, that
-                                // is not based-on the original reference. We consider all !Unpin
-                                // types to be potentially self-referential here.
-                                if ty.is_unpin(tcx, cx.param_env()) {
-                                    PointerKind::UniqueBorrowed
-                                } else {
-                                    PointerKind::UniqueBorrowedPinned
-                                }
-                            }
-                        }
-                    };
+        let pointee_info = match *this.ty.kind() {
+            ty::RawPtr(mt) if offset.bytes() == 0 => {
+                tcx.layout_of(param_env.and(mt.ty)).ok().map(|layout| PointeeInfo {
+                    size: layout.size,
+                    align: layout.align.abi,
+                    safe: None,
+                })
+            }
+            ty::FnPtr(fn_sig) if offset.bytes() == 0 => {
+                tcx.layout_of(param_env.and(tcx.mk_fn_ptr(fn_sig))).ok().map(|layout| PointeeInfo {
+                    size: layout.size,
+                    align: layout.align.abi,
+                    safe: None,
+                })
+            }
+            ty::Ref(_, ty, mt) if offset.bytes() == 0 => {
+                // Use conservative pointer kind if not optimizing. This saves us the
+                // Freeze/Unpin queries, and can save time in the codegen backend (noalias
+                // attributes in LLVM have compile-time cost even in unoptimized builds).
+                let optimize = tcx.sess.opts.optimize != OptLevel::No;
+                let kind = match mt {
+                    hir::Mutability::Not => PointerKind::SharedRef {
+                        frozen: optimize && ty.is_freeze(tcx, cx.param_env()),
+                    },
+                    hir::Mutability::Mut => PointerKind::MutableRef {
+                        unpin: optimize && ty.is_unpin(tcx, cx.param_env()),
+                    },
+                };
 
-                    tcx.layout_of(param_env.and(ty)).ok().map(|layout| PointeeInfo {
-                        size: layout.size,
-                        align: layout.align.abi,
-                        safe: Some(kind),
-                    })
-                }
+                tcx.layout_of(param_env.and(ty)).ok().map(|layout| PointeeInfo {
+                    size: layout.size,
+                    align: layout.align.abi,
+                    safe: Some(kind),
+                })
+            }
 
-                _ => {
-                    let mut data_variant = match this.variants {
-                        // Within the discriminant field, only the niche itself is
-                        // always initialized, so we only check for a pointer at its
-                        // offset.
-                        //
-                        // If the niche is a pointer, it's either valid (according
-                        // to its type), or null (which the niche field's scalar
-                        // validity range encodes). This allows using
-                        // `dereferenceable_or_null` for e.g., `Option<&T>`, and
-                        // this will continue to work as long as we don't start
-                        // using more niches than just null (e.g., the first page of
-                        // the address space, or unaligned pointers).
-                        Variants::Multiple {
-                            tag_encoding: TagEncoding::Niche { untagged_variant, .. },
-                            tag_field,
-                            ..
-                        } if this.fields.offset(tag_field) == offset => {
-                            Some(this.for_variant(cx, untagged_variant))
-                        }
-                        _ => Some(this),
-                    };
+            _ => {
+                let mut data_variant = match this.variants {
+                    // Within the discriminant field, only the niche itself is
+                    // always initialized, so we only check for a pointer at its
+                    // offset.
+                    //
+                    // If the niche is a pointer, it's either valid (according
+                    // to its type), or null (which the niche field's scalar
+                    // validity range encodes). This allows using
+                    // `dereferenceable_or_null` for e.g., `Option<&T>`, and
+                    // this will continue to work as long as we don't start
+                    // using more niches than just null (e.g., the first page of
+                    // the address space, or unaligned pointers).
+                    Variants::Multiple {
+                        tag_encoding: TagEncoding::Niche { untagged_variant, .. },
+                        tag_field,
+                        ..
+                    } if this.fields.offset(tag_field) == offset => {
+                        Some(this.for_variant(cx, untagged_variant))
+                    }
+                    _ => Some(this),
+                };
 
-                    if let Some(variant) = data_variant {
-                        // We're not interested in any unions.
-                        if let FieldsShape::Union(_) = variant.fields {
-                            data_variant = None;
-                        }
+                if let Some(variant) = data_variant {
+                    // We're not interested in any unions.
+                    if let FieldsShape::Union(_) = variant.fields {
+                        data_variant = None;
                     }
+                }
 
-                    let mut result = None;
-
-                    if let Some(variant) = data_variant {
-                        // FIXME(erikdesjardins): handle non-default addrspace ptr sizes
-                        // (requires passing in the expected address space from the caller)
-                        let ptr_end = offset + Pointer(AddressSpace::DATA).size(cx);
-                        for i in 0..variant.fields.count() {
-                            let field_start = variant.fields.offset(i);
-                            if field_start <= offset {
-                                let field = variant.field(cx, i);
-                                result = field.to_result().ok().and_then(|field| {
-                                    if ptr_end <= field_start + field.size {
-                                        // We found the right field, look inside it.
-                                        let field_info =
-                                            field.pointee_info_at(cx, offset - field_start);
-                                        field_info
-                                    } else {
-                                        None
-                                    }
-                                });
-                                if result.is_some() {
-                                    break;
+                let mut result = None;
+
+                if let Some(variant) = data_variant {
+                    // FIXME(erikdesjardins): handle non-default addrspace ptr sizes
+                    // (requires passing in the expected address space from the caller)
+                    let ptr_end = offset + Pointer(AddressSpace::DATA).size(cx);
+                    for i in 0..variant.fields.count() {
+                        let field_start = variant.fields.offset(i);
+                        if field_start <= offset {
+                            let field = variant.field(cx, i);
+                            result = field.to_result().ok().and_then(|field| {
+                                if ptr_end <= field_start + field.size {
+                                    // We found the right field, look inside it.
+                                    let field_info =
+                                        field.pointee_info_at(cx, offset - field_start);
+                                    field_info
+                                } else {
+                                    None
                                 }
+                            });
+                            if result.is_some() {
+                                break;
                             }
                         }
                     }
+                }
 
-                    // FIXME(eddyb) This should be for `ptr::Unique<T>`, not `Box<T>`.
-                    if let Some(ref mut pointee) = result {
-                        if let ty::Adt(def, _) = this.ty.kind() {
-                            if def.is_box() && offset.bytes() == 0 {
-                                pointee.safe = Some(PointerKind::UniqueOwned);
-                            }
+                // FIXME(eddyb) This should be for `ptr::Unique<T>`, not `Box<T>`.
+                if let Some(ref mut pointee) = result {
+                    if let ty::Adt(def, _) = this.ty.kind() {
+                        if def.is_box() && offset.bytes() == 0 {
+                            let optimize = tcx.sess.opts.optimize != OptLevel::No;
+                            pointee.safe = Some(PointerKind::Box {
+                                unpin: optimize && this.ty.boxed_ty().is_unpin(tcx, cx.param_env()),
+                            });
                         }
                     }
-
-                    result
                 }
-            };
+
+                result
+            }
+        };
 
         debug!(
             "pointee_info_at (offset={:?}, type kind: {:?}) => {:?}",
index 7c5e1427d1ed72e9e7a8f45ad2c2e70cb8eef90e..cdda052f529067a27f4c82d49c04bef4a8d7df6e 100644 (file)
@@ -119,6 +119,12 @@ pub fn new(stable_crate_id: StableCrateId, local_hash: u64) -> DefPathHash {
     }
 }
 
+impl Default for DefPathHash {
+    fn default() -> Self {
+        DefPathHash(Fingerprint::ZERO)
+    }
+}
+
 impl Borrow<Fingerprint> for DefPathHash {
     #[inline]
     fn borrow(&self) -> &Fingerprint {
index 1c74aeca5ab1fae38a6e6234c6ad7b40b292078b..ad5527f5a778b89bb11ba49c9f95c23f3b307491 100644 (file)
@@ -254,15 +254,18 @@ fn adjust_for_rust_scalar<'tcx>(
         if let Some(kind) = pointee.safe {
             attrs.pointee_align = Some(pointee.align);
 
-            // `Box` (`UniqueBorrowed`) are not necessarily dereferenceable
-            // for the entire duration of the function as they can be deallocated
-            // at any time. Same for shared mutable references. If LLVM had a
-            // way to say "dereferenceable on entry" we could use it here.
+            // `Box` are not necessarily dereferenceable for the entire duration of the function as
+            // they can be deallocated at any time. Same for non-frozen shared references (see
+            // <https://github.com/rust-lang/rust/pull/98017>), and for mutable references to
+            // potentially self-referential types (see
+            // <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>). If LLVM had a way
+            // to say "dereferenceable on entry" we could use it here.
             attrs.pointee_size = match kind {
-                PointerKind::UniqueBorrowed
-                | PointerKind::UniqueBorrowedPinned
-                | PointerKind::Frozen => pointee.size,
-                PointerKind::SharedMutable | PointerKind::UniqueOwned => Size::ZERO,
+                PointerKind::Box { .. }
+                | PointerKind::SharedRef { frozen: false }
+                | PointerKind::MutableRef { unpin: false } => Size::ZERO,
+                PointerKind::SharedRef { frozen: true }
+                | PointerKind::MutableRef { unpin: true } => pointee.size,
             };
 
             // The aliasing rules for `Box<T>` are still not decided, but currently we emit
@@ -275,18 +278,16 @@ fn adjust_for_rust_scalar<'tcx>(
             // versions at all anymore. We still support turning it off using -Zmutable-noalias.
             let noalias_mut_ref = cx.tcx.sess.opts.unstable_opts.mutable_noalias;
 
-            // `&mut` pointer parameters never alias other parameters,
-            // or mutable global data
+            // `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as both
+            // `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely on memory
+            // dependencies rather than pointer equality. However this only applies to arguments,
+            // not return values.
             //
-            // `&T` where `T` contains no `UnsafeCell<U>` is immutable,
-            // and can be marked as both `readonly` and `noalias`, as
-            // LLVM's definition of `noalias` is based solely on memory
-            // dependencies rather than pointer equality
+            // `&mut T` and `Box<T>` where `T: Unpin` are unique and hence `noalias`.
             let no_alias = match kind {
-                PointerKind::SharedMutable | PointerKind::UniqueBorrowedPinned => false,
-                PointerKind::UniqueBorrowed => noalias_mut_ref,
-                PointerKind::UniqueOwned => noalias_for_box,
-                PointerKind::Frozen => true,
+                PointerKind::SharedRef { frozen } => frozen,
+                PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
+                PointerKind::Box { unpin } => unpin && noalias_for_box,
             };
             // We can never add `noalias` in return position; that LLVM attribute has some very surprising semantics
             // (see <https://github.com/rust-lang/unsafe-code-guidelines/issues/385#issuecomment-1368055745>).
@@ -294,7 +295,7 @@ fn adjust_for_rust_scalar<'tcx>(
                 attrs.set(ArgAttribute::NoAlias);
             }
 
-            if kind == PointerKind::Frozen && !is_return {
+            if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
                 attrs.set(ArgAttribute::ReadOnly);
             }
         }
index 424b52309d3af8bcc58bf1a1164d9a4f18c0839c..a6e0f13f69830350d8c55611c635841edc461212 100644 (file)
@@ -22,14 +22,17 @@ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] {
         hir::ItemKind::Impl(ref impl_) => tcx.arena.alloc_from_iter(
             impl_.items.iter().map(|impl_item_ref| impl_item_ref.id.owner_id.to_def_id()),
         ),
-        hir::ItemKind::TraitAlias(..) => &[],
         _ => span_bug!(item.span, "associated_item_def_ids: not impl or trait"),
     }
 }
 
 fn associated_items(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AssocItems<'_> {
-    let items = tcx.associated_item_def_ids(def_id).iter().map(|did| tcx.associated_item(*did));
-    ty::AssocItems::new(items)
+    if tcx.is_trait_alias(def_id) {
+        ty::AssocItems::new(Vec::new())
+    } else {
+        let items = tcx.associated_item_def_ids(def_id).iter().map(|did| tcx.associated_item(*did));
+        ty::AssocItems::new(items)
+    }
 }
 
 fn impl_item_implementor_ids(tcx: TyCtxt<'_>, impl_id: DefId) -> FxHashMap<DefId, DefId> {
index 6078e39ac9d3b8af5a7214921d91edff4c238385..8a0c532cfb02fe5c29132adb7cb6a2757728696a 100644 (file)
@@ -1114,9 +1114,6 @@ fn run(self, builder: &Builder<'_>) {
             cmd.arg("--bless");
         }
 
-        builder.info("tidy check");
-        try_run(builder, &mut cmd);
-
         if builder.config.channel == "dev" || builder.config.channel == "nightly" {
             builder.info("fmt check");
             if builder.initial_rustfmt().is_none() {
@@ -1134,6 +1131,11 @@ fn run(self, builder: &Builder<'_>) {
             }
             crate::format::format(&builder, !builder.config.cmd.bless(), &[]);
         }
+
+        builder.info("tidy check");
+        try_run(builder, &mut cmd);
+
+        builder.ensure(ExpandYamlAnchors {});
     }
 
     fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
index 8992d165d5d504a783e6a5bf3f25f1fd7150598a..3fc72ecbbc484ffd2b9e292b00585fcbedeeb392 100644 (file)
@@ -51,7 +51,7 @@ fn from_args() -> Result<Self, Box<dyn Error>> {
             ["generate", ref base] => (Mode::Generate, PathBuf::from(base)),
             ["check", ref base] => (Mode::Check, PathBuf::from(base)),
             _ => {
-                eprintln!("usage: expand-yaml-anchors <source-dir> <dest-dir>");
+                eprintln!("usage: expand-yaml-anchors <generate|check> <base-dir>");
                 std::process::exit(1);
             }
         };
index ec555ba2895c8468b82bcaae0008fcfb34b13ad0..106e93751d21904c363c07bf26413a75505c78a7 100644 (file)
@@ -81,21 +81,18 @@ fn from_ref_ty<'tcx>(
                         protector: None,
                     }
                 } else if pointee.is_unpin(*cx.tcx, cx.param_env()) {
-                    // A regular full mutable reference.
+                    // A regular full mutable reference. On `FnEntry` this is `noalias` and `dereferenceable`.
                     NewPermission::Uniform {
                         perm: Permission::Unique,
                         access: Some(AccessKind::Write),
                         protector,
                     }
                 } else {
+                    // `!Unpin` dereferences do not get `noalias` nor `dereferenceable`.
                     NewPermission::Uniform {
                         perm: Permission::SharedReadWrite,
-                        // FIXME: We emit `dereferenceable` for `!Unpin` mutable references, so we
-                        // should do fake accesses here. But then we run into
-                        // <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>, so for now
-                        // we don't do that.
                         access: None,
-                        protector,
+                        protector: None,
                     }
                 }
             }
@@ -109,6 +106,7 @@ fn from_ref_ty<'tcx>(
                 }
             }
             ty::Ref(_, _pointee, Mutability::Not) => {
+                // Shared references. If frozen, these get `noalias` and `dereferenceable`; otherwise neither.
                 NewPermission::FreezeSensitive {
                     freeze_perm: Permission::SharedReadOnly,
                     freeze_access: Some(AccessKind::Read),
@@ -137,6 +135,32 @@ fn from_ref_ty<'tcx>(
         }
     }
 
+    fn from_box_ty<'tcx>(
+        ty: Ty<'tcx>,
+        kind: RetagKind,
+        cx: &crate::MiriInterpCx<'_, 'tcx>,
+    ) -> Self {
+        // `ty` is not the `Box` but the field of the Box with this pointer (due to allocator handling).
+        let pointee = ty.builtin_deref(true).unwrap().ty;
+        if pointee.is_unpin(*cx.tcx, cx.param_env()) {
+            // A regular box. On `FnEntry` this is `noalias`, but not `dereferenceable` (hence only
+            // a weak protector).
+            NewPermission::Uniform {
+                perm: Permission::Unique,
+                access: Some(AccessKind::Write),
+                protector: (kind == RetagKind::FnEntry)
+                    .then_some(ProtectorKind::WeakProtector),
+            }
+        } else {
+            // `!Unpin` boxes do not get `noalias` nor `dereferenceable`.
+            NewPermission::Uniform {
+                perm: Permission::SharedReadWrite,
+                access: None,
+                protector: None,
+            }
+        }
+    }
+
     fn protector(&self) -> Option<ProtectorKind> {
         match self {
             NewPermission::Uniform { protector, .. } => *protector,
@@ -916,12 +940,7 @@ fn ecx(&mut self) -> &mut MiriInterpCx<'mir, 'tcx> {
 
             fn visit_box(&mut self, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
                 // Boxes get a weak protectors, since they may be deallocated.
-                let new_perm = NewPermission::Uniform {
-                    perm: Permission::Unique,
-                    access: Some(AccessKind::Write),
-                    protector: (self.kind == RetagKind::FnEntry)
-                        .then_some(ProtectorKind::WeakProtector),
-                };
+                let new_perm = NewPermission::from_box_ty(place.layout.ty, self.kind, self.ecx);
                 self.retag_ptr_inplace(place, new_perm, self.retag_cause)
             }
 
diff --git a/src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.rs b/src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.rs
deleted file mode 100644 (file)
index fd67dcc..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-//@error-pattern: /deallocating while item \[SharedReadWrite for .*\] is strongly protected/
-use std::marker::PhantomPinned;
-
-pub struct NotUnpin(i32, PhantomPinned);
-
-fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
-    // `f` may mutate, but it may not deallocate!
-    f(x)
-}
-
-fn main() {
-    inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| {
-        let raw = x as *mut _;
-        drop(unsafe { Box::from_raw(raw) });
-    });
-}
diff --git a/src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.stderr b/src/tools/miri/tests/fail/stacked_borrows/deallocate_against_protector2.stderr
deleted file mode 100644 (file)
index 47cfa0d..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-error: Undefined Behavior: deallocating while item [SharedReadWrite for <TAG>] is strongly protected by call ID
-  --> RUSTLIB/alloc/src/alloc.rs:LL:CC
-   |
-LL |     unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
-   |              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ deallocating while item [SharedReadWrite for <TAG>] is strongly protected by call ID
-   |
-   = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
-   = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
-   = note: BACKTRACE:
-   = note: inside `std::alloc::dealloc` at RUSTLIB/alloc/src/alloc.rs:LL:CC
-   = note: inside `<std::alloc::Global as std::alloc::Allocator>::deallocate` at RUSTLIB/alloc/src/alloc.rs:LL:CC
-   = note: inside `alloc::alloc::box_free::<NotUnpin, std::alloc::Global>` at RUSTLIB/alloc/src/alloc.rs:LL:CC
-   = note: inside `std::ptr::drop_in_place::<std::boxed::Box<NotUnpin>> - shim(Some(std::boxed::Box<NotUnpin>))` at RUSTLIB/core/src/ptr/mod.rs:LL:CC
-   = note: inside `std::mem::drop::<std::boxed::Box<NotUnpin>>` at RUSTLIB/core/src/mem/mod.rs:LL:CC
-note: inside closure
-  --> $DIR/deallocate_against_protector2.rs:LL:CC
-   |
-LL |         drop(unsafe { Box::from_raw(raw) });
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   = note: inside `<[closure@$DIR/deallocate_against_protector2.rs:LL:CC] as std::ops::FnOnce<(&mut NotUnpin,)>>::call_once - shim` at RUSTLIB/core/src/ops/function.rs:LL:CC
-note: inside `inner`
-  --> $DIR/deallocate_against_protector2.rs:LL:CC
-   |
-LL |     f(x)
-   |     ^^^^
-note: inside `main`
-  --> $DIR/deallocate_against_protector2.rs:LL:CC
-   |
-LL | /     inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| {
-LL | |         let raw = x as *mut _;
-LL | |         drop(unsafe { Box::from_raw(raw) });
-LL | |     });
-   | |______^
-
-note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
-
-error: aborting due to previous error
-
index 96fc0be344dbfea2f8065c503b4df6b3057d0a7c..6994def16a1da3f537c9fb78bdc4a8622c2d13d5 100644 (file)
@@ -26,6 +26,19 @@ fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<()> {
     }
 }
 
+fn mk_waker() -> Waker {
+    use std::sync::Arc;
+
+    struct MyWaker;
+    impl Wake for MyWaker {
+        fn wake(self: Arc<Self>) {
+            unimplemented!()
+        }
+    }
+
+    Waker::from(Arc::new(MyWaker))
+}
+
 async fn do_stuff() {
     (&mut Delay::new(1)).await;
 }
@@ -73,16 +86,7 @@ fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
 }
 
 fn run_fut<T>(fut: impl Future<Output = T>) -> T {
-    use std::sync::Arc;
-
-    struct MyWaker;
-    impl Wake for MyWaker {
-        fn wake(self: Arc<Self>) {
-            unimplemented!()
-        }
-    }
-
-    let waker = Waker::from(Arc::new(MyWaker));
+    let waker = mk_waker();
     let mut context = Context::from_waker(&waker);
 
     let mut pinned = pin!(fut);
@@ -94,7 +98,37 @@ fn wake(self: Arc<Self>) {
     }
 }
 
+fn self_referential_box() {
+    let waker = mk_waker();
+    let cx = &mut Context::from_waker(&waker);
+
+    async fn my_fut() -> i32 {
+        let val = 10;
+        let val_ref = &val;
+
+        let _ = Delay::new(1).await;
+
+        *val_ref
+    }
+
+    fn box_poll<F: Future>(
+        mut f: Pin<Box<F>>,
+        cx: &mut Context<'_>,
+    ) -> (Pin<Box<F>>, Poll<F::Output>) {
+        let p = f.as_mut().poll(cx);
+        (f, p)
+    }
+
+    let my_fut = Box::pin(my_fut());
+    let (my_fut, p1) = box_poll(my_fut, cx);
+    assert!(p1.is_pending());
+    let (my_fut, p2) = box_poll(my_fut, cx);
+    assert!(p2.is_ready());
+    drop(my_fut);
+}
+
 fn main() {
     run_fut(do_stuff());
     run_fut(DoStuff::new());
+    self_referential_box();
 }
index ef6eb346c17b12804cb9c92dd5caaa424d35dbec..8e78efa73c751578c9c3b7849553bb62fcd32e31 100644 (file)
@@ -19,6 +19,7 @@ fn main() {
     array_casts();
     mut_below_shr();
     wide_raw_ptr_in_tuple();
+    not_unpin_not_protected();
 }
 
 // Make sure that reading from an `&mut` does, like reborrowing to `&`,
@@ -219,3 +220,22 @@ fn wide_raw_ptr_in_tuple() {
     // Make sure the fn ptr part of the vtable is still fine.
     r.type_id();
 }
+
+fn not_unpin_not_protected() {
+    // `&mut !Unpin`, at least for now, does not get `noalias` nor `dereferenceable`, so we also
+    // don't add protectors. (We could, but until we have a better idea for where we want to go with
+    // the self-referntial-generator situation, it does not seem worth the potential trouble.)
+    use std::marker::PhantomPinned;
+
+    pub struct NotUnpin(i32, PhantomPinned);
+
+    fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) {
+        // `f` may mutate, but it may not deallocate!
+        f(x)
+    }
+
+    inner(Box::leak(Box::new(NotUnpin(0, PhantomPinned))), |x| {
+        let raw = x as *mut _;
+        drop(unsafe { Box::from_raw(raw) });
+    });
+}
index ff76405a4ea323338c45c98751756216f76f4f30..0c62e0d35e36d91624e2ac519e56f1cc72d9c91e 100644 (file)
@@ -29,6 +29,12 @@ pub fn borrow(x: &i32) -> &i32 {
   x
 }
 
+// CHECK: align 4 {{i32\*|ptr}} @borrow_mut({{i32\*|ptr}} align 4 %x)
+#[no_mangle]
+pub fn borrow_mut(x: &mut i32) -> &mut i32 {
+  x
+}
+
 // CHECK-LABEL: @borrow_call
 #[no_mangle]
 pub fn borrow_call(x: &i32, f: fn(&i32) -> &i32) -> &i32 {
index 1f979d7b90a70b08b7a5edd95028777218481772..96dfde18683e34b2c3ab452a79920edd040a72a1 100644 (file)
@@ -85,6 +85,12 @@ pub fn option_nonzero_int(x: Option<NonZeroU64>) -> Option<NonZeroU64> {
 pub fn readonly_borrow(_: &i32) {
 }
 
+// CHECK: noundef align 4 dereferenceable(4) {{i32\*|ptr}} @readonly_borrow_ret()
+#[no_mangle]
+pub fn readonly_borrow_ret() -> &'static i32 {
+  loop {}
+}
+
 // CHECK: @static_borrow({{i32\*|ptr}} noalias noundef readonly align 4 dereferenceable(4) %_1)
 // static borrow may be captured
 #[no_mangle]
@@ -115,9 +121,17 @@ pub fn mutable_unsafe_borrow(_: &mut UnsafeInner) {
 pub fn mutable_borrow(_: &mut i32) {
 }
 
+// CHECK: noundef align 4 dereferenceable(4) {{i32\*|ptr}} @mutable_borrow_ret()
+#[no_mangle]
+pub fn mutable_borrow_ret() -> &'static mut i32 {
+  loop {}
+}
+
 #[no_mangle]
-// CHECK: @mutable_notunpin_borrow({{i32\*|ptr}} noundef align 4 dereferenceable(4) %_1)
+// CHECK: @mutable_notunpin_borrow({{i32\*|ptr}} noundef nonnull align 4 %_1)
 // This one is *not* `noalias` because it might be self-referential.
+// It is also not `dereferenceable` due to
+// <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>.
 pub fn mutable_notunpin_borrow(_: &mut NotUnpin) {
 }
 
@@ -167,6 +181,12 @@ pub fn _box(x: Box<i32>) -> Box<i32> {
   x
 }
 
+// CHECK: noundef nonnull align 4 {{i32\*|ptr}} @notunpin_box({{i32\*|ptr}} noundef nonnull align 4 %x)
+#[no_mangle]
+pub fn notunpin_box(x: Box<NotUnpin>) -> Box<NotUnpin> {
+  x
+}
+
 // CHECK: @struct_return({{%S\*|ptr}} noalias nocapture noundef sret(%S) dereferenceable(32){{( %0)?}})
 #[no_mangle]
 pub fn struct_return() -> S {
@@ -233,12 +253,12 @@ pub fn trait_raw(_: *const dyn Drop) {
 
 // CHECK: @trait_box({{\{\}\*|ptr}} noalias noundef nonnull align 1{{( %0)?}}, {{.+}} noalias noundef readonly align {{.*}} dereferenceable({{.*}}){{( %1)?}})
 #[no_mangle]
-pub fn trait_box(_: Box<dyn Drop>) {
+pub fn trait_box(_: Box<dyn Drop + Unpin>) {
 }
 
 // CHECK: { {{i8\*|ptr}}, {{i8\*|ptr}} } @trait_option({{i8\*|ptr}} noalias noundef align 1 %x.0, {{i8\*|ptr}} %x.1)
 #[no_mangle]
-pub fn trait_option(x: Option<Box<dyn Drop>>) -> Option<Box<dyn Drop>> {
+pub fn trait_option(x: Option<Box<dyn Drop + Unpin>>) -> Option<Box<dyn Drop + Unpin>> {
   x
 }