* Don't be afraid to ask! The Rust community is friendly and helpful.
[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/about-this-guide.html
-[gdfrustc]: http://manishearth.github.io/rust-internals-docs/rustc/
+[gdfrustc]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc/
[gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here
[rif]: http://internals.rust-lang.org
[rr]: https://doc.rust-lang.org/book/README.html
"env_logger 0.5.12 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fwdansi 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"git2 0.7.5 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "flate2"
-version = "1.0.2"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.4.16 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "racer"
-version = "2.1.6"
+version = "2.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ordslice 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "racer 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "racer 2.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.75 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.75 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"chalk-engine 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fmt_macros 0.0.0",
"graphviz 0.0.0",
"jobserver 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
name = "rustc_codegen_utils"
version = "0.0.0"
dependencies = [
- "flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "graphviz 0.0.0",
"log 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
name = "rustc_metadata"
version = "0.0.0"
dependencies = [
- "flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc 0.0.0",
"checksum failure_derive 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "946d0e98a50d9831f5d589038d2ca7f8f455b1c21028c0db0e84116a12696426"
"checksum filetime 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "da4b9849e77b13195302c174324b5ba73eec9b236b24c221a61000daefb95c5f"
"checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33"
-"checksum flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "37847f133aae7acf82bb9577ccd8bda241df836787642654286e79679826a54b"
+"checksum flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4af030962d89d62aa52cd9492083b1cd9b2d1a77764878102a6c0f86b4d5444d"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
"checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5"
-"checksum racer 2.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "99e820b7f7701c834c3f6f8226f388c19c0ea948a3ef79ddc96aa7398b5ba87a"
+"checksum racer 2.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "0beefbfaed799c3554021a48856113ad53862311395f6d75376192884ba5fbe6"
"checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd"
"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
"checksum rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "edecf0f94da5551fc9b492093e30b041a891657db7940ee221f9d2f66e82eef2"
builder.install(&cargoclippy, &image.join("bin"), 0o755);
let doc = image.join("share/doc/clippy");
builder.install(&src.join("README.md"), &doc, 0o644);
- builder.install(&src.join("LICENSE"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
// Prepare the overlay
let overlay = tmp.join("clippy-overlay");
drop(fs::remove_dir_all(&overlay));
t!(fs::create_dir_all(&overlay));
builder.install(&src.join("README.md"), &overlay, 0o644);
- builder.install(&src.join("LICENSE"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+ builder.install(&src.join("LICENSE-MIT"), &doc, 0o644);
builder.create(&overlay.join("version"), &version);
// Generate the installer tarball
t!(fs::create_dir_all(dstdir));
drop(fs::remove_file(&dst));
{
+ if !src.exists() {
+ panic!("Error: File \"{}\" not found!", src.display());
+ }
let mut s = t!(fs::File::open(&src));
let mut d = t!(fs::File::create(&dst));
io::copy(&mut s, &mut d).expect("failed to copy");
1 | #[no_mangle]
| ------------ help: remove this attribute
2 | / fn foo<T>(t: T) {
-3 | |
+3 | |
4 | | }
| |_^
|
warning: borrow of packed field requires unsafe function or block (error E0133)
--> src/main.rs:11:13
|
-11 | let y = &x.data.0;
+11 | let y = &x.data.0;
| ^^^^^^^^^
|
= note: #[warn(safe_packed_borrows)] on by default
This will produce:
```text
-warning: unused `std::result::Result` which must be used
+warning: unused `std::result::Result` that must be used
--> src/main.rs:6:5
|
6 | returns_result();
//! Additionally, the return value of this function is [`fmt::Result`] which is a
//! type alias of [`Result`]`<(), `[`std::fmt::Error`]`>`. Formatting implementations
//! should ensure that they propagate errors from the [`Formatter`][`Formatter`] (e.g., when
-//! calling [`write!`]) however, they should never return errors spuriously. That
+//! calling [`write!`]). However, they should never return errors spuriously. That
//! is, a formatting implementation must and may only return an error if the
//! passed-in [`Formatter`] returns an error. This is because, contrary to what
//! the function signature might suggest, string formatting is an infallible
///
/// let five = Rc::new(5);
///
- /// Rc::clone(&five);
+ /// let _ = Rc::clone(&five);
/// ```
#[inline]
fn clone(&self) -> Rc<T> {
///
/// let weak_five = Rc::downgrade(&Rc::new(5));
///
- /// Weak::clone(&weak_five);
+ /// let _ = Weak::clone(&weak_five);
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
///
/// let five = Arc::new(5);
///
- /// Arc::clone(&five);
+ /// let _ = Arc::clone(&five);
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
///
/// let weak_five = Arc::downgrade(&Arc::new(5));
///
- /// Weak::clone(&weak_five);
+ /// let _ = Weak::clone(&weak_five);
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
/// use std::fmt::{self, write};
///
/// let mut output = String::new();
-/// match write(&mut output, format_args!("Hello {}!", "world")) {
-/// Err(fmt::Error) => panic!("An error occurred"),
-/// _ => (),
+/// if let Err(fmt::Error) = write(&mut output, format_args!("Hello {}!", "world")) {
+/// panic!("An error occurred");
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
//! using it. The compiler will warn us about this kind of behavior:
//!
//! ```text
-//! warning: unused result which must be used: iterator adaptors are lazy and
+//! warning: unused result that must be used: iterator adaptors are lazy and
//! do nothing unless consumed
//! ```
//!
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of *the same* allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset, **in bytes**, cannot overflow an `isize`.
///
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of an allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset, **in bytes**, cannot overflow an `isize`.
///
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of an allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset cannot exceed `isize::MAX` **bytes**.
///
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of *the same* allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset, **in bytes**, cannot overflow an `isize`.
///
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of an allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset, **in bytes**, cannot overflow an `isize`.
///
/// Behavior:
///
/// * Both the starting and resulting pointer must be either in bounds or one
- /// byte past the end of an allocated object.
+ /// byte past the end of the same allocated object.
///
/// * The computed offset cannot exceed `isize::MAX` **bytes**.
///
use std::collections::BTreeMap;
use std::fmt::Debug;
-use std::iter;
use std::mem;
use smallvec::SmallVec;
use syntax::attr;
.collect::<P<[hir::Field]>>();
let is_unit = fields.is_empty();
- let struct_path = iter::once("ops")
- .chain(iter::once(path))
- .collect::<Vec<_>>();
+ let struct_path = ["ops", path];
let struct_path = self.std_path(e.span, &struct_path, None, is_unit);
let struct_path = hir::QPath::Resolved(None, P(struct_path));
}
impl_stable_hash_for!(struct mir::interpret::GlobalId<'tcx> { instance, promoted });
+
+impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for mir::UserTypeAnnotation<'gcx> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a>,
+ hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(hcx, hasher);
+ match *self {
+ mir::UserTypeAnnotation::Ty(ref ty) => {
+ ty.hash_stable(hcx, hasher);
+ }
+ mir::UserTypeAnnotation::FnDef(ref def_id, ref substs) => {
+ def_id.hash_stable(hcx, hasher);
+ substs.hash_stable(hcx, hasher);
+ }
+ mir::UserTypeAnnotation::AdtDef(ref def_id, ref substs) => {
+ def_id.hash_stable(hcx, hasher);
+ substs.hash_stable(hcx, hasher);
+ }
+ }
+ }
+}
});
impl_stable_hash_for!(
- impl<'tcx, R> for struct infer::canonical::QueryResult<'tcx, R> {
+ impl<'tcx, R> for struct infer::canonical::QueryResponse<'tcx, R> {
var_values, region_constraints, certainty, value
}
);
Universal,
Existential
});
+
+impl_stable_hash_for!(struct ty::subst::UserSubsts<'tcx> { substs, user_self_ty });
+
+impl_stable_hash_for!(struct ty::subst::UserSelfTy<'tcx> { impl_def_id, self_ty });
+
use infer::canonical::{
Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, Canonicalized,
- SmallCanonicalVarValues,
+ OriginalQueryValues,
};
use infer::InferCtxt;
use std::sync::atomic::Ordering;
pub fn canonicalize_query<V>(
&self,
value: &V,
- var_values: &mut SmallCanonicalVarValues<'tcx>
+ query_state: &mut OriginalQueryValues<'tcx>,
) -> Canonicalized<'gcx, V>
where
V: TypeFoldable<'tcx> + Lift<'gcx>,
value,
Some(self),
self.tcx,
- CanonicalizeRegionMode {
- static_region: true,
- other_free_regions: true,
- },
- var_values,
+ &CanonicalizeAllFreeRegions,
+ query_state,
)
}
/// out the [chapter in the rustc guide][c].
///
/// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
- pub fn canonicalize_response<V>(
- &self,
- value: &V,
- ) -> Canonicalized<'gcx, V>
+ pub fn canonicalize_response<V>(&self, value: &V) -> Canonicalized<'gcx, V>
where
V: TypeFoldable<'tcx> + Lift<'gcx>,
{
- let mut var_values = SmallVec::new();
+ let mut query_state = OriginalQueryValues::default();
Canonicalizer::canonicalize(
value,
Some(self),
self.tcx,
- CanonicalizeRegionMode {
- static_region: false,
- other_free_regions: false,
- },
- &mut var_values
+ &CanonicalizeQueryResponse,
+ &mut query_state,
)
}
pub fn canonicalize_hr_query_hack<V>(
&self,
value: &V,
- var_values: &mut SmallCanonicalVarValues<'tcx>
+ query_state: &mut OriginalQueryValues<'tcx>,
) -> Canonicalized<'gcx, V>
where
V: TypeFoldable<'tcx> + Lift<'gcx>,
value,
Some(self),
self.tcx,
- CanonicalizeRegionMode {
- static_region: false,
- other_free_regions: true,
- },
- var_values
+ &CanonicalizeFreeRegionsOtherThanStatic,
+ query_state,
)
}
}
-/// If this flag is true, then all free regions will be replaced with
-/// a canonical var. This is used to make queries as generic as
-/// possible. For example, the query `F: Foo<'static>` would be
-/// canonicalized to `F: Foo<'0>`.
-struct CanonicalizeRegionMode {
- static_region: bool,
- other_free_regions: bool,
+/// Controls how we canonicalize "free regions" that are not inference
+/// variables. This depends on what we are canonicalizing *for* --
+/// e.g., if we are canonicalizing to create a query, we want to
+/// replace those with inference variables, since we want to make a
+/// maximally general query. But if we are canonicalizing a *query
+/// response*, then we don't typically replace free regions, as they
+/// must have been introduced from other parts of the system.
+trait CanonicalizeRegionMode {
+ fn canonicalize_free_region(
+ &self,
+ canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ r: ty::Region<'tcx>,
+ ) -> ty::Region<'tcx>;
+
+ fn any(&self) -> bool;
+}
+
+struct CanonicalizeQueryResponse;
+
+impl CanonicalizeRegionMode for CanonicalizeQueryResponse {
+ fn canonicalize_free_region(
+ &self,
+ _canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ r: ty::Region<'tcx>,
+ ) -> ty::Region<'tcx> {
+ match r {
+ ty::ReFree(_) | ty::ReEmpty | ty::ReErased | ty::ReStatic | ty::ReEarlyBound(..) => r,
+ _ => {
+ // Other than `'static` or `'empty`, the query
+ // response should be executing in a fully
+ // canonicalized environment, so there shouldn't be
+ // any other region names it can come up.
+ bug!("unexpected region in query response: `{:?}`", r)
+ }
+ }
+ }
+
+ fn any(&self) -> bool {
+ false
+ }
}
-impl CanonicalizeRegionMode {
+struct CanonicalizeAllFreeRegions;
+
+impl CanonicalizeRegionMode for CanonicalizeAllFreeRegions {
+ fn canonicalize_free_region(
+ &self,
+ canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ r: ty::Region<'tcx>,
+ ) -> ty::Region<'tcx> {
+ canonicalizer.canonical_var_for_region(r)
+ }
+
fn any(&self) -> bool {
- self.static_region || self.other_free_regions
+ true
+ }
+}
+
+struct CanonicalizeFreeRegionsOtherThanStatic;
+
+impl CanonicalizeRegionMode for CanonicalizeFreeRegionsOtherThanStatic {
+ fn canonicalize_free_region(
+ &self,
+ canonicalizer: &mut Canonicalizer<'_, '_, 'tcx>,
+ r: ty::Region<'tcx>,
+ ) -> ty::Region<'tcx> {
+ if let ty::ReStatic = r {
+ r
+ } else {
+ canonicalizer.canonical_var_for_region(r)
+ }
+ }
+
+ fn any(&self) -> bool {
+ true
}
}
infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
tcx: TyCtxt<'cx, 'gcx, 'tcx>,
variables: SmallVec<[CanonicalVarInfo; 8]>,
- var_values: &'cx mut SmallCanonicalVarValues<'tcx>,
+ query_state: &'cx mut OriginalQueryValues<'tcx>,
// Note that indices is only used once `var_values` is big enough to be
// heap-allocated.
indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
- canonicalize_region_mode: CanonicalizeRegionMode,
+ canonicalize_region_mode: &'cx dyn CanonicalizeRegionMode,
needs_canonical_flags: TypeFlags,
}
}
ty::ReVar(vid) => {
- let r = self
- .infcx
+ let r = self.infcx
.unwrap()
.borrow_region_constraints()
.opportunistic_resolve_var(self.tcx, vid);
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
debug!(
"canonical: region var found with vid {:?}, \
opportunistically resolved to {:?}",
vid, r
);
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- }
-
- ty::ReStatic => {
- if self.canonicalize_region_mode.static_region {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- } else {
- r
- }
+ self.canonical_var_for_region(r)
}
- ty::ReEarlyBound(..)
+ ty::ReStatic
+ | ty::ReEarlyBound(..)
| ty::ReFree(_)
| ty::ReScope(_)
| ty::RePlaceholder(..)
| ty::ReEmpty
- | ty::ReErased => {
- if self.canonicalize_region_mode.other_free_regions {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- } else {
- r
- }
- }
+ | ty::ReErased => self.canonicalize_region_mode.canonicalize_free_region(self, r),
ty::ReClosureBound(..) | ty::ReCanonical(_) => {
bug!("canonical region encountered during canonicalization")
/// `canonicalize_query` and `canonicalize_response`.
fn canonicalize<V>(
value: &V,
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- canonicalize_region_mode: CanonicalizeRegionMode,
- var_values: &'cx mut SmallCanonicalVarValues<'tcx>
+ infcx: Option<&InferCtxt<'_, 'gcx, 'tcx>>,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalize_region_mode: &dyn CanonicalizeRegionMode,
+ query_state: &mut OriginalQueryValues<'tcx>,
) -> Canonicalized<'gcx, V>
where
V: TypeFoldable<'tcx> + Lift<'gcx>,
canonicalize_region_mode,
needs_canonical_flags,
variables: SmallVec::new(),
- var_values,
+ query_state,
indices: FxHashMap::default(),
};
let out_value = value.fold_with(&mut canonicalizer);
fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
let Canonicalizer {
variables,
- var_values,
+ query_state,
indices,
..
} = self;
+ let var_values = &mut query_state.var_values;
+
// This code is hot. `variables` and `var_values` are usually small
// (fewer than 8 elements ~95% of the time). They are SmallVec's to
// avoid allocations in those cases. We also don't use `indices` to
// fill up `indices` to facilitate subsequent lookups.
if var_values.spilled() {
assert!(indices.is_empty());
- *indices =
- var_values.iter()
- .enumerate()
- .map(|(i, &kind)| (kind, CanonicalVar::new(i)))
- .collect();
+ *indices = var_values
+ .iter()
+ .enumerate()
+ .map(|(i, &kind)| (kind, CanonicalVar::new(i)))
+ .collect();
}
// The cv is the index of the appended element.
CanonicalVar::new(var_values.len() - 1)
}
} else {
// `var_values` is large. Do a hashmap search via `indices`.
- *indices
- .entry(kind)
- .or_insert_with(|| {
- variables.push(info);
- var_values.push(kind);
- assert_eq!(variables.len(), var_values.len());
- CanonicalVar::new(variables.len() - 1)
- })
+ *indices.entry(kind).or_insert_with(|| {
+ variables.push(info);
+ var_values.push(kind);
+ assert_eq!(variables.len(), var_values.len());
+ CanonicalVar::new(variables.len() - 1)
+ })
}
}
+ fn canonical_var_for_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ }
+
/// Given a type variable `ty_var` of the given kind, first check
/// if `ty_var` is bound to anything; if so, canonicalize
/// *that*. Otherwise, create a new canonical variable for
mod canonicalizer;
-pub mod query_result;
+pub mod query_response;
mod substitute;
pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
}
-/// Like CanonicalVarValues, but for use in places where a SmallVec is
-/// appropriate.
-pub type SmallCanonicalVarValues<'tcx> = SmallVec<[Kind<'tcx>; 8]>;
+/// When we canonicalize a value to form a query, we wind up replacing
+/// various parts of it with canonical variables. This struct stores
+/// those replaced bits to remember for when we process the query
+/// result.
+#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct OriginalQueryValues<'tcx> {
+ /// This is equivalent to `CanonicalVarValues`, but using a
+ /// `SmallVec` yields a significant performance win.
+ pub var_values: SmallVec<[Kind<'tcx>; 8]>,
+}
/// Information about a canonical variable that is included with the
/// canonical value. This is sufficient information for code to create
}
/// After we execute a query with a canonicalized key, we get back a
-/// `Canonical<QueryResult<..>>`. You can use
+/// `Canonical<QueryResponse<..>>`. You can use
/// `instantiate_query_result` to access the data in this result.
#[derive(Clone, Debug)]
-pub struct QueryResult<'tcx, R> {
+pub struct QueryResponse<'tcx, R> {
pub var_values: CanonicalVarValues<'tcx>,
pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
pub certainty: Certainty,
pub type Canonicalized<'gcx, V> = Canonical<'gcx, <V as Lift<'gcx>>::Lifted>;
-pub type CanonicalizedQueryResult<'gcx, T> =
- Lrc<Canonical<'gcx, QueryResult<'gcx, <T as Lift<'gcx>>::Lifted>>>;
+pub type CanonicalizedQueryResponse<'gcx, T> =
+ Lrc<Canonical<'gcx, QueryResponse<'gcx, <T as Lift<'gcx>>::Lifted>>>;
/// Indicates whether or not we were able to prove the query to be
/// true.
}
}
-impl<'tcx, R> QueryResult<'tcx, R> {
+impl<'tcx, R> QueryResponse<'tcx, R> {
pub fn is_proven(&self) -> bool {
self.certainty.is_proven()
}
}
}
-impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
+impl<'tcx, R> Canonical<'tcx, QueryResponse<'tcx, R>> {
pub fn is_proven(&self) -> bool {
self.value.is_proven()
}
/// inference variables and applies it to the canonical value.
/// Returns both the instantiated result *and* the substitution S.
///
- /// This is useful at the start of a query: it basically brings
- /// the canonical value "into scope" within your new infcx. At the
- /// end of processing, the substitution S (once canonicalized)
- /// then represents the values that you computed for each of the
- /// canonical inputs to your query.
+ /// This is only meant to be invoked as part of constructing an
+ /// inference context at the start of a query (see
+ /// `InferCtxtBuilder::enter_with_canonical`). It basically
+ /// brings the canonical value "into scope" within your new infcx.
+ ///
+ /// At the end of processing, the substitution S (once
+ /// canonicalized) then represents the values that you computed
+ /// for each of the canonical inputs to your query.
+
pub fn instantiate_canonical_with_fresh_inference_vars<T>(
&self,
span: Span,
}
BraceStructTypeFoldableImpl! {
- impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
+ impl<'tcx, R> TypeFoldable<'tcx> for QueryResponse<'tcx, R> {
var_values, region_constraints, certainty, value
} where R: TypeFoldable<'tcx>,
}
BraceStructLiftImpl! {
- impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
- type Lifted = QueryResult<'tcx, R::Lifted>;
+ impl<'a, 'tcx, R> Lift<'tcx> for QueryResponse<'a, R> {
+ type Lifted = QueryResponse<'tcx, R::Lifted>;
var_values, region_constraints, certainty, value
} where R: Lift<'tcx>
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the code to instantiate a "query result", and
+//! in particular to extract out the resulting region obligations and
+//! encode them therein.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::substitute::substitute_value;
+use infer::canonical::{
+ Canonical, CanonicalVarKind, CanonicalVarValues, CanonicalizedQueryResponse, Certainty,
+ OriginalQueryValues, QueryRegionConstraint, QueryResponse,
+};
+use infer::region_constraints::{Constraint, RegionConstraintData};
+use infer::InferCtxtBuilder;
+use infer::{InferCtxt, InferOk, InferResult};
+use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use std::fmt::Debug;
+use syntax_pos::DUMMY_SP;
+use traits::query::{Fallible, NoSolution};
+use traits::{FulfillmentContext, TraitEngine};
+use traits::{Obligation, ObligationCause, PredicateObligation};
+use ty::fold::TypeFoldable;
+use ty::subst::{Kind, UnpackedKind};
+use ty::{self, CanonicalVar, Lift, Ty, TyCtxt};
+
+impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
+ /// The "main method" for a canonicalized trait query. Given the
+ /// canonical key `canonical_key`, this method will create a new
+ /// inference context, instantiate the key, and run your operation
+ /// `op`. The operation should yield up a result (of type `R`) as
+ /// well as a set of trait obligations that must be fully
+ /// satisfied. These obligations will be processed and the
+ /// canonical result created.
+ ///
+ /// Returns `NoSolution` in the event of any error.
+ ///
+ /// (It might be mildly nicer to implement this on `TyCtxt`, and
+ /// not `InferCtxtBuilder`, but that is a bit tricky right now.
+ /// In part because we would need a `for<'gcx: 'tcx>` sort of
+ /// bound for the closure and in part because it is convenient to
+ /// have `'tcx` be free on this function so that we can talk about
+ /// `K: TypeFoldable<'tcx>`.)
+ pub fn enter_canonical_trait_query<K, R>(
+ &'tcx mut self,
+ canonical_key: &Canonical<'tcx, K>,
+ operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut FulfillmentContext<'tcx>, K)
+ -> Fallible<R>,
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, R>>
+ where
+ K: TypeFoldable<'tcx>,
+ R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ self.enter_with_canonical(
+ DUMMY_SP,
+ canonical_key,
+ |ref infcx, key, canonical_inference_vars| {
+ let fulfill_cx = &mut FulfillmentContext::new();
+ let value = operation(infcx, fulfill_cx, key)?;
+ infcx.make_canonicalized_query_response(canonical_inference_vars, value, fulfill_cx)
+ },
+ )
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// This method is meant to be invoked as the final step of a canonical query
+ /// implementation. It is given:
+ ///
+ /// - the instantiated variables `inference_vars` created from the query key
+ /// - the result `answer` of the query
+ /// - a fulfillment context `fulfill_cx` that may contain various obligations which
+ /// have yet to be proven.
+ ///
+ /// Given this, the function will process the obligations pending
+ /// in `fulfill_cx`:
+ ///
+ /// - If all the obligations can be proven successfully, it will
+ /// package up any resulting region obligations (extracted from
+ /// `infcx`) along with the fully resolved value `answer` into a
+ /// query result (which is then itself canonicalized).
+ /// - If some obligations can be neither proven nor disproven, then
+ /// the same thing happens, but the resulting query is marked as ambiguous.
+ /// - Finally, if any of the obligations result in a hard error,
+ /// then `Err(NoSolution)` is returned.
+ pub fn make_canonicalized_query_response<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, T>>
+ where
+ T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ let query_response = self.make_query_response(inference_vars, answer, fulfill_cx)?;
+ let canonical_result = self.canonicalize_response(&query_response);
+
+ debug!(
+ "make_canonicalized_query_response: canonical_result = {:#?}",
+ canonical_result
+ );
+
+ Ok(Lrc::new(canonical_result))
+ }
+
+ /// Helper for `make_canonicalized_query_response` that does
+ /// everything up until the final canonicalization.
+ fn make_query_response<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Result<QueryResponse<'tcx, T>, NoSolution>
+ where
+ T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ let tcx = self.tcx;
+
+ debug!(
+ "make_query_response(\
+ inference_vars={:?}, \
+ answer={:?})",
+ inference_vars, answer,
+ );
+
+ // Select everything, returning errors.
+ let true_errors = fulfill_cx.select_where_possible(self).err().unwrap_or_else(Vec::new);
+ debug!("true_errors = {:#?}", true_errors);
+
+ if !true_errors.is_empty() {
+ // FIXME -- we don't indicate *why* we failed to solve
+ debug!("make_query_response: true_errors={:#?}", true_errors);
+ return Err(NoSolution);
+ }
+
+ // Anything left unselected *now* must be an ambiguity.
+ let ambig_errors = fulfill_cx.select_all_or_error(self).err().unwrap_or_else(Vec::new);
+ debug!("ambig_errors = {:#?}", ambig_errors);
+
+ let region_obligations = self.take_registered_region_obligations();
+ let region_constraints = self.with_region_constraints(|region_constraints| {
+ make_query_outlives(
+ tcx,
+ region_obligations
+ .iter()
+ .map(|(_, r_o)| (r_o.sup_type, r_o.sub_region)),
+ region_constraints,
+ )
+ });
+
+ let certainty = if ambig_errors.is_empty() {
+ Certainty::Proven
+ } else {
+ Certainty::Ambiguous
+ };
+
+ Ok(QueryResponse {
+ var_values: inference_vars,
+ region_constraints,
+ certainty,
+ value: answer,
+ })
+ }
+
+ /// Given the (canonicalized) result to a canonical query,
+ /// instantiates the result so it can be used, plugging in the
+ /// values from the canonical query. (Note that the result may
+ /// have been ambiguous; you should check the certainty level of
+ /// the query before applying this function.)
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
+ pub fn instantiate_query_response_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &OriginalQueryValues<'tcx>,
+ query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ let InferOk {
+ value: result_subst,
+ mut obligations,
+ } = self.query_response_substitution(cause, param_env, original_values, query_response)?;
+
+ obligations.extend(self.query_region_constraints_into_obligations(
+ cause,
+ param_env,
+ &query_response.value.region_constraints,
+ &result_subst,
+ ));
+
+ let user_result: R =
+ query_response.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// An alternative to
+ /// `instantiate_query_response_and_region_obligations` that is more
+ /// efficient for NLL. NLL is a bit more advanced in the
+ /// "transition to chalk" than the rest of the compiler. During
+ /// the NLL type check, all of the "processing" of types and
+ /// things happens in queries -- the NLL checker itself is only
+ /// interested in the region obligations (`'a: 'b` or `T: 'b`)
+ /// that come out of these queries, which it wants to convert into
+ /// MIR-based constraints and solve. Therefore, it is most
+ /// convenient for the NLL Type Checker to **directly consume**
+ /// the `QueryRegionConstraint` values that arise from doing a
+ /// query. This is contrast to other parts of the compiler, which
+ /// would prefer for those `QueryRegionConstraint` to be converted
+ /// into the older infcx-style constraints (e.g., calls to
+ /// `sub_regions` or `register_region_obligation`).
+ ///
+ /// Therefore, `instantiate_nll_query_response_and_region_obligations` performs the same
+ /// basic operations as `instantiate_query_response_and_region_obligations` but
+ /// it returns its result differently:
+ ///
+ /// - It creates a substitution `S` that maps from the original
+ /// query variables to the values computed in the query
+ /// result. If any errors arise, they are propagated back as an
+ /// `Err` result.
+ /// - In the case of a successful substitution, we will append
+ /// `QueryRegionConstraint` values onto the
+ /// `output_query_region_constraints` vector for the solver to
+ /// use (if an error arises, some values may also be pushed, but
+ /// they should be ignored).
+ /// - It **can happen** (though it rarely does currently) that
+ /// equating types and things will give rise to subobligations
+ /// that must be processed. In this case, those subobligations
+ /// are propagated back in the return value.
+ /// - Finally, the query result (of type `R`) is propagated back,
+ /// after applying the substitution `S`.
+ pub fn instantiate_nll_query_response_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &OriginalQueryValues<'tcx>,
+ query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
+ output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // In an NLL query, there should be no type variables in the
+ // query, only region variables.
+ debug_assert!(query_response.variables.iter().all(|v| match v.kind {
+ CanonicalVarKind::Ty(_) => false,
+ CanonicalVarKind::Region => true,
+ }));
+
+ let result_subst =
+ self.query_response_substitution_guess(cause, original_values, query_response);
+
+ // Compute `QueryRegionConstraint` values that unify each of
+ // the original values `v_o` that was canonicalized into a
+ // variable...
+ let mut obligations = vec![];
+
+ for (index, original_value) in original_values.var_values.iter().enumerate() {
+ // ...with the value `v_r` of that variable from the query.
+ let result_value = query_response.substitute_projected(self.tcx, &result_subst, |v| {
+ &v.var_values[CanonicalVar::new(index)]
+ });
+ match (original_value.unpack(), result_value.unpack()) {
+ (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
+ // no action needed
+ }
+
+ (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => {
+ // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
+ if v_o != v_r {
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
+ }
+ }
+
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ let ok = self.at(cause, param_env).eq(v1, v2)?;
+ obligations.extend(ok.into_obligations());
+ }
+
+ _ => {
+ bug!(
+ "kind mismatch, cannot unify {:?} and {:?}",
+ original_value,
+ result_value
+ );
+ }
+ }
+ }
+
+ // ...also include the other query region constraints from the query.
+ output_query_region_constraints.extend(
+ query_response.value.region_constraints.iter().filter_map(|r_c| {
+ let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder(); // reconstructed below
+ let k1 = substitute_value(self.tcx, &result_subst, &k1);
+ let r2 = substitute_value(self.tcx, &result_subst, &r2);
+ if k1 != r2.into() {
+ Some(ty::Binder::bind(ty::OutlivesPredicate(k1, r2)))
+ } else {
+ None
+ }
+ })
+ );
+
+ let user_result: R =
+ query_response.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a substitution that can be applied
+ /// to the query result to convert the result back into the
+ /// original namespace.
+ ///
+ /// The substitution also comes accompanied with subobligations
+ /// that arose from unification; these might occur if (for
+ /// example) we are doing lazy normalization and the value
+ /// assigned to a type variable is unified with an unnormalized
+ /// projection.
+ fn query_response_substitution<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &OriginalQueryValues<'tcx>,
+ query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
+ ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_response_substitution(original_values={:#?}, query_response={:#?})",
+ original_values, query_response,
+ );
+
+ let result_subst =
+ self.query_response_substitution_guess(cause, original_values, query_response);
+
+ let obligations = self.unify_query_response_substitution_guess(
+ cause,
+ param_env,
+ original_values,
+ &result_subst,
+ query_response,
+ )?
+ .into_obligations();
+
+ Ok(InferOk {
+ value: result_subst,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a **guess** at a substitution that
+ /// can be applied to the query result to convert the result back
+ /// into the original namespace. This is called a **guess**
+ /// because it uses a quick heuristic to find the values for each
+ /// canonical variable; if that quick heuristic fails, then we
+ /// will instantiate fresh inference variables for each canonical
+ /// variable instead. Therefore, the result of this method must be
+ /// properly unified
+ fn query_response_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ original_values: &OriginalQueryValues<'tcx>,
+ query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
+ ) -> CanonicalVarValues<'tcx>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_response_substitution_guess(original_values={:#?}, query_response={:#?})",
+ original_values, query_response,
+ );
+
+ // Every canonical query result includes values for each of
+ // the inputs to the query. Therefore, we begin by unifying
+ // these values with the original inputs that were
+ // canonicalized.
+ let result_values = &query_response.value.var_values;
+ assert_eq!(original_values.var_values.len(), result_values.len());
+
+ // Quickly try to find initial values for the canonical
+ // variables in the result in terms of the query. We do this
+ // by iterating down the values that the query gave to each of
+ // the canonical inputs. If we find that one of those values
+ // is directly equal to one of the canonical variables in the
+ // result, then we can type the corresponding value from the
+ // input. See the example above.
+ let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
+ IndexVec::from_elem_n(None, query_response.variables.len());
+
+ // In terms of our example above, we are iterating over pairs like:
+ // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
+ for (original_value, result_value) in original_values.var_values.iter().zip(result_values) {
+ match result_value.unpack() {
+ UnpackedKind::Type(result_value) => {
+ // e.g., here `result_value` might be `?0` in the example above...
+ if let ty::Infer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
+ // in which case we would set `canonical_vars[0]` to `Some(?U)`.
+ opt_values[index] = Some(*original_value);
+ }
+ }
+ UnpackedKind::Lifetime(result_value) => {
+ // e.g., here `result_value` might be `'?1` in the example above...
+ if let &ty::RegionKind::ReCanonical(index) = result_value {
+ // in which case we would set `canonical_vars[0]` to `Some('static)`.
+ opt_values[index] = Some(*original_value);
+ }
+ }
+ }
+ }
+
+ // Create a result substitution: if we found a value for a
+ // given variable in the loop above, use that. Otherwise, use
+ // a fresh inference variable.
+ let result_subst = CanonicalVarValues {
+ var_values: query_response
+ .variables
+ .iter()
+ .enumerate()
+ .map(|(index, info)| opt_values[CanonicalVar::new(index)].unwrap_or_else(||
+ self.fresh_inference_var_for_canonical_var(cause.span, *info)
+ ))
+ .collect(),
+ };
+
+ result_subst
+ }
+
+ /// Given a "guess" at the values for the canonical variables in
+ /// the input, try to unify with the *actual* values found in the
+ /// query result. Often, but not always, this is a no-op, because
+ /// we already found the mapping in the "guessing" step.
+ ///
+ /// See also: `query_response_substitution_guess`
+ fn unify_query_response_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &OriginalQueryValues<'tcx>,
+ result_subst: &CanonicalVarValues<'tcx>,
+ query_response: &Canonical<'tcx, QueryResponse<'tcx, R>>,
+ ) -> InferResult<'tcx, ()>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // A closure that yields the result value for the given
+ // canonical variable; this is taken from
+ // `query_response.var_values` after applying the substitution
+ // `result_subst`.
+ let substituted_query_response = |index: CanonicalVar| -> Kind<'tcx> {
+ query_response.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
+ };
+
+ // Unify the original value for each variable with the value
+ // taken from `query_response` (after applying `result_subst`).
+ Ok(self.unify_canonical_vars(
+ cause,
+ param_env,
+ original_values,
+ substituted_query_response,
+ )?)
+ }
+
+ /// Converts the region constraints resulting from a query into an
+ /// iterator of obligations.
+ fn query_region_constraints_into_obligations<'a>(
+ &'a self,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
+ result_subst: &'a CanonicalVarValues<'tcx>,
+ ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
+ Box::new(
+ unsubstituted_region_constraints
+ .iter()
+ .map(move |constraint| {
+ let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
+ let k1 = substitute_value(self.tcx, result_subst, k1);
+ let r2 = substitute_value(self.tcx, result_subst, r2);
+
+ Obligation::new(
+ cause.clone(),
+ param_env,
+ match k1.unpack() {
+ UnpackedKind::Lifetime(r1) => ty::Predicate::RegionOutlives(
+ ty::Binder::dummy(
+ ty::OutlivesPredicate(r1, r2)
+ )),
+ UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives(
+ ty::Binder::dummy(ty::OutlivesPredicate(
+ t1, r2
+ )))
+ }
+ )
+ })
+ ) as Box<dyn Iterator<Item = _>>
+ }
+
+ /// Given two sets of values for the same set of canonical variables, unify them.
+ /// The second set is produced lazilly by supplying indices from the first set.
+ fn unify_canonical_vars(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ variables1: &OriginalQueryValues<'tcx>,
+ variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
+ ) -> InferResult<'tcx, ()> {
+ self.commit_if_ok(|_| {
+ let mut obligations = vec![];
+ for (index, value1) in variables1.var_values.iter().enumerate() {
+ let value2 = variables2(CanonicalVar::new(index));
+
+ match (value1.unpack(), value2.unpack()) {
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ (
+ UnpackedKind::Lifetime(ty::ReErased),
+ UnpackedKind::Lifetime(ty::ReErased),
+ ) => {
+ // no action needed
+ }
+ (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ _ => {
+ bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
+ }
+ }
+ }
+ Ok(InferOk {
+ value: (),
+ obligations,
+ })
+ })
+ }
+}
+
+/// Given the region obligations and constraints scraped from the infcx,
+/// creates query region constraints.
+pub fn make_query_outlives<'tcx>(
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ outlives_obligations: impl Iterator<Item = (Ty<'tcx>, ty::Region<'tcx>)>,
+ region_constraints: &RegionConstraintData<'tcx>,
+) -> Vec<QueryRegionConstraint<'tcx>> {
+ let RegionConstraintData {
+ constraints,
+ verifys,
+ givens,
+ } = region_constraints;
+
+ assert!(verifys.is_empty());
+ assert!(givens.is_empty());
+
+ let outlives: Vec<_> = constraints
+ .into_iter()
+ .map(|(k, _)| match *k {
+ // Swap regions because we are going from sub (<=) to outlives
+ // (>=).
+ Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
+ tcx.mk_region(ty::ReVar(v2)).into(),
+ tcx.mk_region(ty::ReVar(v1)),
+ ),
+ Constraint::VarSubReg(v1, r2) => {
+ ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
+ }
+ Constraint::RegSubVar(r1, v2) => {
+ ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
+ }
+ Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
+ })
+ .map(ty::Binder::dummy) // no bound regions in the code above
+ .chain(
+ outlives_obligations
+ .map(|(ty, r)| ty::OutlivesPredicate(ty.into(), r))
+ .map(ty::Binder::dummy), // no bound regions in the code above
+ )
+ .collect();
+
+ outlives
+}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! This module contains the code to instantiate a "query result", and
-//! in particular to extract out the resulting region obligations and
-//! encode them therein.
-//!
-//! For an overview of what canonicaliation is and how it fits into
-//! rustc, check out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
-
-use infer::canonical::substitute::substitute_value;
-use infer::canonical::{
- Canonical, CanonicalVarKind, CanonicalVarValues, CanonicalizedQueryResult, Certainty,
- QueryRegionConstraint, QueryResult, SmallCanonicalVarValues,
-};
-use infer::region_constraints::{Constraint, RegionConstraintData};
-use infer::InferCtxtBuilder;
-use infer::{InferCtxt, InferOk, InferResult};
-use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::sync::Lrc;
-use std::fmt::Debug;
-use syntax_pos::DUMMY_SP;
-use traits::query::{Fallible, NoSolution};
-use traits::{FulfillmentContext, TraitEngine};
-use traits::{Obligation, ObligationCause, PredicateObligation};
-use ty::fold::TypeFoldable;
-use ty::subst::{Kind, UnpackedKind};
-use ty::{self, CanonicalVar, Lift, Ty, TyCtxt};
-
-impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
- /// The "main method" for a canonicalized trait query. Given the
- /// canonical key `canonical_key`, this method will create a new
- /// inference context, instantiate the key, and run your operation
- /// `op`. The operation should yield up a result (of type `R`) as
- /// well as a set of trait obligations that must be fully
- /// satisfied. These obligations will be processed and the
- /// canonical result created.
- ///
- /// Returns `NoSolution` in the event of any error.
- ///
- /// (It might be mildly nicer to implement this on `TyCtxt`, and
- /// not `InferCtxtBuilder`, but that is a bit tricky right now.
- /// In part because we would need a `for<'gcx: 'tcx>` sort of
- /// bound for the closure and in part because it is convenient to
- /// have `'tcx` be free on this function so that we can talk about
- /// `K: TypeFoldable<'tcx>`.)
- pub fn enter_canonical_trait_query<K, R>(
- &'tcx mut self,
- canonical_key: &Canonical<'tcx, K>,
- operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut FulfillmentContext<'tcx>, K)
- -> Fallible<R>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, R>>
- where
- K: TypeFoldable<'tcx>,
- R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
- {
- self.enter(|ref infcx| {
- let (key, canonical_inference_vars) =
- infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_key);
- let fulfill_cx = &mut FulfillmentContext::new();
- let value = operation(infcx, fulfill_cx, key)?;
- infcx.make_canonicalized_query_result(canonical_inference_vars, value, fulfill_cx)
- })
- }
-}
-
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
- /// This method is meant to be invoked as the final step of a canonical query
- /// implementation. It is given:
- ///
- /// - the instantiated variables `inference_vars` created from the query key
- /// - the result `answer` of the query
- /// - a fulfillment context `fulfill_cx` that may contain various obligations which
- /// have yet to be proven.
- ///
- /// Given this, the function will process the obligations pending
- /// in `fulfill_cx`:
- ///
- /// - If all the obligations can be proven successfully, it will
- /// package up any resulting region obligations (extracted from
- /// `infcx`) along with the fully resolved value `answer` into a
- /// query result (which is then itself canonicalized).
- /// - If some obligations can be neither proven nor disproven, then
- /// the same thing happens, but the resulting query is marked as ambiguous.
- /// - Finally, if any of the obligations result in a hard error,
- /// then `Err(NoSolution)` is returned.
- pub fn make_canonicalized_query_result<T>(
- &self,
- inference_vars: CanonicalVarValues<'tcx>,
- answer: T,
- fulfill_cx: &mut FulfillmentContext<'tcx>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, T>>
- where
- T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
- {
- let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;
- let canonical_result = self.canonicalize_response(&query_result);
-
- debug!(
- "make_canonicalized_query_result: canonical_result = {:#?}",
- canonical_result
- );
-
- Ok(Lrc::new(canonical_result))
- }
-
- /// Helper for `make_canonicalized_query_result` that does
- /// everything up until the final canonicalization.
- fn make_query_result<T>(
- &self,
- inference_vars: CanonicalVarValues<'tcx>,
- answer: T,
- fulfill_cx: &mut FulfillmentContext<'tcx>,
- ) -> Result<QueryResult<'tcx, T>, NoSolution>
- where
- T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
- {
- let tcx = self.tcx;
-
- debug!(
- "make_query_result(\
- inference_vars={:?}, \
- answer={:?})",
- inference_vars, answer,
- );
-
- // Select everything, returning errors.
- let true_errors = fulfill_cx.select_where_possible(self).err().unwrap_or_else(Vec::new);
- debug!("true_errors = {:#?}", true_errors);
-
- if !true_errors.is_empty() {
- // FIXME -- we don't indicate *why* we failed to solve
- debug!("make_query_result: true_errors={:#?}", true_errors);
- return Err(NoSolution);
- }
-
- // Anything left unselected *now* must be an ambiguity.
- let ambig_errors = fulfill_cx.select_all_or_error(self).err().unwrap_or_else(Vec::new);
- debug!("ambig_errors = {:#?}", ambig_errors);
-
- let region_obligations = self.take_registered_region_obligations();
- let region_constraints = self.with_region_constraints(|region_constraints| {
- make_query_outlives(
- tcx,
- region_obligations
- .iter()
- .map(|(_, r_o)| (r_o.sup_type, r_o.sub_region)),
- region_constraints)
- });
-
- let certainty = if ambig_errors.is_empty() {
- Certainty::Proven
- } else {
- Certainty::Ambiguous
- };
-
- Ok(QueryResult {
- var_values: inference_vars,
- region_constraints,
- certainty,
- value: answer,
- })
- }
-
- /// Given the (canonicalized) result to a canonical query,
- /// instantiates the result so it can be used, plugging in the
- /// values from the canonical query. (Note that the result may
- /// have been ambiguous; you should check the certainty level of
- /// the query before applying this function.)
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
- pub fn instantiate_query_result_and_region_obligations<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &SmallCanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> InferResult<'tcx, R>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- let InferOk {
- value: result_subst,
- mut obligations,
- } = self.query_result_substitution(cause, param_env, original_values, query_result)?;
-
- obligations.extend(self.query_region_constraints_into_obligations(
- cause,
- param_env,
- &query_result.value.region_constraints,
- &result_subst,
- ));
-
- let user_result: R =
- query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
-
- Ok(InferOk {
- value: user_result,
- obligations,
- })
- }
-
- /// An alternative to
- /// `instantiate_query_result_and_region_obligations` that is more
- /// efficient for NLL. NLL is a bit more advanced in the
- /// "transition to chalk" than the rest of the compiler. During
- /// the NLL type check, all of the "processing" of types and
- /// things happens in queries -- the NLL checker itself is only
- /// interested in the region obligations (`'a: 'b` or `T: 'b`)
- /// that come out of these queries, which it wants to convert into
- /// MIR-based constraints and solve. Therefore, it is most
- /// convenient for the NLL Type Checker to **directly consume**
- /// the `QueryRegionConstraint` values that arise from doing a
- /// query. This is contrast to other parts of the compiler, which
- /// would prefer for those `QueryRegionConstraint` to be converted
- /// into the older infcx-style constraints (e.g., calls to
- /// `sub_regions` or `register_region_obligation`).
- ///
- /// Therefore, `instantiate_nll_query_result_and_region_obligations` performs the same
- /// basic operations as `instantiate_query_result_and_region_obligations` but
- /// it returns its result differently:
- ///
- /// - It creates a substitution `S` that maps from the original
- /// query variables to the values computed in the query
- /// result. If any errors arise, they are propagated back as an
- /// `Err` result.
- /// - In the case of a successful substitution, we will append
- /// `QueryRegionConstraint` values onto the
- /// `output_query_region_constraints` vector for the solver to
- /// use (if an error arises, some values may also be pushed, but
- /// they should be ignored).
- /// - It **can happen** (though it rarely does currently) that
- /// equating types and things will give rise to subobligations
- /// that must be processed. In this case, those subobligations
- /// are propagated back in the return value.
- /// - Finally, the query result (of type `R`) is propagated back,
- /// after applying the substitution `S`.
- pub fn instantiate_nll_query_result_and_region_obligations<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &SmallCanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
- ) -> InferResult<'tcx, R>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- // In an NLL query, there should be no type variables in the
- // query, only region variables.
- debug_assert!(query_result.variables.iter().all(|v| match v.kind {
- CanonicalVarKind::Ty(_) => false,
- CanonicalVarKind::Region => true,
- }));
-
- let result_subst =
- self.query_result_substitution_guess(cause, original_values, query_result);
-
- // Compute `QueryRegionConstraint` values that unify each of
- // the original values `v_o` that was canonicalized into a
- // variable...
- let mut obligations = vec![];
-
- for (index, original_value) in original_values.iter().enumerate() {
- // ...with the value `v_r` of that variable from the query.
- let result_value = query_result.substitute_projected(self.tcx, &result_subst, |v| {
- &v.var_values[CanonicalVar::new(index)]
- });
- match (original_value.unpack(), result_value.unpack()) {
- (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
- // no action needed
- }
-
- (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => {
- // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
- if v_o != v_r {
- output_query_region_constraints
- .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
- output_query_region_constraints
- .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
- }
- }
-
- (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
- let ok = self.at(cause, param_env).eq(v1, v2)?;
- obligations.extend(ok.into_obligations());
- }
-
- _ => {
- bug!(
- "kind mismatch, cannot unify {:?} and {:?}",
- original_value,
- result_value
- );
- }
- }
- }
-
- // ...also include the other query region constraints from the query.
- output_query_region_constraints.extend(
- query_result.value.region_constraints.iter().filter_map(|r_c| {
- let &ty::OutlivesPredicate(k1, r2) = r_c.skip_binder(); // reconstructed below
- let k1 = substitute_value(self.tcx, &result_subst, &k1);
- let r2 = substitute_value(self.tcx, &result_subst, &r2);
- if k1 != r2.into() {
- Some(ty::Binder::bind(ty::OutlivesPredicate(k1, r2)))
- } else {
- None
- }
- })
- );
-
- let user_result: R =
- query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
-
- Ok(InferOk {
- value: user_result,
- obligations,
- })
- }
-
- /// Given the original values and the (canonicalized) result from
- /// computing a query, returns a substitution that can be applied
- /// to the query result to convert the result back into the
- /// original namespace.
- ///
- /// The substitution also comes accompanied with subobligations
- /// that arose from unification; these might occur if (for
- /// example) we are doing lazy normalization and the value
- /// assigned to a type variable is unified with an unnormalized
- /// projection.
- fn query_result_substitution<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &SmallCanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- debug!(
- "query_result_substitution(original_values={:#?}, query_result={:#?})",
- original_values, query_result,
- );
-
- let result_subst =
- self.query_result_substitution_guess(cause, original_values, query_result);
-
- let obligations = self.unify_query_result_substitution_guess(
- cause,
- param_env,
- original_values,
- &result_subst,
- query_result,
- )?
- .into_obligations();
-
- Ok(InferOk {
- value: result_subst,
- obligations,
- })
- }
-
- /// Given the original values and the (canonicalized) result from
- /// computing a query, returns a **guess** at a substitution that
- /// can be applied to the query result to convert the result back
- /// into the original namespace. This is called a **guess**
- /// because it uses a quick heuristic to find the values for each
- /// canonical variable; if that quick heuristic fails, then we
- /// will instantiate fresh inference variables for each canonical
- /// variable instead. Therefore, the result of this method must be
- /// properly unified
- fn query_result_substitution_guess<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- original_values: &SmallCanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> CanonicalVarValues<'tcx>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- debug!(
- "query_result_substitution_guess(original_values={:#?}, query_result={:#?})",
- original_values, query_result,
- );
-
- // Every canonical query result includes values for each of
- // the inputs to the query. Therefore, we begin by unifying
- // these values with the original inputs that were
- // canonicalized.
- let result_values = &query_result.value.var_values;
- assert_eq!(original_values.len(), result_values.len());
-
- // Quickly try to find initial values for the canonical
- // variables in the result in terms of the query. We do this
- // by iterating down the values that the query gave to each of
- // the canonical inputs. If we find that one of those values
- // is directly equal to one of the canonical variables in the
- // result, then we can type the corresponding value from the
- // input. See the example above.
- let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
- IndexVec::from_elem_n(None, query_result.variables.len());
-
- // In terms of our example above, we are iterating over pairs like:
- // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
- for (original_value, result_value) in original_values.iter().zip(result_values) {
- match result_value.unpack() {
- UnpackedKind::Type(result_value) => {
- // e.g., here `result_value` might be `?0` in the example above...
- if let ty::Infer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
- // in which case we would set `canonical_vars[0]` to `Some(?U)`.
- opt_values[index] = Some(*original_value);
- }
- }
- UnpackedKind::Lifetime(result_value) => {
- // e.g., here `result_value` might be `'?1` in the example above...
- if let &ty::RegionKind::ReCanonical(index) = result_value {
- // in which case we would set `canonical_vars[0]` to `Some('static)`.
- opt_values[index] = Some(*original_value);
- }
- }
- }
- }
-
- // Create a result substitution: if we found a value for a
- // given variable in the loop above, use that. Otherwise, use
- // a fresh inference variable.
- let result_subst = CanonicalVarValues {
- var_values: query_result
- .variables
- .iter()
- .enumerate()
- .map(|(index, info)| opt_values[CanonicalVar::new(index)].unwrap_or_else(||
- self.fresh_inference_var_for_canonical_var(cause.span, *info)
- ))
- .collect(),
- };
-
- result_subst
- }
-
- /// Given a "guess" at the values for the canonical variables in
- /// the input, try to unify with the *actual* values found in the
- /// query result. Often, but not always, this is a no-op, because
- /// we already found the mapping in the "guessing" step.
- ///
- /// See also: `query_result_substitution_guess`
- fn unify_query_result_substitution_guess<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &SmallCanonicalVarValues<'tcx>,
- result_subst: &CanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> InferResult<'tcx, ()>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- // A closure that yields the result value for the given
- // canonical variable; this is taken from
- // `query_result.var_values` after applying the substitution
- // `result_subst`.
- let substituted_query_result = |index: CanonicalVar| -> Kind<'tcx> {
- query_result.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
- };
-
- // Unify the original value for each variable with the value
- // taken from `query_result` (after applying `result_subst`).
- Ok(self.unify_canonical_vars(cause, param_env, original_values, substituted_query_result)?)
- }
-
- /// Converts the region constraints resulting from a query into an
- /// iterator of obligations.
- fn query_region_constraints_into_obligations<'a>(
- &'a self,
- cause: &'a ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
- result_subst: &'a CanonicalVarValues<'tcx>,
- ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
- Box::new(
- unsubstituted_region_constraints
- .iter()
- .map(move |constraint| {
- let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
- let k1 = substitute_value(self.tcx, result_subst, k1);
- let r2 = substitute_value(self.tcx, result_subst, r2);
-
- Obligation::new(
- cause.clone(),
- param_env,
- match k1.unpack() {
- UnpackedKind::Lifetime(r1) => ty::Predicate::RegionOutlives(
- ty::Binder::dummy(
- ty::OutlivesPredicate(r1, r2)
- )),
- UnpackedKind::Type(t1) => ty::Predicate::TypeOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(
- t1, r2
- )))
- }
- )
- })
- ) as Box<dyn Iterator<Item = _>>
- }
-
- /// Given two sets of values for the same set of canonical variables, unify them.
- /// The second set is produced lazilly by supplying indices from the first set.
- fn unify_canonical_vars(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- variables1: &SmallCanonicalVarValues<'tcx>,
- variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
- ) -> InferResult<'tcx, ()> {
- self.commit_if_ok(|_| {
- let mut obligations = vec![];
- for (index, value1) in variables1.iter().enumerate() {
- let value2 = variables2(CanonicalVar::new(index));
-
- match (value1.unpack(), value2.unpack()) {
- (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- (
- UnpackedKind::Lifetime(ty::ReErased),
- UnpackedKind::Lifetime(ty::ReErased),
- ) => {
- // no action needed
- }
- (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- _ => {
- bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
- }
- }
- }
- Ok(InferOk {
- value: (),
- obligations,
- })
- })
- }
-}
-
-/// Given the region obligations and constraints scraped from the infcx,
-/// creates query region constraints.
-pub fn make_query_outlives<'tcx>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- outlives_obligations: impl Iterator<Item = (Ty<'tcx>, ty::Region<'tcx>)>,
- region_constraints: &RegionConstraintData<'tcx>,
-) -> Vec<QueryRegionConstraint<'tcx>> {
- let RegionConstraintData {
- constraints,
- verifys,
- givens,
- } = region_constraints;
-
- assert!(verifys.is_empty());
- assert!(givens.is_empty());
-
- let outlives: Vec<_> = constraints
- .into_iter()
- .map(|(k, _)| match *k {
- // Swap regions because we are going from sub (<=) to outlives
- // (>=).
- Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
- tcx.mk_region(ty::ReVar(v2)).into(),
- tcx.mk_region(ty::ReVar(v1)),
- ),
- Constraint::VarSubReg(v1, r2) => {
- ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
- }
- Constraint::RegSubVar(r1, v2) => {
- ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
- }
- Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
- })
- .map(ty::Binder::dummy) // no bound regions in the code above
- .chain(
- outlives_obligations
- .map(|(ty, r)| ty::OutlivesPredicate(ty.into(), r))
- .map(ty::Binder::dummy), // no bound regions in the code above
- )
- .collect();
-
- outlives
-}
where
T : TypeFoldable<'tcx>,
{
- let new_universe = self.create_subuniverse();
+ let next_universe = self.create_next_universe();
let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| {
self.tcx.mk_region(ty::RePlaceholder(ty::Placeholder {
- universe: new_universe,
+ universe: next_universe,
name: br,
}))
});
use arena::SyncDroplessArena;
use errors::DiagnosticBuilder;
use hir::def_id::DefId;
+use infer::canonical::{Canonical, CanonicalVarValues};
use middle::free_region::RegionRelations;
use middle::lang_items;
use middle::region;
use self::type_variable::TypeVariableOrigin;
use self::unify_key::ToType;
-pub mod opaque_types;
pub mod at;
pub mod canonical;
mod combine;
pub mod lattice;
mod lexical_region_resolve;
mod lub;
+pub mod nll_relate;
+pub mod opaque_types;
pub mod outlives;
pub mod region_constraints;
pub mod resolve;
/// NLL borrow checker will also do -- it might be set to true.
#[derive(Copy, Clone, Default, Debug)]
pub struct SuppressRegionErrors {
- suppressed: bool
+ suppressed: bool,
}
impl SuppressRegionErrors {
pub fn when_nll_is_enabled(tcx: TyCtxt<'_, '_, '_>) -> Self {
match tcx.borrowck_mode() {
// If we're on AST or Migrate mode, report AST region errors
- BorrowckMode::Ast | BorrowckMode::Migrate => SuppressRegionErrors {
- suppressed: false
- },
+ BorrowckMode::Ast | BorrowckMode::Migrate => SuppressRegionErrors { suppressed: false },
// If we're on MIR or Compare mode, don't report AST region errors as they should
// be reported by NLL
- BorrowckMode::Compare | BorrowckMode::Mir => SuppressRegionErrors {
- suppressed: true
- },
+ BorrowckMode::Compare | BorrowckMode::Mir => SuppressRegionErrors { suppressed: true },
}
}
}
self
}
- pub fn enter<F, R>(&'tcx mut self, f: F) -> R
+ /// Given a canonical value `C` as a starting point, create an
+ /// inference context that contains each of the bound values
+ /// within instantiated as a fresh variable. The `f` closure is
+ /// invoked with the new infcx, along with the instantiated value
+ /// `V` and a substitution `S`. This substitution `S` maps from
+ /// the bound values in `C` to their instantiated values in `V`
+ /// (in other words, `S(C) = V`).
+ pub fn enter_with_canonical<T, R>(
+ &'tcx mut self,
+ span: Span,
+ canonical: &Canonical<'tcx, T>,
+ f: impl for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>, T, CanonicalVarValues<'tcx>) -> R,
+ ) -> R
where
- F: for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>) -> R,
+ T: TypeFoldable<'tcx>,
{
+ self.enter(|infcx| {
+ let (value, subst) =
+ infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical);
+ f(infcx, value, subst)
+ })
+ }
+
+ pub fn enter<R>(&'tcx mut self, f: impl for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>) -> R) -> R {
let InferCtxtBuilder {
global_tcx,
ref arena,
self.universe.get()
}
- /// Create and return a new subunivese of the current universe;
- /// update `self.universe` to that new subuniverse. At present,
- /// used only in the NLL subtyping code, which uses the new
- /// universe-based scheme instead of the more limited leak-check
- /// scheme.
- pub fn create_subuniverse(&self) -> ty::UniverseIndex {
- let u = self.universe.get().subuniverse();
+ /// Create and return a fresh universe that extends all previous
+ /// universes. Updates `self.universe` to that new universe.
+ pub fn create_next_universe(&self) -> ty::UniverseIndex {
+ let u = self.universe.get().next_universe();
self.universe.set(u);
u
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This code is kind of an alternate way of doing subtyping,
+//! supertyping, and type equating, distinct from the `combine.rs`
+//! code but very similar in its effect and design. Eventually the two
+//! ought to be merged. This code is intended for use in NLL.
+//!
+//! Here are the key differences:
+//!
+//! - This code generally assumes that there are no unbound type
+//! inferences variables, because at NLL
+//! time types are fully inferred up-to regions.
+//! - Actually, to support user-given type annotations like
+//! `Vec<_>`, we do have some measure of support for type
+//! inference variables, but we impose some simplifying
+//! assumptions on them that would not be suitable for the infer
+//! code more generally. This could be fixed.
+//! - This code uses "universes" to handle higher-ranked regions and
+//! not the leak-check. This is "more correct" than what rustc does
+//! and we are generally migrating in this direction, but NLL had to
+//! get there first.
+
+use crate::infer::InferCtxt;
+use crate::ty::fold::{TypeFoldable, TypeVisitor};
+use crate::ty::relate::{self, Relate, RelateResult, TypeRelation};
+use crate::ty::subst::Kind;
+use crate::ty::{self, Ty, TyCtxt};
+use rustc_data_structures::fx::FxHashMap;
+
+pub struct TypeRelating<'me, 'gcx: 'tcx, 'tcx: 'me, D>
+where
+ D: TypeRelatingDelegate<'tcx>,
+{
+ infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+
+ /// Callback to use when we deduce an outlives relationship
+ delegate: D,
+
+ /// How are we relating `a` and `b`?
+ ///
+ /// - covariant means `a <: b`
+ /// - contravariant means `b <: a`
+ /// - invariant means `a == b
+ /// - bivariant means that it doesn't matter
+ ambient_variance: ty::Variance,
+
+ /// When we pass through a set of binders (e.g., when looking into
+ /// a `fn` type), we push a new bound region scope onto here. This
+ /// will contain the instantiated region for each region in those
+ /// binders. When we then encounter a `ReLateBound(d, br)`, we can
+ /// use the debruijn index `d` to find the right scope, and then
+ /// bound region name `br` to find the specific instantiation from
+ /// within that scope. See `replace_bound_region`.
+ ///
+ /// This field stores the instantiations for late-bound regions in
+ /// the `a` type.
+ a_scopes: Vec<BoundRegionScope<'tcx>>,
+
+ /// Same as `a_scopes`, but for the `b` type.
+ b_scopes: Vec<BoundRegionScope<'tcx>>,
+}
+
+pub trait TypeRelatingDelegate<'tcx> {
+ /// Push a constraint `sup: sub` -- this constraint must be
+ /// satisfied for the two types to be related. `sub` and `sup` may
+ /// be regions from the type or new variables created through the
+ /// delegate.
+ fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>);
+
+ /// Creates a new universe index. Used when instantiating placeholders.
+ fn create_next_universe(&mut self) -> ty::UniverseIndex;
+
+ /// Creates a new region variable representing a higher-ranked
+ /// region that is instantiated existentially. This creates an
+ /// inference variable, typically.
+ ///
+ /// So e.g. if you have `for<'a> fn(..) <: for<'b> fn(..)`, then
+ /// we will invoke this method to instantiate `'a` with an
+ /// inference variable (though `'b` would be instantiated first,
+ /// as a placeholder).
+ fn next_existential_region_var(&mut self) -> ty::Region<'tcx>;
+
+ /// Creates a new region variable representing a
+ /// higher-ranked region that is instantiated universally.
+ /// This creates a new region placeholder, typically.
+ ///
+ /// So e.g. if you have `for<'a> fn(..) <: for<'b> fn(..)`, then
+ /// we will invoke this method to instantiate `'b` with a
+ /// placeholder region.
+ fn next_placeholder_region(&mut self, placeholder: ty::Placeholder) -> ty::Region<'tcx>;
+
+ /// Creates a new existential region in the given universe. This
+ /// is used when handling subtyping and type variables -- if we
+ /// have that `?X <: Foo<'a>`, for example, we would instantiate
+ /// `?X` with a type like `Foo<'?0>` where `'?0` is a fresh
+ /// existential variable created by this function. We would then
+ /// relate `Foo<'?0>` with `Foo<'a>` (and probably add an outlives
+ /// relation stating that `'?0: 'a`).
+ fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx>;
+}
+
+#[derive(Clone, Debug)]
+struct ScopesAndKind<'tcx> {
+ scopes: Vec<BoundRegionScope<'tcx>>,
+ kind: Kind<'tcx>,
+}
+
+#[derive(Clone, Debug, Default)]
+struct BoundRegionScope<'tcx> {
+ map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
+}
+
+#[derive(Copy, Clone)]
+struct UniversallyQuantified(bool);
+
+impl<'me, 'gcx, 'tcx, D> TypeRelating<'me, 'gcx, 'tcx, D>
+where
+ D: TypeRelatingDelegate<'tcx>,
+{
+ pub fn new(
+ infcx: &'me InferCtxt<'me, 'gcx, 'tcx>,
+ delegate: D,
+ ambient_variance: ty::Variance,
+ ) -> Self {
+ Self {
+ infcx,
+ delegate,
+ ambient_variance,
+ a_scopes: vec![],
+ b_scopes: vec![],
+ }
+ }
+
+ fn ambient_covariance(&self) -> bool {
+ match self.ambient_variance {
+ ty::Variance::Covariant | ty::Variance::Invariant => true,
+ ty::Variance::Contravariant | ty::Variance::Bivariant => false,
+ }
+ }
+
+ fn ambient_contravariance(&self) -> bool {
+ match self.ambient_variance {
+ ty::Variance::Contravariant | ty::Variance::Invariant => true,
+ ty::Variance::Covariant | ty::Variance::Bivariant => false,
+ }
+ }
+
+ fn create_scope(
+ &mut self,
+ value: &ty::Binder<impl TypeFoldable<'tcx>>,
+ universally_quantified: UniversallyQuantified,
+ ) -> BoundRegionScope<'tcx> {
+ let mut scope = BoundRegionScope::default();
+
+ // Create a callback that creates (via the delegate) either an
+ // existential or placeholder region as needed.
+ let mut next_region = {
+ let delegate = &mut self.delegate;
+ let mut lazy_universe = None;
+ move |br: ty::BoundRegion| {
+ if universally_quantified.0 {
+ // The first time this closure is called, create a
+ // new universe for the placeholders we will make
+ // from here out.
+ let universe = lazy_universe.unwrap_or_else(|| {
+ let universe = delegate.create_next_universe();
+ lazy_universe = Some(universe);
+ universe
+ });
+
+ let placeholder = ty::Placeholder { universe, name: br };
+ delegate.next_placeholder_region(placeholder)
+ } else {
+ delegate.next_existential_region_var()
+ }
+ }
+ };
+
+ value.skip_binder().visit_with(&mut ScopeInstantiator {
+ next_region: &mut next_region,
+ target_index: ty::INNERMOST,
+ bound_region_scope: &mut scope,
+ });
+
+ scope
+ }
+
+ /// When we encounter binders during the type traversal, we record
+ /// the value to substitute for each of the things contained in
+ /// that binder. (This will be either a universal placeholder or
+ /// an existential inference variable.) Given the debruijn index
+ /// `debruijn` (and name `br`) of some binder we have now
+ /// encountered, this routine finds the value that we instantiated
+ /// the region with; to do so, it indexes backwards into the list
+ /// of ambient scopes `scopes`.
+ fn lookup_bound_region(
+ debruijn: ty::DebruijnIndex,
+ br: &ty::BoundRegion,
+ first_free_index: ty::DebruijnIndex,
+ scopes: &[BoundRegionScope<'tcx>],
+ ) -> ty::Region<'tcx> {
+ // The debruijn index is a "reverse index" into the
+ // scopes listing. So when we have INNERMOST (0), we
+ // want the *last* scope pushed, and so forth.
+ let debruijn_index = debruijn.index() - first_free_index.index();
+ let scope = &scopes[scopes.len() - debruijn_index - 1];
+
+ // Find this bound region in that scope to map to a
+ // particular region.
+ scope.map[br]
+ }
+
+ /// If `r` is a bound region, find the scope in which it is bound
+ /// (from `scopes`) and return the value that we instantiated it
+ /// with. Otherwise just return `r`.
+ fn replace_bound_region(
+ &self,
+ r: ty::Region<'tcx>,
+ first_free_index: ty::DebruijnIndex,
+ scopes: &[BoundRegionScope<'tcx>],
+ ) -> ty::Region<'tcx> {
+ if let ty::ReLateBound(debruijn, br) = r {
+ Self::lookup_bound_region(*debruijn, br, first_free_index, scopes)
+ } else {
+ r
+ }
+ }
+
+ /// Push a new outlives requirement into our output set of
+ /// constraints.
+ fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>) {
+ debug!("push_outlives({:?}: {:?})", sup, sub);
+
+ self.delegate.push_outlives(sup, sub);
+ }
+
+ /// When we encounter a canonical variable `var` in the output,
+ /// equate it with `kind`. If the variable has been previously
+ /// equated, then equate it again.
+ fn relate_var(&mut self, var_ty: Ty<'tcx>, value_ty: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ debug!("equate_var(var_ty={:?}, value_ty={:?})", var_ty, value_ty);
+
+ let generalized_ty = self.generalize_value(value_ty);
+ self.infcx
+ .force_instantiate_unchecked(var_ty, generalized_ty);
+
+ // The generalized values we extract from `canonical_var_values` have
+ // been fully instantiated and hence the set of scopes we have
+ // doesn't matter -- just to be sure, put an empty vector
+ // in there.
+ let old_a_scopes = ::std::mem::replace(&mut self.a_scopes, vec![]);
+
+ // Relate the generalized kind to the original one.
+ let result = self.relate(&generalized_ty, &value_ty);
+
+ // Restore the old scopes now.
+ self.a_scopes = old_a_scopes;
+
+ debug!("equate_var: complete, result = {:?}", result);
+ result
+ }
+
+ fn generalize_value<T: Relate<'tcx>>(&mut self, value: T) -> T {
+ TypeGeneralizer {
+ tcx: self.infcx.tcx,
+ delegate: &mut self.delegate,
+ first_free_index: ty::INNERMOST,
+ ambient_variance: self.ambient_variance,
+
+ // These always correspond to an `_` or `'_` written by
+ // user, and those are always in the root universe.
+ universe: ty::UniverseIndex::ROOT,
+ }.relate(&value, &value)
+ .unwrap()
+ }
+}
+
+impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeRelating<'me, 'gcx, 'tcx, D>
+where
+ D: TypeRelatingDelegate<'tcx>,
+{
+ fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
+ self.infcx.tcx
+ }
+
+ fn tag(&self) -> &'static str {
+ "nll::subtype"
+ }
+
+ fn a_is_expected(&self) -> bool {
+ true
+ }
+
+ fn relate_with_variance<T: Relate<'tcx>>(
+ &mut self,
+ variance: ty::Variance,
+ a: &T,
+ b: &T,
+ ) -> RelateResult<'tcx, T> {
+ debug!(
+ "relate_with_variance(variance={:?}, a={:?}, b={:?})",
+ variance, a, b
+ );
+
+ let old_ambient_variance = self.ambient_variance;
+ self.ambient_variance = self.ambient_variance.xform(variance);
+
+ debug!(
+ "relate_with_variance: ambient_variance = {:?}",
+ self.ambient_variance
+ );
+
+ let r = self.relate(a, b)?;
+
+ self.ambient_variance = old_ambient_variance;
+
+ debug!("relate_with_variance: r={:?}", r);
+
+ Ok(r)
+ }
+
+ fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ let a = self.infcx.shallow_resolve(a);
+ match a.sty {
+ ty::Infer(ty::TyVar(_)) | ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) => {
+ self.relate_var(a.into(), b.into())
+ }
+
+ _ => {
+ debug!(
+ "tys(a={:?}, b={:?}, variance={:?})",
+ a, b, self.ambient_variance
+ );
+
+ relate::super_relate_tys(self, a, b)
+ }
+ }
+ }
+
+ fn regions(
+ &mut self,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ ) -> RelateResult<'tcx, ty::Region<'tcx>> {
+ debug!(
+ "regions(a={:?}, b={:?}, variance={:?})",
+ a, b, self.ambient_variance
+ );
+
+ let v_a = self.replace_bound_region(a, ty::INNERMOST, &self.a_scopes);
+ let v_b = self.replace_bound_region(b, ty::INNERMOST, &self.b_scopes);
+
+ debug!("regions: v_a = {:?}", v_a);
+ debug!("regions: v_b = {:?}", v_b);
+
+ if self.ambient_covariance() {
+ // Covariance: a <= b. Hence, `b: a`.
+ self.push_outlives(v_b, v_a);
+ }
+
+ if self.ambient_contravariance() {
+ // Contravariant: b <= a. Hence, `a: b`.
+ self.push_outlives(v_a, v_b);
+ }
+
+ Ok(a)
+ }
+
+ fn binders<T>(
+ &mut self,
+ a: &ty::Binder<T>,
+ b: &ty::Binder<T>,
+ ) -> RelateResult<'tcx, ty::Binder<T>>
+ where
+ T: Relate<'tcx>,
+ {
+ // We want that
+ //
+ // ```
+ // for<'a> fn(&'a u32) -> &'a u32 <:
+ // fn(&'b u32) -> &'b u32
+ // ```
+ //
+ // but not
+ //
+ // ```
+ // fn(&'a u32) -> &'a u32 <:
+ // for<'b> fn(&'b u32) -> &'b u32
+ // ```
+ //
+ // We therefore proceed as follows:
+ //
+ // - Instantiate binders on `b` universally, yielding a universe U1.
+ // - Instantiate binders on `a` existentially in U1.
+
+ debug!(
+ "binders({:?}: {:?}, ambient_variance={:?})",
+ a, b, self.ambient_variance
+ );
+
+ if self.ambient_covariance() {
+ // Covariance, so we want `for<..> A <: for<..> B` --
+ // therefore we compare any instantiation of A (i.e., A
+ // instantiated with existentials) against every
+ // instantiation of B (i.e., B instantiated with
+ // universals).
+
+ let b_scope = self.create_scope(b, UniversallyQuantified(true));
+ let a_scope = self.create_scope(a, UniversallyQuantified(false));
+
+ debug!("binders: a_scope = {:?} (existential)", a_scope);
+ debug!("binders: b_scope = {:?} (universal)", b_scope);
+
+ self.b_scopes.push(b_scope);
+ self.a_scopes.push(a_scope);
+
+ // Reset the ambient variance to covariant. This is needed
+ // to correctly handle cases like
+ //
+ // for<'a> fn(&'a u32, &'a u3) == for<'b, 'c> fn(&'b u32, &'c u32)
+ //
+ // Somewhat surprisingly, these two types are actually
+ // **equal**, even though the one on the right looks more
+ // polymorphic. The reason is due to subtyping. To see it,
+ // consider that each function can call the other:
+ //
+ // - The left function can call the right with `'b` and
+ // `'c` both equal to `'a`
+ //
+ // - The right function can call the left with `'a` set to
+ // `{P}`, where P is the point in the CFG where the call
+ // itself occurs. Note that `'b` and `'c` must both
+ // include P. At the point, the call works because of
+ // subtyping (i.e., `&'b u32 <: &{P} u32`).
+ let variance = ::std::mem::replace(&mut self.ambient_variance, ty::Variance::Covariant);
+
+ self.relate(a.skip_binder(), b.skip_binder())?;
+
+ self.ambient_variance = variance;
+
+ self.b_scopes.pop().unwrap();
+ self.a_scopes.pop().unwrap();
+ }
+
+ if self.ambient_contravariance() {
+ // Contravariance, so we want `for<..> A :> for<..> B`
+ // -- therefore we compare every instantiation of A (i.e.,
+ // A instantiated with universals) against any
+ // instantiation of B (i.e., B instantiated with
+ // existentials). Opposite of above.
+
+ let a_scope = self.create_scope(a, UniversallyQuantified(true));
+ let b_scope = self.create_scope(b, UniversallyQuantified(false));
+
+ debug!("binders: a_scope = {:?} (universal)", a_scope);
+ debug!("binders: b_scope = {:?} (existential)", b_scope);
+
+ self.a_scopes.push(a_scope);
+ self.b_scopes.push(b_scope);
+
+ // Reset ambient variance to contravariance. See the
+ // covariant case above for an explanation.
+ let variance =
+ ::std::mem::replace(&mut self.ambient_variance, ty::Variance::Contravariant);
+
+ self.relate(a.skip_binder(), b.skip_binder())?;
+
+ self.ambient_variance = variance;
+
+ self.b_scopes.pop().unwrap();
+ self.a_scopes.pop().unwrap();
+ }
+
+ Ok(a.clone())
+ }
+}
+
+/// When we encounter a binder like `for<..> fn(..)`, we actually have
+/// to walk the `fn` value to find all the values bound by the `for`
+/// (these are not explicitly present in the ty representation right
+/// now). This visitor handles that: it descends the type, tracking
+/// binder depth, and finds late-bound regions targeting the
+/// `for<..`>. For each of those, it creates an entry in
+/// `bound_region_scope`.
+struct ScopeInstantiator<'me, 'tcx: 'me> {
+ next_region: &'me mut dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
+ // The debruijn index of the scope we are instantiating.
+ target_index: ty::DebruijnIndex,
+ bound_region_scope: &'me mut BoundRegionScope<'tcx>,
+}
+
+impl<'me, 'tcx> TypeVisitor<'tcx> for ScopeInstantiator<'me, 'tcx> {
+ fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> bool {
+ self.target_index.shift_in(1);
+ t.super_visit_with(self);
+ self.target_index.shift_out(1);
+
+ false
+ }
+
+ fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
+ let ScopeInstantiator {
+ bound_region_scope,
+ next_region,
+ ..
+ } = self;
+
+ match r {
+ ty::ReLateBound(debruijn, br) if *debruijn == self.target_index => {
+ bound_region_scope
+ .map
+ .entry(*br)
+ .or_insert_with(|| next_region(*br));
+ }
+
+ _ => {}
+ }
+
+ false
+ }
+}
+
+/// The "type generalize" is used when handling inference variables.
+///
+/// The basic strategy for handling a constraint like `?A <: B` is to
+/// apply a "generalization strategy" to the type `B` -- this replaces
+/// all the lifetimes in the type `B` with fresh inference
+/// variables. (You can read more about the strategy in this [blog
+/// post].)
+///
+/// As an example, if we had `?A <: &'x u32`, we would generalize `&'x
+/// u32` to `&'0 u32` where `'0` is a fresh variable. This becomes the
+/// value of `A`. Finally, we relate `&'0 u32 <: &'x u32`, which
+/// establishes `'0: 'x` as a constraint.
+///
+/// As a side-effect of this generalization procedure, we also replace
+/// all the bound regions that we have traversed with concrete values,
+/// so that the resulting generalized type is independent from the
+/// scopes.
+///
+/// [blog post]: https://is.gd/0hKvIr
+struct TypeGeneralizer<'me, 'gcx: 'tcx, 'tcx: 'me, D>
+where
+ D: TypeRelatingDelegate<'tcx> + 'me,
+{
+ tcx: TyCtxt<'me, 'gcx, 'tcx>,
+
+ delegate: &'me mut D,
+
+ /// After we generalize this type, we are going to relative it to
+ /// some other type. What will be the variance at this point?
+ ambient_variance: ty::Variance,
+
+ first_free_index: ty::DebruijnIndex,
+
+ universe: ty::UniverseIndex,
+}
+
+impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeGeneralizer<'me, 'gcx, 'tcx, D>
+where
+ D: TypeRelatingDelegate<'tcx>,
+{
+ fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
+ self.tcx
+ }
+
+ fn tag(&self) -> &'static str {
+ "nll::generalizer"
+ }
+
+ fn a_is_expected(&self) -> bool {
+ true
+ }
+
+ fn relate_with_variance<T: Relate<'tcx>>(
+ &mut self,
+ variance: ty::Variance,
+ a: &T,
+ b: &T,
+ ) -> RelateResult<'tcx, T> {
+ debug!(
+ "TypeGeneralizer::relate_with_variance(variance={:?}, a={:?}, b={:?})",
+ variance, a, b
+ );
+
+ let old_ambient_variance = self.ambient_variance;
+ self.ambient_variance = self.ambient_variance.xform(variance);
+
+ debug!(
+ "TypeGeneralizer::relate_with_variance: ambient_variance = {:?}",
+ self.ambient_variance
+ );
+
+ let r = self.relate(a, b)?;
+
+ self.ambient_variance = old_ambient_variance;
+
+ debug!("TypeGeneralizer::relate_with_variance: r={:?}", r);
+
+ Ok(r)
+ }
+
+ fn tys(&mut self, a: Ty<'tcx>, _: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ debug!("TypeGeneralizer::tys(a={:?})", a,);
+
+ match a.sty {
+ ty::Infer(ty::TyVar(_)) | ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) => {
+ bug!(
+ "unexpected inference variable encountered in NLL generalization: {:?}",
+ a
+ );
+ }
+
+ _ => relate::super_relate_tys(self, a, a),
+ }
+ }
+
+ fn regions(
+ &mut self,
+ a: ty::Region<'tcx>,
+ _: ty::Region<'tcx>,
+ ) -> RelateResult<'tcx, ty::Region<'tcx>> {
+ debug!("TypeGeneralizer::regions(a={:?})", a,);
+
+ if let ty::ReLateBound(debruijn, _) = a {
+ if *debruijn < self.first_free_index {
+ return Ok(a);
+ }
+ }
+
+ // For now, we just always create a fresh region variable to
+ // replace all the regions in the source type. In the main
+ // type checker, we special case the case where the ambient
+ // variance is `Invariant` and try to avoid creating a fresh
+ // region variable, but since this comes up so much less in
+ // NLL (only when users use `_` etc) it is much less
+ // important.
+ //
+ // As an aside, since these new variables are created in
+ // `self.universe` universe, this also serves to enforce the
+ // universe scoping rules.
+ //
+ // FIXME(#54105) -- if the ambient variance is bivariant,
+ // though, we may however need to check well-formedness or
+ // risk a problem like #41677 again.
+
+ let replacement_region_vid = self.delegate.generalize_existential(self.universe);
+
+ Ok(replacement_region_vid)
+ }
+
+ fn binders<T>(
+ &mut self,
+ a: &ty::Binder<T>,
+ _: &ty::Binder<T>,
+ ) -> RelateResult<'tcx, ty::Binder<T>>
+ where
+ T: Relate<'tcx>,
+ {
+ debug!("TypeGeneralizer::binders(a={:?})", a,);
+
+ self.first_free_index.shift_in(1);
+ let result = self.relate(a.skip_binder(), a.skip_binder())?;
+ self.first_free_index.shift_out(1);
+ Ok(ty::Binder::bind(result))
+ }
+}
+
+impl InferCtxt<'_, '_, 'tcx> {
+ /// A hacky sort of method used by the NLL type-relating code:
+ ///
+ /// - `var` must be some unbound type variable.
+ /// - `value` must be a suitable type to use as its value.
+ ///
+ /// `var` will then be equated with `value`. Note that this
+ /// sidesteps a number of important checks, such as the "occurs
+ /// check" that prevents cyclic types, so it is important not to
+ /// use this method during regular type-check.
+ fn force_instantiate_unchecked(&self, var: Ty<'tcx>, value: Ty<'tcx>) {
+ match (&var.sty, &value.sty) {
+ (&ty::Infer(ty::TyVar(vid)), _) => {
+ let mut type_variables = self.type_variables.borrow_mut();
+
+ // In NLL, we don't have type inference variables
+ // floating around, so we can do this rather imprecise
+ // variant of the occurs-check.
+ assert!(!value.has_infer_types());
+
+ type_variables.instantiate(vid, value);
+ }
+
+ (&ty::Infer(ty::IntVar(vid)), &ty::Int(value)) => {
+ let mut int_unification_table = self.int_unification_table.borrow_mut();
+ int_unification_table
+ .unify_var_value(vid, Some(ty::IntVarValue::IntType(value)))
+ .unwrap_or_else(|_| {
+ bug!("failed to unify int var `{:?}` with `{:?}`", vid, value);
+ });
+ }
+
+ (&ty::Infer(ty::IntVar(vid)), &ty::Uint(value)) => {
+ let mut int_unification_table = self.int_unification_table.borrow_mut();
+ int_unification_table
+ .unify_var_value(vid, Some(ty::IntVarValue::UintType(value)))
+ .unwrap_or_else(|_| {
+ bug!("failed to unify int var `{:?}` with `{:?}`", vid, value);
+ });
+ }
+
+ (&ty::Infer(ty::FloatVar(vid)), &ty::Float(value)) => {
+ let mut float_unification_table = self.float_unification_table.borrow_mut();
+ float_unification_table
+ .unify_var_value(vid, Some(ty::FloatVarValue(value)))
+ .unwrap_or_else(|_| {
+ bug!("failed to unify float var `{:?}` with `{:?}`", vid, value)
+ });
+ }
+
+ _ => {
+ bug!(
+ "force_instantiate_unchecked invoked with bad combination: var={:?} value={:?}",
+ var,
+ value,
+ );
+ }
+ }
+ }
+}
#![feature(in_band_lifetimes)]
#![feature(macro_at_most_once_rep)]
#![feature(crate_visibility_modifier)]
+#![feature(transpose_result)]
#![recursion_limit="512"]
/// Lints indexed by name.
by_name: FxHashMap<String, TargetLint>,
- /// Map of registered lint groups to what lints they expand to. The first
- /// bool is true if the lint group was added by a plugin. The optional string
- /// is used to store the new names of deprecated lint group names.
- lint_groups: FxHashMap<&'static str, (Vec<LintId>, bool, Option<&'static str>)>,
+ /// Map of registered lint groups to what lints they expand to.
+ lint_groups: FxHashMap<&'static str, LintGroup>,
/// Extra info for future incompatibility lints, describing the
/// issue or RFC that caused the incompatibility.
Removed,
}
+struct LintAlias {
+ name: &'static str,
+ /// Whether deprecation warnings should be suppressed for this alias.
+ silent: bool,
+}
+
+struct LintGroup {
+ lint_ids: Vec<LintId>,
+ from_plugin: bool,
+ depr: Option<LintAlias>,
+}
+
pub enum CheckLintNameResult<'a> {
Ok(&'a [LintId]),
/// Lint doesn't exist
}
pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec<LintId>, bool)> {
- self.lint_groups.iter().map(|(k, v)| (*k,
- v.0.clone(),
- v.1)).collect()
+ self.lint_groups.iter()
+ .filter(|(_, LintGroup { depr, .. })| {
+ // Don't display deprecated lint groups.
+ depr.is_none()
+ })
+ .map(|(k, LintGroup { lint_ids, from_plugin, .. })| {
+ (*k, lint_ids.clone(), *from_plugin)
+ })
+ .collect()
}
pub fn register_early_pass(&mut self,
self.future_incompatible.get(&id)
}
+ pub fn register_group_alias(
+ &mut self,
+ lint_name: &'static str,
+ alias: &'static str,
+ ) {
+ self.lint_groups.insert(alias, LintGroup {
+ lint_ids: vec![],
+ from_plugin: false,
+ depr: Some(LintAlias { name: lint_name, silent: true }),
+ });
+ }
+
pub fn register_group(
&mut self,
sess: Option<&Session>,
) {
let new = self
.lint_groups
- .insert(name, (to, from_plugin, None))
+ .insert(name, LintGroup {
+ lint_ids: to,
+ from_plugin,
+ depr: None,
+ })
.is_none();
if let Some(deprecated) = deprecated_name {
- self.lint_groups
- .insert(deprecated, (vec![], from_plugin, Some(name)));
+ self.lint_groups.insert(deprecated, LintGroup {
+ lint_ids: vec![],
+ from_plugin,
+ depr: Some(LintAlias { name, silent: false }),
+ });
}
if !new {
self.by_name.insert(name.into(), Removed(reason.into()));
}
- pub fn find_lints(&self, lint_name: &str) -> Result<Vec<LintId>, FindLintError> {
+ pub fn find_lints(&self, mut lint_name: &str) -> Result<Vec<LintId>, FindLintError> {
match self.by_name.get(lint_name) {
Some(&Id(lint_id)) => Ok(vec![lint_id]),
Some(&Renamed(_, lint_id)) => {
Err(FindLintError::Removed)
},
None => {
- match self.lint_groups.get(lint_name) {
- Some(v) => Ok(v.0.clone()),
- None => Err(FindLintError::Removed)
+ loop {
+ return match self.lint_groups.get(lint_name) {
+ Some(LintGroup {lint_ids, depr, .. }) => {
+ if let Some(LintAlias { name, .. }) = depr {
+ lint_name = name;
+ continue;
+ }
+ Ok(lint_ids.clone())
+ }
+ None => Err(FindLintError::Removed)
+ };
}
}
}
match self.by_name.get(&complete_name) {
None => match self.lint_groups.get(&*complete_name) {
None => return CheckLintNameResult::Tool(Err((None, String::new()))),
- Some(ids) => return CheckLintNameResult::Tool(Ok(&ids.0)),
+ Some(LintGroup { lint_ids, .. }) => {
+ return CheckLintNameResult::Tool(Ok(&lint_ids));
+ }
},
Some(&Id(ref id)) => return CheckLintNameResult::Tool(Ok(slice::from_ref(id))),
// If the lint was registered as removed or renamed by the lint tool, we don't need
// If neither the lint, nor the lint group exists check if there is a `clippy::`
// variant of this lint
None => self.check_tool_name_for_backwards_compat(&complete_name, "clippy"),
- Some(ids) => {
+ Some(LintGroup { lint_ids, depr, .. }) => {
// Check if the lint group name is deprecated
- if let Some(new_name) = ids.2 {
- let lint_ids = self.lint_groups.get(new_name).unwrap();
- return CheckLintNameResult::Tool(Err((
- Some(&lint_ids.0),
- new_name.to_string(),
- )));
+ if let Some(LintAlias { name, silent }) = depr {
+ let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap();
+ return if *silent {
+ CheckLintNameResult::Ok(&lint_ids)
+ } else {
+ CheckLintNameResult::Tool(Err((
+ Some(&lint_ids),
+ name.to_string(),
+ )))
+ };
}
- CheckLintNameResult::Ok(&ids.0)
+ CheckLintNameResult::Ok(&lint_ids)
}
},
Some(&Id(ref id)) => CheckLintNameResult::Ok(slice::from_ref(id)),
None => match self.lint_groups.get(&*complete_name) {
// Now we are sure, that this lint exists nowhere
None => CheckLintNameResult::NoLint,
- Some(ids) => {
- // Reaching this would be weird, but lets cover this case anyway
- if let Some(new_name) = ids.2 {
- let lint_ids = self.lint_groups.get(new_name).unwrap();
- return CheckLintNameResult::Tool(Err((
- Some(&lint_ids.0),
- new_name.to_string(),
- )));
+ Some(LintGroup { lint_ids, depr, .. }) => {
+ // Reaching this would be weird, but let's cover this case anyway
+ if let Some(LintAlias { name, silent }) = depr {
+ let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap();
+ return if *silent {
+ CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
+ } else {
+ CheckLintNameResult::Tool(Err((
+ Some(&lint_ids),
+ name.to_string(),
+ )))
+ };
}
- CheckLintNameResult::Tool(Err((Some(&ids.0), complete_name)))
+ CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
}
},
Some(&Id(ref id)) => {
use syntax::symbol::InternedString;
use syntax_pos::{Span, DUMMY_SP};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
-use ty::subst::{Subst, Substs};
+use ty::subst::{CanonicalUserSubsts, Subst, Substs};
use ty::{self, AdtDef, CanonicalTy, ClosureSubsts, GeneratorSubsts, Region, Ty, TyCtxt};
use util::ppaux;
/// e.g. via `let x: T`, then we carry that type here. The MIR
/// borrow checker needs this information since it can affect
/// region inference.
- pub user_ty: Option<(CanonicalTy<'tcx>, Span)>,
+ pub user_ty: Option<(UserTypeAnnotation<'tcx>, Span)>,
/// Name of the local, used in debuginfo and pretty-printing.
///
/// - `Contravariant` -- requires that `T_y :> T`
/// - `Invariant` -- requires that `T_y == T`
/// - `Bivariant` -- no effect
- AscribeUserType(Place<'tcx>, ty::Variance, CanonicalTy<'tcx>),
+ AscribeUserType(Place<'tcx>, ty::Variance, UserTypeAnnotation<'tcx>),
/// No-op. Useful for deleting instructions without affecting statement indices.
Nop,
&'tcx AdtDef,
usize,
&'tcx Substs<'tcx>,
- Option<CanonicalTy<'tcx>>,
+ Option<UserTypeAnnotation<'tcx>>,
Option<usize>,
),
/// this does not necessarily mean that they are "==" in Rust -- in
/// particular one must be wary of `NaN`!
-#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct Constant<'tcx> {
pub span: Span,
pub ty: Ty<'tcx>,
/// indicate that `Vec<_>` was explicitly specified.
///
/// Needed for NLL to impose user-given type constraints.
- pub user_ty: Option<CanonicalTy<'tcx>>,
+ pub user_ty: Option<UserTypeAnnotation<'tcx>>,
pub literal: &'tcx ty::Const<'tcx>,
}
+/// A user-given type annotation attached to a constant. These arise
+/// from constants that are named via paths, like `Foo::<A>::new` and
+/// so forth.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+pub enum UserTypeAnnotation<'tcx> {
+ Ty(CanonicalTy<'tcx>),
+ FnDef(DefId, CanonicalUserSubsts<'tcx>),
+ AdtDef(&'tcx AdtDef, CanonicalUserSubsts<'tcx>),
+}
+
+EnumTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for UserTypeAnnotation<'tcx> {
+ (UserTypeAnnotation::Ty)(ty),
+ (UserTypeAnnotation::FnDef)(def, substs),
+ (UserTypeAnnotation::AdtDef)(def, substs),
+ }
+}
+
newtype_index! {
pub struct Promoted {
DEBUG_FORMAT = "promoted[{}]"
use hir::def_id::DefId;
use ty::subst::Substs;
-use ty::{CanonicalTy, ClosureSubsts, GeneratorSubsts, Region, Ty};
+use ty::{ClosureSubsts, GeneratorSubsts, Region, Ty};
use mir::*;
use syntax_pos::Span;
fn visit_ascribe_user_ty(&mut self,
place: & $($mutability)* Place<'tcx>,
variance: & $($mutability)* ty::Variance,
- c_ty: & $($mutability)* CanonicalTy<'tcx>,
+ user_ty: & $($mutability)* UserTypeAnnotation<'tcx>,
location: Location) {
- self.super_ascribe_user_ty(place, variance, c_ty, location);
+ self.super_ascribe_user_ty(place, variance, user_ty, location);
}
fn visit_place(&mut self,
self.super_ty(ty);
}
- fn visit_user_ty(&mut self, ty: & $($mutability)* CanonicalTy<'tcx>) {
- self.super_canonical_ty(ty);
+ fn visit_user_type_annotation(
+ &mut self,
+ ty: & $($mutability)* UserTypeAnnotation<'tcx>,
+ ) {
+ self.super_user_type_annotation(ty);
}
fn visit_region(&mut self,
StatementKind::AscribeUserType(
ref $($mutability)* place,
ref $($mutability)* variance,
- ref $($mutability)* c_ty,
+ ref $($mutability)* user_ty,
) => {
- self.visit_ascribe_user_ty(place, variance, c_ty, location);
+ self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
StatementKind::Nop => {}
}
fn super_ascribe_user_ty(&mut self,
place: & $($mutability)* Place<'tcx>,
_variance: & $($mutability)* ty::Variance,
- c_ty: & $($mutability)* CanonicalTy<'tcx>,
+ user_ty: & $($mutability)* UserTypeAnnotation<'tcx>,
location: Location) {
self.visit_place(place, PlaceContext::Validate, location);
- self.visit_user_ty(c_ty);
+ self.visit_user_type_annotation(user_ty);
}
fn super_place(&mut self,
source_info: *source_info,
});
if let Some((user_ty, _)) = user_ty {
- self.visit_user_ty(user_ty);
+ self.visit_user_type_annotation(user_ty);
}
self.visit_source_info(source_info);
self.visit_source_scope(visibility_scope);
self.visit_source_scope(scope);
}
- fn super_canonical_ty(&mut self, _ty: & $($mutability)* CanonicalTy<'tcx>) {
+ fn super_user_type_annotation(
+ &mut self,
+ _ty: & $($mutability)* UserTypeAnnotation<'tcx>,
+ ) {
}
fn super_ty(&mut self, _ty: & $($mutability)* Ty<'tcx>) {
}
impl Input {
- pub fn filestem(&self) -> String {
+ pub fn filestem(&self) -> &str {
match *self {
- Input::File(ref ifile) => ifile.file_stem().unwrap().to_str().unwrap().to_string(),
- Input::Str { .. } => "rust_out".to_string(),
+ Input::File(ref ifile) => ifile.file_stem().unwrap().to_str().unwrap(),
+ Input::Str { .. } => "rust_out",
}
}
match (value, opt_type_desc) {
(Some(..), None) => {
early_error(error_format, &format!("{} option `{}` takes no \
- value", $outputname, key))
+ value", $outputname, key))
}
(None, Some(type_desc)) => {
early_error(error_format, &format!("{0} option `{1}` requires \
- {2} ({3} {1}=<value>)",
- $outputname, key,
- type_desc, $prefix))
+ {2} ({3} {1}=<value>)",
+ $outputname, key,
+ type_desc, $prefix))
}
(Some(value), Some(type_desc)) => {
early_error(error_format, &format!("incorrect value `{}` for {} \
- option `{}` - {} was expected",
- value, $outputname,
- key, type_desc))
+ option `{}` - {} was expected",
+ value, $outputname,
+ key, type_desc))
}
(None, None) => bug!()
}
}
if !found {
early_error(error_format, &format!("unknown {} option: `{}`",
- $outputname, key));
+ $outputname, key));
}
}
return op;
}
impl<'a> dep_tracking::DepTrackingHash for $struct_name {
-
fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) {
let mut sub_hashes = BTreeMap::new();
$({
pub type $setter_name = fn(&mut $struct_name, v: Option<&str>) -> bool;
pub const $stat: &'static [(&'static str, $setter_name,
- Option<&'static str>, &'static str)] =
+ Option<&'static str>, &'static str)] =
&[ $( (stringify!($opt), $mod_set::$opt, $mod_desc::$parse, $desc) ),* ];
#[allow(non_upper_case_globals, dead_code)]
) }
options! {CodegenOptions, CodegenSetter, basic_codegen_options,
- build_codegen_options, "C", "codegen",
- CG_OPTIONS, cg_type_desc, cgsetters,
+ build_codegen_options, "C", "codegen",
+ CG_OPTIONS, cg_type_desc, cgsetters,
ar: Option<String> = (None, parse_opt_string, [UNTRACKED],
"this option is deprecated and does nothing"),
linker: Option<PathBuf> = (None, parse_opt_pathbuf, [UNTRACKED],
no_redzone: Option<bool> = (None, parse_opt_bool, [TRACKED],
"disable the use of the redzone"),
relocation_model: Option<String> = (None, parse_opt_string, [TRACKED],
- "choose the relocation model to use (rustc --print relocation-models for details)"),
+ "choose the relocation model to use (rustc --print relocation-models for details)"),
code_model: Option<String> = (None, parse_opt_string, [TRACKED],
- "choose the code model to use (rustc --print code-models for details)"),
+ "choose the code model to use (rustc --print code-models for details)"),
metadata: Vec<String> = (Vec::new(), parse_list, [TRACKED],
- "metadata to mangle symbol names with"),
+ "metadata to mangle symbol names with"),
extra_filename: String = (String::new(), parse_string, [UNTRACKED],
- "extra data to put in each output filename"),
+ "extra data to put in each output filename"),
codegen_units: Option<usize> = (None, parse_opt_uint, [UNTRACKED],
"divide crate into N units to optimize in parallel"),
remark: Passes = (Passes::Some(Vec::new()), parse_passes, [UNTRACKED],
panic: Option<PanicStrategy> = (None, parse_panic_strategy,
[TRACKED], "panic strategy to compile crate with"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
- "enable incremental compilation"),
+ "enable incremental compilation"),
default_linker_libraries: Option<bool> = (None, parse_opt_bool, [UNTRACKED],
- "allow the linker to link its default libraries"),
+ "allow the linker to link its default libraries"),
}
options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
- build_debugging_options, "Z", "debugging",
- DB_OPTIONS, db_type_desc, dbsetters,
+ build_debugging_options, "Z", "debugging",
+ DB_OPTIONS, db_type_desc, dbsetters,
codegen_backend: Option<String> = (None, parse_opt_string, [TRACKED],
"the backend to use"),
verbose: bool = (false, parse_bool, [UNTRACKED],
flowgraph_print_all: bool = (false, parse_bool, [UNTRACKED],
"include all dataflow analysis data in -Z unpretty flowgraph output"),
print_region_graph: bool = (false, parse_bool, [UNTRACKED],
- "prints region inference graph. \
- Use with RUST_REGION_GRAPH=help for more info"),
+ "prints region inference graph. \
+ Use with RUST_REGION_GRAPH=help for more info"),
parse_only: bool = (false, parse_bool, [UNTRACKED],
- "parse only; do not compile, assemble, or link"),
+ "parse only; do not compile, assemble, or link"),
no_codegen: bool = (false, parse_bool, [TRACKED],
- "run all passes except codegen; no output"),
+ "run all passes except codegen; no output"),
treat_err_as_bug: bool = (false, parse_bool, [TRACKED],
- "treat all errors that occur as bugs"),
+ "treat all errors that occur as bugs"),
report_delayed_bugs: bool = (false, parse_bool, [TRACKED],
- "immediately print bugs registered with `delay_span_bug`"),
+ "immediately print bugs registered with `delay_span_bug`"),
external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED],
- "show macro backtraces even for non-local macros"),
+ "show macro backtraces even for non-local macros"),
teach: bool = (false, parse_bool, [TRACKED],
- "show extended diagnostic help"),
+ "show extended diagnostic help"),
continue_parse_after_error: bool = (false, parse_bool, [TRACKED],
- "attempt to recover from parse errors (experimental)"),
+ "attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
- "enable incremental compilation (experimental)"),
+ "enable incremental compilation (experimental)"),
incremental_queries: bool = (true, parse_bool, [UNTRACKED],
- "enable incremental compilation support for queries (experimental)"),
+ "enable incremental compilation support for queries (experimental)"),
incremental_info: bool = (false, parse_bool, [UNTRACKED],
"print high-level information about incremental reuse (or the lack thereof)"),
incremental_dump_hash: bool = (false, parse_bool, [UNTRACKED],
incremental_ignore_spans: bool = (false, parse_bool, [UNTRACKED],
"ignore spans during ICH computation -- used for testing"),
dump_dep_graph: bool = (false, parse_bool, [UNTRACKED],
- "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
+ "dump the dependency graph to $RUST_DEP_GRAPH (default: /tmp/dep_graph.gv)"),
query_dep_graph: bool = (false, parse_bool, [UNTRACKED],
- "enable queries of the dependency graph for regression testing"),
+ "enable queries of the dependency graph for regression testing"),
profile_queries: bool = (false, parse_bool, [UNTRACKED],
- "trace and profile the queries of the incremental compilation framework"),
+ "trace and profile the queries of the incremental compilation framework"),
profile_queries_and_keys: bool = (false, parse_bool, [UNTRACKED],
- "trace and profile the queries and keys of the incremental compilation framework"),
+ "trace and profile the queries and keys of the incremental compilation framework"),
no_analysis: bool = (false, parse_bool, [UNTRACKED],
- "parse and expand the source, but run no analysis"),
+ "parse and expand the source, but run no analysis"),
extra_plugins: Vec<String> = (Vec::new(), parse_list, [TRACKED],
"load extra plugins"),
unstable_options: bool = (false, parse_bool, [UNTRACKED],
- "adds unstable command line options to rustc interface"),
+ "adds unstable command line options to rustc interface"),
force_overflow_checks: Option<bool> = (None, parse_opt_bool, [TRACKED],
- "force overflow checks on or off"),
+ "force overflow checks on or off"),
trace_macros: bool = (false, parse_bool, [UNTRACKED],
- "for every macro invocation, print its name and arguments"),
+ "for every macro invocation, print its name and arguments"),
debug_macros: bool = (false, parse_bool, [TRACKED],
- "emit line numbers debug info inside macros"),
+ "emit line numbers debug info inside macros"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],
- "don't clear the hygiene data after analysis"),
+ "don't clear the hygiene data after analysis"),
keep_ast: bool = (false, parse_bool, [UNTRACKED],
- "keep the AST after lowering it to HIR"),
+ "keep the AST after lowering it to HIR"),
show_span: Option<String> = (None, parse_opt_string, [TRACKED],
- "show spans for compiler debugging (expr|pat|ty)"),
+ "show spans for compiler debugging (expr|pat|ty)"),
print_type_sizes: bool = (false, parse_bool, [UNTRACKED],
- "print layout information for each type encountered"),
+ "print layout information for each type encountered"),
print_mono_items: Option<String> = (None, parse_opt_string, [UNTRACKED],
- "print the result of the monomorphization collection pass"),
+ "print the result of the monomorphization collection pass"),
mir_opt_level: usize = (1, parse_uint, [TRACKED],
- "set the MIR optimization level (0-3, default: 1)"),
+ "set the MIR optimization level (0-3, default: 1)"),
mutable_noalias: Option<bool> = (None, parse_opt_bool, [TRACKED],
- "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"),
+ "emit noalias metadata for mutable references (default: yes on LLVM >= 6)"),
arg_align_attributes: bool = (false, parse_bool, [TRACKED],
- "emit align metadata for reference arguments"),
+ "emit align metadata for reference arguments"),
dump_mir: Option<String> = (None, parse_opt_string, [UNTRACKED],
- "dump MIR state at various points in transforms"),
+ "dump MIR state at various points in transforms"),
dump_mir_dir: String = (String::from("mir_dump"), parse_string, [UNTRACKED],
- "the directory the MIR is dumped into"),
+ "the directory the MIR is dumped into"),
dump_mir_graphviz: bool = (false, parse_bool, [UNTRACKED],
- "in addition to `.mir` files, create graphviz `.dot` files"),
+ "in addition to `.mir` files, create graphviz `.dot` files"),
dump_mir_exclude_pass_number: bool = (false, parse_bool, [UNTRACKED],
- "if set, exclude the pass number when dumping MIR (used in tests)"),
+ "if set, exclude the pass number when dumping MIR (used in tests)"),
mir_emit_validate: usize = (0, parse_uint, [TRACKED],
- "emit Validate MIR statements, interpreted e.g. by miri (0: do not emit; 1: if function \
- contains unsafe block, only validate arguments; 2: always emit full validation)"),
+ "emit Validate MIR statements, interpreted e.g. by miri (0: do not emit; 1: if function \
+ contains unsafe block, only validate arguments; 2: always emit full validation)"),
perf_stats: bool = (false, parse_bool, [UNTRACKED],
- "print some performance-related statistics"),
+ "print some performance-related statistics"),
hir_stats: bool = (false, parse_bool, [UNTRACKED],
- "print some statistics about AST and HIR"),
+ "print some statistics about AST and HIR"),
mir_stats: bool = (false, parse_bool, [UNTRACKED],
- "print some statistics about MIR"),
+ "print some statistics about MIR"),
always_encode_mir: bool = (false, parse_bool, [TRACKED],
- "encode MIR of all functions into the crate metadata"),
+ "encode MIR of all functions into the crate metadata"),
osx_rpath_install_name: bool = (false, parse_bool, [TRACKED],
- "pass `-install_name @rpath/...` to the macOS linker"),
+ "pass `-install_name @rpath/...` to the macOS linker"),
sanitizer: Option<Sanitizer> = (None, parse_sanitizer, [TRACKED],
- "Use a sanitizer"),
+ "Use a sanitizer"),
linker_flavor: Option<LinkerFlavor> = (None, parse_linker_flavor, [UNTRACKED],
"Linker flavor"),
fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
profile: bool = (false, parse_bool, [TRACKED],
"insert profiling code"),
pgo_gen: Option<String> = (None, parse_opt_string, [TRACKED],
- "Generate PGO profile data, to a given file, or to the default \
- location if it's empty."),
+ "Generate PGO profile data, to a given file, or to the default location if it's empty."),
pgo_use: String = (String::new(), parse_string, [TRACKED],
"Use PGO profile data from the given profile file."),
- disable_instrumentation_preinliner: bool =
- (false, parse_bool, [TRACKED], "Disable the instrumentation pre-inliner, \
- useful for profiling / PGO."),
+ disable_instrumentation_preinliner: bool = (false, parse_bool, [TRACKED],
+ "Disable the instrumentation pre-inliner, useful for profiling / PGO."),
relro_level: Option<RelroLevel> = (None, parse_relro_level, [TRACKED],
"choose which RELRO level to use"),
nll_subminimal_causes: bool = (false, parse_bool, [UNTRACKED],
inline_in_all_cgus: Option<bool> = (None, parse_opt_bool, [TRACKED],
"control whether #[inline] functions are in all cgus"),
tls_model: Option<String> = (None, parse_opt_string, [TRACKED],
- "choose the TLS model to use (rustc --print tls-models for details)"),
+ "choose the TLS model to use (rustc --print tls-models for details)"),
saturating_float_casts: bool = (false, parse_bool, [TRACKED],
"make float->int casts UB-free: numbers outside the integer type's range are clipped to \
the max/min integer respectively, and NaN is mapped to 0"),
`hir` (the HIR), `hir,identified`, or
`hir,typed` (HIR with types for each node)."),
run_dsymutil: Option<bool> = (None, parse_opt_bool, [TRACKED],
- "run `dsymutil` and delete intermediate object files"),
+ "run `dsymutil` and delete intermediate object files"),
ui_testing: bool = (false, parse_bool, [UNTRACKED],
- "format compiler diagnostics in a way that's better suitable for UI testing"),
+ "format compiler diagnostics in a way that's better suitable for UI testing"),
embed_bitcode: bool = (false, parse_bool, [TRACKED],
- "embed LLVM bitcode in object files"),
+ "embed LLVM bitcode in object files"),
strip_debuginfo_if_disabled: Option<bool> = (None, parse_opt_bool, [TRACKED],
"tell the linker to strip debuginfo when building without debuginfo enabled."),
share_generics: Option<bool> = (None, parse_opt_bool, [TRACKED],
- "make the current crate share its generic instantiations"),
+ "make the current crate share its generic instantiations"),
chalk: bool = (false, parse_bool, [TRACKED],
- "enable the experimental Chalk-based trait solving engine"),
+ "enable the experimental Chalk-based trait solving engine"),
cross_lang_lto: CrossLangLto = (CrossLangLto::Disabled, parse_cross_lang_lto, [TRACKED],
- "generate build artifacts that are compatible with linker-based LTO."),
+ "generate build artifacts that are compatible with linker-based LTO."),
no_parallel_llvm: bool = (false, parse_bool, [UNTRACKED],
- "don't run LLVM in parallel (while keeping codegen-units and ThinLTO)"),
+ "don't run LLVM in parallel (while keeping codegen-units and ThinLTO)"),
no_leak_check: bool = (false, parse_bool, [UNTRACKED],
"disables the 'leak check' for subtyping; unsound, but useful for tests"),
crate_attr: Vec<String> = (Vec::new(), parse_string_push, [TRACKED],
"inject the given attribute in the crate"),
self_profile: bool = (false, parse_bool, [UNTRACKED],
- "run the self profiler"),
+ "run the self profiler"),
profile_json: bool = (false, parse_bool, [UNTRACKED],
- "output a json file with profiler results"),
+ "output a json file with profiler results"),
emit_stack_sizes: bool = (false, parse_bool, [UNTRACKED],
- "emits a section containing stack size metadata"),
+ "emits a section containing stack size metadata"),
plt: Option<bool> = (None, parse_opt_bool, [TRACKED],
"whether to use the PLT when calling into shared libraries;
only has effect for PIC code on systems with ELF binaries
let atomic_cas = sess.target.target.options.atomic_cas;
let mut ret = FxHashSet::default();
+ ret.reserve(6); // the minimum number of insertions
// Target bindings.
ret.insert((Symbol::intern("target_os"), Some(Symbol::intern(os))));
if let Some(ref fam) = sess.target.target.options.target_family {
if sess.opts.crate_types.contains(&CrateType::ProcMacro) {
ret.insert((Symbol::intern("proc_macro"), None));
}
- return ret;
+ ret
}
pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> ast::CrateConfig {
}
pub fn build_target_config(opts: &Options, sp: &Handler) -> Config {
- let target = match Target::search(&opts.target_triple) {
- Ok(t) => t,
- Err(e) => {
- sp.struct_fatal(&format!("Error loading target specification: {}", e))
- .help("Use `--print target-list` for a list of built-in targets")
- .emit();
- FatalError.raise();
- }
- };
+ let target = Target::search(&opts.target_triple).unwrap_or_else(|e| {
+ sp.struct_fatal(&format!("Error loading target specification: {}", e))
+ .help("Use `--print target-list` for a list of built-in targets")
+ .emit();
+ FatalError.raise();
+ });
let (isize_ty, usize_ty) = match &target.target_pointer_width[..] {
"16" => (ast::IntTy::I16, ast::UintTy::U16),
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum OptionStability {
Stable,
-
Unstable,
}
};
let edition = match matches.opt_str("edition") {
- Some(arg) => match Edition::from_str(&arg){
- Ok(edition) => edition,
- Err(_) => early_error(
+ Some(arg) => Edition::from_str(&arg).unwrap_or_else(|_|
+ early_error(
ErrorOutputType::default(),
&format!(
"argument for --edition must be one of: \
- {}. (instead was `{}`)",
+ {}. (instead was `{}`)",
EDITION_NAME_LIST,
arg
),
),
- }
+ ),
None => DEFAULT_EDITION,
};
ErrorOutputType::default(),
&format!(
"Edition {} is unstable and only \
- available for nightly builds of rustc.",
+ available for nightly builds of rustc.",
edition,
)
)
for output_type in list.split(',') {
let mut parts = output_type.splitn(2, '=');
let shorthand = parts.next().unwrap();
- let output_type = match OutputType::from_shorthand(shorthand) {
- Some(output_type) => output_type,
- None => early_error(
+ let output_type = OutputType::from_shorthand(shorthand).unwrap_or_else(||
+ early_error(
error_format,
&format!(
"unknown emission type: `{}` - expected one of: {}",
OutputType::shorthands_display(),
),
),
- };
+ );
let path = parts.next().map(PathBuf::from);
output_types.insert(output_type, path);
}
let target_triple = if let Some(target) = matches.opt_str("target") {
if target.ends_with(".json") {
let path = Path::new(&target);
- match TargetTriple::from_path(&path) {
- Ok(triple) => triple,
- Err(_) => {
- early_error(error_format, &format!("target file {:?} does not exist", path))
- }
- }
+ TargetTriple::from_path(&path).unwrap_or_else(|_|
+ early_error(error_format, &format!("target file {:?} does not exist", path)))
} else {
TargetTriple::TargetTriple(target)
}
let mut name_parts = name.splitn(2, ':');
let name = name_parts.next().unwrap();
let new_name = name_parts.next();
- (name.to_string(), new_name.map(|n| n.to_string()), kind)
+ (name.to_owned(), new_name.map(|n| n.to_owned()), kind)
})
.collect();
let mut externs: BTreeMap<_, BTreeSet<_>> = BTreeMap::new();
for arg in &matches.opt_strs("extern") {
let mut parts = arg.splitn(2, '=');
- let name = match parts.next() {
- Some(s) => s,
- None => early_error(error_format, "--extern value must not be empty"),
- };
+ let name = parts.next().unwrap_or_else(||
+ early_error(error_format, "--extern value must not be empty"));
let location = parts.next().map(|s| s.to_string());
if location.is_none() && !is_unstable_enabled {
early_error(
};
externs
- .entry(name.to_string())
+ .entry(name.to_owned())
.or_default()
.insert(location);
}
"cdylib" => CrateType::Cdylib,
"bin" => CrateType::Executable,
"proc-macro" => CrateType::ProcMacro,
- _ => {
- return Err(format!("unknown crate type: `{}`", part));
- }
+ _ => return Err(format!("unknown crate type: `{}`", part))
};
if !crate_types.contains(&new_part) {
crate_types.push(new_part)
F: FnMut(&Path, PathKind)
{
let mut visited_dirs = FxHashSet::default();
-
+ visited_dirs.reserve(self.search_paths.paths.len() + 1);
for (path, kind) in self.search_paths.iter(self.kind) {
f(path, kind);
visited_dirs.insert(path.to_path_buf());
match env::current_exe() {
Ok(exe) => {
match canonicalize(Some(exe)) {
- Some(mut p) => { p.pop(); p.pop(); return p; },
+ Some(mut p) => { p.pop(); p.pop(); p },
None => bug!("can't determine value for sysroot")
}
}
// to lib64/lib32. This would be more foolproof by basing the sysroot off
// of the directory where librustc is located, rather than where the rustc
// binary is.
- //If --libdir is set during configuration to the value other than
+ // If --libdir is set during configuration to the value other than
// "lib" (i.e. non-default), this value is used (see issue #16552).
- match option_env!("CFG_LIBDIR_RELATIVE") {
- Some(libdir) if libdir != "lib" => return libdir.into(),
- _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() {
- return PRIMARY_LIB_DIR.into();
- } else {
- return SECONDARY_LIB_DIR.into();
- }
- }
-
#[cfg(target_pointer_width = "64")]
const PRIMARY_LIB_DIR: &'static str = "lib64";
const PRIMARY_LIB_DIR: &'static str = "lib32";
const SECONDARY_LIB_DIR: &'static str = "lib";
+
+ match option_env!("CFG_LIBDIR_RELATIVE") {
+ Some(libdir) if libdir != "lib" => libdir.into(),
+ _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() {
+ PRIMARY_LIB_DIR.into()
+ } else {
+ SECONDARY_LIB_DIR.into()
+ }
+ }
}
// The name of rustc's own place to organize libraries.
match self.opts.maybe_sysroot {
Some(ref sysroot) => sysroot,
None => self.default_sysroot
- .as_ref()
- .expect("missing sysroot and default_sysroot in Session"),
+ .as_ref()
+ .expect("missing sysroot and default_sysroot in Session"),
}
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch<'_> {
pub fn set_incr_session_load_dep_graph(&self, load: bool) {
let mut incr_comp_session = self.incr_comp_session.borrow_mut();
- match *incr_comp_session {
- IncrCompSession::Active {
- ref mut load_dep_graph,
- ..
- } => {
- *load_dep_graph = load;
- }
- _ => {}
+ if let IncrCompSession::Active { ref mut load_dep_graph, .. } = *incr_comp_session {
+ *load_dep_graph = load;
}
}
/// This expends fuel if applicable, and records fuel if applicable.
pub fn consider_optimizing<T: Fn() -> String>(&self, crate_name: &str, msg: T) -> bool {
let mut ret = true;
- match self.optimization_fuel_crate {
- Some(ref c) if c == crate_name => {
- assert!(self.query_threads() == 1);
+ if let Some(ref c) = self.optimization_fuel_crate {
+ if c == crate_name {
+ assert_eq!(self.query_threads(), 1);
let fuel = self.optimization_fuel_limit.get();
ret = fuel != 0;
if fuel == 0 && !self.out_of_fuel.get() {
self.optimization_fuel_limit.set(fuel - 1);
}
}
- _ => {}
}
- match self.print_fuel_crate {
- Some(ref c) if c == crate_name => {
- assert!(self.query_threads() == 1);
+ if let Some(ref c) = self.print_fuel_crate {
+ if c == crate_name {
+ assert_eq!(self.query_threads(), 1);
self.print_fuel.set(self.print_fuel.get() + 1);
}
- _ => {}
}
ret
}
source_map: Lrc<source_map::SourceMap>,
) -> Session {
let host_triple = TargetTriple::from_triple(config::host_triple());
- let host = match Target::search(&host_triple) {
- Ok(t) => t,
- Err(e) => {
- span_diagnostic
- .fatal(&format!("Error loading host specification: {}", e))
- .raise();
- }
- };
+ let host = Target::search(&host_triple).unwrap_or_else(|e|
+ span_diagnostic
+ .fatal(&format!("Error loading host specification: {}", e))
+ .raise()
+ );
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, source_map);
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = LockCell::new(0);
- let working_dir = match env::current_dir() {
- Ok(dir) => dir,
- Err(e) => p_s.span_diagnostic
+ let working_dir = env::current_dir().unwrap_or_else(|e|
+ p_s.span_diagnostic
.fatal(&format!("Current directory is invalid: {}", e))
- .raise(),
- };
+ .raise()
+ );
let working_dir = file_path_mapping.map_prefix(working_dir);
let cgu_reuse_tracker = if sopts.debugging_opts.query_dep_graph {
#[derive(Clone, Debug)]
pub struct SearchPaths {
- paths: Vec<(PathKind, PathBuf)>,
+ crate paths: Vec<(PathKind, PathBuf)>,
}
pub struct Iter<'a> {
match ty.sty {
ty::Ref(..) => true,
ty::Adt(def, _) => def.is_fundamental(),
- ty::Dynamic(ref data, ..) => {
- data.principal().map_or(false, |p| tcx.has_attr(p.def_id(), "fundamental"))
- }
+ ty::Dynamic(ref data, ..) => tcx.has_attr(data.principal().def_id(), "fundamental"),
_ => false
}
}
ty::Adt(def, _) => def_id_is_local(def.did, in_crate),
ty::Foreign(did) => def_id_is_local(did, in_crate),
- ty::Dynamic(ref tt, ..) => {
- tt.principal().map_or(false, |p|
- def_id_is_local(p.def_id(), in_crate)
- )
- }
+ ty::Dynamic(ref tt, ..) => def_id_is_local(tt.principal().def_id(), in_crate),
ty::Error => true,
use infer::at::At;
use infer::InferOk;
-use smallvec::SmallVec;
+use infer::canonical::OriginalQueryValues;
use std::iter::FromIterator;
use syntax::source_map::Span;
use ty::subst::Kind;
}
let gcx = tcx.global_tcx();
- let mut orig_values = SmallVec::new();
+ let mut orig_values = OriginalQueryValues::default();
let c_ty = self.infcx.canonicalize_query(&self.param_env.and(ty), &mut orig_values);
let span = self.cause.span;
debug!("c_ty = {:?}", c_ty);
match &gcx.dropck_outlives(c_ty) {
Ok(result) if result.is_proven() => {
if let Ok(InferOk { value, obligations }) =
- self.infcx.instantiate_query_result_and_region_obligations(
+ self.infcx.instantiate_query_response_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
// except according to those terms.
use infer::InferCtxt;
-use smallvec::SmallVec;
+use infer::canonical::OriginalQueryValues;
use traits::{EvaluationResult, PredicateObligation, SelectionContext,
TraitQueryMode, OverflowError};
&self,
obligation: &PredicateObligation<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
- let mut _orig_values = SmallVec::new();
+ let mut _orig_values = OriginalQueryValues::default();
let c_pred = self.canonicalize_query(&obligation.param_env.and(obligation.predicate),
&mut _orig_values);
// Run canonical query. If overflow occurs, rerun from scratch but this time
//! `normalize_projection_ty` query when it encounters projections.
use infer::at::At;
+use infer::canonical::OriginalQueryValues;
use infer::{InferCtxt, InferOk};
use mir::interpret::{ConstValue, GlobalId};
-use smallvec::SmallVec;
use traits::project::Normalized;
use traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
use ty::fold::{TypeFoldable, TypeFolder};
let gcx = self.infcx.tcx.global_tcx();
- let mut orig_values = SmallVec::new();
+ let mut orig_values = OriginalQueryValues::default();
let c_data = self.infcx.canonicalize_query(
&self.param_env.and(*data), &mut orig_values);
debug!("QueryNormalizer: c_data = {:#?}", c_data);
return ty;
}
- match self.infcx.instantiate_query_result_and_region_obligations(
+ match self.infcx.instantiate_query_response_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
// except according to those terms.
use infer::InferCtxt;
+use infer::canonical::OriginalQueryValues;
use syntax::ast;
use syntax::source_map::Span;
-use smallvec::SmallVec;
use traits::{FulfillmentContext, ObligationCause, TraitEngine, TraitEngineExt};
use traits::query::NoSolution;
use ty::{self, Ty, TyCtxt};
) -> Vec<OutlivesBound<'tcx>> {
debug!("implied_outlives_bounds(ty = {:?})", ty);
- let mut orig_values = SmallVec::new();
+ let mut orig_values = OriginalQueryValues::default();
let key = self.canonicalize_query(¶m_env.and(ty), &mut orig_values);
let result = match self.tcx.global_tcx().implied_outlives_bounds(key) {
Ok(r) => r,
};
assert!(result.value.is_proven());
- let result = self.instantiate_query_result_and_region_obligations(
+ let result = self.instantiate_query_response_and_region_obligations(
&ObligationCause::misc(span, body_id), param_env, &orig_values, &result);
debug!("implied_outlives_bounds for {:?}: {:#?}", ty, result);
let result = match result {
use std::fmt;
use traits::query::Fallible;
-use infer::canonical::query_result;
+use infer::canonical::query_response;
use infer::canonical::QueryRegionConstraint;
use std::rc::Rc;
use syntax::source_map::DUMMY_SP;
let region_constraint_data = infcx.take_and_reset_region_constraints();
- let outlives = query_result::make_query_outlives(
+ let outlives = query_response::make_query_outlives(
infcx.tcx,
region_obligations
.iter()
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::Fallible;
use ty::{ParamEnvAnd, Ty, TyCtxt};
}
impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Eq<'tcx> {
- type QueryResult = ();
+ type QueryResponse = ();
fn try_fast_path(
_tcx: TyCtxt<'_, 'gcx, 'tcx>,
key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
- ) -> Option<Self::QueryResult> {
+ ) -> Option<Self::QueryResponse> {
if key.value.a == key.value.b {
Some(())
} else {
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
tcx.type_op_eq(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, ()>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::outlives_bounds::OutlivesBound;
use traits::query::Fallible;
use ty::{ParamEnvAnd, Ty, TyCtxt};
}
impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ImpliedOutlivesBounds<'tcx> {
- type QueryResult = Vec<OutlivesBound<'tcx>>;
+ type QueryResponse = Vec<OutlivesBound<'tcx>>;
fn try_fast_path(
_tcx: TyCtxt<'_, 'gcx, 'tcx>,
_key: &ParamEnvAnd<'tcx, Self>,
- ) -> Option<Self::QueryResult> {
+ ) -> Option<Self::QueryResponse> {
None
}
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
// FIXME the query should take a `ImpliedOutlivesBounds`
let Canonical {
variables,
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
v
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryRegionConstraint,
- QueryResult};
+use infer::canonical::{
+ Canonical, Canonicalized, CanonicalizedQueryResponse, OriginalQueryValues,
+ QueryRegionConstraint, QueryResponse,
+};
use infer::{InferCtxt, InferOk};
-use smallvec::SmallVec;
use std::fmt;
use std::rc::Rc;
use traits::query::Fallible;
pub trait QueryTypeOp<'gcx: 'tcx, 'tcx>:
fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'gcx>
{
- type QueryResult: TypeFoldable<'tcx> + Lift<'gcx>;
+ type QueryResponse: TypeFoldable<'tcx> + Lift<'gcx>;
/// Give query the option for a simple fast path that never
/// actually hits the tcx cache lookup etc. Return `Some(r)` with
fn try_fast_path(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
- ) -> Option<Self::QueryResult>;
+ ) -> Option<Self::QueryResponse>;
/// Performs the actual query with the canonicalized key -- the
/// real work happens here. This method is not given an `infcx`
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>>;
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>>;
/// Casts a lifted query result (which is in the gcx lifetime)
/// into the tcx lifetime. This is always just an identity cast,
/// but the generic code doesn't realize it -- put another way, in
- /// the generic code, we have a `Lifted<'gcx, Self::QueryResult>`
- /// and we want to convert that to a `Self::QueryResult`. This is
+ /// the generic code, we have a `Lifted<'gcx, Self::QueryResponse>`
+ /// and we want to convert that to a `Self::QueryResponse`. This is
/// not a priori valid, so we can't do it -- but in practice, it
/// is always a no-op (e.g., the lifted form of a type,
/// `Ty<'gcx>`, is a subtype of `Ty<'tcx>`). So we have to push
/// the operation into the impls that know more specifically what
- /// `QueryResult` is. This operation would (maybe) be nicer with
+ /// `QueryResponse` is. This operation would (maybe) be nicer with
/// something like HKTs or GATs, since then we could make
- /// `QueryResult` parametric and `'gcx` and `'tcx` etc.
+ /// `QueryResponse` parametric and `'gcx` and `'tcx` etc.
fn shrink_to_tcx_lifetime(
- lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>>;
+ lifted_query_result: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>>;
fn fully_perform_into(
query_key: ParamEnvAnd<'tcx, Self>,
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
- ) -> Fallible<Self::QueryResult> {
+ ) -> Fallible<Self::QueryResponse> {
if let Some(result) = QueryTypeOp::try_fast_path(infcx.tcx, &query_key) {
return Ok(result);
}
// `canonicalize_hr_query_hack` here because of things
// like the subtype query, which go awry around
// `'static` otherwise.
- let mut canonical_var_values = SmallVec::new();
+ let mut canonical_var_values = OriginalQueryValues::default();
let canonical_self =
infcx.canonicalize_hr_query_hack(&query_key, &mut canonical_var_values);
let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
let param_env = query_key.param_env;
let InferOk { value, obligations } = infcx
- .instantiate_nll_query_result_and_region_obligations(
+ .instantiate_nll_query_response_and_region_obligations(
&ObligationCause::dummy(),
param_env,
&canonical_var_values,
where
Q: QueryTypeOp<'gcx, 'tcx>,
{
- type Output = Q::QueryResult;
+ type Output = Q::QueryResponse;
fn fully_perform(
self,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use std::fmt;
use traits::query::Fallible;
use ty::fold::TypeFoldable;
where
T: Normalizable<'gcx, 'tcx>,
{
- type QueryResult = T;
+ type QueryResponse = T;
fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
if !key.value.value.has_projections() {
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
T::type_op_method(tcx, canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, T>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, T>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, T>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, T>> {
T::shrink_to_tcx_lifetime(v)
}
}
fn type_op_method(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>>;
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>>;
/// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
/// form of `Self`.
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>>;
+ v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>>;
}
impl Normalizable<'gcx, 'tcx> for Ty<'tcx>
fn type_op_method(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
tcx.type_op_normalize_ty(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
fn type_op_method(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
tcx.type_op_normalize_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
fn type_op_method(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
tcx.type_op_normalize_poly_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
fn type_op_method(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>> {
tcx.type_op_normalize_fn_sig(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, Self>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self>> {
v
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::dropck_outlives::trivial_dropck_outlives;
use traits::query::dropck_outlives::DropckOutlivesResult;
use traits::query::Fallible;
where
'gcx: 'tcx,
{
- type QueryResult = DropckOutlivesResult<'tcx>;
+ type QueryResponse = DropckOutlivesResult<'tcx>;
fn try_fast_path(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
- ) -> Option<Self::QueryResult> {
+ ) -> Option<Self::QueryResponse> {
if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
Some(DropckOutlivesResult::default())
} else {
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self::QueryResponse>> {
// Subtle: note that we are not invoking
// `infcx.at(...).dropck_outlives(...)` here, but rather the
// underlying `dropck_outlives` query. This same underlying
}
fn shrink_to_tcx_lifetime(
- lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>> {
+ lifted_query_result: &'a CanonicalizedQueryResponse<'gcx, Self::QueryResponse>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, Self::QueryResponse>> {
lifted_query_result
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::Fallible;
use ty::{ParamEnvAnd, Predicate, TyCtxt};
}
impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ProvePredicate<'tcx> {
- type QueryResult = ();
+ type QueryResponse = ();
fn try_fast_path(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
key: &ParamEnvAnd<'tcx, Self>,
- ) -> Option<Self::QueryResult> {
+ ) -> Option<Self::QueryResponse> {
// Proving Sized, very often on "obviously sized" types like
// `&T`, accounts for about 60% percentage of the predicates
// we have to prove. No need to canonicalize and all that for
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
tcx.type_op_prove_predicate(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, ()>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResponse, QueryResponse};
use traits::query::Fallible;
use ty::{ParamEnvAnd, Ty, TyCtxt};
}
impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Subtype<'tcx> {
- type QueryResult = ();
+ type QueryResponse = ();
fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
if key.value.sub == key.value.sup {
fn perform_query(
tcx: TyCtxt<'_, 'gcx, 'tcx>,
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
- ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ ) -> Fallible<CanonicalizedQueryResponse<'gcx, ()>> {
tcx.type_op_subtype(canonicalized)
}
fn shrink_to_tcx_lifetime(
- v: &'a CanonicalizedQueryResult<'gcx, ()>,
- ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v: &'a CanonicalizedQueryResponse<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResponse<'tcx, ()>> {
v
}
}
// Winnow, but record the exact outcome of evaluation, which
// is needed for specialization. Propagate overflow if it occurs.
- let candidates: Result<Vec<Option<EvaluatedCandidate<'_>>>, _> = candidates
- .into_iter()
+ let mut candidates = candidates.into_iter()
.map(|c| match self.evaluate_candidate(stack, &c) {
Ok(eval) if eval.may_apply() => Ok(Some(EvaluatedCandidate {
candidate: c,
Ok(_) => Ok(None),
Err(OverflowError) => Err(Overflow),
})
- .collect();
-
- let mut candidates: Vec<EvaluatedCandidate<'_>> =
- candidates?.into_iter().filter_map(|c| c).collect();
+ .flat_map(Result::transpose)
+ .collect::<Result<Vec<_>, _>>()?;
debug!(
"winnowed to {} candidates for {:?}: {:?}",
candidates
);
- // If there are STILL multiple candidate, we can further
+ // If there are STILL multiple candidates, we can further
// reduce the list by dropping duplicates -- including
// resolving specializations.
if candidates.len() > 1 {
return;
}
- match data.principal() {
- Some(p) => p.with_self_ty(this.tcx(), self_ty),
- None => return,
- }
+ data.principal().with_self_ty(this.tcx(), self_ty)
}
ty::Infer(ty::TyVar(_)) => {
debug!("assemble_candidates_from_object_ty: ambiguous");
//
// We always upcast when we can because of reason
// #2 (region bounds).
- match (data_a.principal(), data_b.principal()) {
- (Some(a), Some(b)) => {
- a.def_id() == b.def_id()
- && data_b.auto_traits()
- // All of a's auto traits need to be in b's auto traits.
- .all(|b| data_a.auto_traits().any(|a| a == b))
- }
- _ => false,
- }
+ data_a.principal().def_id() == data_b.principal().def_id()
+ && data_b.auto_traits()
+ // All of a's auto traits need to be in b's auto traits.
+ .all(|b| data_a.auto_traits().any(|a| a == b))
}
// T -> Trait.
.shallow_resolve(*obligation.self_ty().skip_binder());
let poly_trait_ref = match self_ty.sty {
ty::Dynamic(ref data, ..) => {
- data.principal().unwrap().with_self_ty(self.tcx(), self_ty)
+ data.principal().with_self_ty(self.tcx(), self_ty)
}
_ => span_bug!(obligation.cause.span, "object candidate with non-object"),
};
(&ty::Dynamic(ref data_a, r_a), &ty::Dynamic(ref data_b, r_b)) => {
// See assemble_candidates_for_unsizing for more info.
let existential_predicates = data_a.map_bound(|data_a| {
- let principal = data_a.principal();
- let iter = principal
- .into_iter()
- .map(ty::ExistentialPredicate::Trait)
+ let iter = iter::once(ty::ExistentialPredicate::Trait(data_a.principal()))
.chain(
data_a
.projection_bounds()
// T -> Trait.
(_, &ty::Dynamic(ref data, r)) => {
let mut object_dids = data.auto_traits()
- .chain(data.principal().map(|p| p.def_id()));
+ .chain(iter::once(data.principal().def_id()));
if let Some(did) = object_dids.find(|did| !tcx.is_object_safe(*did)) {
return Err(TraitNotObjectSafe(did));
}
use middle::stability;
use mir::{self, Mir, interpret};
use mir::interpret::Allocation;
-use ty::subst::{CanonicalSubsts, Kind, Substs, Subst};
+use ty::subst::{CanonicalUserSubsts, Kind, Substs, Subst};
use ty::ReprOptions;
use traits;
use traits::{Clause, Clauses, GoalKind, Goal, Goals};
/// If the user wrote `foo.collect::<Vec<_>>()`, then the
/// canonical substitutions would include only `for<X> { Vec<X>
/// }`.
- user_substs: ItemLocalMap<CanonicalSubsts<'tcx>>,
+ user_substs: ItemLocalMap<CanonicalUserSubsts<'tcx>>,
adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
self.node_substs.get(&id.local_id).cloned()
}
- pub fn user_substs_mut(&mut self) -> LocalTableInContextMut<'_, CanonicalSubsts<'tcx>> {
+ pub fn user_substs_mut(&mut self) -> LocalTableInContextMut<'_, CanonicalUserSubsts<'tcx>> {
LocalTableInContextMut {
local_id_root: self.local_id_root,
data: &mut self.user_substs
}
}
- pub fn user_substs(&self, id: hir::HirId) -> Option<CanonicalSubsts<'tcx>> {
+ pub fn user_substs(&self, id: hir::HirId) -> Option<CanonicalUserSubsts<'tcx>> {
validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
self.user_substs.get(&id.local_id).cloned()
}
ty::FnDef(..) => "fn item".into(),
ty::FnPtr(_) => "fn pointer".into(),
ty::Dynamic(ref inner, ..) => {
- inner.principal().map_or_else(|| "trait".into(),
- |p| format!("trait {}", tcx.item_path_str(p.def_id())).into())
+ format!("trait {}", tcx.item_path_str(inner.principal().def_id())).into()
}
ty::Closure(..) => "closure".into(),
ty::Generator(..) => "generator".into(),
ty::Array(..) | ty::Slice(_) => Some(ArraySimplifiedType),
ty::RawPtr(_) => Some(PtrSimplifiedType),
ty::Dynamic(ref trait_info, ..) => {
- trait_info.principal().map(|p| TraitSimplifiedType(p.def_id()))
+ Some(TraitSimplifiedType(trait_info.principal().def_id()))
}
ty::Ref(_, ty, _) => {
// since we introduce auto-refs during method lookup, we
match ty.sty {
ty::Adt(adt_def, _) => Some(adt_def.did),
- ty::Dynamic(data, ..) => data.principal().map(|p| p.def_id()),
+ ty::Dynamic(data, ..) => Some(data.principal().def_id()),
ty::Array(subty, _) |
ty::Slice(subty) => characteristic_def_id_of_type(subty),
use syntax_pos::{DUMMY_SP, Span};
use smallvec;
+use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
HashStable};
/// "Universes" are used during type- and trait-checking in the
/// presence of `for<..>` binders to control what sets of names are
/// visible. Universes are arranged into a tree: the root universe
-/// contains names that are always visible. But when you enter into
-/// some subuniverse, then it may add names that are only visible
-/// within that subtree (but it can still name the names of its
-/// ancestor universes).
+/// contains names that are always visible. Each child then adds a new
+/// set of names that are visible, in addition to those of its parent.
+/// We say that the child universe "extends" the parent universe with
+/// new names.
///
/// To make this more concrete, consider this program:
///
/// ```
///
/// The struct name `Foo` is in the root universe U0. But the type
-/// parameter `T`, introduced on `bar`, is in a subuniverse U1 --
-/// i.e., within `bar`, we can name both `T` and `Foo`, but outside of
-/// `bar`, we cannot name `T`. Then, within the type of `y`, the
-/// region `'a` is in a subuniverse U2 of U1, because we can name it
-/// inside the fn type but not outside.
+/// parameter `T`, introduced on `bar`, is in an extended universe U1
+/// -- i.e., within `bar`, we can name both `T` and `Foo`, but outside
+/// of `bar`, we cannot name `T`. Then, within the type of `y`, the
+/// region `'a` is in a universe U2 that extends U1, because we can
+/// name it inside the fn type but not outside.
///
/// Universes are used to do type- and trait-checking around these
/// "forall" binders (also called **universal quantification**). The
/// declared, but a type name in a non-zero universe is a placeholder
/// type -- an idealized representative of "types in general" that we
/// use for checking generic functions.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
-pub struct UniverseIndex(u32);
-
-impl UniverseIndex {
- /// The root universe, where things that the user defined are
- /// visible.
- pub const ROOT: Self = UniverseIndex(0);
+newtype_index! {
+ pub struct UniverseIndex {
+ DEBUG_FORMAT = "U{}",
+ }
+}
- /// The "max universe" -- this isn't really a valid universe, but
- /// it's useful sometimes as a "starting value" when you are
- /// taking the minimum of a (non-empty!) set of universes.
- pub const MAX: Self = UniverseIndex(::std::u32::MAX);
+impl_stable_hash_for!(struct UniverseIndex { private });
- /// Creates a universe index from the given integer. Not to be
- /// used lightly lest you pick a bad value. But sometimes we
- /// convert universe indices into integers and back for various
- /// reasons.
- pub fn from_u32(index: u32) -> Self {
- UniverseIndex(index)
- }
+impl UniverseIndex {
+ pub const ROOT: UniverseIndex = UniverseIndex::from_u32_const(0);
- /// A "subuniverse" corresponds to being inside a `forall` quantifier.
- /// So, for example, suppose we have this type in universe `U`:
+ /// Returns the "next" universe index in order -- this new index
+ /// is considered to extend all previous universes. This
+ /// corresponds to entering a `forall` quantifier. So, for
+ /// example, suppose we have this type in universe `U`:
///
/// ```
/// for<'a> fn(&'a u32)
/// ```
///
/// Once we "enter" into this `for<'a>` quantifier, we are in a
- /// subuniverse of `U` -- in this new universe, we can name the
- /// region `'a`, but that region was not nameable from `U` because
- /// it was not in scope there.
- pub fn subuniverse(self) -> UniverseIndex {
- UniverseIndex(self.0.checked_add(1).unwrap())
- }
-
- /// True if the names in this universe are a subset of the names in `other`.
- pub fn is_subset_of(self, other: UniverseIndex) -> bool {
- self.0 <= other.0
- }
-
- pub fn as_u32(&self) -> u32 {
- self.0
- }
-
- pub fn as_usize(&self) -> usize {
- self.0 as usize
- }
-}
-
-impl fmt::Debug for UniverseIndex {
- fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(fmt, "U{}", self.as_u32())
- }
-}
-
-impl From<u32> for UniverseIndex {
- fn from(index: u32) -> Self {
- UniverseIndex(index)
+ /// new universe that extends `U` -- in this new universe, we can
+ /// name the region `'a`, but that region was not nameable from
+ /// `U` because it was not in scope there.
+ pub fn next_universe(self) -> UniverseIndex {
+ UniverseIndex::from_u32(self.private.checked_add(1).unwrap())
+ }
+
+ /// True if `self` can name a name from `other` -- in other words,
+ /// if the set of names in `self` is a superset of those in
+ /// `other`.
+ pub fn can_name(self, other: UniverseIndex) -> bool {
+ self.private >= other.private
}
}
[] fn normalize_projection_ty: NormalizeProjectionTy(
CanonicalProjectionGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, NormalizationResult<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, NormalizationResult<'tcx>>>>,
NoSolution,
>,
[] fn implied_outlives_bounds: ImpliedOutlivesBounds(
CanonicalTyGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Vec<OutlivesBound<'tcx>>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, Vec<OutlivesBound<'tcx>>>>>,
NoSolution,
>,
[] fn dropck_outlives: DropckOutlives(
CanonicalTyGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, DropckOutlivesResult<'tcx>>>>,
NoSolution,
>,
[] fn type_op_eq: TypeOpEq(
CanonicalTypeOpEqGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>>,
NoSolution,
>,
[] fn type_op_subtype: TypeOpSubtype(
CanonicalTypeOpSubtypeGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>>,
NoSolution,
>,
[] fn type_op_prove_predicate: TypeOpProvePredicate(
CanonicalTypeOpProvePredicateGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>>,
NoSolution,
>,
[] fn type_op_normalize_ty: TypeOpNormalizeTy(
CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Ty<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, Ty<'tcx>>>>,
NoSolution,
>,
[] fn type_op_normalize_predicate: TypeOpNormalizePredicate(
CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::Predicate<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ty::Predicate<'tcx>>>>,
NoSolution,
>,
[] fn type_op_normalize_poly_fn_sig: TypeOpNormalizePolyFnSig(
CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::PolyFnSig<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ty::PolyFnSig<'tcx>>>>,
NoSolution,
>,
[] fn type_op_normalize_fn_sig: TypeOpNormalizeFnSig(
CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::FnSig<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, ty::FnSig<'tcx>>>>,
NoSolution,
>,
::ty::ClosureKind,
::ty::IntVarValue,
::ty::ParamTy,
+ ::ty::UniverseIndex,
::ty::Variance,
::syntax_pos::Span,
}
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx List<ExistentialPredicate<'tcx>> {}
impl<'tcx> List<ExistentialPredicate<'tcx>> {
- pub fn principal(&self) -> Option<ExistentialTraitRef<'tcx>> {
- match self.get(0) {
- Some(&ExistentialPredicate::Trait(tr)) => Some(tr),
- _ => None,
+ pub fn principal(&self) -> ExistentialTraitRef<'tcx> {
+ match self[0] {
+ ExistentialPredicate::Trait(tr) => tr,
+ other => bug!("first predicate is {:?}", other),
}
}
}
impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> {
- pub fn principal(&self) -> Option<PolyExistentialTraitRef<'tcx>> {
- self.skip_binder().principal().map(Binder::bind)
+ pub fn principal(&self) -> PolyExistentialTraitRef<'tcx> {
+ Binder::bind(self.skip_binder().principal())
}
#[inline]
}
Dynamic(ref obj, region) => {
let mut v = vec![region];
- if let Some(p) = obj.principal() {
- v.extend(p.skip_binder().substs.regions());
- }
+ v.extend(obj.principal().skip_binder().substs.regions());
v
}
Adt(_, substs) | Opaque(_, substs) => {
}
}
-pub type CanonicalSubsts<'gcx> = Canonical<'gcx, &'gcx Substs<'gcx>>;
-
-impl<'gcx> CanonicalSubsts<'gcx> {
- /// True if this represents a substitution like
- ///
- /// ```text
- /// [?0, ?1, ?2]
- /// ```
- ///
- /// i.e., each thing is mapped to a canonical variable with the same index.
- pub fn is_identity(&self) -> bool {
- self.value.iter().zip(CanonicalVar::new(0)..).all(|(kind, cvar)| {
- match kind.unpack() {
- UnpackedKind::Type(ty) => match ty.sty {
- ty::Infer(ty::CanonicalTy(cvar1)) => cvar == cvar1,
- _ => false,
- },
-
- UnpackedKind::Lifetime(r) => match r {
- ty::ReCanonical(cvar1) => cvar == *cvar1,
- _ => false,
- },
- }
- })
- }
-}
-
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Substs<'tcx> {}
///////////////////////////////////////////////////////////////////////////
self.tcx().mk_region(ty::fold::shift_region(*region, self.region_binders_passed))
}
}
+
+pub type CanonicalUserSubsts<'tcx> = Canonical<'tcx, UserSubsts<'tcx>>;
+
+impl CanonicalUserSubsts<'tcx> {
+ /// True if this represents a substitution like
+ ///
+ /// ```text
+ /// [?0, ?1, ?2]
+ /// ```
+ ///
+ /// i.e., each thing is mapped to a canonical variable with the same index.
+ pub fn is_identity(&self) -> bool {
+ if self.value.user_self_ty.is_some() {
+ return false;
+ }
+
+ self.value.substs.iter().zip(CanonicalVar::new(0)..).all(|(kind, cvar)| {
+ match kind.unpack() {
+ UnpackedKind::Type(ty) => match ty.sty {
+ ty::Infer(ty::CanonicalTy(cvar1)) => cvar == cvar1,
+ _ => false,
+ },
+
+ UnpackedKind::Lifetime(r) => match r {
+ ty::ReCanonical(cvar1) => cvar == *cvar1,
+ _ => false,
+ },
+ }
+ })
+ }
+}
+
+/// Stores the user-given substs to reach some fully qualified path
+/// (e.g., `<T>::Item` or `<T as Trait>::Item`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+pub struct UserSubsts<'tcx> {
+ /// The substitutions for the item as given by the user.
+ pub substs: &'tcx Substs<'tcx>,
+
+ /// The self-type, in the case of a `<T>::Item` path (when applied
+ /// to an inherent impl). See `UserSelfTy` below.
+ pub user_self_ty: Option<UserSelfTy<'tcx>>,
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for UserSubsts<'tcx> {
+ substs,
+ user_self_ty,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for UserSubsts<'a> {
+ type Lifted = UserSubsts<'tcx>;
+ substs,
+ user_self_ty,
+ }
+}
+
+/// Specifies the user-given self-type. In the case of a path that
+/// refers to a member in an inherent impl, this self-type is
+/// sometimes needed to constrain the type parameters on the impl. For
+/// example, in this code:
+///
+/// ```
+/// struct Foo<T> { }
+/// impl<A> Foo<A> { fn method() { } }
+/// ```
+///
+/// when you then have a path like `<Foo<&'static u32>>::method`,
+/// this struct would carry the def-id of the impl along with the
+/// self-type `Foo<u32>`. Then we can instantiate the parameters of
+/// the impl (with the substs from `UserSubsts`) and apply those to
+/// the self-type, giving `Foo<?A>`. Finally, we unify that with
+/// the self-type here, which contains `?A` to be `&'static u32`
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+pub struct UserSelfTy<'tcx> {
+ pub impl_def_id: DefId,
+ pub self_ty: Ty<'tcx>,
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for UserSelfTy<'tcx> {
+ impl_def_id,
+ self_ty,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for UserSelfTy<'a> {
+ type Lifted = UserSelfTy<'tcx>;
+ impl_def_id,
+ self_ty,
+ }
+}
match (r1, r2) {
(Representability::SelfRecursive(v1),
Representability::SelfRecursive(v2)) => {
- Representability::SelfRecursive(v1.iter().map(|s| *s).chain(v2).collect())
+ Representability::SelfRecursive(v1.into_iter().chain(v2).collect())
}
(r1, r2) => cmp::max(r1, r2)
}
let cause = self.cause(traits::MiscObligation);
let component_traits =
- data.auto_traits().chain(data.principal().map(|p| p.def_id()));
+ data.auto_traits().chain(once(data.principal().def_id()));
self.out.extend(
component_traits.map(|did| traits::Obligation::new(
cause.clone(),
// Use a type that can't appear in defaults of type parameters.
let dummy_self = tcx.mk_infer(ty::FreshTy(0));
- if let Some(p) = self.principal() {
- let principal = tcx.lift(&p).expect("could not lift TraitRef for printing")
- .with_self_ty(tcx, dummy_self);
- let projections = self.projection_bounds().map(|p| {
- tcx.lift(&p)
- .expect("could not lift projection for printing")
- .with_self_ty(tcx, dummy_self)
- }).collect::<Vec<_>>();
- cx.parameterized(f, principal.substs, principal.def_id, &projections)?;
- }
+ let principal = tcx
+ .lift(&self.principal())
+ .expect("could not lift TraitRef for printing")
+ .with_self_ty(tcx, dummy_self);
+ let projections = self.projection_bounds().map(|p| {
+ tcx.lift(&p)
+ .expect("could not lift projection for printing")
+ .with_self_ty(tcx, dummy_self)
+ }).collect::<Vec<_>>();
+ cx.parameterized(f, principal.substs, principal.def_id, &projections)?;
// Builtin bounds.
for did in self.auto_traits() {
// Note that the platform intrinsic ABI is exempt here as
// that's how we connect up to LLVM and it's unstable
// anyway, we control all calls to it in libstd.
- layout::Abi::Vector { .. } if abi != Abi::PlatformIntrinsic => {
+ layout::Abi::Vector { .. }
+ if abi != Abi::PlatformIntrinsic &&
+ cx.sess().target.target.options.simd_types_indirect =>
+ {
arg.make_indirect();
return
}
main_thread_worker_state = MainThreadWorkerState::Idle;
}
Message::Done { result: Err(()), worker_id: _ } => {
- shared_emitter.fatal("aborting due to worker thread failure");
- // Exit the coordinator thread
- return Err(())
+ bug!("worker thread panicked");
}
Message::CodegenItem => {
bug!("the coordinator should not receive codegen requests")
panic!("expected abort due to worker thread errors")
},
Err(_) => {
- sess.fatal("Error during codegen/LLVM phase.");
+ bug!("panic during codegen/LLVM phase");
}
};
/// Cache instances of monomorphic and polymorphic items
pub instances: RefCell<FxHashMap<Instance<'tcx>, &'a Value>>,
/// Cache generated vtables
- pub vtables: RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
+ pub vtables: RefCell<FxHashMap<(Ty<'tcx>, ty::PolyExistentialTraitRef<'tcx>),
&'a Value>>,
/// Cache of constant strings,
pub const_cstr_cache: RefCell<FxHashMap<LocalInternedString, &'a Value>>,
// But it does not describe the trait's methods.
let containing_scope = match trait_type.sty {
- ty::Dynamic(ref data, ..) => if let Some(principal) = data.principal() {
- let def_id = principal.def_id();
- Some(get_namespace_for_item(cx, def_id))
- } else {
- NO_SCOPE_METADATA
- },
+ ty::Dynamic(ref data, ..) => Some(get_namespace_for_item(cx, data.principal().def_id())),
_ => {
bug!("debuginfo: Unexpected trait-object type in \
trait_pointer_metadata(): {:?}",
}
},
ty::Dynamic(ref trait_data, ..) => {
- if let Some(principal) = trait_data.principal() {
- let principal = cx.tcx.normalize_erasing_late_bound_regions(
- ty::ParamEnv::reveal_all(),
- &principal,
- );
- push_item_name(cx, principal.def_id, false, output);
- push_type_params(cx, principal.substs, output);
- }
+ let principal = cx.tcx.normalize_erasing_late_bound_regions(
+ ty::ParamEnv::reveal_all(),
+ &trait_data.principal(),
+ );
+ push_item_name(cx, principal.def_id, false, output);
+ push_type_params(cx, principal.substs, output);
},
ty::FnDef(..) | ty::FnPtr(_) => {
let sig = t.fn_sig(cx.tcx);
extern { pub type ModuleBuffer; }
extern "C" {
+ pub fn LLVMRustInstallFatalErrorHandler();
+
// Create and destroy contexts.
pub fn LLVMRustContextCreate(shouldDiscardNames: bool) -> &'static mut Context;
pub fn LLVMContextDispose(C: &'static mut Context);
let mut llvm_c_strs = Vec::with_capacity(n_args + 1);
let mut llvm_args = Vec::with_capacity(n_args + 1);
+ llvm::LLVMRustInstallFatalErrorHandler();
+
{
let mut add = |arg: &str| {
let s = CString::new(arg).unwrap();
pub fn get_vtable(
cx: &CodegenCx<'ll, 'tcx>,
ty: Ty<'tcx>,
- trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
+ trait_ref: ty::PolyExistentialTraitRef<'tcx>,
) -> &'ll Value {
let tcx = cx.tcx;
// Not in the cache. Build it.
let nullptr = C_null(Type::i8p(cx));
+ let methods = tcx.vtable_methods(trait_ref.with_self_ty(tcx, ty));
+ let methods = methods.iter().cloned().map(|opt_mth| {
+ opt_mth.map_or(nullptr, |(def_id, substs)| {
+ callee::resolve_and_get_fn(cx, def_id, substs)
+ })
+ });
+
let (size, align) = cx.size_and_align_of(ty);
- let mut components: Vec<_> = [
+ let components: Vec<_> = [
callee::get_fn(cx, monomorphize::resolve_drop_in_place(cx.tcx, ty)),
C_usize(cx, size.bytes()),
C_usize(cx, align.abi())
- ].iter().cloned().collect();
-
- if let Some(trait_ref) = trait_ref {
- let trait_ref = trait_ref.with_self_ty(tcx, ty);
- let methods = tcx.vtable_methods(trait_ref);
- let methods = methods.iter().cloned().map(|opt_mth| {
- opt_mth.map_or(nullptr, |(def_id, substs)| {
- callee::resolve_and_get_fn(cx, def_id, substs)
- })
- });
- components.extend(methods);
- }
+ ].iter().cloned().chain(methods).collect();
let vtable_const = C_struct(cx, &components, false);
let align = cx.data_layout().pointer_align;
log = "0.4"
rustc_cratesio_shim = { path = "../librustc_cratesio_shim" }
serialize = { path = "../libserialize" }
+graphviz = { path = "../libgraphviz" }
cfg-if = "0.1.2"
stable_deref_trait = "1.0.0"
parking_lot_core = "0.2.8"
extern crate rustc_rayon_core as rayon_core;
extern crate rustc_hash;
extern crate serialize;
+extern crate graphviz;
extern crate smallvec;
// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use graphviz as dot;
+use obligation_forest::{ForestObligation, ObligationForest};
+use std::env::var_os;
+use std::fs::File;
+use std::path::Path;
+use std::sync::atomic::AtomicUsize;
+use std::sync::atomic::Ordering;
+
+impl<O: ForestObligation> ObligationForest<O> {
+ /// Create a graphviz representation of the obligation forest. Given a directory this will
+ /// create files with name of the format `<counter>_<description>.gv`. The counter is
+ /// global and is maintained internally.
+ ///
+ /// Calling this will do nothing unless the environment variable
+ /// `DUMP_OBLIGATION_FOREST_GRAPHVIZ` is defined.
+ ///
+ /// A few post-processing that you might want to do make the forest easier to visualize:
+ ///
+ /// * `sed 's,std::[a-z]*::,,g'` — Deletes the `std::<package>::` prefix of paths.
+ /// * `sed 's,"Binder(TraitPredicate(<\(.*\)>)) (\([^)]*\))","\1 (\2)",'` — Transforms
+ /// `Binder(TraitPredicate(<predicate>))` into just `<predicate>`.
+ #[allow(dead_code)]
+ pub fn dump_graphviz<P: AsRef<Path>>(&self, dir: P, description: &str) {
+ static COUNTER: AtomicUsize = AtomicUsize::new(0);
+
+ if var_os("DUMP_OBLIGATION_FOREST_GRAPHVIZ").is_none() {
+ return;
+ }
+
+ let counter = COUNTER.fetch_add(1, Ordering::AcqRel);
+
+ let file_path = dir.as_ref().join(format!("{:010}_{}.gv", counter, description));
+
+ let mut gv_file = File::create(file_path).unwrap();
+
+ dot::render(&self, &mut gv_file).unwrap();
+ }
+}
+
+impl<'a, O: ForestObligation + 'a> dot::Labeller<'a> for &'a ObligationForest<O> {
+ type Node = usize;
+ type Edge = (usize, usize);
+
+ fn graph_id(&self) -> dot::Id {
+ dot::Id::new("trait_obligation_forest").unwrap()
+ }
+
+ fn node_id(&self, index: &Self::Node) -> dot::Id {
+ dot::Id::new(format!("obligation_{}", index)).unwrap()
+ }
+
+ fn node_label(&self, index: &Self::Node) -> dot::LabelText {
+ let node = &self.nodes[*index];
+ let label = format!("{:?} ({:?})", node.obligation.as_predicate(), node.state.get());
+
+ dot::LabelText::LabelStr(label.into())
+ }
+
+ fn edge_label(&self, (_index_source, _index_target): &Self::Edge) -> dot::LabelText {
+ dot::LabelText::LabelStr("".into())
+ }
+}
+
+impl<'a, O: ForestObligation + 'a> dot::GraphWalk<'a> for &'a ObligationForest<O> {
+ type Node = usize;
+ type Edge = (usize, usize);
+
+ fn nodes(&self) -> dot::Nodes<Self::Node> {
+ (0..self.nodes.len()).collect()
+ }
+
+ fn edges(&self) -> dot::Edges<Self::Edge> {
+ (0..self.nodes.len())
+ .flat_map(|i| {
+ let node = &self.nodes[i];
+
+ node.parent.iter().map(|p| p.get())
+ .chain(node.dependents.iter().map(|p| p.get()))
+ .map(move |p| (p, i))
+ })
+ .collect()
+ }
+
+ fn source(&self, (s, _): &Self::Edge) -> Self::Node {
+ *s
+ }
+
+ fn target(&self, (_, t): &Self::Edge) -> Self::Node {
+ *t
+ }
+}
mod node_index;
use self::node_index::NodeIndex;
+mod graphviz;
+
#[cfg(test)]
mod test;
use std::any::Any;
use std::env;
-use std::ffi::{OsStr, OsString};
+use std::ffi::OsString;
use std::fs;
use std::io::{self, Write};
use std::iter;
.cloned()
.collect();
missing_fragment_specifiers.sort();
+
for span in missing_fragment_specifiers {
let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
let msg = "missing fragment specifier";
.collect();
let mut file = fs::File::create(&deps_filename)?;
for path in out_filenames {
- write!(file, "{}: {}\n\n", path.display(), files.join(" "))?;
+ writeln!(file, "{}: {}\n", path.display(), files.join(" "))?;
}
// Emit a fake target for each input file to the compilation. This
Ok(())
})();
- match result {
- Ok(()) => {}
- Err(e) => {
- sess.fatal(&format!(
- "error writing dependencies to `{}`: {}",
- deps_filename.display(),
- e
- ));
- }
+ if let Err(e) = result {
+ sess.fatal(&format!(
+ "error writing dependencies to `{}`: {}",
+ deps_filename.display(),
+ e
+ ));
}
}
Symbol::intern("proc-macro"),
Symbol::intern("bin")
];
+
if let ast::MetaItemKind::NameValue(spanned) = a.meta().unwrap().node {
let span = spanned.span;
let lev_candidate = find_best_match_for_name(
}
None
}
- _ => {
+ None => {
session
.struct_span_err(a.span, "`crate_type` requires a value")
.note("for example: `#![crate_type=\"lib\"]`")
base.push(::rustc_codegen_utils::link::default_output_for_target(
session,
));
+ } else {
+ base.sort();
+ base.dedup();
}
- base.sort();
- base.dedup();
}
- base.into_iter()
- .filter(|crate_type| {
- let res = !::rustc_codegen_utils::link::invalid_output_for_target(session, *crate_type);
+ base.retain(|crate_type| {
+ let res = !::rustc_codegen_utils::link::invalid_output_for_target(session, *crate_type);
- if !res {
- session.warn(&format!(
- "dropping unsupported crate type `{}` for target `{}`",
- *crate_type, session.opts.target_triple
- ));
- }
+ if !res {
+ session.warn(&format!(
+ "dropping unsupported crate type `{}` for target `{}`",
+ *crate_type, session.opts.target_triple
+ ));
+ }
- res
- })
- .collect()
+ res
+ });
+
+ base
}
pub fn compute_crate_disambiguator(session: &Session) -> CrateDisambiguator {
// "-" as input file will cause the parser to read from stdin so we
// have to make up a name
// We want to toss everything after the final '.'
- let dirpath = match *odir {
- Some(ref d) => d.clone(),
- None => PathBuf::new(),
- };
+ let dirpath = (*odir).as_ref().cloned().unwrap_or_default();
// If a crate name is present, we use it as the link name
let stem = sess.opts
.crate_name
.clone()
.or_else(|| attr::find_crate_name(attrs).map(|n| n.to_string()))
- .unwrap_or(input.filestem());
+ .unwrap_or_else(|| input.filestem().to_owned());
OutputFilenames {
out_directory: dirpath,
sess.warn("ignoring -C extra-filename flag due to -o flag");
}
- let cur_dir = Path::new("");
-
OutputFilenames {
- out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
+ out_directory: out_file.parent().unwrap_or_else(|| Path::new("")).to_path_buf(),
out_filestem: out_file
.file_stem()
- .unwrap_or(OsStr::new(""))
+ .unwrap_or_default()
.to_str()
.unwrap()
.to_string(),
use serialize::json::ToJson;
use std::any::Any;
+use std::borrow::Cow;
use std::cmp::max;
use std::default::Default;
use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
codegen_backend: &dyn CodegenBackend) {
let tf = Symbol::intern("target_feature");
- for feat in codegen_backend.target_features(sess) {
- cfg.insert((tf, Some(feat)));
- }
+ cfg.extend(codegen_backend.target_features(sess).into_iter().map(|feat| (tf, Some(feat))));
if sess.crt_static_feature() {
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
/// Exit status code used for compilation failures and invalid flags.
pub const EXIT_FAILURE: isize = 1;
-const BUG_REPORT_URL: &'static str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
- md#bug-reports";
-
-const ICE_REPORT_COMPILER_FLAGS: &'static [&'static str] = &[
- "Z",
- "C",
- "crate-type",
-];
-const ICE_REPORT_COMPILER_FLAGS_EXCLUDE: &'static [&'static str] = &[
- "metadata",
- "extra-filename",
-];
-const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &'static [&'static str] = &[
- "incremental",
-];
+const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
+ md#bug-reports";
+
+const ICE_REPORT_COMPILER_FLAGS: &[&str] = &["Z", "C", "crate-type"];
+
+const ICE_REPORT_COMPILER_FLAGS_EXCLUDE: &[&str] = &["metadata", "extra-filename"];
+
+const ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE: &[&str] = &["incremental"];
pub fn abort_on_err<T>(result: Result<T, CompileIncomplete>, sess: &Session) -> T {
match result {
}
None => {
let emitter =
- errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
- None,
- true,
- false);
+ errors::emitter::EmitterWriter::stderr(
+ errors::ColorConfig::Auto,
+ None,
+ true,
+ false
+ );
let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
handler.emit(&MultiSpan::new(),
- "aborting due to previous error(s)",
- errors::Level::Fatal);
+ "aborting due to previous error(s)",
+ errors::Level::Fatal);
panic::resume_unwind(Box::new(errors::FatalErrorMarker));
}
}
// available for future dynamic libraries opened. This is currently used by
// loading LLVM and then making its symbols available for other dynamic
// libraries.
- let lib = match DynamicLibrary::open_global_now(path) {
- Ok(lib) => lib,
- Err(err) => {
- let err = format!("couldn't load codegen backend {:?}: {:?}",
- path,
- err);
- early_error(ErrorOutputType::default(), &err);
- }
- };
+ let lib = DynamicLibrary::open_global_now(path).unwrap_or_else(|err| {
+ let err = format!("couldn't load codegen backend {:?}: {:?}", path, err);
+ early_error(ErrorOutputType::default(), &err);
+ });
unsafe {
match lib.symbol("__rustc_codegen_backend") {
Ok(f) => {
let sysroot = sysroot_candidates.iter()
.map(|sysroot| {
let libdir = filesearch::relative_target_lib_path(&sysroot, &target);
- sysroot.join(libdir)
- .with_file_name(option_env!("CFG_CODEGEN_BACKENDS_DIR")
- .unwrap_or("codegen-backends"))
+ sysroot.join(libdir).with_file_name(
+ option_env!("CFG_CODEGEN_BACKENDS_DIR").unwrap_or("codegen-backends"))
})
.filter(|f| {
info!("codegen backend candidate: {}", f.display());
f.exists()
})
.next();
- let sysroot = match sysroot {
- Some(path) => path,
- None => {
- let candidates = sysroot_candidates.iter()
- .map(|p| p.display().to_string())
- .collect::<Vec<_>>()
- .join("\n* ");
- let err = format!("failed to find a `codegen-backends` folder \
- in the sysroot candidates:\n* {}", candidates);
- early_error(ErrorOutputType::default(), &err);
- }
- };
+ let sysroot = sysroot.unwrap_or_else(|| {
+ let candidates = sysroot_candidates.iter()
+ .map(|p| p.display().to_string())
+ .collect::<Vec<_>>()
+ .join("\n* ");
+ let err = format!("failed to find a `codegen-backends` folder \
+ in the sysroot candidates:\n* {}", candidates);
+ early_error(ErrorOutputType::default(), &err);
+ });
info!("probing {} for a codegen backend", sysroot.display());
- let d = match sysroot.read_dir() {
- Ok(d) => d,
- Err(e) => {
- let err = format!("failed to load default codegen backend, couldn't \
- read `{}`: {}", sysroot.display(), e);
- early_error(ErrorOutputType::default(), &err);
- }
- };
+ let d = sysroot.read_dir().unwrap_or_else(|e| {
+ let err = format!("failed to load default codegen backend, couldn't \
+ read `{}`: {}", sysroot.display(), e);
+ early_error(ErrorOutputType::default(), &err);
+ });
let mut file: Option<PathBuf> = None;
}
if let Some(ref prev) = file {
let err = format!("duplicate codegen backends found\n\
- first: {}\n\
- second: {}\n\
+ first: {}\n\
+ second: {}\n\
", prev.display(), path.display());
early_error(ErrorOutputType::default(), &err);
}
None => {
let err = format!("failed to load default codegen backend for `{}`, \
no appropriate codegen dylib found in `{}`",
- backend_name, sysroot.display());
+ backend_name, sysroot.display());
early_error(ErrorOutputType::default(), &err);
}
}
unsafe {
// Set the SIGPIPE signal handler, so that an EPIPE
// will cause rustc to terminate, as expected.
- assert!(libc::signal(libc::SIGPIPE, libc::SIG_DFL) != libc::SIG_ERR);
+ assert_ne!(libc::signal(libc::SIGPIPE, libc::SIG_DFL), libc::SIG_ERR);
}
}
input: &Input)
-> Compilation {
let r = matches.opt_strs("Z");
- if r.contains(&("ls".to_string())) {
+ if r.iter().any(|s| *s == "ls") {
match input {
&Input::File(ref ifile) => {
let path = &(*ifile);
return Compilation::Stop;
}
- return Compilation::Continue;
+ Compilation::Continue
}
use rustc::session::config::PrintRequest::*;
// PrintRequest::NativeStaticLibs is special - printed during linking
// (empty iterator returns true)
- if sess.opts.prints.iter().all(|&p| p==PrintRequest::NativeStaticLibs) {
+ if sess.opts.prints.iter().all(|&p| p == PrintRequest::NativeStaticLibs) {
return Compilation::Continue;
}
Sysroot => println!("{}", sess.sysroot().display()),
TargetSpec => println!("{}", sess.target.target.to_json().pretty()),
FileNames | CrateName => {
- let input = match input {
- Some(input) => input,
- None => early_error(ErrorOutputType::default(), "no input file provided"),
- };
+ let input = input.unwrap_or_else(||
+ early_error(ErrorOutputType::default(), "no input file provided"));
let attrs = attrs.as_ref().unwrap();
let t_outputs = driver::build_output_filenames(input, odir, ofile, attrs, sess);
let id = rustc_codegen_utils::link::find_crate_name(Some(sess), attrs, input);
&id,
&t_outputs
);
- println!("{}",
- fname.file_name()
- .unwrap()
- .to_string_lossy());
+ println!("{}", fname.file_name().unwrap().to_string_lossy());
}
}
Cfg => {
let allow_unstable_cfg = UnstableFeatures::from_environment()
.is_nightly_build();
- let mut cfgs = Vec::new();
- for &(name, ref value) in sess.parse_sess.config.iter() {
+ let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| {
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
node: ast::MetaItemKind::Word,
let value = value.as_ref().map(|s| s.as_ref());
if name != "target_feature" || value != Some("crt-static") {
if !allow_unstable_cfg && gated_cfg.is_some() {
- continue;
+ return None
}
}
- cfgs.push(if let Some(value) = value {
- format!("{}=\"{}\"", name, value)
+ if let Some(value) = value {
+ Some(format!("{}=\"{}\"", name, value))
} else {
- name.to_string()
- });
- }
+ Some(name.to_string())
+ }
+ }).collect::<Vec<String>>();
cfgs.sort();
for cfg in cfgs {
pub fn version(binary: &str, matches: &getopts::Matches) {
let verbose = matches.opt_present("verbose");
- println!("{} {}",
- binary,
- option_env!("CFG_VERSION").unwrap_or("unknown version"));
+ println!("{} {}", binary, option_env!("CFG_VERSION").unwrap_or("unknown version"));
+
if verbose {
fn unw(x: Option<&str>) -> &str {
x.unwrap_or("unknown")
for option in groups.iter().filter(|x| include_unstable_options || x.is_stable()) {
(option.apply)(&mut options);
}
- let message = "Usage: rustc [OPTIONS] INPUT".to_string();
+ let message = "Usage: rustc [OPTIONS] INPUT";
let nightly_help = if nightly_options::is_nightly_build() {
"\n -Z help Print internal options for debugging rustc"
} else {
-C help Print codegen options
-W help \
Print 'lint' options and default settings{}{}\n",
- options.usage(&message),
+ options.usage(message),
nightly_help,
verbose_help);
}
print_lints(builtin);
-
-
let max_name_len = max("warnings".len(),
plugin_groups.iter()
.chain(&builtin_groups)
for option in config::rustc_optgroups() {
(option.apply)(&mut options);
}
- let matches = match options.parse(args) {
- Ok(m) => m,
- Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
- };
+ let matches = options.parse(args).unwrap_or_else(|f|
+ early_error(ErrorOutputType::default(), &f.to_string()));
// For all options we just parsed, we check a few aspects:
//
}
let cg_flags = matches.opt_strs("C");
+
if cg_flags.iter().any(|x| *x == "help") {
describe_codegen_flags();
return None;
"the --no-stack-check flag is deprecated and does nothing");
}
- if cg_flags.contains(&"passes=list".to_string()) {
+ if cg_flags.iter().any(|x| *x == "passes=list") {
get_codegen_sysroot("llvm")().print_passes();
return None;
}
// Temporarily have stack size set to 16MB to deal with nom-using crates failing
const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB
- #[cfg(all(unix,not(target_os = "haiku")))]
+ #[cfg(all(unix, not(target_os = "haiku")))]
let spawn_thread = unsafe {
// Fetch the current resource limits
let mut rlim = libc::rlimit {
}
};
- #[cfg(not(any(windows,unix)))]
+ #[cfg(not(any(windows, unix)))]
let spawn_thread = true;
// The or condition is added from backward compatibility.
}
}
- if result.len() > 0 {
+ if !result.is_empty() {
Some((result, excluded_cargo_defaults))
} else {
None
errors::Level::Bug);
}
- let mut xs = vec![
- "the compiler unexpectedly panicked. this is a bug.".to_string(),
- format!("we would appreciate a bug report: {}", BUG_REPORT_URL),
+ let mut xs: Vec<Cow<'static, str>> = vec![
+ "the compiler unexpectedly panicked. this is a bug.".into(),
+ format!("we would appreciate a bug report: {}", BUG_REPORT_URL).into(),
format!("rustc {} running on {}",
option_env!("CFG_VERSION").unwrap_or("unknown_version"),
- config::host_triple()),
+ config::host_triple()).into(),
];
if let Some((flags, excluded_cargo_defaults)) = extra_compiler_flags() {
- xs.push(format!("compiler flags: {}", flags.join(" ")));
+ xs.push(format!("compiler flags: {}", flags.join(" ")).into());
if excluded_cargo_defaults {
- xs.push("some of the compiler flags provided by cargo are hidden".to_string());
+ xs.push("some of the compiler flags provided by cargo are hidden".into());
}
}
for note in &xs {
handler.emit(&MultiSpan::new(),
- ¬e,
+ note,
errors::Level::Note);
}
impl PpSourceMode {
/// Constructs a `PrinterSupport` object and passes it to `f`.
fn call_with_pp_support<'tcx, A, F>(&self,
- sess: &'tcx Session,
- hir_map: Option<&hir_map::Map<'tcx>>,
- f: F)
- -> A
+ sess: &'tcx Session,
+ hir_map: Option<&hir_map::Map<'tcx>>,
+ f: F)
+ -> A
where F: FnOnce(&dyn PrinterSupport) -> A
{
match *self {
_ => panic!("Should use call_with_pp_support_hir"),
}
}
- fn call_with_pp_support_hir<'tcx, A, F>(&self,
- sess: &'tcx Session,
- cstore: &'tcx CStore,
- hir_map: &hir_map::Map<'tcx>,
- analysis: &ty::CrateAnalysis,
- resolutions: &Resolutions,
- arenas: &'tcx AllArenas<'tcx>,
- output_filenames: &OutputFilenames,
- id: &str,
- f: F)
- -> A
+ fn call_with_pp_support_hir<'tcx, A, F>(
+ &self,
+ sess: &'tcx Session,
+ cstore: &'tcx CStore,
+ hir_map: &hir_map::Map<'tcx>,
+ analysis: &ty::CrateAnalysis,
+ resolutions: &Resolutions,
+ arenas: &'tcx AllArenas<'tcx>,
+ output_filenames: &OutputFilenames,
+ id: &str,
+ f: F
+ ) -> A
where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
{
match *self {
break n.body();
}
let parent = tcx.hir.get_parent_node(node_id);
- assert!(node_id != parent);
+ assert_ne!(node_id, parent);
node_id = parent;
}
}
// Silently ignores an identified node.
let out: &mut dyn Write = &mut out;
s.call_with_pp_support(sess, None, move |annotation| {
- debug!("pretty printing source code {:?}", s);
- let sess = annotation.sess();
- pprust::print_crate(sess.source_map(),
- &sess.parse_sess,
- krate,
- src_name,
- &mut rdr,
- box out,
- annotation.pp_ann(),
- false)
- })
- .unwrap()
+ debug!("pretty printing source code {:?}", s);
+ let sess = annotation.sess();
+ pprust::print_crate(sess.source_map(),
+ &sess.parse_sess,
+ krate,
+ src_name,
+ &mut rdr,
+ box out,
+ annotation.pp_ann(),
+ false)
+ }).unwrap()
} else {
unreachable!();
};
use std::sync::mpsc::{channel};
let (tx, rx) = channel();
if profq_set_chan(sess, tx) {
- thread::spawn(move||profile_queries_thread(rx));
+ thread::spawn(move || profile_queries_thread(rx));
}
}
pub fn dump(sess: &Session, path: String) {
use std::sync::mpsc::{channel};
let (tx, rx) = channel();
- let params = ProfQDumpParams{
- path, ack:tx,
+ let params = ProfQDumpParams {
+ path,
+ ack: tx,
// FIXME: Add another compiler flag to toggle whether this log
// is written; false for now
- dump_profq_msg_log:true,
+ dump_profq_msg_log: true,
};
profq_msg(sess, ProfileQueriesMsg::Dump(params));
let _ = rx.recv().unwrap();
}
fn total_duration(traces: &[trace::Rec]) -> Duration {
- let mut sum : Duration = Duration::new(0,0);
+ let mut sum : Duration = Duration::new(0, 0);
for t in traces.iter() { sum += t.dur_total; }
return sum
}
// profiling thread; retains state (in local variables) and dump traces, upon request.
-fn profile_queries_thread(r:Receiver<ProfileQueriesMsg>) {
+fn profile_queries_thread(r: Receiver<ProfileQueriesMsg>) {
use self::trace::*;
use std::fs::File;
use std::time::{Instant};
- let mut profq_msgs : Vec<ProfileQueriesMsg> = vec![];
- let mut frame : StackFrame = StackFrame{ parse_st:ParseState::Clear, traces:vec![] };
- let mut stack : Vec<StackFrame> = vec![];
+ let mut profq_msgs: Vec<ProfileQueriesMsg> = vec![];
+ let mut frame: StackFrame = StackFrame { parse_st: ParseState::Clear, traces: vec![] };
+ let mut stack: Vec<StackFrame> = vec![];
loop {
let msg = r.recv();
if let Err(_recv_err) = msg {
match msg {
ProfileQueriesMsg::Halt => return,
ProfileQueriesMsg::Dump(params) => {
- assert!(stack.len() == 0);
+ assert!(stack.is_empty());
assert!(frame.parse_st == ParseState::Clear);
{
// write log of all messages
let counts_path = format!("{}.counts.txt", params.path);
let mut counts_file = File::create(&counts_path).unwrap();
- write!(html_file, "<html>\n").unwrap();
- write!(html_file,
- "<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n",
- "profile_queries.css").unwrap();
- write!(html_file, "<style>\n").unwrap();
+ writeln!(html_file,
+ "<html>\n<head>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">",
+ "profile_queries.css").unwrap();
+ writeln!(html_file, "<style>").unwrap();
trace::write_style(&mut html_file);
- write!(html_file, "</style>\n").unwrap();
- write!(html_file, "</head>\n").unwrap();
- write!(html_file, "<body>\n").unwrap();
+ writeln!(html_file, "</style>\n</head>\n<body>").unwrap();
trace::write_traces(&mut html_file, &mut counts_file, &frame.traces);
- write!(html_file, "</body>\n</html>\n").unwrap();
+ writeln!(html_file, "</body>\n</html>").unwrap();
let ack_path = format!("{}.ack", params.path);
let ack_file = File::create(&ack_path).unwrap();
// Parse State: Clear
(ParseState::Clear,
- ProfileQueriesMsg::QueryBegin(span,querymsg)) => {
+ ProfileQueriesMsg::QueryBegin(span, querymsg)) => {
let start = Instant::now();
frame.parse_st = ParseState::HaveQuery
- (Query{span:span, msg:querymsg}, start)
+ (Query { span, msg: querymsg }, start)
},
(ParseState::Clear,
ProfileQueriesMsg::CacheHit) => {
frame = StackFrame{parse_st:ParseState::Clear, traces:vec![]};
},
- //
- //
// Parse errors:
(ParseState::HaveQuery(q,_),
unreachable!()
},
}
-
}
}
}
pub dur_total: Duration,
}
+fn cons(s: &str) -> String {
+ let first = s.split(|d| d == '(' || d == '{').next();
+ assert!(first.is_some() && first != Some(""));
+ first.unwrap().to_owned()
+}
+
pub fn cons_of_query_msg(q: &trace::Query) -> String {
- let s = format!("{:?}", q.msg);
- let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
- assert!(cons.len() > 0 && cons[0] != "");
- cons[0].to_string()
+ cons(&format!("{:?}", q.msg))
}
pub fn cons_of_key(k: &DepNode) -> String {
- let s = format!("{:?}", k);
- let cons: Vec<&str> = s.split(|d| d == '(' || d == '{').collect();
- assert!(cons.len() > 0 && cons[0] != "");
- cons[0].to_string()
+ cons(&format!("{:?}", k))
}
// First return value is text; second return value is a CSS class
// First return value is text; second return value is a CSS class
fn html_of_duration(_start: &Instant, dur: &Duration) -> (String, String) {
use rustc::util::common::duration_to_secs_str;
- (duration_to_secs_str(dur.clone()),
- String::new()
- )
+ (duration_to_secs_str(dur.clone()), String::new())
}
-fn html_of_fraction(frac: f64) -> (String, String) {
+fn html_of_fraction(frac: f64) -> (String, &'static str) {
let css = {
- if frac > 0.50 { "frac-50".to_string() }
- else if frac > 0.40 { "frac-40".to_string() }
- else if frac > 0.30 { "frac-30".to_string() }
- else if frac > 0.20 { "frac-20".to_string() }
- else if frac > 0.10 { "frac-10".to_string() }
- else if frac > 0.05 { "frac-05".to_string() }
- else if frac > 0.02 { "frac-02".to_string() }
- else if frac > 0.01 { "frac-01".to_string() }
- else if frac > 0.001 { "frac-001".to_string() }
- else { "frac-0".to_string() }
+ if frac > 0.50 { "frac-50" }
+ else if frac > 0.40 { "frac-40" }
+ else if frac > 0.30 { "frac-30" }
+ else if frac > 0.20 { "frac-20" }
+ else if frac > 0.10 { "frac-10" }
+ else if frac > 0.05 { "frac-05" }
+ else if frac > 0.02 { "frac-02" }
+ else if frac > 0.01 { "frac-01" }
+ else if frac > 0.001 { "frac-001" }
+ else { "frac-0" }
};
let percent = frac * 100.0;
- if percent > 0.1 { (format!("{:.1}%", percent), css) }
- else { ("< 0.1%".to_string(), css) }
+
+ if percent > 0.1 {
+ (format!("{:.1}%", percent), css)
+ } else {
+ ("< 0.1%".to_string(), css)
+ }
}
fn total_duration(traces: &[Rec]) -> Duration {
- let mut sum : Duration = Duration::new(0,0);
- for t in traces.iter() {
- sum += t.dur_total;
- }
- return sum
+ Duration::new(0, 0) + traces.iter().map(|t| t.dur_total).sum()
}
fn duration_div(nom: Duration, den: Duration) -> f64 {
let fraction = duration_div(t.dur_total, total);
let percent = fraction * 100.0;
let (frc_text, frc_css_classes) = html_of_fraction(fraction);
- write!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">\n",
- depth,
- t.extent.len(),
- /* Heuristic for 'important' CSS class: */
- if t.extent.len() > 5 || percent >= 1.0 {
- " important" }
- else { "" },
- eff_css_classes,
- dur_css_classes,
- frc_css_classes,
+ writeln!(file, "<div class=\"trace depth-{} extent-{}{} {} {} {}\">",
+ depth,
+ t.extent.len(),
+ /* Heuristic for 'important' CSS class: */
+ if t.extent.len() > 5 || percent >= 1.0 { " important" } else { "" },
+ eff_css_classes,
+ dur_css_classes,
+ frc_css_classes,
).unwrap();
- write!(file, "<div class=\"eff\">{}</div>\n", eff_text).unwrap();
- write!(file, "<div class=\"dur\">{}</div>\n", dur_text).unwrap();
- write!(file, "<div class=\"frc\">{}</div>\n", frc_text).unwrap();
+ writeln!(file, "<div class=\"eff\">{}</div>", eff_text).unwrap();
+ writeln!(file, "<div class=\"dur\">{}</div>", dur_text).unwrap();
+ writeln!(file, "<div class=\"frc\">{}</div>", frc_text).unwrap();
write_traces_rec(file, &t.extent, total, depth + 1);
- write!(file, "</div>\n").unwrap();
+ writeln!(file, "</div>").unwrap();
}
}
fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]) {
+ counts.reserve(traces.len());
for t in traces.iter() {
match t.effect {
Effect::TimeBegin(ref msg) => {
let qm = match counts.get(msg) {
- Some(_qm) => { panic!("TimeBegin with non-unique, repeat message") }
- None => QueryMetric{
+ Some(_qm) => panic!("TimeBegin with non-unique, repeat message"),
+ None => QueryMetric {
count: 1,
dur_self: t.dur_self,
dur_total: t.dur_total,
- }};
+ }
+ };
counts.insert(msg.clone(), qm);
},
Effect::TaskBegin(ref key) => {
let cons = cons_of_key(key);
let qm = match counts.get(&cons) {
Some(qm) =>
- QueryMetric{
+ QueryMetric {
count: qm.count + 1,
dur_self: qm.dur_self + t.dur_self,
dur_total: qm.dur_total + t.dur_total,
},
- None => QueryMetric{
+ None => QueryMetric {
count: 1,
dur_self: t.dur_self,
dur_total: t.dur_total,
- }};
+ }
+ };
counts.insert(cons, qm);
},
Effect::QueryBegin(ref qmsg, ref _cc) => {
let qcons = cons_of_query_msg(qmsg);
let qm = match counts.get(&qcons) {
Some(qm) =>
- QueryMetric{
+ QueryMetric {
count: qm.count + 1,
dur_total: qm.dur_total + t.dur_total,
dur_self: qm.dur_self + t.dur_self
},
- None => QueryMetric{
+ None => QueryMetric {
count: 1,
dur_total: t.dur_total,
dur_self: t.dur_self,
}
}
-pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String,QueryMetric>) {
+pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String, QueryMetric>) {
use rustc::util::common::duration_to_secs_str;
use std::cmp::Reverse;
let mut data = counts.iter().map(|(ref cons, ref qm)|
(cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
).collect::<Vec<_>>();
+
data.sort_by_key(|k| Reverse(k.3));
for (cons, count, dur_total, dur_self) in data {
- write!(count_file, "{}, {}, {}, {}\n",
- cons, count,
- duration_to_secs_str(dur_total),
- duration_to_secs_str(dur_self)
+ writeln!(count_file, "{}, {}, {}, {}",
+ cons, count,
+ duration_to_secs_str(dur_total),
+ duration_to_secs_str(dur_self)
).unwrap();
}
}
compute_counts_rec(&mut counts, traces);
write_counts(counts_file, &mut counts);
- let total : Duration = total_duration(traces);
+ let total: Duration = total_duration(traces);
write_traces_rec(html_file, traces, total, 0)
}
pub fn write_style(html_file: &mut File) {
- write!(html_file,"{}", "
+ write!(html_file, "{}", "
body {
font-family: sans-serif;
background: black;
($sess:ident, $($name:ident),*,) => (
{$(
store.register_early_pass($sess, false, box $name);
- )*}
- )
+ )*}
+ )
}
macro_rules! add_pre_expansion_builtin {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_pre_expansion_pass($sess, box $name);
- )*}
- )
+ )*}
+ )
}
macro_rules! add_early_builtin_with_new {
($sess:ident, $($name:ident),*,) => (
{$(
store.register_early_pass($sess, false, box $name::new());
- )*}
- )
+ )*}
+ )
}
macro_rules! add_lint_group {
($sess:ident, $name:expr, $($lint:ident),*) => (
store.register_group($sess, false, $name, None, vec![$(LintId::of($lint)),*]);
- )
+ )
}
add_pre_expansion_builtin!(sess,
store.register_late_pass(sess, false, box BuiltinCombinedLateLintPass::new());
- add_lint_group!(sess,
- "bad_style",
- NON_CAMEL_CASE_TYPES,
- NON_SNAKE_CASE,
- NON_UPPER_CASE_GLOBALS);
-
add_lint_group!(sess,
"nonstandard_style",
NON_CAMEL_CASE_TYPES,
store.register_removed("unsigned_negation", "replaced by negate_unsigned feature gate");
store.register_removed("negate_unsigned", "cast a signed value instead");
store.register_removed("raw_pointer_derive", "using derive with raw pointers is ok");
+ // Register lint group aliases
+ store.register_group_alias("nonstandard_style", "bad_style");
// This was renamed to raw_pointer_derive, which was then removed,
// so it is also considered removed
store.register_removed("raw_pointer_deriving", "using derive with raw pointers is ok");
match callee.node {
hir::ExprKind::Path(ref qpath) => {
let def = cx.tables.qpath_def(qpath, callee.hir_id);
- if let Def::Fn(_) = def {
- Some(def)
- } else { // `Def::Local` if it was a closure, for which we
- None // do not currently support must-use linting
+ match def {
+ Def::Fn(_) | Def::Method(_) => Some(def),
+ // `Def::Local` if it was a closure, for which we
+ // do not currently support must-use linting
+ _ => None
}
},
_ => None
if let Some(must_use_op) = must_use_op {
cx.span_lint(UNUSED_MUST_USE, expr.span,
- &format!("unused {} which must be used", must_use_op));
+ &format!("unused {} that must be used", must_use_op));
op_warned = true;
}
fn check_must_use(cx: &LateContext, def_id: DefId, sp: Span, describe_path: &str) -> bool {
for attr in cx.tcx.get_attrs(def_id).iter() {
if attr.check_name("must_use") {
- let msg = format!("unused {}`{}` which must be used",
+ let msg = format!("unused {}`{}` that must be used",
describe_path, cx.tcx.item_path_str(def_id));
let mut err = cx.struct_span_lint(UNUSED_MUST_USE, sp, &msg);
// check for #[must_use = "..."]
pub struct UnusedParens;
impl UnusedParens {
- fn check_unused_parens_core(&self,
+ fn check_unused_parens_expr(&self,
cx: &EarlyContext,
value: &ast::Expr,
msg: &str,
let necessary = struct_lit_needs_parens &&
parser::contains_exterior_struct_lit(&inner);
if !necessary {
- let span_msg = format!("unnecessary parentheses around {}", msg);
- let mut err = cx.struct_span_lint(UNUSED_PARENS,
- value.span,
- &span_msg);
- // Remove exactly one pair of parentheses (rather than naïvely
- // stripping all paren characters)
- let mut ate_left_paren = false;
- let mut ate_right_paren = false;
- let parens_removed = pprust::expr_to_string(value)
- .trim_matches(|c| {
- match c {
- '(' => {
- if ate_left_paren {
- false
- } else {
- ate_left_paren = true;
- true
- }
- },
- ')' => {
- if ate_right_paren {
- false
- } else {
- ate_right_paren = true;
- true
- }
- },
- _ => false,
- }
- }).to_owned();
- err.span_suggestion_short_with_applicability(
- value.span,
- "remove these parentheses",
- parens_removed,
- Applicability::MachineApplicable
- );
- err.emit();
+ let pattern = pprust::expr_to_string(value);
+ Self::remove_outer_parens(cx, value.span, &pattern, msg);
}
}
}
+
+ fn check_unused_parens_pat(&self,
+ cx: &EarlyContext,
+ value: &ast::Pat,
+ msg: &str) {
+ if let ast::PatKind::Paren(_) = value.node {
+ let pattern = pprust::pat_to_string(value);
+ Self::remove_outer_parens(cx, value.span, &pattern, msg);
+ }
+ }
+
+ fn remove_outer_parens(cx: &EarlyContext, span: Span, pattern: &str, msg: &str) {
+ let span_msg = format!("unnecessary parentheses around {}", msg);
+ let mut err = cx.struct_span_lint(UNUSED_PARENS, span, &span_msg);
+ let mut ate_left_paren = false;
+ let mut ate_right_paren = false;
+ let parens_removed = pattern
+ .trim_matches(|c| {
+ match c {
+ '(' => {
+ if ate_left_paren {
+ false
+ } else {
+ ate_left_paren = true;
+ true
+ }
+ },
+ ')' => {
+ if ate_right_paren {
+ false
+ } else {
+ ate_right_paren = true;
+ true
+ }
+ },
+ _ => false,
+ }
+ }).to_owned();
+ err.span_suggestion_short_with_applicability(
+ span,
+ "remove these parentheses",
+ parens_removed,
+ Applicability::MachineApplicable
+ );
+ err.emit();
+ }
}
impl LintPass for UnusedParens {
// first "argument" is self (which sometimes needs parens)
MethodCall(_, ref args) => (&args[1..], "method"),
// actual catch-all arm
- _ => { return; }
+ _ => {
+ return;
+ }
};
// Don't lint if this is a nested macro expansion: otherwise, the lint could
// trigger in situations that macro authors shouldn't have to care about, e.g.,
}
let msg = format!("{} argument", call_kind);
for arg in args_to_check {
- self.check_unused_parens_core(cx, arg, &msg, false);
+ self.check_unused_parens_expr(cx, arg, &msg, false);
}
return;
}
};
- self.check_unused_parens_core(cx, &value, msg, struct_lit_needs_parens);
+ self.check_unused_parens_expr(cx, &value, msg, struct_lit_needs_parens);
+ }
+
+ fn check_pat(&mut self, cx: &EarlyContext, p: &ast::Pat) {
+ use ast::PatKind::{Paren, Range};
+ // The lint visitor will visit each subpattern of `p`. We do not want to lint any range
+ // pattern no matter where it occurs in the pattern. For something like `&(a..=b)`, there
+ // is a recursive `check_pat` on `a` and `b`, but we will assume that if there are
+ // unnecessry parens they serve a purpose of readability.
+ if let Paren(ref pat) = p.node {
+ match pat.node {
+ Range(..) => {}
+ _ => self.check_unused_parens_pat(cx, &p, "pattern")
+ }
+ }
}
fn check_stmt(&mut self, cx: &EarlyContext, s: &ast::Stmt) {
if let ast::StmtKind::Local(ref local) = s.node {
if let Some(ref value) = local.init {
- self.check_unused_parens_core(cx, &value, "assigned value", false);
+ self.check_unused_parens_expr(cx, &value, "assigned value", false);
}
}
}
use rustc::mir::visit::Visitor;
use rustc::mir::{BasicBlock, BasicBlockData, Location, Mir, Place, Rvalue};
use rustc::mir::{Statement, Terminator};
+use rustc::mir::UserTypeAnnotation;
use rustc::ty::fold::TypeFoldable;
use rustc::ty::subst::Substs;
-use rustc::ty::{self, CanonicalTy, ClosureSubsts, GeneratorSubsts, RegionVid};
+use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, RegionVid};
pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
&mut self,
_place: &Place<'tcx>,
_variance: &ty::Variance,
- _c_ty: &CanonicalTy<'tcx>,
+ _user_ty: &UserTypeAnnotation<'tcx>,
_location: Location,
) {
}
/// Which universe is this region variable defined in? This is
/// most often `ty::UniverseIndex::ROOT`, but when we encounter
/// forall-quantifiers like `for<'a> { 'a = 'b }`, we would create
- /// the variable for `'a` in a subuniverse.
+ /// the variable for `'a` in a fresh universe that extends ROOT.
universe: ty::UniverseIndex,
/// If this is 'static or an early-bound region, then this is
NLLRegionVariableOrigin::Placeholder(placeholder) => {
// Each placeholder region is only visible from
- // its universe `ui` and its superuniverses. So we
+ // its universe `ui` and its extensions. So we
// can't just add it into `scc` unless the
// universe of the scc can name this region.
let scc_universe = self.scc_universes[scc];
- if placeholder.universe.is_subset_of(scc_universe) {
+ if scc_universe.can_name(placeholder.universe) {
self.scc_values.add_element(scc, placeholder);
} else {
self.add_incompatible_universe(scc);
// Quick check: if scc_b's declared universe is a subset of
// scc_a's declared univese (typically, both are ROOT), then
// it cannot contain any problematic universe elements.
- if self.scc_universes[scc_b].is_subset_of(universe_a) {
+ if universe_a.can_name(self.scc_universes[scc_b]) {
return true;
}
// from universe_a
self.scc_values
.placeholders_contained_in(scc_b)
- .all(|p| p.universe.is_subset_of(universe_a))
+ .all(|p| universe_a.can_name(p.universe))
}
/// Extend `scc` so that it can outlive some placeholder region
/// a lifetime parameter).
RootUniversalRegion(RegionVid),
- /// A subuniverse from a subuniverse (e.g., instantiated from a
- /// `for<'a> fn(&'a u32)` type).
+ /// A placeholder (e.g., instantiated from a `for<'a> fn(&'a u32)`
+ /// type).
PlaceholderRegion(ty::Placeholder),
}
}
}
-impl ::std::iter::FromIterator<ty::Placeholder> for PlaceholderIndices {
- fn from_iter<I>(iter: I) -> Self
- where
- I: IntoIterator<Item = ty::Placeholder>,
- {
- let mut result = Self::default();
- iter.into_iter().for_each(|p| {
- result.insert(p);
- });
- result
- }
-}
-
/// Stores the full values for a set of regions (in contrast to
/// `LivenessValues`, which only stores those points in the where a
/// region is live). The full value for a region may contain points in
// except according to those terms.
use rustc::ty::subst::Substs;
-use rustc::ty::{self, CanonicalTy, ClosureSubsts, GeneratorSubsts, Ty, TypeFoldable};
-use rustc::mir::{BasicBlock, Location, Mir, Statement, StatementKind};
+use rustc::ty::{self, ClosureSubsts, GeneratorSubsts, Ty, TypeFoldable};
+use rustc::mir::{BasicBlock, Location, Mir, Statement, StatementKind, UserTypeAnnotation};
use rustc::mir::visit::{MutVisitor, TyContext};
use rustc::infer::{InferCtxt, NLLRegionVariableOrigin};
debug!("visit_ty: ty={:?}", ty);
}
- fn visit_user_ty(&mut self, _ty: &mut CanonicalTy<'tcx>) {
- // `user_ty` annotations represent the types that the user
+ fn visit_user_type_annotation(&mut self, _ty: &mut UserTypeAnnotation<'tcx>) {
+ // User type annotations represent the types that the user
// wrote in the progarm. We don't want to erase the regions
// from these types: rather, we want to add them as
// constraints at type-check time.
- debug!("visit_user_ty: skipping renumber");
+ debug!("visit_user_type_annotation: skipping renumber");
}
fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>, location: Location) {
use rustc::traits::{ObligationCause, PredicateObligations};
use rustc::ty::fold::TypeFoldable;
use rustc::ty::subst::{Subst, UnpackedKind};
-use rustc::ty::{self, CanonicalTy, RegionVid, ToPolyTraitRef, Ty, TyCtxt, TyKind};
+use rustc::ty::{self, RegionVid, ToPolyTraitRef, Ty, TyCtxt, TyKind};
use std::rc::Rc;
use std::{fmt, iter};
use syntax_pos::{Span, DUMMY_SP};
&mut self,
a: Ty<'tcx>,
v: ty::Variance,
- b: CanonicalTy<'tcx>,
+ b: UserTypeAnnotation<'tcx>,
locations: Locations,
category: ConstraintCategory,
) -> Fallible<()> {
/// If this rvalue supports a user-given type annotation, then
/// extract and return it. This represents the final type of the
/// rvalue and will be unified with the inferred type.
- fn rvalue_user_ty(&self, rvalue: &Rvalue<'tcx>) -> Option<CanonicalTy<'tcx>> {
+ fn rvalue_user_ty(&self, rvalue: &Rvalue<'tcx>) -> Option<UserTypeAnnotation<'tcx>> {
match rvalue {
Rvalue::Use(_)
| Rvalue::Repeat(..)
use borrow_check::nll::constraints::OutlivesConstraint;
use borrow_check::nll::type_check::{BorrowCheckContext, Locations};
-use rustc::infer::canonical::{Canonical, CanonicalVarInfos, CanonicalVarValues};
+use rustc::infer::nll_relate::{TypeRelating, TypeRelatingDelegate};
use rustc::infer::{InferCtxt, NLLRegionVariableOrigin};
-use rustc::mir::ConstraintCategory;
+use rustc::mir::{ConstraintCategory, UserTypeAnnotation};
use rustc::traits::query::Fallible;
-use rustc::ty::fold::{TypeFoldable, TypeVisitor};
-use rustc::ty::relate::{self, Relate, RelateResult, TypeRelation};
-use rustc::ty::subst::Kind;
-use rustc::ty::{self, CanonicalTy, CanonicalVar, Ty, TyCtxt};
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::indexed_vec::IndexVec;
+use rustc::ty::relate::TypeRelation;
+use rustc::ty::subst::{Subst, UserSelfTy, UserSubsts};
+use rustc::ty::{self, Ty, TypeFoldable};
+use syntax_pos::DUMMY_SP;
/// Adds sufficient constraints to ensure that `a <: b`.
pub(super) fn sub_types<'tcx>(
) -> Fallible<()> {
debug!("sub_types(a={:?}, b={:?}, locations={:?})", a, b, locations);
TypeRelating::new(
- infcx.tcx,
+ infcx,
NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),
ty::Variance::Covariant,
- ty::List::empty(),
).relate(&a, &b)?;
Ok(())
}
) -> Fallible<()> {
debug!("eq_types(a={:?}, b={:?}, locations={:?})", a, b, locations);
TypeRelating::new(
- infcx.tcx,
+ infcx,
NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),
ty::Variance::Invariant,
- ty::List::empty(),
).relate(&a, &b)?;
Ok(())
}
infcx: &InferCtxt<'_, '_, 'tcx>,
a: Ty<'tcx>,
v: ty::Variance,
- b: CanonicalTy<'tcx>,
+ user_ty: UserTypeAnnotation<'tcx>,
locations: Locations,
category: ConstraintCategory,
borrowck_context: Option<&mut BorrowCheckContext<'_, 'tcx>>,
) -> Fallible<Ty<'tcx>> {
debug!(
- "sub_type_and_user_type(a={:?}, b={:?}, locations={:?})",
- a, b, locations
+ "relate_type_and_user_type(a={:?}, v={:?}, b={:?}, locations={:?})",
+ a, v, user_ty, locations
);
- let Canonical {
- variables: b_variables,
- value: b_value,
- } = b;
// The `TypeRelating` code assumes that the "canonical variables"
// appear in the "a" side, so flip `Contravariant` ambient
let v1 = ty::Contravariant.xform(v);
let mut type_relating = TypeRelating::new(
- infcx.tcx,
+ infcx,
NllTypeRelatingDelegate::new(infcx, borrowck_context, locations, category),
v1,
- b_variables,
);
- type_relating.relate(&b_value, &a)?;
- Ok(b.substitute(
- infcx.tcx,
- &CanonicalVarValues {
- var_values: type_relating
- .canonical_var_values
- .into_iter()
- .map(|x| x.expect("unsubstituted canonical variable"))
- .collect(),
- },
- ))
-}
-
-struct TypeRelating<'me, 'gcx: 'tcx, 'tcx: 'me, D>
-where
- D: TypeRelatingDelegate<'tcx>,
-{
- tcx: TyCtxt<'me, 'gcx, 'tcx>,
-
- /// Callback to use when we deduce an outlives relationship
- delegate: D,
-
- /// How are we relating `a` and `b`?
- ///
- /// - covariant means `a <: b`
- /// - contravariant means `b <: a`
- /// - invariant means `a == b
- /// - bivariant means that it doesn't matter
- ambient_variance: ty::Variance,
-
- /// When we pass through a set of binders (e.g., when looking into
- /// a `fn` type), we push a new bound region scope onto here. This
- /// will contain the instantiated region for each region in those
- /// binders. When we then encounter a `ReLateBound(d, br)`, we can
- /// use the debruijn index `d` to find the right scope, and then
- /// bound region name `br` to find the specific instantiation from
- /// within that scope. See `replace_bound_region`.
- ///
- /// This field stores the instantiations for late-bound regions in
- /// the `a` type.
- a_scopes: Vec<BoundRegionScope<'tcx>>,
-
- /// Same as `a_scopes`, but for the `b` type.
- b_scopes: Vec<BoundRegionScope<'tcx>>,
-
- /// As we execute, the type on the LHS *may* come from a canonical
- /// source. In that case, we will sometimes find a constraint like
- /// `?0 = B`, where `B` is a type from the RHS. The first time we
- /// find that, we simply record `B` (and the list of scopes that
- /// tells us how to *interpret* `B`). The next time we encounter
- /// `?0`, then, we can read this value out and use it.
- ///
- /// One problem: these variables may be in some other universe,
- /// how can we enforce that? I guess I could add some kind of
- /// "minimum universe constraint" that we can feed to the NLL checker.
- /// --> also, we know this doesn't happen
- canonical_var_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>>,
-}
-
-trait TypeRelatingDelegate<'tcx> {
- /// Push a constraint `sup: sub` -- this constraint must be
- /// satisfied for the two types to be related. `sub` and `sup` may
- /// be regions from the type or new variables created through the
- /// delegate.
- fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>);
-
- /// Creates a new universe index. Used when instantiating placeholders.
- fn next_subuniverse(&mut self) -> ty::UniverseIndex;
+ match user_ty {
+ UserTypeAnnotation::Ty(canonical_ty) => {
+ let (ty, _) =
+ infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_ty);
+ type_relating.relate(&ty, &a)?;
+ Ok(ty)
+ }
+ UserTypeAnnotation::FnDef(def_id, canonical_substs) => {
+ let (
+ UserSubsts {
+ substs,
+ user_self_ty,
+ },
+ _,
+ ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);
+ let ty = infcx.tcx.mk_fn_def(def_id, substs);
+
+ type_relating.relate(&ty, &a)?;
+
+ if let Some(UserSelfTy {
+ impl_def_id,
+ self_ty,
+ }) = user_self_ty
+ {
+ let impl_self_ty = infcx.tcx.type_of(impl_def_id);
+ let impl_self_ty = impl_self_ty.subst(infcx.tcx, &substs);
+
+ // There may be type variables in `substs` and hence
+ // in `impl_self_ty`, but they should all have been
+ // resolved to some fixed value during the first call
+ // to `relate`, above. Therefore, if we use
+ // `resolve_type_vars_if_possible` we should get to
+ // something without type variables. This is important
+ // because the `b` type in `relate_with_variance`
+ // below is not permitted to have inference variables.
+ let impl_self_ty = infcx.resolve_type_vars_if_possible(&impl_self_ty);
+ assert!(!impl_self_ty.has_infer_types());
+
+ type_relating.relate_with_variance(
+ ty::Variance::Invariant,
+ &self_ty,
+ &impl_self_ty,
+ )?;
+ }
- /// Creates a new region variable representing a higher-ranked
- /// region that is instantiated existentially. This creates an
- /// inference variable, typically.
- ///
- /// So e.g. if you have `for<'a> fn(..) <: for<'b> fn(..)`, then
- /// we will invoke this method to instantiate `'a` with an
- /// inference variable (though `'b` would be instantiated first,
- /// as a placeholder).
- fn next_existential_region_var(&mut self) -> ty::Region<'tcx>;
+ Ok(ty)
+ }
+ UserTypeAnnotation::AdtDef(adt_def, canonical_substs) => {
+ let (
+ UserSubsts {
+ substs,
+ user_self_ty,
+ },
+ _,
+ ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_substs);
- /// Creates a new region variable representing a
- /// higher-ranked region that is instantiated universally.
- /// This creates a new region placeholder, typically.
- ///
- /// So e.g. if you have `for<'a> fn(..) <: for<'b> fn(..)`, then
- /// we will invoke this method to instantiate `'b` with a
- /// placeholder region.
- fn next_placeholder_region(&mut self, placeholder: ty::Placeholder) -> ty::Region<'tcx>;
+ // We don't extract adt-defs with a self-type.
+ assert!(user_self_ty.is_none());
- /// Creates a new existential region in the given universe. This
- /// is used when handling subtyping and type variables -- if we
- /// have that `?X <: Foo<'a>`, for example, we would instantiate
- /// `?X` with a type like `Foo<'?0>` where `'?0` is a fresh
- /// existential variable created by this function. We would then
- /// relate `Foo<'?0>` with `Foo<'a>` (and probably add an outlives
- /// relation stating that `'?0: 'a`).
- fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx>;
+ let ty = infcx.tcx.mk_adt(adt_def, substs);
+ type_relating.relate(&ty, &a)?;
+ Ok(ty)
+ }
+ }
}
struct NllTypeRelatingDelegate<'me, 'bccx: 'me, 'gcx: 'tcx, 'tcx: 'bccx> {
}
impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, '_, 'tcx> {
- fn next_subuniverse(&mut self) -> ty::UniverseIndex {
- self.infcx.create_subuniverse()
+ fn create_next_universe(&mut self) -> ty::UniverseIndex {
+ self.infcx.create_next_universe()
}
fn next_existential_region_var(&mut self) -> ty::Region<'tcx> {
}
}
}
-
-#[derive(Clone, Debug)]
-struct ScopesAndKind<'tcx> {
- scopes: Vec<BoundRegionScope<'tcx>>,
- kind: Kind<'tcx>,
-}
-
-#[derive(Clone, Debug, Default)]
-struct BoundRegionScope<'tcx> {
- map: FxHashMap<ty::BoundRegion, ty::Region<'tcx>>,
-}
-
-#[derive(Copy, Clone)]
-struct UniversallyQuantified(bool);
-
-impl<'me, 'gcx, 'tcx, D> TypeRelating<'me, 'gcx, 'tcx, D>
-where
- D: TypeRelatingDelegate<'tcx>,
-{
- fn new(
- tcx: TyCtxt<'me, 'gcx, 'tcx>,
- delegate: D,
- ambient_variance: ty::Variance,
- canonical_var_infos: CanonicalVarInfos<'tcx>,
- ) -> Self {
- let canonical_var_values = IndexVec::from_elem_n(None, canonical_var_infos.len());
- Self {
- tcx,
- delegate,
- ambient_variance,
- canonical_var_values,
- a_scopes: vec![],
- b_scopes: vec![],
- }
- }
-
- fn ambient_covariance(&self) -> bool {
- match self.ambient_variance {
- ty::Variance::Covariant | ty::Variance::Invariant => true,
- ty::Variance::Contravariant | ty::Variance::Bivariant => false,
- }
- }
-
- fn ambient_contravariance(&self) -> bool {
- match self.ambient_variance {
- ty::Variance::Contravariant | ty::Variance::Invariant => true,
- ty::Variance::Covariant | ty::Variance::Bivariant => false,
- }
- }
-
- fn create_scope(
- &mut self,
- value: &ty::Binder<impl TypeFoldable<'tcx>>,
- universally_quantified: UniversallyQuantified,
- ) -> BoundRegionScope<'tcx> {
- let mut scope = BoundRegionScope::default();
-
- // Create a callback that creates (via the delegate) either an
- // existential or placeholder region as needed.
- let mut next_region = {
- let delegate = &mut self.delegate;
- let mut lazy_universe = None;
- move |br: ty::BoundRegion| {
- if universally_quantified.0 {
- // The first time this closure is called, create a
- // new universe for the placeholders we will make
- // from here out.
- let universe = lazy_universe.unwrap_or_else(|| {
- let universe = delegate.next_subuniverse();
- lazy_universe = Some(universe);
- universe
- });
-
- let placeholder = ty::Placeholder { universe, name: br };
- delegate.next_placeholder_region(placeholder)
- } else {
- delegate.next_existential_region_var()
- }
- }
- };
-
- value.skip_binder().visit_with(&mut ScopeInstantiator {
- next_region: &mut next_region,
- target_index: ty::INNERMOST,
- bound_region_scope: &mut scope,
- });
-
- scope
- }
-
- /// When we encounter binders during the type traversal, we record
- /// the value to substitute for each of the things contained in
- /// that binder. (This will be either a universal placeholder or
- /// an existential inference variable.) Given the debruijn index
- /// `debruijn` (and name `br`) of some binder we have now
- /// encountered, this routine finds the value that we instantiated
- /// the region with; to do so, it indexes backwards into the list
- /// of ambient scopes `scopes`.
- fn lookup_bound_region(
- debruijn: ty::DebruijnIndex,
- br: &ty::BoundRegion,
- first_free_index: ty::DebruijnIndex,
- scopes: &[BoundRegionScope<'tcx>],
- ) -> ty::Region<'tcx> {
- // The debruijn index is a "reverse index" into the
- // scopes listing. So when we have INNERMOST (0), we
- // want the *last* scope pushed, and so forth.
- let debruijn_index = debruijn.index() - first_free_index.index();
- let scope = &scopes[scopes.len() - debruijn_index - 1];
-
- // Find this bound region in that scope to map to a
- // particular region.
- scope.map[br]
- }
-
- /// If `r` is a bound region, find the scope in which it is bound
- /// (from `scopes`) and return the value that we instantiated it
- /// with. Otherwise just return `r`.
- fn replace_bound_region(
- &self,
- r: ty::Region<'tcx>,
- first_free_index: ty::DebruijnIndex,
- scopes: &[BoundRegionScope<'tcx>],
- ) -> ty::Region<'tcx> {
- if let ty::ReLateBound(debruijn, br) = r {
- Self::lookup_bound_region(*debruijn, br, first_free_index, scopes)
- } else {
- r
- }
- }
-
- /// Push a new outlives requirement into our output set of
- /// constraints.
- fn push_outlives(&mut self, sup: ty::Region<'tcx>, sub: ty::Region<'tcx>) {
- debug!("push_outlives({:?}: {:?})", sup, sub);
-
- self.delegate.push_outlives(sup, sub);
- }
-
- /// When we encounter a canonical variable `var` in the output,
- /// equate it with `kind`. If the variable has been previously
- /// equated, then equate it again.
- fn relate_var(
- &mut self,
- var: CanonicalVar,
- b_kind: Kind<'tcx>,
- ) -> RelateResult<'tcx, Kind<'tcx>> {
- debug!("equate_var(var={:?}, b_kind={:?})", var, b_kind);
-
- let generalized_kind = match self.canonical_var_values[var] {
- Some(v) => v,
- None => {
- let generalized_kind = self.generalize_value(b_kind);
- self.canonical_var_values[var] = Some(generalized_kind);
- generalized_kind
- }
- };
-
- // The generalized values we extract from `canonical_var_values` have
- // been fully instantiated and hence the set of scopes we have
- // doesn't matter -- just to be sure, put an empty vector
- // in there.
- let old_a_scopes = ::std::mem::replace(&mut self.a_scopes, vec![]);
-
- // Relate the generalized kind to the original one.
- let result = self.relate(&generalized_kind, &b_kind);
-
- // Restore the old scopes now.
- self.a_scopes = old_a_scopes;
-
- debug!("equate_var: complete, result = {:?}", result);
- return result;
- }
-
- fn generalize_value(&mut self, kind: Kind<'tcx>) -> Kind<'tcx> {
- TypeGeneralizer {
- tcx: self.tcx,
- delegate: &mut self.delegate,
- first_free_index: ty::INNERMOST,
- ambient_variance: self.ambient_variance,
-
- // These always correspond to an `_` or `'_` written by
- // user, and those are always in the root universe.
- universe: ty::UniverseIndex::ROOT,
- }.relate(&kind, &kind)
- .unwrap()
- }
-}
-
-impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeRelating<'me, 'gcx, 'tcx, D>
-where
- D: TypeRelatingDelegate<'tcx>,
-{
- fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn tag(&self) -> &'static str {
- "nll::subtype"
- }
-
- fn a_is_expected(&self) -> bool {
- true
- }
-
- fn relate_with_variance<T: Relate<'tcx>>(
- &mut self,
- variance: ty::Variance,
- a: &T,
- b: &T,
- ) -> RelateResult<'tcx, T> {
- debug!(
- "relate_with_variance(variance={:?}, a={:?}, b={:?})",
- variance, a, b
- );
-
- let old_ambient_variance = self.ambient_variance;
- self.ambient_variance = self.ambient_variance.xform(variance);
-
- debug!(
- "relate_with_variance: ambient_variance = {:?}",
- self.ambient_variance
- );
-
- let r = self.relate(a, b)?;
-
- self.ambient_variance = old_ambient_variance;
-
- debug!("relate_with_variance: r={:?}", r);
-
- Ok(r)
- }
-
- fn tys(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
- // Watch out for the case that we are matching a `?T` against the
- // right-hand side.
- if let ty::Infer(ty::CanonicalTy(var)) = a.sty {
- self.relate_var(var, b.into())?;
- Ok(a)
- } else {
- debug!(
- "tys(a={:?}, b={:?}, variance={:?})",
- a, b, self.ambient_variance
- );
-
- relate::super_relate_tys(self, a, b)
- }
- }
-
- fn regions(
- &mut self,
- a: ty::Region<'tcx>,
- b: ty::Region<'tcx>,
- ) -> RelateResult<'tcx, ty::Region<'tcx>> {
- if let ty::ReCanonical(var) = a {
- self.relate_var(*var, b.into())?;
- return Ok(a);
- }
-
- debug!(
- "regions(a={:?}, b={:?}, variance={:?})",
- a, b, self.ambient_variance
- );
-
- let v_a = self.replace_bound_region(a, ty::INNERMOST, &self.a_scopes);
- let v_b = self.replace_bound_region(b, ty::INNERMOST, &self.b_scopes);
-
- debug!("regions: v_a = {:?}", v_a);
- debug!("regions: v_b = {:?}", v_b);
-
- if self.ambient_covariance() {
- // Covariance: a <= b. Hence, `b: a`.
- self.push_outlives(v_b, v_a);
- }
-
- if self.ambient_contravariance() {
- // Contravariant: b <= a. Hence, `a: b`.
- self.push_outlives(v_a, v_b);
- }
-
- Ok(a)
- }
-
- fn binders<T>(
- &mut self,
- a: &ty::Binder<T>,
- b: &ty::Binder<T>,
- ) -> RelateResult<'tcx, ty::Binder<T>>
- where
- T: Relate<'tcx>,
- {
- // We want that
- //
- // ```
- // for<'a> fn(&'a u32) -> &'a u32 <:
- // fn(&'b u32) -> &'b u32
- // ```
- //
- // but not
- //
- // ```
- // fn(&'a u32) -> &'a u32 <:
- // for<'b> fn(&'b u32) -> &'b u32
- // ```
- //
- // We therefore proceed as follows:
- //
- // - Instantiate binders on `b` universally, yielding a universe U1.
- // - Instantiate binders on `a` existentially in U1.
-
- debug!(
- "binders({:?}: {:?}, ambient_variance={:?})",
- a, b, self.ambient_variance
- );
-
- if self.ambient_covariance() {
- // Covariance, so we want `for<..> A <: for<..> B` --
- // therefore we compare any instantiation of A (i.e., A
- // instantiated with existentials) against every
- // instantiation of B (i.e., B instantiated with
- // universals).
-
- let b_scope = self.create_scope(b, UniversallyQuantified(true));
- let a_scope = self.create_scope(a, UniversallyQuantified(false));
-
- debug!("binders: a_scope = {:?} (existential)", a_scope);
- debug!("binders: b_scope = {:?} (universal)", b_scope);
-
- self.b_scopes.push(b_scope);
- self.a_scopes.push(a_scope);
-
- // Reset the ambient variance to covariant. This is needed
- // to correctly handle cases like
- //
- // for<'a> fn(&'a u32, &'a u3) == for<'b, 'c> fn(&'b u32, &'c u32)
- //
- // Somewhat surprisingly, these two types are actually
- // **equal**, even though the one on the right looks more
- // polymorphic. The reason is due to subtyping. To see it,
- // consider that each function can call the other:
- //
- // - The left function can call the right with `'b` and
- // `'c` both equal to `'a`
- //
- // - The right function can call the left with `'a` set to
- // `{P}`, where P is the point in the CFG where the call
- // itself occurs. Note that `'b` and `'c` must both
- // include P. At the point, the call works because of
- // subtyping (i.e., `&'b u32 <: &{P} u32`).
- let variance = ::std::mem::replace(&mut self.ambient_variance, ty::Variance::Covariant);
-
- self.relate(a.skip_binder(), b.skip_binder())?;
-
- self.ambient_variance = variance;
-
- self.b_scopes.pop().unwrap();
- self.a_scopes.pop().unwrap();
- }
-
- if self.ambient_contravariance() {
- // Contravariance, so we want `for<..> A :> for<..> B`
- // -- therefore we compare every instantiation of A (i.e.,
- // A instantiated with universals) against any
- // instantiation of B (i.e., B instantiated with
- // existentials). Opposite of above.
-
- let a_scope = self.create_scope(a, UniversallyQuantified(true));
- let b_scope = self.create_scope(b, UniversallyQuantified(false));
-
- debug!("binders: a_scope = {:?} (universal)", a_scope);
- debug!("binders: b_scope = {:?} (existential)", b_scope);
-
- self.a_scopes.push(a_scope);
- self.b_scopes.push(b_scope);
-
- // Reset ambient variance to contravariance. See the
- // covariant case above for an explanation.
- let variance =
- ::std::mem::replace(&mut self.ambient_variance, ty::Variance::Contravariant);
-
- self.relate(a.skip_binder(), b.skip_binder())?;
-
- self.ambient_variance = variance;
-
- self.b_scopes.pop().unwrap();
- self.a_scopes.pop().unwrap();
- }
-
- Ok(a.clone())
- }
-}
-
-/// When we encounter a binder like `for<..> fn(..)`, we actually have
-/// to walk the `fn` value to find all the values bound by the `for`
-/// (these are not explicitly present in the ty representation right
-/// now). This visitor handles that: it descends the type, tracking
-/// binder depth, and finds late-bound regions targeting the
-/// `for<..`>. For each of those, it creates an entry in
-/// `bound_region_scope`.
-struct ScopeInstantiator<'me, 'tcx: 'me> {
- next_region: &'me mut dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx>,
- // The debruijn index of the scope we are instantiating.
- target_index: ty::DebruijnIndex,
- bound_region_scope: &'me mut BoundRegionScope<'tcx>,
-}
-
-impl<'me, 'tcx> TypeVisitor<'tcx> for ScopeInstantiator<'me, 'tcx> {
- fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> bool {
- self.target_index.shift_in(1);
- t.super_visit_with(self);
- self.target_index.shift_out(1);
-
- false
- }
-
- fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
- let ScopeInstantiator {
- bound_region_scope,
- next_region,
- ..
- } = self;
-
- match r {
- ty::ReLateBound(debruijn, br) if *debruijn == self.target_index => {
- bound_region_scope
- .map
- .entry(*br)
- .or_insert_with(|| next_region(*br));
- }
-
- _ => {}
- }
-
- false
- }
-}
-
-/// The "type generalize" is used when handling inference variables.
-///
-/// The basic strategy for handling a constraint like `?A <: B` is to
-/// apply a "generalization strategy" to the type `B` -- this replaces
-/// all the lifetimes in the type `B` with fresh inference
-/// variables. (You can read more about the strategy in this [blog
-/// post].)
-///
-/// As an example, if we had `?A <: &'x u32`, we would generalize `&'x
-/// u32` to `&'0 u32` where `'0` is a fresh variable. This becomes the
-/// value of `A`. Finally, we relate `&'0 u32 <: &'x u32`, which
-/// establishes `'0: 'x` as a constraint.
-///
-/// As a side-effect of this generalization procedure, we also replace
-/// all the bound regions that we have traversed with concrete values,
-/// so that the resulting generalized type is independent from the
-/// scopes.
-///
-/// [blog post]: https://is.gd/0hKvIr
-struct TypeGeneralizer<'me, 'gcx: 'tcx, 'tcx: 'me, D>
-where
- D: TypeRelatingDelegate<'tcx> + 'me,
-{
- tcx: TyCtxt<'me, 'gcx, 'tcx>,
-
- delegate: &'me mut D,
-
- /// After we generalize this type, we are going to relative it to
- /// some other type. What will be the variance at this point?
- ambient_variance: ty::Variance,
-
- first_free_index: ty::DebruijnIndex,
-
- universe: ty::UniverseIndex,
-}
-
-impl<D> TypeRelation<'me, 'gcx, 'tcx> for TypeGeneralizer<'me, 'gcx, 'tcx, D>
-where
- D: TypeRelatingDelegate<'tcx>,
-{
- fn tcx(&self) -> TyCtxt<'me, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn tag(&self) -> &'static str {
- "nll::generalizer"
- }
-
- fn a_is_expected(&self) -> bool {
- true
- }
-
- fn relate_with_variance<T: Relate<'tcx>>(
- &mut self,
- variance: ty::Variance,
- a: &T,
- b: &T,
- ) -> RelateResult<'tcx, T> {
- debug!(
- "TypeGeneralizer::relate_with_variance(variance={:?}, a={:?}, b={:?})",
- variance, a, b
- );
-
- let old_ambient_variance = self.ambient_variance;
- self.ambient_variance = self.ambient_variance.xform(variance);
-
- debug!(
- "TypeGeneralizer::relate_with_variance: ambient_variance = {:?}",
- self.ambient_variance
- );
-
- let r = self.relate(a, b)?;
-
- self.ambient_variance = old_ambient_variance;
-
- debug!("TypeGeneralizer::relate_with_variance: r={:?}", r);
-
- Ok(r)
- }
-
- fn tys(&mut self, a: Ty<'tcx>, _: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
- debug!("TypeGeneralizer::tys(a={:?})", a,);
-
- match a.sty {
- ty::Infer(ty::TyVar(_)) | ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) => {
- bug!(
- "unexpected inference variable encountered in NLL generalization: {:?}",
- a
- );
- }
-
- _ => relate::super_relate_tys(self, a, a),
- }
- }
-
- fn regions(
- &mut self,
- a: ty::Region<'tcx>,
- _: ty::Region<'tcx>,
- ) -> RelateResult<'tcx, ty::Region<'tcx>> {
- debug!("TypeGeneralizer::regions(a={:?})", a,);
-
- if let ty::ReLateBound(debruijn, _) = a {
- if *debruijn < self.first_free_index {
- return Ok(a);
- }
- }
-
- // For now, we just always create a fresh region variable to
- // replace all the regions in the source type. In the main
- // type checker, we special case the case where the ambient
- // variance is `Invariant` and try to avoid creating a fresh
- // region variable, but since this comes up so much less in
- // NLL (only when users use `_` etc) it is much less
- // important.
- //
- // As an aside, since these new variables are created in
- // `self.universe` universe, this also serves to enforce the
- // universe scoping rules.
- //
- // FIXME(#54105) -- if the ambient variance is bivariant,
- // though, we may however need to check well-formedness or
- // risk a problem like #41677 again.
-
- let replacement_region_vid = self.delegate.generalize_existential(self.universe);
-
- Ok(replacement_region_vid)
- }
-
- fn binders<T>(
- &mut self,
- a: &ty::Binder<T>,
- _: &ty::Binder<T>,
- ) -> RelateResult<'tcx, ty::Binder<T>>
- where
- T: Relate<'tcx>,
- {
- debug!("TypeGeneralizer::binders(a={:?})", a,);
-
- self.first_free_index.shift_in(1);
- let result = self.relate(a.skip_binder(), a.skip_binder())?;
- self.first_free_index.shift_out(1);
- Ok(ty::Binder::bind(result))
- }
-}
use hair::*;
use rustc::hir;
use rustc::mir::*;
-use rustc::ty::{self, CanonicalTy, Ty};
+use rustc::ty::{self, Ty};
use rustc_data_structures::bit_set::BitSet;
use rustc_data_structures::fx::FxHashMap;
use syntax::ast::{Name, NodeId};
pub fn visit_bindings(
&mut self,
pattern: &Pattern<'tcx>,
- mut pattern_user_ty: Option<(CanonicalTy<'tcx>, Span)>,
+ mut pattern_user_ty: Option<(UserTypeAnnotation<'tcx>, Span)>,
f: &mut impl FnMut(
&mut Self,
Mutability,
NodeId,
Span,
Ty<'tcx>,
- Option<(CanonicalTy<'tcx>, Span)>,
+ Option<(UserTypeAnnotation<'tcx>, Span)>,
),
) {
match *pattern.kind {
struct Ascription<'tcx> {
span: Span,
source: Place<'tcx>,
- user_ty: CanonicalTy<'tcx>,
+ user_ty: UserTypeAnnotation<'tcx>,
}
#[derive(Clone, Debug)]
num_patterns: usize,
var_id: NodeId,
var_ty: Ty<'tcx>,
- user_var_ty: Option<(CanonicalTy<'tcx>, Span)>,
+ user_var_ty: Option<(UserTypeAnnotation<'tcx>, Span)>,
has_guard: ArmHasGuard,
opt_match_place: Option<(Option<Place<'tcx>>, Span)>,
pat_span: Span,
let mut pattern = cx.pattern_from_hir(&local.pat);
if let Some(ty) = &local.ty {
- if let Some(user_ty) = cx.tables.user_provided_tys().get(ty.hir_id) {
+ if let Some(&user_ty) = cx.tables.user_provided_tys().get(ty.hir_id) {
pattern = Pattern {
ty: pattern.ty,
span: pattern.span,
kind: Box::new(PatternKind::AscribeUserType {
- user_ty: *user_ty,
+ user_ty: UserTypeAnnotation::Ty(user_ty),
user_ty_span: ty.span,
subpattern: pattern
})
let substs = cx.tables().node_substs(fun.hir_id);
let user_ty = cx.tables().user_substs(fun.hir_id)
- .map(|user_substs| {
- user_substs.unchecked_map(|user_substs| {
- // Here, we just pair an `AdtDef` with the
- // `user_substs`, so no new types etc are introduced.
- cx.tcx().mk_adt(adt_def, user_substs)
- })
- });
+ .map(|user_substs| UserTypeAnnotation::AdtDef(adt_def, user_substs));
let field_refs = args.iter()
.enumerate()
}
hir::ExprKind::Type(ref source, ref ty) => {
let user_provided_tys = cx.tables.user_provided_tys();
- let user_ty = *user_provided_tys
- .get(ty.hir_id)
- .expect(&format!("{:?} not found in user_provided_tys, source: {:?}", ty, source));
+ let user_ty = UserTypeAnnotation::Ty(
+ *user_provided_tys
+ .get(ty.hir_id)
+ .expect(&format!(
+ "{:?} not found in user_provided_tys, source: {:?}",
+ ty,
+ source,
+ ))
+ );
if source.is_place_expr() {
ExprKind::PlaceTypeAscription {
source: source.to_ref(),
}
}
-fn user_annotated_ty_for_def(
+fn user_substs_applied_to_def(
cx: &mut Cx<'a, 'gcx, 'tcx>,
hir_id: hir::HirId,
def: &Def,
-) -> Option<CanonicalTy<'tcx>> {
+) -> Option<UserTypeAnnotation<'tcx>> {
match def {
// A reference to something callable -- e.g., a fn, method, or
// a tuple-struct or tuple-variant. This has the type of a
Def::Method(_) |
Def::StructCtor(_, CtorKind::Fn) |
Def::VariantCtor(_, CtorKind::Fn) =>
- Some(cx.tables().user_substs(hir_id)?.unchecked_map(|user_substs| {
- // Here, we just pair a `DefId` with the
- // `user_substs`, so no new types etc are introduced.
- cx.tcx().mk_fn_def(def.def_id(), user_substs)
- })),
+ Some(UserTypeAnnotation::FnDef(def.def_id(), cx.tables().user_substs(hir_id)?)),
Def::Const(_def_id) |
Def::AssociatedConst(_def_id) =>
cx.user_substs_applied_to_ty_of_hir_id(hir_id),
_ =>
- bug!("user_annotated_ty_for_def: unexpected def {:?} at {:?}", def, hir_id)
+ bug!("user_substs_applied_to_def: unexpected def {:?} at {:?}", def, hir_id)
}
}
.unwrap_or_else(|| {
span_bug!(expr.span, "no type-dependent def for method callee")
});
- let user_ty = user_annotated_ty_for_def(cx, expr.hir_id, def);
+ let user_ty = user_substs_applied_to_def(cx, expr.hir_id, def);
(def.def_id(), cx.tables().node_substs(expr.hir_id), user_ty)
}
};
Def::StructCtor(_, CtorKind::Fn) |
Def::VariantCtor(_, CtorKind::Fn) |
Def::SelfCtor(..) => {
- let user_ty = user_annotated_ty_for_def(cx, expr.hir_id, &def);
+ let user_ty = user_substs_applied_to_def(cx, expr.hir_id, &def);
ExprKind::Literal {
literal: ty::Const::zero_sized(
cx.tcx,
//! unit-tested and separated from the Rust source and compiler data
//! structures.
-use rustc::mir::{BinOp, BorrowKind, Field, UnOp};
+use rustc::mir::{BinOp, BorrowKind, UserTypeAnnotation, Field, UnOp};
use rustc::hir::def_id::DefId;
use rustc::middle::region;
use rustc::ty::subst::Substs;
-use rustc::ty::{AdtDef, CanonicalTy, UpvarSubsts, Region, Ty, Const};
+use rustc::ty::{AdtDef, UpvarSubsts, Region, Ty, Const};
use rustc::hir;
use syntax::ast;
use syntax_pos::Span;
/// Optional user-given substs: for something like `let x =
/// Bar::<T> { ... }`.
- user_ty: Option<CanonicalTy<'tcx>>,
+ user_ty: Option<UserTypeAnnotation<'tcx>>,
fields: Vec<FieldExprRef<'tcx>>,
base: Option<FruInfo<'tcx>>
PlaceTypeAscription {
source: ExprRef<'tcx>,
/// Type that the user gave to this expression
- user_ty: CanonicalTy<'tcx>,
+ user_ty: UserTypeAnnotation<'tcx>,
},
ValueTypeAscription {
source: ExprRef<'tcx>,
/// Type that the user gave to this expression
- user_ty: CanonicalTy<'tcx>,
+ user_ty: UserTypeAnnotation<'tcx>,
},
Closure {
closure_id: DefId,
},
Literal {
literal: &'tcx Const<'tcx>,
-
- /// Optional user-given type: for something like
- /// `collect::<Vec<_>>`, this would be present and would
- /// indicate that `Vec<_>` was explicitly specified.
- ///
- /// Needed for NLL to impose user-given type constraints.
- user_ty: Option<CanonicalTy<'tcx>>,
+ user_ty: Option<UserTypeAnnotation<'tcx>>,
},
InlineAsm {
asm: &'tcx hir::InlineAsm,
use hair::util::UserAnnotatedTyHelpers;
-use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
+use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability, UserTypeAnnotation};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, sign_extend};
-use rustc::ty::{self, CanonicalTy, TyCtxt, AdtDef, Ty, Region};
+use rustc::ty::{self, Region, TyCtxt, AdtDef, Ty};
use rustc::ty::subst::{Substs, Kind};
use rustc::hir::{self, PatKind, RangeEnd};
use rustc::hir::def::{Def, CtorKind};
Wild,
AscribeUserType {
- user_ty: CanonicalTy<'tcx>,
+ user_ty: UserTypeAnnotation<'tcx>,
subpattern: Pattern<'tcx>,
user_ty_span: Span,
},
CloneImpls!{ <'tcx>
Span, Field, Mutability, ast::Name, ast::NodeId, usize, &'tcx ty::Const<'tcx>,
Region<'tcx>, Ty<'tcx>, BindingMode<'tcx>, &'tcx AdtDef,
- &'tcx Substs<'tcx>, &'tcx Kind<'tcx>, CanonicalTy<'tcx>
+ &'tcx Substs<'tcx>, &'tcx Kind<'tcx>, UserTypeAnnotation<'tcx>
}
impl<'tcx> PatternFoldable<'tcx> for FieldPattern<'tcx> {
// except according to those terms.
use rustc::hir;
-use rustc::ty::{self, AdtDef, CanonicalTy, TyCtxt};
+use rustc::mir::UserTypeAnnotation;
+use rustc::ty::{self, AdtDef, TyCtxt};
crate trait UserAnnotatedTyHelpers<'gcx: 'tcx, 'tcx> {
fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx>;
&self,
hir_id: hir::HirId,
adt_def: &'tcx AdtDef,
- ) -> Option<CanonicalTy<'tcx>> {
+ ) -> Option<UserTypeAnnotation<'tcx>> {
let user_substs = self.tables().user_substs(hir_id)?;
- Some(user_substs.unchecked_map(|user_substs| {
- // Here, we just pair an `AdtDef` with the
- // `user_substs`, so no new types etc are introduced.
- self.tcx().mk_adt(adt_def, user_substs)
- }))
+ Some(UserTypeAnnotation::AdtDef(adt_def, user_substs))
}
/// Looks up the type associated with this hir-id and applies the
/// user-given substitutions; the hir-id must map to a suitable
/// type.
- fn user_substs_applied_to_ty_of_hir_id(&self, hir_id: hir::HirId) -> Option<CanonicalTy<'tcx>> {
+ fn user_substs_applied_to_ty_of_hir_id(
+ &self,
+ hir_id: hir::HirId,
+ ) -> Option<UserTypeAnnotation<'tcx>> {
let user_substs = self.tables().user_substs(hir_id)?;
match &self.tables().node_id_to_type(hir_id).sty {
- ty::Adt(adt_def, _) => Some(user_substs.unchecked_map(|user_substs| {
- // Ok to call `unchecked_map` because we just pair an
- // `AdtDef` with the `user_substs`, so no new types
- // etc are introduced.
- self.tcx().mk_adt(adt_def, user_substs)
- })),
- ty::FnDef(def_id, _) => Some(user_substs.unchecked_map(|user_substs| {
- // Here, we just pair a `DefId` with the
- // `user_substs`, so no new types etc are introduced.
- self.tcx().mk_fn_def(*def_id, user_substs)
- })),
+ ty::Adt(adt_def, _) => Some(UserTypeAnnotation::AdtDef(adt_def, user_substs)),
+ ty::FnDef(def_id, _) => Some(UserTypeAnnotation::FnDef(*def_id, user_substs)),
sty => bug!(
"sty: {:?} should not have user-substs {:?} recorded ",
sty,
}
(_, &ty::Dynamic(ref data, _)) => {
// Initial cast from sized to dyn trait
- let trait_ref = data.principal().unwrap().with_self_ty(
+ let trait_ref = data.principal().with_self_ty(
*self.tcx,
src_pointee_ty,
);
}
/// Returns true as long as there are more things to do.
- fn step(&mut self) -> EvalResult<'tcx, bool> {
+ ///
+ /// This is used by [priroda](https://github.com/oli-obk/priroda)
+ pub fn step(&mut self) -> EvalResult<'tcx, bool> {
if self.stack.is_empty() {
return Ok(false);
}
!impl_ty.needs_subst() && !impl_ty.has_escaping_regions());
if let ty::Dynamic(ref trait_ty, ..) = trait_ty.sty {
- if let Some(principal) = trait_ty.principal() {
- let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
- assert!(!poly_trait_ref.has_escaping_regions());
-
- // Walk all methods of the trait, including those of its supertraits
- let methods = tcx.vtable_methods(poly_trait_ref);
- let methods = methods.iter().cloned().filter_map(|method| method)
- .map(|(def_id, substs)| ty::Instance::resolve(
- tcx,
- ty::ParamEnv::reveal_all(),
- def_id,
- substs).unwrap())
- .filter(|&instance| should_monomorphize_locally(tcx, &instance))
- .map(|instance| create_fn_mono_item(instance));
- output.extend(methods);
- }
+ let poly_trait_ref = trait_ty.principal().with_self_ty(tcx, impl_ty);
+ assert!(!poly_trait_ref.has_escaping_regions());
+
+ // Walk all methods of the trait, including those of its supertraits
+ let methods = tcx.vtable_methods(poly_trait_ref);
+ let methods = methods.iter().cloned().filter_map(|method| method)
+ .map(|(def_id, substs)| ty::Instance::resolve(
+ tcx,
+ ty::ParamEnv::reveal_all(),
+ def_id,
+ substs).unwrap())
+ .filter(|&instance| should_monomorphize_locally(tcx, &instance))
+ .map(|instance| create_fn_mono_item(instance));
+ output.extend(methods);
// Also add the destructor
visit_drop_use(tcx, impl_ty, false, output);
}
output.push(']');
},
ty::Dynamic(ref trait_data, ..) => {
- if let Some(principal) = trait_data.principal() {
- self.push_def_path(principal.def_id(), output);
- self.push_type_params(principal.skip_binder().substs,
- trait_data.projection_bounds(),
- output);
- }
+ let principal = trait_data.principal();
+ self.push_def_path(principal.def_id(), output);
+ self.push_type_params(
+ principal.skip_binder().substs,
+ trait_data.projection_bounds(),
+ output,
+ );
},
ty::Foreign(did) => self.push_def_path(did, output),
ty::FnDef(..) |
use rustc::mir::*;
use rustc::mir::visit::*;
-use rustc::ty::{self, Instance, Ty, TyCtxt};
+use rustc::ty::{self, Instance, InstanceDef, Ty, TyCtxt};
use rustc::ty::subst::{Subst,Substs};
use std::collections::VecDeque;
param_env,
callee_def_id,
substs) {
- callsites.push_back(CallSite {
- callee: instance.def_id(),
- substs: instance.substs,
- bb,
- location: terminator.source_info
- });
+ let is_virtual =
+ if let InstanceDef::Virtual(..) = instance.def {
+ true
+ } else {
+ false
+ };
+
+ if !is_virtual {
+ callsites.push_back(CallSite {
+ callee: instance.def_id(),
+ substs: instance.substs,
+ bb,
+ location: terminator.source_info
+ });
+ }
}
}
}
let ty_def_id = match self.tcx.type_of(item_def_id).sty {
ty::Adt(adt, _) => adt.did,
ty::Foreign(did) => did,
- ty::Dynamic(ref obj, ..) if obj.principal().is_some() =>
- obj.principal().unwrap().def_id(),
+ ty::Dynamic(ref obj, ..) => obj.principal().def_id(),
ty::Projection(ref proj) => proj.trait_ref(self.tcx).def_id,
_ => return Some(AccessLevel::Public)
};
let ty_def_id = match ty.sty {
ty::Adt(adt, _) => Some(adt.did),
ty::Foreign(did) => Some(did),
- ty::Dynamic(ref obj, ..) => obj.principal().map(|p| p.def_id()),
+ ty::Dynamic(ref obj, ..) => Some(obj.principal().def_id()),
ty::Projection(ref proj) => Some(proj.item_def_id),
ty::FnDef(def_id, ..) |
ty::Closure(def_id, ..) |
let ty_def_id = match ty.sty {
ty::Adt(adt, _) => Some(adt.did),
ty::Foreign(did) => Some(did),
- ty::Dynamic(ref obj, ..) => obj.principal().map(|p| p.def_id()),
+ ty::Dynamic(ref obj, ..) => Some(obj.principal().def_id()),
ty::Projection(ref proj) => {
if self.required_visibility == ty::Visibility::Invisible {
// Conservatively approximate the whole type alias as public without
/// typically because the platform needs to unwind for things like stack
/// unwinders.
pub requires_uwtable: bool,
+
+ /// Whether or not SIMD types are passed by reference in the Rust ABI,
+ /// typically required if a target can be compiled with a mixed set of
+ /// target features. This is `true` by default, and `false` for targets like
+ /// wasm32 where the whole program either has simd or not.
+ pub simd_types_indirect: bool,
}
impl Default for TargetOptions {
embed_bitcode: false,
emit_debug_gdb_scripts: true,
requires_uwtable: false,
+ simd_types_indirect: true,
}
}
}
key!(embed_bitcode, bool);
key!(emit_debug_gdb_scripts, bool);
key!(requires_uwtable, bool);
+ key!(simd_types_indirect, bool);
if let Some(array) = obj.find("abi-blacklist").and_then(Json::as_array) {
for name in array.iter().filter_map(|abi| abi.as_string()) {
target_option_val!(embed_bitcode);
target_option_val!(emit_debug_gdb_scripts);
target_option_val!(requires_uwtable);
+ target_option_val!(simd_types_indirect);
if default.abi_blacklist != self.options.abi_blacklist {
d.insert("abi-blacklist".to_string(), self.options.abi_blacklist.iter()
linker: Some("rust-lld".to_owned()),
lld_flavor: LldFlavor::Wasm,
+ // No need for indirection here, simd types can always be passed by
+ // value as the whole module either has simd or not, which is different
+ // from x86 (for example) where programs can have functions that don't
+ // enable simd features.
+ simd_types_indirect: false,
+
.. Default::default()
};
Ok(Target {
use chalk_engine::fallible::Fallible as ChalkEngineFallible;
use chalk_engine::{context, hh::HhGoal, DelayedLiteral, ExClause};
-use rustc::infer::canonical::{Canonical, CanonicalVarValues, QueryRegionConstraint, QueryResult};
+use rustc::infer::canonical::{
+ Canonical, CanonicalVarValues, OriginalQueryValues, QueryRegionConstraint, QueryResponse,
+};
use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
use rustc::traits::{
WellFormed,
use rustc::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc::ty::subst::Kind;
use rustc::ty::{self, TyCtxt};
-use smallvec::SmallVec;
use std::fmt::{self, Debug};
use std::marker::PhantomData;
// u-canonicalization not yet implemented
type UniverseMap = UniverseMap;
- type Solution = Canonical<'tcx, QueryResult<'tcx, ()>>;
+ type Solution = Canonical<'tcx, QueryResponse<'tcx, ()>>;
type InferenceNormalizedSubst = CanonicalVarValues<'tcx>;
&self,
_root_goal: &Canonical<'gcx, ty::ParamEnvAnd<'gcx, Goal<'gcx>>>,
_simplified_answers: impl context::AnswerStream<ChalkArenas<'gcx>>,
- ) -> Option<Canonical<'gcx, QueryResult<'gcx, ()>>> {
+ ) -> Option<Canonical<'gcx, QueryResponse<'gcx, ()>>> {
unimplemented!()
}
}
&mut self,
value: &ty::ParamEnvAnd<'tcx, Goal<'tcx>>,
) -> Canonical<'gcx, ty::ParamEnvAnd<'gcx, Goal<'gcx>>> {
- let mut _orig_values = SmallVec::new();
+ let mut _orig_values = OriginalQueryValues::default();
self.infcx.canonicalize_query(value, &mut _orig_values)
}
// except according to those terms.
use rustc::hir::def_id::DefId;
-use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint};
use rustc::traits::query::{CanonicalTyGoal, NoSolution};
use rustc::traits::{FulfillmentContext, Normalized, ObligationCause, TraitEngineExt};
fn dropck_outlives<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
- goal: CanonicalTyGoal<'tcx>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
- debug!("dropck_outlives(goal={:#?})", goal);
-
- tcx.infer_ctxt().enter(|ref infcx| {
- let tcx = infcx.tcx;
- let (
- ParamEnvAnd {
+ canonical_goal: CanonicalTyGoal<'tcx>,
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
+ debug!("dropck_outlives(goal={:#?})", canonical_goal);
+
+ tcx.infer_ctxt().enter_with_canonical(
+ DUMMY_SP,
+ &canonical_goal,
+ |ref infcx, goal, canonical_inference_vars| {
+ let tcx = infcx.tcx;
+ let ParamEnvAnd {
param_env,
value: for_ty,
- },
- canonical_inference_vars,
- ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
+ } = goal;
- let mut result = DropckOutlivesResult {
- kinds: vec![],
- overflows: vec![],
- };
+ let mut result = DropckOutlivesResult {
+ kinds: vec![],
+ overflows: vec![],
+ };
- // A stack of types left to process. Each round, we pop
- // something from the stack and invoke
- // `dtorck_constraint_for_ty`. This may produce new types that
- // have to be pushed on the stack. This continues until we have explored
- // all the reachable types from the type `for_ty`.
- //
- // Example: Imagine that we have the following code:
- //
- // ```rust
- // struct A {
- // value: B,
- // children: Vec<A>,
- // }
- //
- // struct B {
- // value: u32
- // }
- //
- // fn f() {
- // let a: A = ...;
- // ..
- // } // here, `a` is dropped
- // ```
- //
- // at the point where `a` is dropped, we need to figure out
- // which types inside of `a` contain region data that may be
- // accessed by any destructors in `a`. We begin by pushing `A`
- // onto the stack, as that is the type of `a`. We will then
- // invoke `dtorck_constraint_for_ty` which will expand `A`
- // into the types of its fields `(B, Vec<A>)`. These will get
- // pushed onto the stack. Eventually, expanding `Vec<A>` will
- // lead to us trying to push `A` a second time -- to prevent
- // infinite recursion, we notice that `A` was already pushed
- // once and stop.
- let mut ty_stack = vec![(for_ty, 0)];
-
- // Set used to detect infinite recursion.
- let mut ty_set = FxHashSet();
-
- let fulfill_cx = &mut FulfillmentContext::new();
-
- let cause = ObligationCause::dummy();
- while let Some((ty, depth)) = ty_stack.pop() {
- let DtorckConstraint {
- dtorck_types,
- outlives,
- overflows,
- } = dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty)?;
-
- // "outlives" represent types/regions that may be touched
- // by a destructor.
- result.kinds.extend(outlives);
- result.overflows.extend(overflows);
-
- // dtorck types are "types that will get dropped but which
- // do not themselves define a destructor", more or less. We have
- // to push them onto the stack to be expanded.
- for ty in dtorck_types {
- match infcx.at(&cause, param_env).normalize(&ty) {
- Ok(Normalized {
- value: ty,
- obligations,
- }) => {
- fulfill_cx.register_predicate_obligations(infcx, obligations);
-
- debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
-
- match ty.sty {
- // All parameters live for the duration of the
- // function.
- ty::Param(..) => {}
-
- // A projection that we couldn't resolve - it
- // might have a destructor.
- ty::Projection(..) | ty::Opaque(..) => {
- result.kinds.push(ty.into());
- }
+ // A stack of types left to process. Each round, we pop
+ // something from the stack and invoke
+ // `dtorck_constraint_for_ty`. This may produce new types that
+ // have to be pushed on the stack. This continues until we have explored
+ // all the reachable types from the type `for_ty`.
+ //
+ // Example: Imagine that we have the following code:
+ //
+ // ```rust
+ // struct A {
+ // value: B,
+ // children: Vec<A>,
+ // }
+ //
+ // struct B {
+ // value: u32
+ // }
+ //
+ // fn f() {
+ // let a: A = ...;
+ // ..
+ // } // here, `a` is dropped
+ // ```
+ //
+ // at the point where `a` is dropped, we need to figure out
+ // which types inside of `a` contain region data that may be
+ // accessed by any destructors in `a`. We begin by pushing `A`
+ // onto the stack, as that is the type of `a`. We will then
+ // invoke `dtorck_constraint_for_ty` which will expand `A`
+ // into the types of its fields `(B, Vec<A>)`. These will get
+ // pushed onto the stack. Eventually, expanding `Vec<A>` will
+ // lead to us trying to push `A` a second time -- to prevent
+ // infinite recursion, we notice that `A` was already pushed
+ // once and stop.
+ let mut ty_stack = vec![(for_ty, 0)];
+
+ // Set used to detect infinite recursion.
+ let mut ty_set = FxHashSet();
+
+ let fulfill_cx = &mut FulfillmentContext::new();
+
+ let cause = ObligationCause::dummy();
+ while let Some((ty, depth)) = ty_stack.pop() {
+ let DtorckConstraint {
+ dtorck_types,
+ outlives,
+ overflows,
+ } = dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty)?;
+
+ // "outlives" represent types/regions that may be touched
+ // by a destructor.
+ result.kinds.extend(outlives);
+ result.overflows.extend(overflows);
+
+ // dtorck types are "types that will get dropped but which
+ // do not themselves define a destructor", more or less. We have
+ // to push them onto the stack to be expanded.
+ for ty in dtorck_types {
+ match infcx.at(&cause, param_env).normalize(&ty) {
+ Ok(Normalized {
+ value: ty,
+ obligations,
+ }) => {
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+
+ debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
+
+ match ty.sty {
+ // All parameters live for the duration of the
+ // function.
+ ty::Param(..) => {}
+
+ // A projection that we couldn't resolve - it
+ // might have a destructor.
+ ty::Projection(..) | ty::Opaque(..) => {
+ result.kinds.push(ty.into());
+ }
- _ => {
- if ty_set.insert(ty) {
- ty_stack.push((ty, depth + 1));
+ _ => {
+ if ty_set.insert(ty) {
+ ty_stack.push((ty, depth + 1));
+ }
}
}
}
- }
- // We don't actually expect to fail to normalize.
- // That implies a WF error somewhere else.
- Err(NoSolution) => {
- return Err(NoSolution);
+ // We don't actually expect to fail to normalize.
+ // That implies a WF error somewhere else.
+ Err(NoSolution) => {
+ return Err(NoSolution);
+ }
}
}
}
- }
- debug!("dropck_outlives: result = {:#?}", result);
+ debug!("dropck_outlives: result = {:#?}", result);
- infcx.make_canonicalized_query_result(canonical_inference_vars, result, fulfill_cx)
- })
+ infcx.make_canonicalized_query_response(canonical_inference_vars, result, fulfill_cx)
+ },
+ )
}
/// Return a set of constraints that needs to be satisfied in
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
}
- ty::Tuple(tys) => tys
- .iter()
+ ty::Tuple(tys) => tys.iter()
.map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
.collect(),
return Ok(result);
}
- let mut result = def
- .all_fields()
+ let mut result = def.all_fields()
.map(|field| tcx.type_of(field.did))
.map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
.collect::<Result<DtorckConstraint, NoSolution>>()?;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::traits::{EvaluationResult, Obligation, ObligationCause,
- OverflowError, SelectionContext, TraitQueryMode};
use rustc::traits::query::CanonicalPredicateGoal;
+use rustc::traits::{
+ EvaluationResult, Obligation, ObligationCause, OverflowError, SelectionContext, TraitQueryMode,
+};
use rustc::ty::query::Providers;
use rustc::ty::{ParamEnvAnd, TyCtxt};
use syntax::source_map::DUMMY_SP;
fn evaluate_obligation<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
- goal: CanonicalPredicateGoal<'tcx>,
+ canonical_goal: CanonicalPredicateGoal<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
- tcx.infer_ctxt().enter(|ref infcx| {
- let (
- ParamEnvAnd {
+ tcx.infer_ctxt().enter_with_canonical(
+ DUMMY_SP,
+ &canonical_goal,
+ |ref infcx, goal, _canonical_inference_vars| {
+ let ParamEnvAnd {
param_env,
value: predicate,
- },
- _canonical_inference_vars,
- ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
+ } = goal;
- let mut selcx = SelectionContext::with_query_mode(&infcx, TraitQueryMode::Canonical);
- let obligation = Obligation::new(ObligationCause::dummy(), param_env, predicate);
+ let mut selcx = SelectionContext::with_query_mode(&infcx, TraitQueryMode::Canonical);
+ let obligation = Obligation::new(ObligationCause::dummy(), param_env, predicate);
- selcx.evaluate_obligation_recursively(&obligation)
- })
+ selcx.evaluate_obligation_recursively(&obligation)
+ },
+ )
}
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalTyGoal<'tcx>,
) -> Result<
- Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Vec<OutlivesBound<'tcx>>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResponse<'tcx, Vec<OutlivesBound<'tcx>>>>>,
NoSolution,
> {
tcx.infer_ctxt()
let wf_conditions = iter::once(ty::Binder::dummy(trait_pred.lower()))
.chain(
where_clauses
- .iter()
- .cloned()
+ .into_iter()
.map(|wc| wc.map_bound(|goal| goal.into_well_formed_goal()))
);
// `WC`
let where_clauses = tcx.predicates_of(def_id).predicates
.into_iter()
- .map(|(wc, _)| wc.lower())
- .collect::<Vec<_>>();
+ .map(|(wc, _)| wc.lower());
// `Implemented(A0: Trait<A1..An>) :- WC`
let clause = ProgramClause {
goal: trait_pred,
hypotheses: tcx.mk_goals(
where_clauses
- .into_iter()
.map(|wc| tcx.mk_goal(GoalKind::from_poly_domain_goal(wc, tcx))),
),
};
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};
use rustc::traits::{self, ObligationCause, SelectionContext, TraitEngineExt};
use rustc::ty::query::Providers;
fn normalize_projection_ty<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalProjectionGoal<'tcx>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
debug!("normalize_provider(goal={:#?})", goal);
tcx.sess
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::infer::InferCtxt;
use rustc::traits::query::type_op::eq::Eq;
use rustc::traits::query::type_op::normalize::Normalize;
fn type_op_eq<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, ()>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
let (param_env, Eq { a, b }) = key.into_parts();
fn type_op_normalize_ty(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Ty<'tcx>>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Ty<'tcx>>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, Ty<'tcx>>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, type_op_normalize)
}
fn type_op_normalize_predicate(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Predicate<'tcx>>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Predicate<'tcx>>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, Predicate<'tcx>>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, type_op_normalize)
}
fn type_op_normalize_fn_sig(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<FnSig<'tcx>>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, FnSig<'tcx>>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, FnSig<'tcx>>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, type_op_normalize)
}
fn type_op_normalize_poly_fn_sig(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<PolyFnSig<'tcx>>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, PolyFnSig<'tcx>>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, PolyFnSig<'tcx>>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, type_op_normalize)
}
fn type_op_subtype<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, ()>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
let (param_env, Subtype { sub, sup }) = key.into_parts();
fn type_op_prove_predicate<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
-) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, ()>>>, NoSolution> {
tcx.infer_ctxt()
.enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
let (param_env, ProvePredicate { predicate }) = key.into_parts();
debug!("ty_of_fn");
let tcx = self.tcx();
- let input_tys: Vec<Ty> =
- decl.inputs.iter().map(|a| self.ty_of_arg(a, None)).collect();
+ let input_tys =
+ decl.inputs.iter().map(|a| self.ty_of_arg(a, None));
let output_ty = match decl.output {
hir::Return(ref output) => self.ast_ty_to_ty(output),
debug!("ty_of_fn: output_ty={:?}", output_ty);
let bare_fn_ty = ty::Binder::bind(tcx.mk_fn_sig(
- input_tys.into_iter(),
+ input_tys,
output_ty,
decl.variadic,
unsafety,
// Typecheck the patterns first, so that we get types for all the
// bindings.
- let all_arm_pats_diverge: Vec<_> = arms.iter().map(|arm| {
+ let all_arm_pats_diverge = arms.iter().map(|arm| {
let mut all_pats_diverge = Diverges::WarnedAlways;
for p in &arm.pats {
self.diverges.set(Diverges::Maybe);
Diverges::Maybe => Diverges::Maybe,
Diverges::Always | Diverges::WarnedAlways => Diverges::WarnedAlways,
}
- }).collect();
+ });
// Now typecheck the blocks.
//
/// No metadata attached, ie pointer to sized type or foreign type
Thin,
/// A trait object
- Vtable(Option<DefId>),
+ Vtable(DefId),
/// Slice
Length,
/// The unsize info of this projection
Ok(match t.sty {
ty::Slice(_) | ty::Str => Some(PointerKind::Length),
ty::Dynamic(ref tty, ..) =>
- Some(PointerKind::Vtable(tty.principal().map(|p| p.def_id()))),
+ Some(PointerKind::Vtable(tty.principal().def_id())),
ty::Adt(def, substs) if def.is_struct() => {
match def.non_enum_variant().fields.last() {
None => Some(PointerKind::Thin),
self.deduce_sig_from_projection(None, &pb)
})
.next();
- let kind = object_type
- .principal()
- .and_then(|p| self.tcx.lang_items().fn_trait_kind(p.def_id()));
+ let kind = self.tcx.lang_items().fn_trait_kind(object_type.principal().def_id());
(sig, kind)
}
ty::Infer(ty::TyVar(vid)) => self.deduce_expectations_from_obligations(vid),
// field is of the found type, suggest such variants. See Issue
// #42764.
if let ty::Adt(expected_adt, substs) = expected.sty {
- let compatible_variants = expected_adt.variants
+ let mut compatible_variants = expected_adt.variants
.iter()
.filter(|variant| variant.fields.len() == 1)
.filter_map(|variant| {
} else {
None
}
- }).collect::<Vec<_>>();
+ }).peekable();
- if !compatible_variants.is_empty() {
+ if compatible_variants.peek().is_some() {
let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
- let suggestions = compatible_variants.iter()
- .map(|v| format!("{}({})", v, expr_text)).collect::<Vec<_>>();
+ let suggestions = compatible_variants.map(|v|
+ format!("{}({})", v, expr_text)).collect::<Vec<_>>();
err.span_suggestions_with_applicability(
expr.span,
"try using a variant of the expected type",
.include_raw_pointers()
.filter_map(|(ty, _)|
match ty.sty {
- ty::Dynamic(ref data, ..) => data.principal().map(|p| closure(self, ty, p)),
+ ty::Dynamic(ref data, ..) => Some(closure(self, ty, data.principal())),
_ => None,
}
)
match self_ty.sty {
ty::Dynamic(ref data, ..) => {
- if let Some(p) = data.principal() {
- self.assemble_inherent_candidates_from_object(self_ty, p);
- self.assemble_inherent_impl_candidates_for_type(p.def_id());
- }
+ let p = data.principal();
+ self.assemble_inherent_candidates_from_object(self_ty, p);
+ self.assemble_inherent_impl_candidates_for_type(p.def_id());
}
ty::Adt(def, _) => {
self.assemble_inherent_impl_candidates_for_type(def.did);
ty::Adt(def, _) => def.did.is_local(),
ty::Foreign(did) => did.is_local(),
- ty::Dynamic(ref tr, ..) => tr.principal()
- .map_or(false, |p| p.def_id().is_local()),
+ ty::Dynamic(ref tr, ..) => tr.principal().def_id().is_local(),
ty::Param(_) => true,
use rustc::infer::type_variable::{TypeVariableOrigin};
use rustc::middle::region;
use rustc::mir::interpret::{ConstValue, GlobalId};
-use rustc::ty::subst::{CanonicalSubsts, UnpackedKind, Subst, Substs};
+use rustc::ty::subst::{CanonicalUserSubsts, UnpackedKind, Subst, Substs,
+ UserSelfTy, UserSubsts};
use rustc::traits::{self, ObligationCause, ObligationCauseCode, TraitEngine};
use rustc::ty::{self, Ty, TyCtxt, GenericParamDefKind, Visibility, ToPredicate, RegionKind};
use rustc::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability};
}
// For each field, figure out if it's known to be a ZST and align(1)
- let field_infos: Vec<_> = adt.non_enum_variant().fields.iter().map(|field| {
+ let field_infos = adt.non_enum_variant().fields.iter().map(|field| {
let ty = field.ty(tcx, Substs::identity_for_item(tcx, field.did));
let param_env = tcx.param_env(field.did);
let layout = tcx.layout_of(param_env.and(ty));
let zst = layout.map(|layout| layout.is_zst()).unwrap_or(false);
let align1 = layout.map(|layout| layout.align.abi() == 1).unwrap_or(false);
(span, zst, align1)
- }).collect();
+ });
- let non_zst_fields = field_infos.iter().filter(|(_span, zst, _align1)| !*zst);
+ let non_zst_fields = field_infos.clone().filter(|(_span, zst, _align1)| !*zst);
let non_zst_count = non_zst_fields.clone().count();
if non_zst_count != 1 {
- let field_spans: Vec<_> = non_zst_fields.map(|(span, _zst, _align1)| *span).collect();
+ let field_spans: Vec<_> = non_zst_fields.map(|(span, _zst, _align1)| span).collect();
struct_span_err!(tcx.sess, sp, E0690,
"transparent struct needs exactly one non-zero-sized field, but has {}",
non_zst_count)
.span_note(field_spans, "non-zero-sized field")
.emit();
}
- for &(span, zst, align1) in &field_infos {
+ for (span, zst, align1) in field_infos {
if zst && !align1 {
span_err!(tcx.sess, span, E0691,
"zero-sized field in transparent struct has alignment larger than 1");
method.substs[i]
}
});
- self.infcx.canonicalize_response(&just_method_substs)
+ self.infcx.canonicalize_response(&UserSubsts {
+ substs: just_method_substs,
+ user_self_ty: None, // not relevant here
+ })
});
debug!("write_method_call: user_substs = {:?}", user_substs);
/// This should be invoked **before any unifications have
/// occurred**, so that annotations like `Vec<_>` are preserved
/// properly.
- pub fn write_user_substs_from_substs(&self, hir_id: hir::HirId, substs: &'tcx Substs<'tcx>) {
+ pub fn write_user_substs_from_substs(
+ &self,
+ hir_id: hir::HirId,
+ substs: &'tcx Substs<'tcx>,
+ user_self_ty: Option<UserSelfTy<'tcx>>,
+ ) {
debug!(
"write_user_substs_from_substs({:?}, {:?}) in fcx {}",
hir_id,
);
if !substs.is_noop() {
- let user_substs = self.infcx.canonicalize_response(&substs);
+ let user_substs = self.infcx.canonicalize_response(&UserSubsts {
+ substs,
+ user_self_ty,
+ });
debug!("instantiate_value_path: user_substs = {:?}", user_substs);
self.write_user_substs(hir_id, user_substs);
}
}
- pub fn write_user_substs(&self, hir_id: hir::HirId, substs: CanonicalSubsts<'tcx>) {
+ pub fn write_user_substs(&self, hir_id: hir::HirId, substs: CanonicalUserSubsts<'tcx>) {
debug!(
"write_user_substs({:?}, {:?}) in fcx {}",
hir_id,
if let Some((variant, did, substs)) = variant {
debug!("check_struct_path: did={:?} substs={:?}", did, substs);
let hir_id = self.tcx.hir.node_to_hir_id(node_id);
- self.write_user_substs_from_substs(hir_id, substs);
+ self.write_user_substs_from_substs(hir_id, substs, None);
// Check bounds on type arguments used in the path.
let bounds = self.instantiate_bounds(path_span, did, substs);
let path_segs = self.def_ids_for_path_segments(segments, def);
- let mut ufcs_associated = None;
+ let mut user_self_ty = None;
match def {
Def::Method(def_id) |
Def::AssociatedConst(def_id) => {
ty::TraitContainer(trait_did) => {
callee::check_legal_trait_for_method_call(self.tcx, span, trait_did)
}
- ty::ImplContainer(_) => {}
- }
- if segments.len() == 1 {
- // `<T>::assoc` will end up here, and so can `T::assoc`.
- let self_ty = self_ty.expect("UFCS sugared assoc missing Self");
- ufcs_associated = Some((container, self_ty));
+ ty::ImplContainer(impl_def_id) => {
+ if segments.len() == 1 {
+ // `<T>::assoc` will end up here, and so
+ // can `T::assoc`. It this came from an
+ // inherent impl, we need to record the
+ // `T` for posterity (see `UserSelfTy` for
+ // details).
+ let self_ty = self_ty.expect("UFCS sugared assoc missing Self");
+ user_self_ty = Some(UserSelfTy {
+ impl_def_id,
+ self_ty,
+ });
+ }
+ }
}
}
_ => {}
assert!(!substs.has_escaping_regions());
assert!(!ty.has_escaping_regions());
+ // Write the "user substs" down first thing for later.
+ let hir_id = self.tcx.hir.node_to_hir_id(node_id);
+ self.write_user_substs_from_substs(hir_id, substs, user_self_ty);
+
// Add all the obligations that are required, substituting and
// normalized appropriately.
let bounds = self.instantiate_bounds(span, def_id, &substs);
// the referenced item.
let ty_substituted = self.instantiate_type_scheme(span, &substs, &ty);
- if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated {
+ if let Some(UserSelfTy { impl_def_id, self_ty }) = user_self_ty {
// In the case of `Foo<T>::method` and `<Foo<T>>::method`, if `method`
// is inherent, there is no `Self` parameter, instead, the impl needs
// type parameters, which we can infer by unifying the provided `Self`
debug!("instantiate_value_path: type of {:?} is {:?}",
node_id,
ty_substituted);
- let hir_id = self.tcx.hir.node_to_hir_id(node_id);
self.write_substs(hir_id, substs);
- debug!(
- "instantiate_value_path: id={:?} substs={:?}",
- node_id,
- substs,
- );
- self.write_user_substs_from_substs(hir_id, substs);
-
(ty_substituted, new_def)
}
ty::Foreign(did) => {
self.check_def_id(item, did);
}
- ty::Dynamic(ref data, ..) if data.principal().is_some() => {
- self.check_def_id(item, data.principal().unwrap().def_id());
+ ty::Dynamic(ref data, ..) => {
+ self.check_def_id(item, data.principal().def_id());
}
ty::Char => {
self.check_primitive_impl(def_id,
// This is something like impl Trait1 for Trait2. Illegal
// if Trait1 is a supertrait of Trait2 or Trait2 is not object safe.
- if data.principal().map_or(true, |p| !tcx.is_object_safe(p.def_id())) {
+ if !tcx.is_object_safe(data.principal().def_id()) {
// This is an error, but it will be reported by wfcheck. Ignore it here.
// This is tested by `coherence-impl-trait-for-trait-object-safe.rs`.
} else {
let mut supertrait_def_ids =
- traits::supertrait_def_ids(tcx,
- data.principal().unwrap().def_id());
+ traits::supertrait_def_ids(tcx, data.principal().def_id());
if supertrait_def_ids.any(|d| d == trait_def_id) {
let sp = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap());
struct_span_err!(tcx.sess,
debug!("Dynamic");
debug!("field_ty = {}", &field_ty);
debug!("ty in field = {}", &ty);
- if let Some(ex_trait_ref) = obj.principal() {
- // Here, we are passing the type `usize` as a
- // placeholder value with the function
- // `with_self_ty`, since there is no concrete type
- // `Self` for a `dyn Trait` at this
- // stage. Therefore when checking explicit
- // predicates in `check_explicit_predicates` we
- // need to ignore checking the explicit_map for
- // Self type.
- let substs = ex_trait_ref
- .with_self_ty(tcx, tcx.types.usize)
- .skip_binder()
- .substs;
- check_explicit_predicates(
- tcx,
- &ex_trait_ref.skip_binder().def_id,
- substs,
- required_predicates,
- explicit_map,
- IgnoreSelfTy(true),
- );
- }
+ let ex_trait_ref = obj.principal();
+ // Here, we are passing the type `usize` as a
+ // placeholder value with the function
+ // `with_self_ty`, since there is no concrete type
+ // `Self` for a `dyn Trait` at this
+ // stage. Therefore when checking explicit
+ // predicates in `check_explicit_predicates` we
+ // need to ignore checking the explicit_map for
+ // Self type.
+ let substs = ex_trait_ref
+ .with_self_ty(tcx, tcx.types.usize)
+ .skip_binder()
+ .substs;
+ check_explicit_predicates(
+ tcx,
+ &ex_trait_ref.skip_binder().def_id,
+ substs,
+ required_predicates,
+ explicit_map,
+ IgnoreSelfTy(true),
+ );
}
ty::Projection(obj) => {
let contra = self.contravariant(variance);
self.add_constraints_from_region(current, r, contra);
- if let Some(p) = data.principal() {
- let poly_trait_ref = p.with_self_ty(self.tcx(), self.tcx().types.err);
- self.add_constraints_from_trait_ref(
- current, *poly_trait_ref.skip_binder(), variance);
- }
+ let poly_trait_ref = data
+ .principal()
+ .with_self_ty(self.tcx(), self.tcx().types.err);
+ self.add_constraints_from_trait_ref(
+ current, *poly_trait_ref.skip_binder(), variance);
for projection in data.projection_bounds() {
self.add_constraints_from_ty(
}
}
ty::Dynamic(ref obj, ref reg) => {
- if let Some(principal) = obj.principal() {
- let did = principal.def_id();
+ let principal = obj.principal();
+ let did = principal.def_id();
+ inline::record_extern_fqn(cx, did, TypeKind::Trait);
+
+ let mut typarams = vec![];
+ reg.clean(cx).map(|b| typarams.push(GenericBound::Outlives(b)));
+ for did in obj.auto_traits() {
+ let empty = cx.tcx.intern_substs(&[]);
+ let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
+ Some(did), false, vec![], empty);
inline::record_extern_fqn(cx, did, TypeKind::Trait);
+ let bound = GenericBound::TraitBound(PolyTrait {
+ trait_: ResolvedPath {
+ path,
+ typarams: None,
+ did,
+ is_generic: false,
+ },
+ generic_params: Vec::new(),
+ }, hir::TraitBoundModifier::None);
+ typarams.push(bound);
+ }
- let mut typarams = vec![];
- reg.clean(cx).map(|b| typarams.push(GenericBound::Outlives(b)));
- for did in obj.auto_traits() {
- let empty = cx.tcx.intern_substs(&[]);
- let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
- Some(did), false, vec![], empty);
- inline::record_extern_fqn(cx, did, TypeKind::Trait);
- let bound = GenericBound::TraitBound(PolyTrait {
- trait_: ResolvedPath {
- path,
- typarams: None,
- did,
- is_generic: false,
- },
- generic_params: Vec::new(),
- }, hir::TraitBoundModifier::None);
- typarams.push(bound);
- }
-
- let mut bindings = vec![];
- for pb in obj.projection_bounds() {
- bindings.push(TypeBinding {
- name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
- ty: pb.skip_binder().ty.clean(cx)
- });
- }
+ let mut bindings = vec![];
+ for pb in obj.projection_bounds() {
+ bindings.push(TypeBinding {
+ name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
+ ty: pb.skip_binder().ty.clean(cx)
+ });
+ }
- let path = external_path(cx, &cx.tcx.item_name(did).as_str(), Some(did),
- false, bindings, principal.skip_binder().substs);
- ResolvedPath {
- path,
- typarams: Some(typarams),
- did,
- is_generic: false,
- }
- } else {
- Never
+ let path = external_path(cx, &cx.tcx.item_name(did).as_str(), Some(did),
+ false, bindings, principal.skip_binder().substs);
+ ResolvedPath {
+ path,
+ typarams: Some(typarams),
+ did,
+ is_generic: false,
}
}
ty::Tuple(ref t) => Tuple(t.clean(cx)),
///
/// [`format!`]: ../std/macro.format.html
/// [`std::fmt`]: ../std/fmt/index.html
-/// [`eprintln!`]: ../std/macro.eprint.html
+/// [`eprintln!`]: ../std/macro.eprintln.html
/// # Panics
///
/// Panics if writing to `io::stdout` fails.
use rustc_data_structures::sync::Lrc;
use errors::Applicability;
+const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
+ `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \
+ `path`, `meta`, `tt`, `item` and `vis`";
+
pub struct ParserAnyMacro<'a> {
parser: Parser<'a>,
if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) {
let msg = format!("invalid fragment specifier `{}`", bad_frag);
sess.span_diagnostic.struct_span_err(token.span(), &msg)
- .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
- `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`")
+ .help(VALID_FRAGMENT_NAMES_MSG)
.emit();
// (This eliminates false positives and duplicates
// from error messages.)
},
"" => Ok(true), // keywords::Invalid
_ => Err((format!("invalid fragment specifier `{}`", frag),
- "valid fragment specifiers are `ident`, `block`, \
- `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
- `literal`, `item` and `vis`"))
+ VALID_FRAGMENT_NAMES_MSG))
}
}
}
// #[cfg_attr(predicate, multiple, attributes, here)]
(active, cfg_attr_multi, "1.31.0", Some(54881), None),
+
+ // Allows `const _: TYPE = VALUE`
+ (active, underscore_const_names, "1.31.0", Some(54912), None),
);
declare_features! (
}
}
+ ast::ItemKind::Const(_,_) => {
+ if i.ident.name == "_" {
+ gate_feature_post!(&self, underscore_const_names, i.span,
+ "naming constants with `_` is unstable");
+ }
+ }
+
ast::ItemKind::ForeignMod(ref foreign_module) => {
self.check_abi(foreign_module.abi, i.span);
}
}
fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
- let id = self.parse_ident()?;
+ let id = match self.token {
+ token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ self.bump(); // `_`
+ ident.gensym()
+ },
+ _ => self.parse_ident()?,
+ };
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
self.expect(&token::Eq)?;
#include "llvm/Object/Archive.h"
#include "llvm/Object/ObjectFile.h"
#include "llvm/Bitcode/BitcodeWriterPass.h"
+#include "llvm/Support/Signals.h"
#include "llvm/IR/CallSite.h"
#include <cstdlib>
#endif
+#include <iostream>
+
//===----------------------------------------------------------------------===
//
// This file defines alternate interfaces to core functions that are more
static LLVM_THREAD_LOCAL char *LastError;
+// Custom error handler for fatal LLVM errors.
+//
+// Notably it exits the process with code 101, unlike LLVM's default of 1.
+static void FatalErrorHandler(void *UserData,
+ const std::string& Reason,
+ bool GenCrashDiag) {
+ // Do the same thing that the default error handler does.
+ std::cerr << "LLVM ERROR: " << Reason << std::endl;
+
+ // Since this error handler exits the process, we have to run any cleanup that
+ // LLVM would run after handling the error. This might change with an LLVM
+ // upgrade.
+ sys::RunInterruptHandlers();
+
+ exit(101);
+}
+
+extern "C" void LLVMRustInstallFatalErrorHandler() {
+ install_fatal_error_handler(FatalErrorHandler);
+}
+
extern "C" LLVMMemoryBufferRef
LLVMRustCreateMemoryBufferWithContentsOfFile(const char *Path) {
ErrorOr<std::unique_ptr<MemoryBuffer>> BufOr =
# source tarball for a stable release you'll likely see `1.x.0` for rustc and
# `0.x.0` for Cargo where they were released on `date`.
-date: 2018-09-23
+date: 2018-10-13
rustc: beta
cargo: beta
-Subproject commit fe825c93788c841ac1872e8351a62c37a5f78427
+Subproject commit 307650500de5b44dc1047dc9d15e449e09d92b57
// StorageLive(_4);
// _4 = std::option::Option<std::boxed::Box<u32>>::None;
// FakeRead(ForLet, _4);
-// AscribeUserType(_4, o, Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> });
+// AscribeUserType(_4, o, Ty(Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> }));
// StorageLive(_5);
// StorageLive(_6);
// _6 = move _4;
--- /dev/null
+// compile-flags: -Z span_free_formats
+
+fn main() {
+ println!("{}", test(&()));
+}
+
+fn test(x: &dyn X) -> u32 {
+ x.y()
+}
+
+trait X {
+ fn y(&self) -> u32 {
+ 1
+ }
+}
+
+impl X for () {
+ fn y(&self) -> u32 {
+ 2
+ }
+}
+
+// END RUST SOURCE
+// START rustc.test.Inline.after.mir
+// ...
+// bb0: {
+// ...
+// _0 = const X::y(move _2) -> bb1;
+// }
+// ...
+// END rustc.test.Inline.after.mir
// run-pass
fn main() {
+ #[allow(unused_parens)]
match 0 {
(pat) => assert_eq!(pat, 0)
}
fn check<Clone>(_c: Clone) {
fn check2() {
- <() as std::clone::Clone>::clone(&());
+ let _ = <() as std::clone::Clone>::clone(&());
}
check2();
}
+++ /dev/null
-See `src/test/run-pass/closure-expected-type`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+fn test<F: Fn(&u64, &u64)>(f: F) {}
+
+fn main() {
+ test(|x, y | {});
+ test(|x:&u64, y:&u64| {});
+ test(|x:&u64, y | {});
+ test(|x, y:&u64| {});
+}
fn main() {
MustUseDeprecated::new(); //~ warning: use of deprecated item
- //| warning: unused `MustUseDeprecated` which must be used
+ //| warning: unused `MustUseDeprecated` that must be used
}
LL | MustUseDeprecated {} //~ warning: use of deprecated item
| ^^^^^^^^^^^^^^^^^
-warning: unused `MustUseDeprecated` which must be used
+warning: unused `MustUseDeprecated` that must be used
--> $DIR/cfg-attr-multi-true.rs:20:5
|
LL | MustUseDeprecated::new(); //~ warning: use of deprecated item
--- /dev/null
+// Copyright 2012-2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+#![feature(const_let)]
+
+trait Trt {}
+struct Str {}
+
+impl Trt for Str {}
+
+const _ : () = {
+ use std::marker::PhantomData;
+ struct ImplementsTrait<T: Trt>(PhantomData<T>);
+ let _ = ImplementsTrait::<Str>(PhantomData);
+ ()
+};
+
+fn main() {}
--- /dev/null
+error[E0658]: naming constants with `_` is unstable (see issue #54912)
+ --> $DIR/feature-gate-underscore_const_names.rs:17:1
+ |
+LL | / const _ : () = {
+LL | | use std::marker::PhantomData;
+LL | | struct ImplementsTrait<T: Trt>(PhantomData<T>);
+LL | | let _ = ImplementsTrait::<Str>(PhantomData);
+LL | | ()
+LL | | };
+ | |__^
+ |
+ = help: add #![feature(underscore_const_names)] to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
fn need_to_use_this_method_value(&self) -> usize {
self.n
}
+
+ #[must_use]
+ fn need_to_use_this_associated_function_value() -> isize {
+ -1
+ }
}
trait EvenNature {
m.is_even(); // trait method!
//~^ WARN unused return value
+ MyStruct::need_to_use_this_associated_function_value();
+ //~^ WARN unused return value
+
m.replace(3); // won't warn (annotation needs to be in trait definition)
// comparison methods are `must_use`
-warning: unused return value of `need_to_use_this_value` which must be used
- --> $DIR/fn_must_use.rs:60:5
+warning: unused return value of `need_to_use_this_value` that must be used
+ --> $DIR/fn_must_use.rs:65:5
|
LL | need_to_use_this_value(); //~ WARN unused return value
| ^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^
= note: it's important
-warning: unused return value of `MyStruct::need_to_use_this_method_value` which must be used
- --> $DIR/fn_must_use.rs:65:5
+warning: unused return value of `MyStruct::need_to_use_this_method_value` that must be used
+ --> $DIR/fn_must_use.rs:70:5
|
LL | m.need_to_use_this_method_value(); //~ WARN unused return value
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-warning: unused return value of `EvenNature::is_even` which must be used
- --> $DIR/fn_must_use.rs:66:5
+warning: unused return value of `EvenNature::is_even` that must be used
+ --> $DIR/fn_must_use.rs:71:5
|
LL | m.is_even(); // trait method!
| ^^^^^^^^^^^^
|
= note: no side effects
-warning: unused return value of `std::cmp::PartialEq::eq` which must be used
- --> $DIR/fn_must_use.rs:72:5
+warning: unused return value of `MyStruct::need_to_use_this_associated_function_value` that must be used
+ --> $DIR/fn_must_use.rs:74:5
+ |
+LL | MyStruct::need_to_use_this_associated_function_value();
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: unused return value of `std::cmp::PartialEq::eq` that must be used
+ --> $DIR/fn_must_use.rs:80:5
|
LL | 2.eq(&3); //~ WARN unused return value
| ^^^^^^^^^
-warning: unused return value of `std::cmp::PartialEq::eq` which must be used
- --> $DIR/fn_must_use.rs:73:5
+warning: unused return value of `std::cmp::PartialEq::eq` that must be used
+ --> $DIR/fn_must_use.rs:81:5
|
LL | m.eq(&n); //~ WARN unused return value
| ^^^^^^^^^
-warning: unused comparison which must be used
- --> $DIR/fn_must_use.rs:76:5
+warning: unused comparison that must be used
+ --> $DIR/fn_must_use.rs:84:5
|
LL | 2 == 3; //~ WARN unused comparison
| ^^^^^^
-warning: unused comparison which must be used
- --> $DIR/fn_must_use.rs:77:5
+warning: unused comparison that must be used
+ --> $DIR/fn_must_use.rs:85:5
|
LL | m == n; //~ WARN unused comparison
| ^^^^^^
LL | macro_rules! test { ($wrong:t_ty ..) => () }
| ^^^^^^^^^^^
|
- = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
+ = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
error: aborting due to previous error
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod module {}
+
+fn main() {
+ let _ = module { x: 0 }; //~ERROR expected struct
+}
--- /dev/null
+error[E0574]: expected struct, variant or union type, found module `module`
+ --> $DIR/issue-23189.rs:14:13
+ |
+LL | let _ = module { x: 0 }; //~ERROR expected struct
+ | ^^^^^^ not a struct, variant or union type
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0574`.
--- /dev/null
+error[E0596]: cannot borrow data in a `&` reference as mutable
+ --> $DIR/issue-52240.rs:9:27
+ |
+LL | if let (Some(Foo::Bar(ref mut val)), _) = (&arr.get(0), 0) {
+ | ^^^^^^^^^^^ cannot borrow as mutable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0596`.
--- /dev/null
+// issue-52240: Can turn immutable into mut with `ref mut`
+
+enum Foo {
+ Bar(i32),
+}
+
+fn main() {
+ let arr = vec!(Foo::Bar(0));
+ if let (Some(Foo::Bar(ref mut val)), _) = (&arr.get(0), 0) {
+ //~^ ERROR cannot borrow field of immutable binding as mutable
+ *val = 9001;
+ }
+ match arr[0] {
+ Foo::Bar(ref s) => println!("{}", s)
+ }
+}
--- /dev/null
+error[E0596]: cannot borrow field of immutable binding as mutable
+ --> $DIR/issue-52240.rs:9:27
+ |
+LL | if let (Some(Foo::Bar(ref mut val)), _) = (&arr.get(0), 0) {
+ | ^^^^^^^^^^^ cannot mutably borrow field of immutable binding
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0596`.
--- /dev/null
+// issue-54966: ICE returning an unknown type with impl FnMut
+
+fn generate_duration() -> Oper<impl FnMut()> {}
+//~^ ERROR cannot find type `Oper` in this scope
+
+fn main() {}
--- /dev/null
+error[E0412]: cannot find type `Oper` in this scope
+ --> $DIR/issue-54966.rs:3:27
+ |
+LL | fn generate_duration() -> Oper<impl FnMut()> {}
+ | ^^^^ not found in this scope
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0412`.
+++ /dev/null
-error: unsatisfied lifetime constraints
- --> $DIR/ex3-both-anon-regions-both-are-structs-4.rs:16:5
- |
-LL | fn foo(mut x: Ref) {
- | -----
- | |
- | has type `Ref<'_, '1>`
- | has type `Ref<'2, '_>`
-LL | x.a = x.b; //~ ERROR lifetime mismatch
- | ^^^^^^^^^ assignment requires that `'1` must outlive `'2`
-
-error: aborting due to previous error
-
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-struct Ref<'a, 'b> {
- a: &'a u32,
- b: &'b u32,
-}
-
-fn foo(mut x: Ref) {
- x.a = x.b; //~ ERROR lifetime mismatch
-}
-
-fn main() {}
+++ /dev/null
-error[E0623]: lifetime mismatch
- --> $DIR/ex3-both-anon-regions-both-are-structs-4.rs:16:11
- |
-LL | fn foo(mut x: Ref) {
- | ---
- | |
- | this type is declared with multiple lifetimes...
-LL | x.a = x.b; //~ ERROR lifetime mismatch
- | ^^^ ...but data with one lifetime flows into the other here
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0623`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+#![allow(unreachable_patterns)]
+#![allow(unused_variables)]
+#![warn(unused_parens)]
+
+fn main() {
+ match 1 {
+ (_) => {} //~ WARNING: unnecessary parentheses around pattern
+ (y) => {} //~ WARNING: unnecessary parentheses around pattern
+ (ref r) => {} //~ WARNING: unnecessary parentheses around pattern
+ (e @ 1..=2) => {} //~ WARNING: unnecessary parentheses around outer pattern
+ (1..=2) => {} // Non ambiguous range pattern should not warn
+ e @ (3..=4) => {} // Non ambiguous range pattern should not warn
+ }
+
+ match &1 {
+ (e @ &(1...2)) => {} //~ WARNING: unnecessary parentheses around outer pattern
+ &(_) => {} //~ WARNING: unnecessary parentheses around pattern
+ e @ &(1...2) => {} // Ambiguous range pattern should not warn
+ &(1..=2) => {} // Ambiguous range pattern should not warn
+ }
+
+ match &1 {
+ e @ &(1...2) | e @ &(3..=4) => {} // Complex ambiguous pattern should not warn
+ &_ => {}
+ }
+}
--- /dev/null
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:19:9
+ |
+LL | (_) => {} //~ WARNING: unnecessary parentheses around pattern
+ | ^^^ help: remove these parentheses
+ |
+note: lint level defined here
+ --> $DIR/issue-54538-unused-parens-lint.rs:15:9
+ |
+LL | #![warn(unused_parens)]
+ | ^^^^^^^^^^^^^
+
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:20:9
+ |
+LL | (y) => {} //~ WARNING: unnecessary parentheses around pattern
+ | ^^^ help: remove these parentheses
+
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:21:9
+ |
+LL | (ref r) => {} //~ WARNING: unnecessary parentheses around pattern
+ | ^^^^^^^ help: remove these parentheses
+
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:22:9
+ |
+LL | (e @ 1..=2) => {} //~ WARNING: unnecessary parentheses around outer pattern
+ | ^^^^^^^^^^^ help: remove these parentheses
+
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:28:9
+ |
+LL | (e @ &(1...2)) => {} //~ WARNING: unnecessary parentheses around outer pattern
+ | ^^^^^^^^^^^^^^ help: remove these parentheses
+
+warning: unnecessary parentheses around pattern
+ --> $DIR/issue-54538-unused-parens-lint.rs:29:10
+ |
+LL | &(_) => {} //~ WARNING: unnecessary parentheses around pattern
+ | ^^^ help: remove these parentheses
+
+++ /dev/null
-// Copyright 2014–2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![deny(nonstandard_style)]
-#![allow(dead_code)]
-
-fn CamelCase() {} //~ ERROR should have a snake
-
-#[allow(nonstandard_style)]
-mod test {
- fn CamelCase() {}
-
- #[forbid(nonstandard_style)]
- mod bad {
- fn CamelCase() {} //~ ERROR should have a snake
-
- static bad: isize = 1; //~ ERROR should have an upper
- }
-
- mod warn {
- #![warn(nonstandard_style)]
-
- fn CamelCase() {} //~ WARN should have a snake
-
- struct snake_case; //~ WARN should have a camel
- }
-}
-
-fn main() {}
+++ /dev/null
-error: function `CamelCase` should have a snake case name such as `camel_case`
- --> $DIR/lint-group-style.rs:14:1
- |
-LL | fn CamelCase() {} //~ ERROR should have a snake
- | ^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/lint-group-style.rs:11:9
- |
-LL | #![deny(nonstandard_style)]
- | ^^^^^^^^^^^^^^^^^
- = note: #[deny(non_snake_case)] implied by #[deny(nonstandard_style)]
-
-error: function `CamelCase` should have a snake case name such as `camel_case`
- --> $DIR/lint-group-style.rs:22:9
- |
-LL | fn CamelCase() {} //~ ERROR should have a snake
- | ^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/lint-group-style.rs:20:14
- |
-LL | #[forbid(nonstandard_style)]
- | ^^^^^^^^^^^^^^^^^
- = note: #[forbid(non_snake_case)] implied by #[forbid(nonstandard_style)]
-
-error: static variable `bad` should have an upper case name such as `BAD`
- --> $DIR/lint-group-style.rs:24:9
- |
-LL | static bad: isize = 1; //~ ERROR should have an upper
- | ^^^^^^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/lint-group-style.rs:20:14
- |
-LL | #[forbid(nonstandard_style)]
- | ^^^^^^^^^^^^^^^^^
- = note: #[forbid(non_upper_case_globals)] implied by #[forbid(nonstandard_style)]
-
-warning: function `CamelCase` should have a snake case name such as `camel_case`
- --> $DIR/lint-group-style.rs:30:9
- |
-LL | fn CamelCase() {} //~ WARN should have a snake
- | ^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/lint-group-style.rs:28:17
- |
-LL | #![warn(nonstandard_style)]
- | ^^^^^^^^^^^^^^^^^
- = note: #[warn(non_snake_case)] implied by #[warn(nonstandard_style)]
-
-warning: type `snake_case` should have a camel case name such as `SnakeCase`
- --> $DIR/lint-group-style.rs:32:9
- |
-LL | struct snake_case; //~ WARN should have a camel
- | ^^^^^^^^^^^^^^^^^^
- |
-note: lint level defined here
- --> $DIR/lint-group-style.rs:28:17
- |
-LL | #![warn(nonstandard_style)]
- | ^^^^^^^^^^^^^^^^^
- = note: #[warn(non_camel_case_types)] implied by #[warn(nonstandard_style)]
-
-error: aborting due to 3 previous errors
-
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:22:5
|
LL | val == 1;
LL | #![warn(unused_must_use)]
| ^^^^^^^^^^^^^^^
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:23:5
|
LL | val < 1;
| ^^^^^^^
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:24:5
|
LL | val <= 1;
| ^^^^^^^^
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:25:5
|
LL | val != 1;
| ^^^^^^^^
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:26:5
|
LL | val >= 1;
| ^^^^^^^^
-warning: unused comparison which must be used
+warning: unused comparison that must be used
--> $DIR/must-use-ops.rs:27:5
|
LL | val > 1;
| ^^^^^^^
-warning: unused arithmetic operation which must be used
+warning: unused arithmetic operation that must be used
--> $DIR/must-use-ops.rs:30:5
|
LL | val + 2;
| ^^^^^^^
-warning: unused arithmetic operation which must be used
+warning: unused arithmetic operation that must be used
--> $DIR/must-use-ops.rs:31:5
|
LL | val - 2;
| ^^^^^^^
-warning: unused arithmetic operation which must be used
+warning: unused arithmetic operation that must be used
--> $DIR/must-use-ops.rs:32:5
|
LL | val / 2;
| ^^^^^^^
-warning: unused arithmetic operation which must be used
+warning: unused arithmetic operation that must be used
--> $DIR/must-use-ops.rs:33:5
|
LL | val * 2;
| ^^^^^^^
-warning: unused arithmetic operation which must be used
+warning: unused arithmetic operation that must be used
--> $DIR/must-use-ops.rs:34:5
|
LL | val % 2;
| ^^^^^^^
-warning: unused logical operation which must be used
+warning: unused logical operation that must be used
--> $DIR/must-use-ops.rs:37:5
|
LL | true && true;
| ^^^^^^^^^^^^
-warning: unused logical operation which must be used
+warning: unused logical operation that must be used
--> $DIR/must-use-ops.rs:38:5
|
LL | false || true;
| ^^^^^^^^^^^^^
-warning: unused bitwise operation which must be used
+warning: unused bitwise operation that must be used
--> $DIR/must-use-ops.rs:41:5
|
LL | 5 ^ val;
| ^^^^^^^
-warning: unused bitwise operation which must be used
+warning: unused bitwise operation that must be used
--> $DIR/must-use-ops.rs:42:5
|
LL | 5 & val;
| ^^^^^^^
-warning: unused bitwise operation which must be used
+warning: unused bitwise operation that must be used
--> $DIR/must-use-ops.rs:43:5
|
LL | 5 | val;
| ^^^^^^^
-warning: unused bitwise operation which must be used
+warning: unused bitwise operation that must be used
--> $DIR/must-use-ops.rs:44:5
|
LL | 5 << val;
| ^^^^^^^^
-warning: unused bitwise operation which must be used
+warning: unused bitwise operation that must be used
--> $DIR/must-use-ops.rs:45:5
|
LL | 5 >> val;
| ^^^^^^^^
-warning: unused unary operation which must be used
+warning: unused unary operation that must be used
--> $DIR/must-use-ops.rs:48:5
|
LL | !val;
| ^^^^
-warning: unused unary operation which must be used
+warning: unused unary operation that must be used
--> $DIR/must-use-ops.rs:49:5
|
LL | -val;
| ^^^^
-warning: unused unary operation which must be used
+warning: unused unary operation that must be used
--> $DIR/must-use-ops.rs:50:5
|
LL | *val_pointer;
-error: unused return value of `foo` which must be used
+error: unused return value of `foo` that must be used
--> $DIR/must_use-unit.rs:14:5
|
LL | foo(); //~ unused return value of `foo`
LL | #![deny(unused_must_use)]
| ^^^^^^^^^^^^^^^
-error: unused return value of `bar` which must be used
+error: unused return value of `bar` that must be used
--> $DIR/must_use-unit.rs:16:5
|
LL | bar(); //~ unused return value of `bar`
LL | ($x:foo) => ()
| ^^^^^^
|
- = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
+ = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
error: aborting due to previous error
-error: user substs: Canonical { variables: [], value: [u32] }
+error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
--> $DIR/dump-adt-brace-struct.rs:28:5
|
LL | SomeStruct::<u32> { t: 22 }; //~ ERROR [u32]
-error: user substs: Canonical { variables: [], value: [u32] }
+error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u32], user_self_ty: None } }
--> $DIR/dump-fn-method.rs:36:13
|
LL | let x = foo::<u32>; //~ ERROR [u32]
| ^^^^^^^^^^
-error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: [?0, u32, ?1] }
+error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, u32, ?1], user_self_ty: None } }
--> $DIR/dump-fn-method.rs:42:13
|
LL | let x = <_ as Bazoom<u32>>::method::<_>; //~ ERROR [?0, u32, ?1]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: user substs: Canonical { variables: [], value: [u8, u16, u32] }
+error: user substs: Canonical { variables: [], value: UserSubsts { substs: [u8, u16, u32], user_self_ty: None } }
--> $DIR/dump-fn-method.rs:46:13
|
LL | let x = <u8 as Bazoom<u16>>::method::<u32>; //~ ERROR [u8, u16, u32]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: [?0, ?1, u32] }
+error: user substs: Canonical { variables: [CanonicalVarInfo { kind: Ty(General) }, CanonicalVarInfo { kind: Ty(General) }], value: UserSubsts { substs: [?0, ?1, u32], user_self_ty: None } }
--> $DIR/dump-fn-method.rs:54:5
|
LL | y.method::<u32>(44, 66); //~ ERROR [?0, ?1, u32]
--- /dev/null
+#![feature(nll)]
+
+// Check that substitutions given on the self type (here, `A`) carry
+// through to NLL.
+
+struct A<'a> { x: &'a u32 }
+
+impl<'a> A<'a> {
+ fn new<'b, T>(x: &'a u32, y: T) -> Self {
+ Self { x }
+ }
+}
+
+fn foo<'a>() {
+ let v = 22;
+ let x = A::<'a>::new(&v, 22);
+ //~^ ERROR
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-1.rs:16:26
+ |
+LL | let x = A::<'a>::new(&v, 22);
+ | ^^ borrowed value does not live long enough
+LL | //~^ ERROR
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 14:8...
+ --> $DIR/method-ufcs-inherent-1.rs:14:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0597`.
--- /dev/null
+#![feature(nll)]
+
+// Check that substitutions given on the self type (here, `A`) can be
+// used in combination with annotations given for method arguments.
+
+struct A<'a> { x: &'a u32 }
+
+impl<'a> A<'a> {
+ fn new<'b, T>(x: &'a u32, y: T) -> Self {
+ Self { x }
+ }
+}
+
+fn foo<'a>() {
+ let v = 22;
+ let x = A::<'a>::new::<&'a u32>(&v, &v);
+ //~^ ERROR
+ //~| ERROR
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-2.rs:16:37
+ |
+LL | let x = A::<'a>::new::<&'a u32>(&v, &v);
+ | ^^ borrowed value does not live long enough
+...
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 14:8...
+ --> $DIR/method-ufcs-inherent-2.rs:14:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-2.rs:16:41
+ |
+LL | let x = A::<'a>::new::<&'a u32>(&v, &v);
+ | ^^ borrowed value does not live long enough
+...
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 14:8...
+ --> $DIR/method-ufcs-inherent-2.rs:14:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
--- /dev/null
+#![feature(nll)]
+
+// Check that inherent methods invoked with `<T>::new` style
+// carry their annotations through to NLL.
+
+struct A<'a> { x: &'a u32 }
+
+impl<'a> A<'a> {
+ fn new<'b, T>(x: &'a u32, y: T) -> Self {
+ Self { x }
+ }
+}
+
+fn foo<'a>() {
+ let v = 22;
+ let x = <A<'a>>::new(&v, 22);
+ //~^ ERROR
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-3.rs:16:26
+ |
+LL | let x = <A<'a>>::new(&v, 22);
+ | ^^ borrowed value does not live long enough
+LL | //~^ ERROR
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 14:8...
+ --> $DIR/method-ufcs-inherent-3.rs:14:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0597`.
--- /dev/null
+#![feature(nll)]
+
+// Check that inherent methods invoked with `<T>::new` style
+// carry their annotations through to NLL in connection with
+// method type parameters.
+
+struct A<'a> { x: &'a u32 }
+
+impl<'a> A<'a> {
+ fn new<'b, T>(x: &'a u32, y: T) -> Self {
+ Self { x }
+ }
+}
+
+fn foo<'a>() {
+ let v = 22;
+ let x = <A<'a>>::new::<&'a u32>(&v, &v);
+ //~^ ERROR
+ //~| ERROR
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-4.rs:17:37
+ |
+LL | let x = <A<'a>>::new::<&'a u32>(&v, &v);
+ | ^^ borrowed value does not live long enough
+...
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 15:8...
+ --> $DIR/method-ufcs-inherent-4.rs:15:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error[E0597]: `v` does not live long enough
+ --> $DIR/method-ufcs-inherent-4.rs:17:41
+ |
+LL | let x = <A<'a>>::new::<&'a u32>(&v, &v);
+ | ^^ borrowed value does not live long enough
+...
+LL | }
+ | - `v` dropped here while still borrowed
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 15:8...
+ --> $DIR/method-ufcs-inherent-4.rs:15:8
+ |
+LL | fn foo<'a>() {
+ | ^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0597`.
--- /dev/null
+// Copyright 2012-2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+#![feature(const_let)]
+#![feature(underscore_const_names)]
+
+trait Trt {}
+struct Str {}
+impl Trt for Str {}
+
+macro_rules! check_impl {
+ ($struct:ident,$trait:ident) => {
+ const _ : () = {
+ use std::marker::PhantomData;
+ struct ImplementsTrait<T: $trait>(PhantomData<T>);
+ let _ = ImplementsTrait::<$struct>(PhantomData);
+ ()
+ };
+ }
+}
+
+#[deny(unused)]
+const _ : () = ();
+
+const _ : i32 = 42;
+const _ : Str = Str{};
+
+check_impl!(Str, Trt);
+check_impl!(Str, Trt);
+
+fn main() {
+ check_impl!(Str, Trt);
+ check_impl!(Str, Trt);
+}
LL | ($wrong:t_ty) => () //~ ERROR invalid fragment specifier `t_ty`
| ^^^^^^^^^^^
|
- = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
+ = help: valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, `meta`, `tt`, `item` and `vis`
error: aborting due to previous error
#[allow(unused_results)]
fn test() {
foo::<isize>();
- foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
- foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+ foo::<MustUse>(); //~ ERROR: unused `MustUse` that must be used
+ foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` that must be used
//~^ NOTE: some message
}
fn main() {
foo::<isize>(); //~ ERROR: unused result
- foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
- foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+ foo::<MustUse>(); //~ ERROR: unused `MustUse` that must be used
+ foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` that must be used
//~^ NOTE: some message
let _ = foo::<isize>();
-error: unused `MustUse` which must be used
+error: unused `MustUse` that must be used
--> $DIR/unused-result.rs:31:5
|
-LL | foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
+LL | foo::<MustUse>(); //~ ERROR: unused `MustUse` that must be used
| ^^^^^^^^^^^^^^^^^
|
note: lint level defined here
LL | #![deny(unused_results, unused_must_use)]
| ^^^^^^^^^^^^^^^
-error: unused `MustUseMsg` which must be used
+error: unused `MustUseMsg` that must be used
--> $DIR/unused-result.rs:32:5
|
-LL | foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+LL | foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` that must be used
| ^^^^^^^^^^^^^^^^^^^^
|
= note: some message
LL | #![deny(unused_results, unused_must_use)]
| ^^^^^^^^^^^^^^
-error: unused `MustUse` which must be used
+error: unused `MustUse` that must be used
--> $DIR/unused-result.rs:45:5
|
-LL | foo::<MustUse>(); //~ ERROR: unused `MustUse` which must be used
+LL | foo::<MustUse>(); //~ ERROR: unused `MustUse` that must be used
| ^^^^^^^^^^^^^^^^^
-error: unused `MustUseMsg` which must be used
+error: unused `MustUseMsg` that must be used
--> $DIR/unused-result.rs:46:5
|
-LL | foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` which must be used
+LL | foo::<MustUseMsg>(); //~ ERROR: unused `MustUseMsg` that must be used
| ^^^^^^^^^^^^^^^^^^^^
|
= note: some message
-Subproject commit ad6e5c0037d88602a1c95051e42b392ed5ffcbe8
+Subproject commit 5dbac98885199bbd7c0f189d7405b5523434d1e3
-Subproject commit 32b1d1fc157f71ed2f10b60fe28abe087a743618
+Subproject commit 5afdf8b78507ddf015d192858aef56e72c17de16
-Subproject commit 26f9d617c347185433b77c481a5c50c55d9b72ce
+Subproject commit 8b14b03368429e6ee2a8ac0e0c876505606ab1f1
-Subproject commit 15d4d4a5b0cf3c0155195f3322cc7a61148e5567
+Subproject commit 440a9855b73b6bf9b5345cf3a79565566f6ef345