"tt",
]
+[[package]]
+name = "hir_ty"
+version = "0.0.0"
+dependencies = [
+ "arena",
+ "arrayvec",
+ "base_db",
+ "chalk-ir",
+ "chalk-recursive",
+ "chalk-solve",
+ "ena",
+ "expect",
+ "hir_def",
+ "hir_expand",
+ "itertools",
+ "log",
+ "profile",
+ "rustc-hash",
+ "scoped-tls",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test_utils",
+ "tracing",
+ "tracing-subscriber",
+ "tracing-tree",
+]
+
[[package]]
name = "home"
version = "0.5.3"
"either",
"hir_def",
"hir_expand",
+ "hir_ty",
"itertools",
"log",
"profile",
- "ra_hir_ty",
"rustc-hash",
"stdx",
"syntax",
]
-[[package]]
-name = "ra_hir_ty"
-version = "0.1.0"
-dependencies = [
- "arena",
- "arrayvec",
- "base_db",
- "chalk-ir",
- "chalk-recursive",
- "chalk-solve",
- "ena",
- "expect",
- "hir_def",
- "hir_expand",
- "itertools",
- "log",
- "profile",
- "rustc-hash",
- "scoped-tls",
- "smallvec",
- "stdx",
- "syntax",
- "test_utils",
- "tracing",
- "tracing-subscriber",
- "tracing-tree",
-]
-
[[package]]
name = "ra_ide"
version = "0.1.0"
"expect",
"flycheck",
"hir_def",
+ "hir_ty",
"itertools",
"jod-thread",
"log",
"profile",
"project_model",
"ra_hir",
- "ra_hir_ty",
"ra_ide",
"ra_ide_db",
"ra_ssr",
--- /dev/null
+[package]
+name = "hir_ty"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+authors = ["rust-analyzer developers"]
+edition = "2018"
+
+[lib]
+doctest = false
+
+[dependencies]
+itertools = "0.9.0"
+arrayvec = "0.5.1"
+smallvec = "1.2.0"
+ena = "0.14.0"
+log = "0.4.8"
+rustc-hash = "1.1.0"
+scoped-tls = "1"
+chalk-solve = { version = "0.21.0" }
+chalk-ir = { version = "0.21.0" }
+chalk-recursive = { version = "0.21.0" }
+
+stdx = { path = "../stdx" }
+hir_def = { path = "../hir_def" }
+hir_expand = { path = "../hir_expand" }
+arena = { path = "../arena" }
+base_db = { path = "../base_db" }
+profile = { path = "../profile" }
+syntax = { path = "../syntax" }
+test_utils = { path = "../test_utils" }
+
+[dev-dependencies]
+tracing = "0.1"
+tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] }
+tracing-tree = { version = "0.1.4" }
+
+expect = { path = "../expect" }
--- /dev/null
+//! In certain situations, rust automatically inserts derefs as necessary: for
+//! example, field accesses `foo.bar` still work when `foo` is actually a
+//! reference to a type with the field `bar`. This is an approximation of the
+//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
+
+use std::iter::successors;
+
+use base_db::CrateId;
+use hir_def::lang_item::LangItemTarget;
+use hir_expand::name::name;
+use log::{info, warn};
+
+use crate::{
+ db::HirDatabase,
+ traits::{InEnvironment, Solution},
+ utils::generics,
+ BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
+};
+
+const AUTODEREF_RECURSION_LIMIT: usize = 10;
+
+pub fn autoderef<'a>(
+ db: &'a dyn HirDatabase,
+ krate: Option<CrateId>,
+ ty: InEnvironment<Canonical<Ty>>,
+) -> impl Iterator<Item = Canonical<Ty>> + 'a {
+ let InEnvironment { value: ty, environment } = ty;
+ successors(Some(ty), move |ty| {
+ deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() })
+ })
+ .take(AUTODEREF_RECURSION_LIMIT)
+}
+
+pub(crate) fn deref(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ ty: InEnvironment<&Canonical<Ty>>,
+) -> Option<Canonical<Ty>> {
+ if let Some(derefed) = ty.value.value.builtin_deref() {
+ Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() })
+ } else {
+ deref_by_trait(db, krate, ty)
+ }
+}
+
+fn deref_by_trait(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ ty: InEnvironment<&Canonical<Ty>>,
+) -> Option<Canonical<Ty>> {
+ let deref_trait = match db.lang_item(krate, "deref".into())? {
+ LangItemTarget::TraitId(it) => it,
+ _ => return None,
+ };
+ let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
+
+ let generic_params = generics(db.upcast(), target.into());
+ if generic_params.len() != 1 {
+ // the Target type + Deref trait should only have one generic parameter,
+ // namely Deref's Self type
+ return None;
+ }
+
+ // FIXME make the Canonical / bound var handling nicer
+
+ let parameters =
+ Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
+
+ // Check that the type implements Deref at all
+ let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
+ let implements_goal = Canonical {
+ kinds: ty.value.kinds.clone(),
+ value: InEnvironment {
+ value: Obligation::Trait(trait_ref),
+ environment: ty.environment.clone(),
+ },
+ };
+ if db.trait_solve(krate, implements_goal).is_none() {
+ return None;
+ }
+
+ // Now do the assoc type projection
+ let projection = super::traits::ProjectionPredicate {
+ ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())),
+ projection_ty: super::ProjectionTy { associated_ty: target, parameters },
+ };
+
+ let obligation = super::Obligation::Projection(projection);
+
+ let in_env = InEnvironment { value: obligation, environment: ty.environment };
+
+ let canonical =
+ Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General)));
+
+ let solution = db.trait_solve(krate, canonical)?;
+
+ match &solution {
+ Solution::Unique(vars) => {
+ // FIXME: vars may contain solutions for any inference variables
+ // that happened to be inside ty. To correctly handle these, we
+ // would have to pass the solution up to the inference context, but
+ // that requires a larger refactoring (especially if the deref
+ // happens during method resolution). So for the moment, we just
+ // check that we're not in the situation we're we would actually
+ // need to handle the values of the additional variables, i.e.
+ // they're just being 'passed through'. In the 'standard' case where
+ // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
+ // the case.
+
+ // FIXME: if the trait solver decides to truncate the type, these
+ // assumptions will be broken. We would need to properly introduce
+ // new variables in that case
+
+ for i in 1..vars.0.kinds.len() {
+ if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
+ {
+ warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
+ return None;
+ }
+ }
+ Some(Canonical {
+ value: vars.0.value[vars.0.value.len() - 1].clone(),
+ kinds: vars.0.kinds.clone(),
+ })
+ }
+ Solution::Ambig(_) => {
+ info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution);
+ None
+ }
+ }
+}
--- /dev/null
+//! FIXME: write short doc here
+
+use std::sync::Arc;
+
+use arena::map::ArenaMap;
+use base_db::{impl_intern_key, salsa, CrateId, Upcast};
+use hir_def::{
+ db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId,
+ TypeParamId, VariantId,
+};
+
+use crate::{
+ method_resolution::{InherentImpls, TraitImpls},
+ traits::chalk,
+ Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig,
+ ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
+};
+use hir_expand::name::Name;
+
+#[salsa::query_group(HirDatabaseStorage)]
+pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
+ #[salsa::invoke(infer_wait)]
+ #[salsa::transparent]
+ fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::infer::infer_query)]
+ fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+
+ #[salsa::invoke(crate::lower::ty_query)]
+ #[salsa::cycle(crate::lower::ty_recover)]
+ fn ty(&self, def: TyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::value_ty_query)]
+ fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::impl_self_ty_query)]
+ #[salsa::cycle(crate::lower::impl_self_ty_recover)]
+ fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
+
+ #[salsa::invoke(crate::lower::impl_trait_query)]
+ fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
+
+ #[salsa::invoke(crate::lower::field_types_query)]
+ fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
+
+ #[salsa::invoke(crate::callable_item_sig)]
+ fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
+
+ #[salsa::invoke(crate::lower::return_type_impl_traits)]
+ fn return_type_impl_traits(
+ &self,
+ def: FunctionId,
+ ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+ #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+ fn generic_predicates_for_param(
+ &self,
+ param_id: TypeParamId,
+ ) -> Arc<[Binders<GenericPredicate>]>;
+
+ #[salsa::invoke(crate::lower::generic_predicates_query)]
+ fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>;
+
+ #[salsa::invoke(crate::lower::generic_defaults_query)]
+ fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
+
+ #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+ fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+ fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+ fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
+
+ // Interned IDs for Chalk integration
+ #[salsa::interned]
+ fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
+ #[salsa::interned]
+ fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId;
+ #[salsa::interned]
+ fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId;
+ #[salsa::interned]
+ fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId;
+
+ #[salsa::invoke(chalk::associated_ty_data_query)]
+ fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>;
+
+ #[salsa::invoke(chalk::trait_datum_query)]
+ fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>;
+
+ #[salsa::invoke(chalk::struct_datum_query)]
+ fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc<chalk::StructDatum>;
+
+ #[salsa::invoke(crate::traits::chalk::impl_datum_query)]
+ fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>;
+
+ #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)]
+ fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc<chalk::FnDefDatum>;
+
+ #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
+ fn associated_ty_value(
+ &self,
+ krate: CrateId,
+ id: chalk::AssociatedTyValueId,
+ ) -> Arc<chalk::AssociatedTyValue>;
+
+ #[salsa::invoke(crate::traits::trait_solve_query)]
+ fn trait_solve(
+ &self,
+ krate: CrateId,
+ goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
+ ) -> Option<crate::traits::Solution>;
+
+ #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
+ fn program_clauses_for_chalk_env(
+ &self,
+ krate: CrateId,
+ env: chalk_ir::Environment<chalk::Interner>,
+ ) -> chalk_ir::ProgramClauses<chalk::Interner>;
+}
+
+fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer:wait").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => {
+ db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ });
+ db.infer_query(def)
+}
+
+#[test]
+fn hir_database_is_object_safe() {
+ fn _assert_object_safe(_: &dyn HirDatabase) {}
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct GlobalTypeParamId(salsa::InternId);
+impl_intern_key!(GlobalTypeParamId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct InternedOpaqueTyId(salsa::InternId);
+impl_intern_key!(InternedOpaqueTyId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct ClosureId(salsa::InternId);
+impl_intern_key!(ClosureId);
+
+/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+/// we have different IDs for struct and enum variant constructors.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
+pub struct InternedCallableDefId(salsa::InternId);
+impl_intern_key!(InternedCallableDefId);
--- /dev/null
+//! FIXME: write short doc here
+mod expr;
+mod match_check;
+mod unsafe_check;
+
+use std::any::Any;
+
+use hir_def::DefWithBodyId;
+use hir_expand::diagnostics::{Diagnostic, DiagnosticSink};
+use hir_expand::{name::Name, HirFileId, InFile};
+use stdx::format_to;
+use syntax::{ast, AstPtr, SyntaxNodePtr};
+
+use crate::db::HirDatabase;
+
+pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields};
+
+pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) {
+ let _p = profile::span("validate_body");
+ let infer = db.infer(owner);
+ infer.add_diagnostics(db, owner, sink);
+ let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink);
+ validator.validate_body(db);
+ let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink);
+ validator.validate_body(db);
+}
+
+#[derive(Debug)]
+pub struct NoSuchField {
+ pub file: HirFileId,
+ pub field: AstPtr<ast::RecordExprField>,
+}
+
+impl Diagnostic for NoSuchField {
+ fn message(&self) -> String {
+ "no such field".to_string()
+ }
+
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile::new(self.file, self.field.clone().into())
+ }
+
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MissingFields {
+ pub file: HirFileId,
+ pub field_list_parent: AstPtr<ast::RecordExpr>,
+ pub field_list_parent_path: Option<AstPtr<ast::Path>>,
+ pub missed_fields: Vec<Name>,
+}
+
+impl Diagnostic for MissingFields {
+ fn message(&self) -> String {
+ let mut buf = String::from("Missing structure fields:\n");
+ for field in &self.missed_fields {
+ format_to!(buf, "- {}\n", field);
+ }
+ buf
+ }
+
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile {
+ file_id: self.file,
+ value: self
+ .field_list_parent_path
+ .clone()
+ .map(SyntaxNodePtr::from)
+ .unwrap_or_else(|| self.field_list_parent.clone().into()),
+ }
+ }
+
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MissingPatFields {
+ pub file: HirFileId,
+ pub field_list_parent: AstPtr<ast::RecordPat>,
+ pub field_list_parent_path: Option<AstPtr<ast::Path>>,
+ pub missed_fields: Vec<Name>,
+}
+
+impl Diagnostic for MissingPatFields {
+ fn message(&self) -> String {
+ let mut buf = String::from("Missing structure fields:\n");
+ for field in &self.missed_fields {
+ format_to!(buf, "- {}\n", field);
+ }
+ buf
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile {
+ file_id: self.file,
+ value: self
+ .field_list_parent_path
+ .clone()
+ .map(SyntaxNodePtr::from)
+ .unwrap_or_else(|| self.field_list_parent.clone().into()),
+ }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MissingMatchArms {
+ pub file: HirFileId,
+ pub match_expr: AstPtr<ast::Expr>,
+ pub arms: AstPtr<ast::MatchArmList>,
+}
+
+impl Diagnostic for MissingMatchArms {
+ fn message(&self) -> String {
+ String::from("Missing match arm")
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile { file_id: self.file, value: self.match_expr.clone().into() }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MissingOkInTailExpr {
+ pub file: HirFileId,
+ pub expr: AstPtr<ast::Expr>,
+}
+
+impl Diagnostic for MissingOkInTailExpr {
+ fn message(&self) -> String {
+ "wrap return expression in Ok".to_string()
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile { file_id: self.file, value: self.expr.clone().into() }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct BreakOutsideOfLoop {
+ pub file: HirFileId,
+ pub expr: AstPtr<ast::Expr>,
+}
+
+impl Diagnostic for BreakOutsideOfLoop {
+ fn message(&self) -> String {
+ "break outside of loop".to_string()
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile { file_id: self.file, value: self.expr.clone().into() }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MissingUnsafe {
+ pub file: HirFileId,
+ pub expr: AstPtr<ast::Expr>,
+}
+
+impl Diagnostic for MissingUnsafe {
+ fn message(&self) -> String {
+ format!("This operation is unsafe and requires an unsafe function or block")
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile { file_id: self.file, value: self.expr.clone().into() }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+}
+
+#[derive(Debug)]
+pub struct MismatchedArgCount {
+ pub file: HirFileId,
+ pub call_expr: AstPtr<ast::Expr>,
+ pub expected: usize,
+ pub found: usize,
+}
+
+impl Diagnostic for MismatchedArgCount {
+ fn message(&self) -> String {
+ let s = if self.expected == 1 { "" } else { "s" };
+ format!("Expected {} argument{}, found {}", self.expected, s, self.found)
+ }
+ fn display_source(&self) -> InFile<SyntaxNodePtr> {
+ InFile { file_id: self.file, value: self.call_expr.clone().into() }
+ }
+ fn as_any(&self) -> &(dyn Any + Send + 'static) {
+ self
+ }
+ fn is_experimental(&self) -> bool {
+ true
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
+ use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
+ use hir_expand::{
+ db::AstDatabase,
+ diagnostics::{Diagnostic, DiagnosticSinkBuilder},
+ };
+ use rustc_hash::FxHashMap;
+ use syntax::{TextRange, TextSize};
+
+ use crate::{diagnostics::validate_body, test_db::TestDB};
+
+ impl TestDB {
+ fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
+ let crate_graph = self.crate_graph();
+ for krate in crate_graph.iter() {
+ let crate_def_map = self.crate_def_map(krate);
+
+ let mut fns = Vec::new();
+ for (module_id, _) in crate_def_map.modules.iter() {
+ for decl in crate_def_map[module_id].scope.declarations() {
+ if let ModuleDefId::FunctionId(f) = decl {
+ fns.push(f)
+ }
+ }
+
+ for impl_id in crate_def_map[module_id].scope.impls() {
+ let impl_data = self.impl_data(impl_id);
+ for item in impl_data.items.iter() {
+ if let AssocItemId::FunctionId(f) = item {
+ fns.push(*f)
+ }
+ }
+ }
+ }
+
+ for f in fns {
+ let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
+ validate_body(self, f.into(), &mut sink);
+ }
+ }
+ }
+ }
+
+ pub(crate) fn check_diagnostics(ra_fixture: &str) {
+ let db = TestDB::with_files(ra_fixture);
+ let annotations = db.extract_annotations();
+
+ let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
+ db.diagnostics(|d| {
+ let src = d.display_source();
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ // FIXME: macros...
+ let file_id = src.file_id.original_file(&db);
+ let range = src.value.to_node(&root).text_range();
+ let message = d.message().to_owned();
+ actual.entry(file_id).or_default().push((range, message));
+ });
+
+ for (file_id, diags) in actual.iter_mut() {
+ diags.sort_by_key(|it| it.0.start());
+ let text = db.file_text(*file_id);
+ // For multiline spans, place them on line start
+ for (range, content) in diags {
+ if text[*range].contains('\n') {
+ *range = TextRange::new(range.start(), range.start() + TextSize::from(1));
+ *content = format!("... {}", content);
+ }
+ }
+ }
+
+ assert_eq!(annotations, actual);
+ }
+
+ #[test]
+ fn no_such_field_diagnostics() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+impl S {
+ fn new() -> S {
+ S {
+ //^ Missing structure fields:
+ //| - bar
+ foo: 92,
+ baz: 62,
+ //^^^^^^^ no such field
+ }
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct MyStruct {
+ my_val: usize,
+ #[cfg(feature = "foo")]
+ bar: bool,
+}
+
+impl MyStruct {
+ #[cfg(feature = "foo")]
+ pub(crate) fn new(my_val: usize, bar: bool) -> Self {
+ Self { my_val, bar }
+ }
+ #[cfg(not(feature = "foo"))]
+ pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
+ Self { my_val }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_enum_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+enum Foo {
+ #[cfg(not(feature = "foo"))]
+ Buz,
+ #[cfg(feature = "foo")]
+ Bar,
+ Baz
+}
+
+fn test_fn(f: Foo) {
+ match f {
+ Foo::Bar => {},
+ Foo::Baz => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {
+ #[cfg(feature = "foo")]
+ foo: u32,
+ #[cfg(not(feature = "foo"))]
+ bar: u32,
+}
+
+impl S {
+ #[cfg(feature = "foo")]
+ fn new(foo: u32) -> Self {
+ Self { foo }
+ }
+ #[cfg(not(feature = "foo"))]
+ fn new(bar: u32) -> Self {
+ Self { bar }
+ }
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { foo: bar } }
+ #[cfg(not(feature = "foo"))]
+ { Self { bar } }
+ }
+ fn new2(val: u32) -> Self {
+ Self {
+ #[cfg(feature = "foo")]
+ foo: val,
+ #[cfg(not(feature = "foo"))]
+ bar: val,
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type { () => { u32 }; }
+struct Foo { bar: Type![] }
+
+impl Foo {
+ fn new() -> Self {
+ Foo { bar: 0 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_record_pat_field_diagnostic() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+fn baz(s: S) {
+ let S { foo: _ } = s;
+ //^ Missing structure fields:
+ //| - bar
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
+ check_diagnostics(
+ r"
+struct S { foo: i32, bar: () }
+fn baz(s: S) -> i32 {
+ match s {
+ S { foo, .. } => foo,
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn break_outside_of_loop() {
+ check_diagnostics(
+ r#"
+fn foo() { break; }
+ //^^^^^ break outside of loop
+"#,
+ );
+ }
+}
--- /dev/null
+//! FIXME: write short doc here
+
+use std::sync::Arc;
+
+use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId};
+use hir_expand::diagnostics::DiagnosticSink;
+use rustc_hash::FxHashSet;
+use syntax::{ast, AstPtr};
+
+use crate::{
+ db::HirDatabase,
+ diagnostics::{
+ match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
+ MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields,
+ },
+ utils::variant_data,
+ ApplicationTy, InferenceResult, Ty, TypeCtor,
+};
+
+pub use hir_def::{
+ body::{
+ scope::{ExprScopes, ScopeEntry, ScopeId},
+ Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
+ },
+ expr::{
+ ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
+ MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
+ },
+ src::HasSource,
+ LocalFieldId, Lookup, VariantId,
+};
+
+pub(super) struct ExprValidator<'a, 'b: 'a> {
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ sink: &'a mut DiagnosticSink<'b>,
+}
+
+impl<'a, 'b> ExprValidator<'a, 'b> {
+ pub(super) fn new(
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ sink: &'a mut DiagnosticSink<'b>,
+ ) -> ExprValidator<'a, 'b> {
+ ExprValidator { owner, infer, sink }
+ }
+
+ pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
+ let body = db.body(self.owner.into());
+
+ for (id, expr) in body.exprs.iter() {
+ if let Some((variant_def, missed_fields, true)) =
+ record_literal_missing_fields(db, &self.infer, id, expr)
+ {
+ self.create_record_literal_missing_fields_diagnostic(
+ id,
+ db,
+ variant_def,
+ missed_fields,
+ );
+ }
+
+ match expr {
+ Expr::Match { expr, arms } => {
+ self.validate_match(id, *expr, arms, db, self.infer.clone());
+ }
+ Expr::Call { .. } | Expr::MethodCall { .. } => {
+ self.validate_call(db, id, expr);
+ }
+ _ => {}
+ }
+ }
+ for (id, pat) in body.pats.iter() {
+ if let Some((variant_def, missed_fields, true)) =
+ record_pattern_missing_fields(db, &self.infer, id, pat)
+ {
+ self.create_record_pattern_missing_fields_diagnostic(
+ id,
+ db,
+ variant_def,
+ missed_fields,
+ );
+ }
+ }
+ let body_expr = &body[body.body_expr];
+ if let Expr::Block { tail: Some(t), .. } = body_expr {
+ self.validate_results_in_tail_expr(body.body_expr, *t, db);
+ }
+ }
+
+ fn create_record_literal_missing_fields_diagnostic(
+ &mut self,
+ id: ExprId,
+ db: &dyn HirDatabase,
+ variant_def: VariantId,
+ missed_fields: Vec<LocalFieldId>,
+ ) {
+ // XXX: only look at source_map if we do have missing fields
+ let (_, source_map) = db.body_with_source_map(self.owner.into());
+
+ if let Ok(source_ptr) = source_map.expr_syntax(id) {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) {
+ if let Some(_) = record_expr.record_expr_field_list() {
+ let variant_data = variant_data(db.upcast(), variant_def);
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+ self.sink.push(MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&record_expr),
+ field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)),
+ missed_fields,
+ })
+ }
+ }
+ }
+ }
+
+ fn create_record_pattern_missing_fields_diagnostic(
+ &mut self,
+ id: PatId,
+ db: &dyn HirDatabase,
+ variant_def: VariantId,
+ missed_fields: Vec<LocalFieldId>,
+ ) {
+ // XXX: only look at source_map if we do have missing fields
+ let (_, source_map) = db.body_with_source_map(self.owner.into());
+
+ if let Ok(source_ptr) = source_map.pat_syntax(id) {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
+ if let Some(_) = record_pat.record_pat_field_list() {
+ let variant_data = variant_data(db.upcast(), variant_def);
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+ self.sink.push(MissingPatFields {
+ file: source_ptr.file_id,
+ field_list_parent: AstPtr::new(&record_pat),
+ field_list_parent_path: record_pat
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ })
+ }
+ }
+ }
+ }
+ }
+
+ fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> {
+ // Check that the number of arguments matches the number of parameters.
+
+ // FIXME: Due to shortcomings in the current type system implementation, only emit this
+ // diagnostic if there are no type mismatches in the containing function.
+ if self.infer.type_mismatches.iter().next().is_some() {
+ return Some(());
+ }
+
+ let is_method_call = matches!(expr, Expr::MethodCall { .. });
+ let (sig, args) = match expr {
+ Expr::Call { callee, args } => {
+ let callee = &self.infer.type_of_expr[*callee];
+ let sig = callee.callable_sig(db)?;
+ (sig, args.clone())
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ let mut args = args.clone();
+ args.insert(0, *receiver);
+
+ // FIXME: note that we erase information about substs here. This
+ // is not right, but, luckily, doesn't matter as we care only
+ // about the number of params
+ let callee = self.infer.method_resolution(call_id)?;
+ let sig = db.callable_item_signature(callee.into()).value;
+
+ (sig, args)
+ }
+ _ => return None,
+ };
+
+ if sig.is_varargs {
+ return None;
+ }
+
+ let params = sig.params();
+
+ let mut param_count = params.len();
+ let mut arg_count = args.len();
+
+ if arg_count != param_count {
+ let (_, source_map) = db.body_with_source_map(self.owner.into());
+ if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
+ if is_method_call {
+ param_count -= 1;
+ arg_count -= 1;
+ }
+ self.sink.push(MismatchedArgCount {
+ file: source_ptr.file_id,
+ call_expr: source_ptr.value,
+ expected: param_count,
+ found: arg_count,
+ });
+ }
+ }
+
+ None
+ }
+
+ fn validate_match(
+ &mut self,
+ id: ExprId,
+ match_expr: ExprId,
+ arms: &[MatchArm],
+ db: &dyn HirDatabase,
+ infer: Arc<InferenceResult>,
+ ) {
+ let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
+ db.body_with_source_map(self.owner.into());
+
+ let match_expr_ty = match infer.type_of_expr.get(match_expr) {
+ Some(ty) => ty,
+ // If we can't resolve the type of the match expression
+ // we cannot perform exhaustiveness checks.
+ None => return,
+ };
+
+ let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db };
+ let pats = arms.iter().map(|arm| arm.pat);
+
+ let mut seen = Matrix::empty();
+ for pat in pats {
+ if let Some(pat_ty) = infer.type_of_pat.get(pat) {
+ // We only include patterns whose type matches the type
+ // of the match expression. If we had a InvalidMatchArmPattern
+ // diagnostic or similar we could raise that in an else
+ // block here.
+ //
+ // When comparing the types, we also have to consider that rustc
+ // will automatically de-reference the match expression type if
+ // necessary.
+ //
+ // FIXME we should use the type checker for this.
+ if pat_ty == match_expr_ty
+ || match_expr_ty
+ .as_reference()
+ .map(|(match_expr_ty, _)| match_expr_ty == pat_ty)
+ .unwrap_or(false)
+ {
+ // If we had a NotUsefulMatchArm diagnostic, we could
+ // check the usefulness of each pattern as we added it
+ // to the matrix here.
+ let v = PatStack::from_pattern(pat);
+ seen.push(&cx, v);
+ continue;
+ }
+ }
+
+ // If we can't resolve the type of a pattern, or the pattern type doesn't
+ // fit the match expression, we skip this diagnostic. Skipping the entire
+ // diagnostic rather than just not including this match arm is preferred
+ // to avoid the chance of false positives.
+ return;
+ }
+
+ match is_useful(&cx, &seen, &PatStack::from_wild()) {
+ Ok(Usefulness::Useful) => (),
+ // if a wildcard pattern is not useful, then all patterns are covered
+ Ok(Usefulness::NotUseful) => return,
+ // this path is for unimplemented checks, so we err on the side of not
+ // reporting any errors
+ _ => return,
+ }
+
+ if let Ok(source_ptr) = source_map.expr_syntax(id) {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
+ if let (Some(match_expr), Some(arms)) =
+ (match_expr.expr(), match_expr.match_arm_list())
+ {
+ self.sink.push(MissingMatchArms {
+ file: source_ptr.file_id,
+ match_expr: AstPtr::new(&match_expr),
+ arms: AstPtr::new(&arms),
+ })
+ }
+ }
+ }
+ }
+
+ fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
+ // the mismatch will be on the whole block currently
+ let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
+ Some(m) => m,
+ None => return,
+ };
+
+ let core_result_path = path![core::result::Result];
+
+ let resolver = self.owner.resolver(db.upcast());
+ let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) {
+ Some(it) => it,
+ _ => return,
+ };
+
+ let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum));
+ let params = match &mismatch.expected {
+ Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => {
+ parameters
+ }
+ _ => return,
+ };
+
+ if params.len() == 2 && params[0] == mismatch.actual {
+ let (_, source_map) = db.body_with_source_map(self.owner.into());
+
+ if let Ok(source_ptr) = source_map.expr_syntax(id) {
+ self.sink
+ .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value });
+ }
+ }
+ }
+}
+
+pub fn record_literal_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: ExprId,
+ expr: &Expr,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhausitve) = match expr {
+ Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_expr(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_data(db.upcast(), variant_def);
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhausitve))
+}
+
+pub fn record_pattern_missing_fields(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ id: PatId,
+ pat: &Pat,
+) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
+ let (fields, exhaustive) = match pat {
+ Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
+ _ => return None,
+ };
+
+ let variant_def = infer.variant_resolution_for_pat(id)?;
+ if let VariantId::UnionId(_) = variant_def {
+ return None;
+ }
+
+ let variant_data = variant_data(db.upcast(), variant_def);
+
+ let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
+ let missed_fields: Vec<LocalFieldId> = variant_data
+ .fields()
+ .iter()
+ .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
+ .collect();
+ if missed_fields.is_empty() {
+ return None;
+ }
+ Some((variant_def, missed_fields, exhaustive))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::diagnostics::tests::check_diagnostics;
+
+ #[test]
+ fn simple_free_fn_zero() {
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(1); }
+ //^^^^^^^ Expected 0 arguments, found 1
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn zero() {}
+fn f() { zero(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn simple_free_fn_one() {
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(); }
+ //^^^^^ Expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+fn one(arg: u8) {}
+fn f() { one(1); }
+"#,
+ );
+ }
+
+ #[test]
+ fn method_as_fn() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method();
+} //^^^^^^^^^^^ Expected 1 argument, found 0
+"#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self) {} }
+
+fn f() {
+ S::method(&S);
+ S.method();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_arg() {
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+ fn f() {
+ S.method();
+ } //^^^^^^^^^^ Expected 1 argument, found 0
+ "#,
+ );
+
+ check_diagnostics(
+ r#"
+struct S;
+impl S { fn method(&self, arg: u8) {} }
+
+fn f() {
+ S::method(&S, 0);
+ S.method(1);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_struct() {
+ check_diagnostics(
+ r#"
+struct Tup(u8, u16);
+fn f() {
+ Tup(0);
+} //^^^^^^ Expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant() {
+ check_diagnostics(
+ r#"
+enum En { Variant(u8, u16), }
+fn f() {
+ En::Variant(0);
+} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1
+"#,
+ )
+ }
+
+ #[test]
+ fn enum_variant_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type {
+ () => { u32 };
+}
+enum Foo {
+ Bar(Type![])
+}
+impl Foo {
+ fn new() {
+ Foo::Bar(0);
+ Foo::Bar(0, 1);
+ //^^^^^^^^^^^^^^ Expected 1 argument, found 2
+ Foo::Bar();
+ //^^^^^^^^^^ Expected 1 argument, found 0
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn varargs() {
+ check_diagnostics(
+ r#"
+extern "C" {
+ fn fixed(fixed: u8);
+ fn varargs(fixed: u8, ...);
+ fn varargs2(...);
+}
+
+fn f() {
+ unsafe {
+ fixed(0);
+ fixed(0, 1);
+ //^^^^^^^^^^^ Expected 1 argument, found 2
+ varargs(0);
+ varargs(0, 1);
+ varargs2();
+ varargs2(0);
+ varargs2(0, 1);
+ }
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn arg_count_lambda() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let f = |()| ();
+ f();
+ //^^^ Expected 1 argument, found 0
+ f(());
+ f((), ());
+ //^^^^^^^^^ Expected 1 argument, found 2
+}
+"#,
+ )
+ }
+}
--- /dev/null
+//! This module implements match statement exhaustiveness checking and usefulness checking
+//! for match arms.
+//!
+//! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which
+//! contains very detailed documentation about the algorithms used here. I've duplicated
+//! most of that documentation below.
+//!
+//! This file includes the logic for exhaustiveness and usefulness checking for
+//! pattern-matching. Specifically, given a list of patterns for a type, we can
+//! tell whether:
+//! - (a) the patterns cover every possible constructor for the type (exhaustiveness).
+//! - (b) each pattern is necessary (usefulness).
+//!
+//! The algorithm implemented here is a modified version of the one described in
+//! <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
+//! However, to save future implementors from reading the original paper, we
+//! summarise the algorithm here to hopefully save time and be a little clearer
+//! (without being so rigorous).
+//!
+//! The core of the algorithm revolves about a "usefulness" check. In particular, we
+//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as
+//! a matrix). `U(P, p)` represents whether, given an existing list of patterns
+//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously-
+//! uncovered values of the type).
+//!
+//! If we have this predicate, then we can easily compute both exhaustiveness of an
+//! entire set of patterns and the individual usefulness of each one.
+//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard
+//! match doesn't increase the number of values we're matching)
+//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a
+//! pattern to those that have come before it doesn't increase the number of values
+//! we're matching).
+//!
+//! During the course of the algorithm, the rows of the matrix won't just be individual patterns,
+//! but rather partially-deconstructed patterns in the form of a list of patterns. The paper
+//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the
+//! new pattern `p`.
+//!
+//! For example, say we have the following:
+//!
+//! ```ignore
+//! // x: (Option<bool>, Result<()>)
+//! match x {
+//! (Some(true), _) => (),
+//! (None, Err(())) => (),
+//! (None, Err(_)) => (),
+//! }
+//! ```
+//!
+//! Here, the matrix `P` starts as:
+//!
+//! ```text
+//! [
+//! [(Some(true), _)],
+//! [(None, Err(()))],
+//! [(None, Err(_))],
+//! ]
+//! ```
+//!
+//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering
+//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because
+//! all the values it covers are already covered by row 2.
+//!
+//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of
+//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks.
+//! To match the paper, the top of the stack is at the beginning / on the left.
+//!
+//! There are two important operations on pattern-stacks necessary to understand the algorithm:
+//!
+//! 1. We can pop a given constructor off the top of a stack. This operation is called
+//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or
+//! `None`) and `p` a pattern-stack.
+//! If the pattern on top of the stack can cover `c`, this removes the constructor and
+//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns.
+//! Otherwise the pattern-stack is discarded.
+//! This essentially filters those pattern-stacks whose top covers the constructor `c` and
+//! discards the others.
+//!
+//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we
+//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the
+//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get
+//! nothing back.
+//!
+//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1`
+//! on top of the stack, and we have four cases:
+//!
+//! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto
+//! the stack the arguments of this constructor, and return the result:
+//!
+//! r_1, .., r_a, p_2, .., p_n
+//!
+//! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠c'`. We discard the current stack and return
+//! nothing.
+//! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has
+//! arguments (its arity), and return the resulting stack:
+//!
+//! _, .., _, p_2, .., p_n
+//!
+//! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
+//!
+//! S(c, (r_1, p_2, .., p_n))
+//! S(c, (r_2, p_2, .., p_n))
+//!
+//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
+//! a pattern-stack.
+//! This is used when we know there are missing constructor cases, but there might be
+//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check
+//! all its *other* components.
+//!
+//! It is computed as follows. We look at the pattern `p_1` on top of the stack,
+//! and we have three cases:
+//! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
+//! * 1.2. `p_1 = _`. We return the rest of the stack:
+//!
+//! p_2, .., p_n
+//!
+//! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
+//!
+//! D((r_1, p_2, .., p_n))
+//! D((r_2, p_2, .., p_n))
+//!
+//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
+//! exhaustive integer matching rules, so they're written here for posterity.
+//!
+//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by
+//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with
+//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard.
+//!
+//!
+//! The algorithm for computing `U`
+//! -------------------------------
+//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns).
+//! That means we're going to check the components from left-to-right, so the algorithm
+//! operates principally on the first component of the matrix and new pattern-stack `p`.
+//! This algorithm is realised in the `is_useful` function.
+//!
+//! Base case (`n = 0`, i.e., an empty tuple pattern):
+//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then
+//! `U(P, p)` is false.
+//! - Otherwise, `P` must be empty, so `U(P, p)` is true.
+//!
+//! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded
+//! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`:
+//!
+//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern.
+//! Then, the usefulness of `p_1` can be reduced to whether it is useful when
+//! we ignore all the patterns in the first column of `P` that involve other constructors.
+//! This is where `S(c, P)` comes in:
+//!
+//! ```text
+//! U(P, p) := U(S(c, P), S(c, p))
+//! ```
+//!
+//! This special case is handled in `is_useful_specialized`.
+//!
+//! For example, if `P` is:
+//!
+//! ```text
+//! [
+//! [Some(true), _],
+//! [None, 0],
+//! ]
+//! ```
+//!
+//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only
+//! matches values that row 2 doesn't. For row 1 however, we need to dig into the
+//! arguments of `Some` to know whether some new value is covered. So we compute
+//! `U([[true, _]], [false, 0])`.
+//!
+//! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of
+//! the rows of `P`:
+//! - If there are some constructors that aren't present, then we might think that the
+//! wildcard `_` is useful, since it covers those constructors that weren't covered
+//! before.
+//! That's almost correct, but only works if there were no wildcards in those first
+//! components. So we need to check that `p` is useful with respect to the rows that
+//! start with a wildcard, if there are any. This is where `D` comes in:
+//! `U(P, p) := U(D(P), D(p))`
+//!
+//! For example, if `P` is:
+//! ```text
+//! [
+//! [_, true, _],
+//! [None, false, 1],
+//! ]
+//! ```
+//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we
+//! only had row 2, we'd know that `p` is useful. However row 1 starts with a
+//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`.
+//!
+//! - Otherwise, all possible constructors (for the relevant type) are present. In this
+//! case we must check whether the wildcard pattern covers any unmatched value. For
+//! that, we can think of the `_` pattern as a big OR-pattern that covers all
+//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for
+//! example. The wildcard pattern is useful in this case if it is useful when
+//! specialized to one of the possible constructors. So we compute:
+//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))`
+//!
+//! For example, if `P` is:
+//! ```text
+//! [
+//! [Some(true), _],
+//! [None, false],
+//! ]
+//! ```
+//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first
+//! components of `P`. We will therefore try popping both constructors in turn: we
+//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]],
+//! [false])` for the `None` constructor. The first case returns true, so we know that
+//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched
+//! before.
+//!
+//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately:
+//!
+//! ```text
+//! U(P, p) := U(P, (r_1, p_2, .., p_n))
+//! || U(P, (r_2, p_2, .., p_n))
+//! ```
+use std::sync::Arc;
+
+use arena::Idx;
+use hir_def::{
+ adt::VariantData,
+ body::Body,
+ expr::{Expr, Literal, Pat, PatId},
+ AdtId, EnumVariantId, VariantId,
+};
+use smallvec::{smallvec, SmallVec};
+
+use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor};
+
+#[derive(Debug, Clone, Copy)]
+/// Either a pattern from the source code being analyzed, represented as
+/// as `PatId`, or a `Wild` pattern which is created as an intermediate
+/// step in the match checking algorithm and thus is not backed by a
+/// real `PatId`.
+///
+/// Note that it is totally valid for the `PatId` variant to contain
+/// a `PatId` which resolves to a `Wild` pattern, if that wild pattern
+/// exists in the source code being analyzed.
+enum PatIdOrWild {
+ PatId(PatId),
+ Wild,
+}
+
+impl PatIdOrWild {
+ fn as_pat(self, cx: &MatchCheckCtx) -> Pat {
+ match self {
+ PatIdOrWild::PatId(id) => cx.body.pats[id].clone(),
+ PatIdOrWild::Wild => Pat::Wild,
+ }
+ }
+
+ fn as_id(self) -> Option<PatId> {
+ match self {
+ PatIdOrWild::PatId(id) => Some(id),
+ PatIdOrWild::Wild => None,
+ }
+ }
+}
+
+impl From<PatId> for PatIdOrWild {
+ fn from(pat_id: PatId) -> Self {
+ Self::PatId(pat_id)
+ }
+}
+
+impl From<&PatId> for PatIdOrWild {
+ fn from(pat_id: &PatId) -> Self {
+ Self::PatId(*pat_id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub(super) enum MatchCheckErr {
+ NotImplemented,
+ MalformedMatchArm,
+ /// Used when type inference cannot resolve the type of
+ /// a pattern or expression.
+ Unknown,
+}
+
+/// The return type of `is_useful` is either an indication of usefulness
+/// of the match arm, or an error in the case the match statement
+/// is made up of types for which exhaustiveness checking is currently
+/// not completely implemented.
+///
+/// The `std::result::Result` type is used here rather than a custom enum
+/// to allow the use of `?`.
+pub(super) type MatchCheckResult<T> = Result<T, MatchCheckErr>;
+
+#[derive(Debug)]
+/// A row in a Matrix.
+///
+/// This type is modeled from the struct of the same name in `rustc`.
+pub(super) struct PatStack(PatStackInner);
+type PatStackInner = SmallVec<[PatIdOrWild; 2]>;
+
+impl PatStack {
+ pub(super) fn from_pattern(pat_id: PatId) -> PatStack {
+ Self(smallvec!(pat_id.into()))
+ }
+
+ pub(super) fn from_wild() -> PatStack {
+ Self(smallvec!(PatIdOrWild::Wild))
+ }
+
+ fn from_slice(slice: &[PatIdOrWild]) -> PatStack {
+ Self(SmallVec::from_slice(slice))
+ }
+
+ fn from_vec(v: PatStackInner) -> PatStack {
+ Self(v)
+ }
+
+ fn get_head(&self) -> Option<PatIdOrWild> {
+ self.0.first().copied()
+ }
+
+ fn tail(&self) -> &[PatIdOrWild] {
+ self.0.get(1..).unwrap_or(&[])
+ }
+
+ fn to_tail(&self) -> PatStack {
+ Self::from_slice(self.tail())
+ }
+
+ fn replace_head_with<I, T>(&self, pats: I) -> PatStack
+ where
+ I: Iterator<Item = T>,
+ T: Into<PatIdOrWild>,
+ {
+ let mut patterns: PatStackInner = smallvec![];
+ for pat in pats {
+ patterns.push(pat.into());
+ }
+ for pat in &self.0[1..] {
+ patterns.push(*pat);
+ }
+ PatStack::from_vec(patterns)
+ }
+
+ /// Computes `D(self)`.
+ ///
+ /// See the module docs and the associated documentation in rustc for details.
+ fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> {
+ if matches!(self.get_head()?.as_pat(cx), Pat::Wild) {
+ Some(self.to_tail())
+ } else {
+ None
+ }
+ }
+
+ /// Computes `S(constructor, self)`.
+ ///
+ /// See the module docs and the associated documentation in rustc for details.
+ fn specialize_constructor(
+ &self,
+ cx: &MatchCheckCtx,
+ constructor: &Constructor,
+ ) -> MatchCheckResult<Option<PatStack>> {
+ let head = match self.get_head() {
+ Some(head) => head,
+ None => return Ok(None),
+ };
+
+ let head_pat = head.as_pat(cx);
+ let result = match (head_pat, constructor) {
+ (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => {
+ if ellipsis.is_some() {
+ // If there are ellipsis here, we should add the correct number of
+ // Pat::Wild patterns to `pat_ids`. We should be able to use the
+ // constructors arity for this, but at the time of writing we aren't
+ // correctly calculating this arity when ellipsis are present.
+ return Err(MatchCheckErr::NotImplemented);
+ }
+
+ Some(self.replace_head_with(pat_ids.iter()))
+ }
+ (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => {
+ match cx.body.exprs[lit_expr] {
+ Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => {
+ Some(self.to_tail())
+ }
+ // it was a bool but the value doesn't match
+ Expr::Literal(Literal::Bool(_)) => None,
+ // perhaps this is actually unreachable given we have
+ // already checked that these match arms have the appropriate type?
+ _ => return Err(MatchCheckErr::NotImplemented),
+ }
+ }
+ (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
+ (Pat::Path(_), Constructor::Enum(constructor)) => {
+ // unit enum variants become `Pat::Path`
+ let pat_id = head.as_id().expect("we know this isn't a wild");
+ if !enum_variant_matches(cx, pat_id, *constructor) {
+ None
+ } else {
+ Some(self.to_tail())
+ }
+ }
+ (
+ Pat::TupleStruct { args: ref pat_ids, ellipsis, .. },
+ Constructor::Enum(enum_constructor),
+ ) => {
+ let pat_id = head.as_id().expect("we know this isn't a wild");
+ if !enum_variant_matches(cx, pat_id, *enum_constructor) {
+ None
+ } else {
+ let constructor_arity = constructor.arity(cx)?;
+ if let Some(ellipsis_position) = ellipsis {
+ // If there are ellipsis in the pattern, the ellipsis must take the place
+ // of at least one sub-pattern, so `pat_ids` should be smaller than the
+ // constructor arity.
+ if pat_ids.len() < constructor_arity {
+ let mut new_patterns: Vec<PatIdOrWild> = vec![];
+
+ for pat_id in &pat_ids[0..ellipsis_position] {
+ new_patterns.push((*pat_id).into());
+ }
+
+ for _ in 0..(constructor_arity - pat_ids.len()) {
+ new_patterns.push(PatIdOrWild::Wild);
+ }
+
+ for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] {
+ new_patterns.push((*pat_id).into());
+ }
+
+ Some(self.replace_head_with(new_patterns.into_iter()))
+ } else {
+ return Err(MatchCheckErr::MalformedMatchArm);
+ }
+ } else {
+ // If there is no ellipsis in the tuple pattern, the number
+ // of patterns must equal the constructor arity.
+ if pat_ids.len() == constructor_arity {
+ Some(self.replace_head_with(pat_ids.into_iter()))
+ } else {
+ return Err(MatchCheckErr::MalformedMatchArm);
+ }
+ }
+ }
+ }
+ (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => {
+ let pat_id = head.as_id().expect("we know this isn't a wild");
+ if !enum_variant_matches(cx, pat_id, *e) {
+ None
+ } else {
+ match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
+ VariantData::Record(struct_field_arena) => {
+ // Here we treat any missing fields in the record as the wild pattern, as
+ // if the record has ellipsis. We want to do this here even if the
+ // record does not contain ellipsis, because it allows us to continue
+ // enforcing exhaustiveness for the rest of the match statement.
+ //
+ // Creating the diagnostic for the missing field in the pattern
+ // should be done in a different diagnostic.
+ let patterns = struct_field_arena.iter().map(|(_, struct_field)| {
+ arg_patterns
+ .iter()
+ .find(|pat| pat.name == struct_field.name)
+ .map(|pat| PatIdOrWild::from(pat.pat))
+ .unwrap_or(PatIdOrWild::Wild)
+ });
+
+ Some(self.replace_head_with(patterns))
+ }
+ _ => return Err(MatchCheckErr::Unknown),
+ }
+ }
+ }
+ (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
+ (_, _) => return Err(MatchCheckErr::NotImplemented),
+ };
+
+ Ok(result)
+ }
+
+ /// A special case of `specialize_constructor` where the head of the pattern stack
+ /// is a Wild pattern.
+ ///
+ /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns
+ /// (N >= 0), where N is the arity of the given constructor.
+ fn expand_wildcard(
+ &self,
+ cx: &MatchCheckCtx,
+ constructor: &Constructor,
+ ) -> MatchCheckResult<PatStack> {
+ assert_eq!(
+ Pat::Wild,
+ self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx),
+ "expand_wildcard must only be called on PatStack with wild at head",
+ );
+
+ let mut patterns: PatStackInner = smallvec![];
+
+ for _ in 0..constructor.arity(cx)? {
+ patterns.push(PatIdOrWild::Wild);
+ }
+
+ for pat in &self.0[1..] {
+ patterns.push(*pat);
+ }
+
+ Ok(PatStack::from_vec(patterns))
+ }
+}
+
+/// A collection of PatStack.
+///
+/// This type is modeled from the struct of the same name in `rustc`.
+pub(super) struct Matrix(Vec<PatStack>);
+
+impl Matrix {
+ pub(super) fn empty() -> Self {
+ Self(vec![])
+ }
+
+ pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) {
+ if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) {
+ // Or patterns are expanded here
+ for pat_id in pat_ids {
+ self.0.push(PatStack::from_pattern(pat_id));
+ }
+ } else {
+ self.0.push(row);
+ }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ fn heads(&self) -> Vec<PatIdOrWild> {
+ self.0.iter().flat_map(|p| p.get_head()).collect()
+ }
+
+ /// Computes `D(self)` for each contained PatStack.
+ ///
+ /// See the module docs and the associated documentation in rustc for details.
+ fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self {
+ Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx)))
+ }
+
+ /// Computes `S(constructor, self)` for each contained PatStack.
+ ///
+ /// See the module docs and the associated documentation in rustc for details.
+ fn specialize_constructor(
+ &self,
+ cx: &MatchCheckCtx,
+ constructor: &Constructor,
+ ) -> MatchCheckResult<Self> {
+ let mut new_matrix = Matrix::empty();
+ for pat in &self.0 {
+ if let Some(pat) = pat.specialize_constructor(cx, constructor)? {
+ new_matrix.push(cx, pat);
+ }
+ }
+
+ Ok(new_matrix)
+ }
+
+ fn collect<T: IntoIterator<Item = PatStack>>(cx: &MatchCheckCtx, iter: T) -> Self {
+ let mut matrix = Matrix::empty();
+
+ for pat in iter {
+ // using push ensures we expand or-patterns
+ matrix.push(cx, pat);
+ }
+
+ matrix
+ }
+}
+
+#[derive(Clone, Debug, PartialEq)]
+/// An indication of the usefulness of a given match arm, where
+/// usefulness is defined as matching some patterns which were
+/// not matched by an prior match arms.
+///
+/// We may eventually need an `Unknown` variant here.
+pub(super) enum Usefulness {
+ Useful,
+ NotUseful,
+}
+
+pub(super) struct MatchCheckCtx<'a> {
+ pub(super) match_expr: Idx<Expr>,
+ pub(super) body: Arc<Body>,
+ pub(super) infer: Arc<InferenceResult>,
+ pub(super) db: &'a dyn HirDatabase,
+}
+
+/// Given a set of patterns `matrix`, and pattern to consider `v`, determines
+/// whether `v` is useful. A pattern is useful if it covers cases which were
+/// not previously covered.
+///
+/// When calling this function externally (that is, not the recursive calls) it
+/// expected that you have already type checked the match arms. All patterns in
+/// matrix should be the same type as v, as well as they should all be the same
+/// type as the match expression.
+pub(super) fn is_useful(
+ cx: &MatchCheckCtx,
+ matrix: &Matrix,
+ v: &PatStack,
+) -> MatchCheckResult<Usefulness> {
+ // Handle two special cases:
+ // - enum with no variants
+ // - `!` type
+ // In those cases, no match arm is useful.
+ match cx.infer[cx.match_expr].strip_references() {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => {
+ if cx.db.enum_data(*enum_id).variants.is_empty() {
+ return Ok(Usefulness::NotUseful);
+ }
+ }
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => {
+ return Ok(Usefulness::NotUseful);
+ }
+ _ => (),
+ }
+
+ let head = match v.get_head() {
+ Some(head) => head,
+ None => {
+ let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful };
+
+ return Ok(result);
+ }
+ };
+
+ if let Pat::Or(pat_ids) = head.as_pat(cx) {
+ let mut found_unimplemented = false;
+ let any_useful = pat_ids.iter().any(|&pat_id| {
+ let v = PatStack::from_pattern(pat_id);
+
+ match is_useful(cx, matrix, &v) {
+ Ok(Usefulness::Useful) => true,
+ Ok(Usefulness::NotUseful) => false,
+ _ => {
+ found_unimplemented = true;
+ false
+ }
+ }
+ });
+
+ return if any_useful {
+ Ok(Usefulness::Useful)
+ } else if found_unimplemented {
+ Err(MatchCheckErr::NotImplemented)
+ } else {
+ Ok(Usefulness::NotUseful)
+ };
+ }
+
+ if let Some(constructor) = pat_constructor(cx, head)? {
+ let matrix = matrix.specialize_constructor(&cx, &constructor)?;
+ let v = v
+ .specialize_constructor(&cx, &constructor)?
+ .expect("we know this can't fail because we get the constructor from `v.head()` above");
+
+ is_useful(&cx, &matrix, &v)
+ } else {
+ // expanding wildcard
+ let mut used_constructors: Vec<Constructor> = vec![];
+ for pat in matrix.heads() {
+ if let Some(constructor) = pat_constructor(cx, pat)? {
+ used_constructors.push(constructor);
+ }
+ }
+
+ // We assume here that the first constructor is the "correct" type. Since we
+ // only care about the "type" of the constructor (i.e. if it is a bool we
+ // don't care about the value), this assumption should be valid as long as
+ // the match statement is well formed. We currently uphold this invariant by
+ // filtering match arms before calling `is_useful`, only passing in match arms
+ // whose type matches the type of the match expression.
+ match &used_constructors.first() {
+ Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => {
+ // If all constructors are covered, then we need to consider whether
+ // any values are covered by this wildcard.
+ //
+ // For example, with matrix '[[Some(true)], [None]]', all
+ // constructors are covered (`Some`/`None`), so we need
+ // to perform specialization to see that our wildcard will cover
+ // the `Some(false)` case.
+ //
+ // Here we create a constructor for each variant and then check
+ // usefulness after specializing for that constructor.
+ let mut found_unimplemented = false;
+ for constructor in constructor.all_constructors(cx) {
+ let matrix = matrix.specialize_constructor(&cx, &constructor)?;
+ let v = v.expand_wildcard(&cx, &constructor)?;
+
+ match is_useful(&cx, &matrix, &v) {
+ Ok(Usefulness::Useful) => return Ok(Usefulness::Useful),
+ Ok(Usefulness::NotUseful) => continue,
+ _ => found_unimplemented = true,
+ };
+ }
+
+ if found_unimplemented {
+ Err(MatchCheckErr::NotImplemented)
+ } else {
+ Ok(Usefulness::NotUseful)
+ }
+ }
+ _ => {
+ // Either not all constructors are covered, or the only other arms
+ // are wildcards. Either way, this pattern is useful if it is useful
+ // when compared to those arms with wildcards.
+ let matrix = matrix.specialize_wildcard(&cx);
+ let v = v.to_tail();
+
+ is_useful(&cx, &matrix, &v)
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy)]
+/// Similar to TypeCtor, but includes additional information about the specific
+/// value being instantiated. For example, TypeCtor::Bool doesn't contain the
+/// boolean value.
+enum Constructor {
+ Bool(bool),
+ Tuple { arity: usize },
+ Enum(EnumVariantId),
+}
+
+impl Constructor {
+ fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult<usize> {
+ let arity = match self {
+ Constructor::Bool(_) => 0,
+ Constructor::Tuple { arity } => *arity,
+ Constructor::Enum(e) => {
+ match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
+ VariantData::Tuple(struct_field_data) => struct_field_data.len(),
+ VariantData::Record(struct_field_data) => struct_field_data.len(),
+ VariantData::Unit => 0,
+ }
+ }
+ };
+
+ Ok(arity)
+ }
+
+ fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> {
+ match self {
+ Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)],
+ Constructor::Tuple { .. } => vec![*self],
+ Constructor::Enum(e) => cx
+ .db
+ .enum_data(e.parent)
+ .variants
+ .iter()
+ .map(|(local_id, _)| {
+ Constructor::Enum(EnumVariantId { parent: e.parent, local_id })
+ })
+ .collect(),
+ }
+ }
+}
+
+/// Returns the constructor for the given pattern. Should only return None
+/// in the case of a Wild pattern.
+fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> {
+ let res = match pat.as_pat(cx) {
+ Pat::Wild => None,
+ // FIXME somehow create the Tuple constructor with the proper arity. If there are
+ // ellipsis, the arity is not equal to the number of patterns.
+ Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => {
+ Some(Constructor::Tuple { arity: pats.len() })
+ }
+ Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
+ Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
+ _ => return Err(MatchCheckErr::NotImplemented),
+ },
+ Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => {
+ let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
+ let variant_id =
+ cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?;
+ match variant_id {
+ VariantId::EnumVariantId(enum_variant_id) => {
+ Some(Constructor::Enum(enum_variant_id))
+ }
+ _ => return Err(MatchCheckErr::NotImplemented),
+ }
+ }
+ _ => return Err(MatchCheckErr::NotImplemented),
+ };
+
+ Ok(res)
+}
+
+fn all_constructors_covered(
+ cx: &MatchCheckCtx,
+ constructor: &Constructor,
+ used_constructors: &[Constructor],
+) -> bool {
+ match constructor {
+ Constructor::Tuple { arity } => {
+ used_constructors.iter().any(|constructor| match constructor {
+ Constructor::Tuple { arity: used_arity } => arity == used_arity,
+ _ => false,
+ })
+ }
+ Constructor::Bool(_) => {
+ if used_constructors.is_empty() {
+ return false;
+ }
+
+ let covers_true =
+ used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true)));
+ let covers_false =
+ used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false)));
+
+ covers_true && covers_false
+ }
+ Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| {
+ for constructor in used_constructors {
+ if let Constructor::Enum(e) = constructor {
+ if id == e.local_id {
+ return true;
+ }
+ }
+ }
+
+ false
+ }),
+ }
+}
+
+fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool {
+ Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::diagnostics::tests::check_diagnostics;
+
+ #[test]
+ fn empty_tuple() {
+ check_diagnostics(
+ r#"
+fn main() {
+ match () { }
+ //^^ Missing match arm
+ match (()) { }
+ //^^^^ Missing match arm
+
+ match () { _ => (), }
+ match () { () => (), }
+ match (()) { (()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_two_empty_tuple() {
+ check_diagnostics(
+ r#"
+fn main() {
+ match ((), ()) { }
+ //^^^^^^^^ Missing match arm
+
+ match ((), ()) { ((), ()) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn boolean() {
+ check_diagnostics(
+ r#"
+fn test_main() {
+ match false { }
+ //^^^^^ Missing match arm
+ match false { true => (), }
+ //^^^^^ Missing match arm
+ match (false, true) {}
+ //^^^^^^^^^^^^^ Missing match arm
+ match (false, true) { (true, true) => (), }
+ //^^^^^^^^^^^^^ Missing match arm
+ match (false, true) {
+ //^^^^^^^^^^^^^ Missing match arm
+ (false, true) => (),
+ (false, false) => (),
+ (true, false) => (),
+ }
+ match (false, true) { (true, _x) => (), }
+ //^^^^^^^^^^^^^ Missing match arm
+
+ match false { true => (), false => (), }
+ match (false, true) {
+ (false, _) => (),
+ (true, false) => (),
+ (_, true) => (),
+ }
+ match (false, true) {
+ (true, true) => (),
+ (true, false) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true) {
+ (true, _x) => (),
+ (false, true) => (),
+ (false, false) => (),
+ }
+ match (false, true, false) {
+ (false, ..) => (),
+ (true, ..) => (),
+ }
+ match (false, true, false) {
+ (.., false) => (),
+ (.., true) => (),
+ }
+ match (false, true, false) { (..) => (), }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_tuple_and_bools() {
+ check_diagnostics(
+ r#"
+fn main() {
+ match (false, ((), false)) {}
+ //^^^^^^^^^^^^^^^^^^^^ Missing match arm
+ match (false, ((), false)) { (true, ((), true)) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ Missing match arm
+ match (false, ((), false)) { (true, _) => (), }
+ //^^^^^^^^^^^^^^^^^^^^ Missing match arm
+
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, ((), true)) => (),
+ (false, ((), false)) => (),
+ }
+ match (false, ((), false)) {
+ (true, ((), true)) => (),
+ (true, ((), false)) => (),
+ (false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enums() {
+ check_diagnostics(
+ r#"
+enum Either { A, B, }
+
+fn main() {
+ match Either::A { }
+ //^^^^^^^^^ Missing match arm
+ match Either::B { Either::A => (), }
+ //^^^^^^^^^ Missing match arm
+
+ match &Either::B {
+ //^^^^^^^^^^ Missing match arm
+ Either::A => (),
+ }
+
+ match Either::B {
+ Either::A => (), Either::B => (),
+ }
+ match &Either::B {
+ Either::A => (), Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_containing_bool() {
+ check_diagnostics(
+ r#"
+enum Either { A(bool), B }
+
+fn main() {
+ match Either::B { }
+ //^^^^^^^^^ Missing match arm
+ match Either::B {
+ //^^^^^^^^^ Missing match arm
+ Either::A(true) => (), Either::B => ()
+ }
+
+ match Either::B {
+ Either::A(true) => (),
+ Either::A(false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::B => (),
+ _ => (),
+ }
+ match Either::B {
+ Either::A(_) => (),
+ Either::B => (),
+ }
+
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn enum_different_sizes() {
+ check_diagnostics(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+
+fn main() {
+ match Either::A(false) {
+ //^^^^^^^^^^^^^^^^ Missing match arm
+ Either::A(_) => (),
+ Either::B(false, _) => (),
+ }
+
+ match Either::A(false) {
+ Either::A(_) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+ match Either::A(false) {
+ Either::A(true) | Either::A(false) => (),
+ Either::B(true, _) => (),
+ Either::B(false, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_enum_no_diagnostic() {
+ check_diagnostics(
+ r#"
+enum Either { A(bool), B(bool, bool) }
+enum Either2 { C, D }
+
+fn main() {
+ match (Either::A(false), Either2::C) {
+ (Either::A(true), _) | (Either::A(false), _) => (),
+ (Either::B(true, _), Either2::C) => (),
+ (Either::B(false, _), Either2::C) => (),
+ (Either::B(_, _), Either2::D) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mismatched_types() {
+ // Match statements with arms that don't match the
+ // expression pattern do not fire this diagnostic.
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+enum Either2 { C, D }
+
+fn main() {
+ match Either::A {
+ Either2::C => (),
+ Either2::D => (),
+ }
+ match (true, false) {
+ (true, false, true) => (),
+ (true) => (),
+ }
+ match (0) { () => () }
+ match Unresolved::Bar { Unresolved::Baz => () }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn malformed_match_arm_tuple_enum_missing_pattern() {
+ // We are testing to be sure we don't panic here when the match
+ // arm `Either::B` is missing its pattern.
+ check_diagnostics(
+ r#"
+enum Either { A, B(u32) }
+
+fn main() {
+ match Either::A {
+ Either::A => (),
+ Either::B() => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_diverges() {
+ check_diagnostics(
+ r#"
+enum Either { A, B }
+
+fn main() {
+ match loop {} {
+ Either::A => (),
+ Either::B => (),
+ }
+ match loop {} {
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ //^^^^^^^^^^^^^^^^^^^^^ Missing match arm
+ Either::A => (),
+ }
+ match loop { break Foo::A } {
+ Either::A => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn expr_partially_diverges() {
+ check_diagnostics(
+ r#"
+enum Either<T> { A(T), B }
+
+fn foo() -> Either<!> { Either::B }
+fn main() -> u32 {
+ match foo() {
+ Either::A(val) => val,
+ Either::B => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record() {
+ check_diagnostics(
+ r#"
+enum Either { A { foo: bool }, B }
+
+fn main() {
+ let a = Either::A { foo: true };
+ match a { }
+ //^ Missing match arm
+ match a { Either::A { foo: true } => () }
+ //^ Missing match arm
+ match a {
+ Either::A { } => (),
+ //^^^^^^^^^ Missing structure fields:
+ // | - foo
+ Either::B => (),
+ }
+ match a {
+ //^ Missing match arm
+ Either::A { } => (),
+ } //^^^^^^^^^ Missing structure fields:
+ // | - foo
+
+ match a {
+ Either::A { foo: true } => (),
+ Either::A { foo: false } => (),
+ Either::B => (),
+ }
+ match a {
+ Either::A { foo: _ } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_fields_out_of_order() {
+ check_diagnostics(
+ r#"
+enum Either {
+ A { foo: bool, bar: () },
+ B,
+}
+
+fn main() {
+ let a = Either::A { foo: true, bar: () };
+ match a {
+ //^ Missing match arm
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ }
+
+ match a {
+ Either::A { bar: (), foo: false } => (),
+ Either::A { foo: true, bar: () } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_record_ellipsis() {
+ check_diagnostics(
+ r#"
+enum Either {
+ A { foo: bool, bar: bool },
+ B,
+}
+
+fn main() {
+ let a = Either::B;
+ match a {
+ //^ Missing match arm
+ Either::A { foo: true, .. } => (),
+ Either::B => (),
+ }
+ match a {
+ //^ Missing match arm
+ Either::A { .. } => (),
+ }
+
+ match a {
+ Either::A { foo: true, .. } => (),
+ Either::A { foo: false, .. } => (),
+ Either::B => (),
+ }
+
+ match a {
+ Either::A { .. } => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn enum_tuple_partial_ellipsis() {
+ check_diagnostics(
+ r#"
+enum Either {
+ A(bool, bool, bool, bool),
+ B,
+}
+
+fn main() {
+ match Either::B {
+ //^^^^^^^^^ Missing match arm
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ //^^^^^^^^^ Missing match arm
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::B => (),
+ }
+
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(false, .., true) => (),
+ Either::A(false, .., false) => (),
+ Either::B => (),
+ }
+ match Either::B {
+ Either::A(true, .., true) => (),
+ Either::A(true, .., false) => (),
+ Either::A(.., true) => (),
+ Either::A(.., false) => (),
+ Either::B => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn never() {
+ check_diagnostics(
+ r#"
+enum Never {}
+
+fn enum_(never: Never) {
+ match never {}
+}
+fn enum_ref(never: &Never) {
+ match never {}
+}
+fn bang(never: !) {
+ match never {}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn or_pattern_panic() {
+ check_diagnostics(
+ r#"
+pub enum Category { Infinity, Zero }
+
+fn panic(a: Category, b: Category) {
+ match (a, b) {
+ (Category::Zero | Category::Infinity, _) => (),
+ (_, Category::Zero | Category::Infinity) => (),
+ }
+
+ // FIXME: This is a false positive, but the code used to cause a panic in the match checker,
+ // so this acts as a regression test for that.
+ match (a, b) {
+ //^^^^^^ Missing match arm
+ (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (),
+ (Category::Infinity | Category::Zero, _) => (),
+ }
+}
+"#,
+ );
+ }
+
+ mod false_negatives {
+ //! The implementation of match checking here is a work in progress. As we roll this out, we
+ //! prefer false negatives to false positives (ideally there would be no false positives). This
+ //! test module should document known false negatives. Eventually we will have a complete
+ //! implementation of match checking and this module will be empty.
+ //!
+ //! The reasons for documenting known false negatives:
+ //!
+ //! 1. It acts as a backlog of work that can be done to improve the behavior of the system.
+ //! 2. It ensures the code doesn't panic when handling these cases.
+ use super::*;
+
+ #[test]
+ fn integers() {
+ // We don't currently check integer exhaustiveness.
+ check_diagnostics(
+ r#"
+fn main() {
+ match 5 {
+ 10 => (),
+ 11..20 => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn internal_or() {
+ // We do not currently handle patterns with internal `or`s.
+ check_diagnostics(
+ r#"
+fn main() {
+ enum Either { A(bool), B }
+ match Either::B {
+ Either::A(true | false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
+ // We don't currently handle tuple patterns with ellipsis.
+ check_diagnostics(
+ r#"
+fn main() {
+ match (false, true, false) {
+ (false, ..) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
+ // We don't currently handle tuple patterns with ellipsis.
+ check_diagnostics(
+ r#"
+fn main() {
+ match (false, true, false) {
+ (.., false) => (),
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_missing_arm() {
+ // We don't currently handle structs.
+ check_diagnostics(
+ r#"
+struct Foo { a: bool }
+fn main(f: Foo) {
+ match f { Foo { a: true } => () }
+}
+"#,
+ );
+ }
+ }
+}
--- /dev/null
+//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
+//! unsafe blocks.
+
+use std::sync::Arc;
+
+use hir_def::{
+ body::Body,
+ expr::{Expr, ExprId, UnaryOp},
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId,
+};
+use hir_expand::diagnostics::DiagnosticSink;
+
+use crate::{
+ db::HirDatabase, diagnostics::MissingUnsafe, lower::CallableDefId, ApplicationTy,
+ InferenceResult, Ty, TypeCtor,
+};
+
+pub(super) struct UnsafeValidator<'a, 'b: 'a> {
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ sink: &'a mut DiagnosticSink<'b>,
+}
+
+impl<'a, 'b> UnsafeValidator<'a, 'b> {
+ pub(super) fn new(
+ owner: DefWithBodyId,
+ infer: Arc<InferenceResult>,
+ sink: &'a mut DiagnosticSink<'b>,
+ ) -> UnsafeValidator<'a, 'b> {
+ UnsafeValidator { owner, infer, sink }
+ }
+
+ pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
+ let def = self.owner.into();
+ let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
+ let is_unsafe = match self.owner {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe,
+ DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
+ };
+ if is_unsafe
+ || unsafe_expressions
+ .iter()
+ .filter(|unsafe_expr| !unsafe_expr.inside_unsafe_block)
+ .count()
+ == 0
+ {
+ return;
+ }
+
+ let (_, body_source) = db.body_with_source_map(def);
+ for unsafe_expr in unsafe_expressions {
+ if !unsafe_expr.inside_unsafe_block {
+ if let Ok(in_file) = body_source.as_ref().expr_syntax(unsafe_expr.expr) {
+ self.sink.push(MissingUnsafe { file: in_file.file_id, expr: in_file.value })
+ }
+ }
+ }
+ }
+}
+
+pub struct UnsafeExpr {
+ pub expr: ExprId,
+ pub inside_unsafe_block: bool,
+}
+
+pub fn unsafe_expressions(
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+) -> Vec<UnsafeExpr> {
+ let mut unsafe_exprs = vec![];
+ let body = db.body(def);
+ walk_unsafe(&mut unsafe_exprs, db, infer, def, &body, body.body_expr, false);
+
+ unsafe_exprs
+}
+
+fn walk_unsafe(
+ unsafe_exprs: &mut Vec<UnsafeExpr>,
+ db: &dyn HirDatabase,
+ infer: &InferenceResult,
+ def: DefWithBodyId,
+ body: &Body,
+ current: ExprId,
+ inside_unsafe_block: bool,
+) {
+ let expr = &body.exprs[current];
+ match expr {
+ Expr::Call { callee, .. } => {
+ let ty = &infer[*callee];
+ if let &Ty::Apply(ApplicationTy {
+ ctor: TypeCtor::FnDef(CallableDefId::FunctionId(func)),
+ ..
+ }) = ty
+ {
+ if db.function_data(func).is_unsafe {
+ unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::Path(path) => {
+ let resolver = resolver_for_expr(db.upcast(), def, current);
+ let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
+ if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
+ if db.static_data(id).mutable {
+ unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ }
+ Expr::MethodCall { .. } => {
+ if infer
+ .method_resolution(current)
+ .map(|func| db.function_data(func).is_unsafe)
+ .unwrap_or(false)
+ {
+ unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
+ if let Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }) = &infer[*expr] {
+ unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
+ }
+ }
+ Expr::Unsafe { body: child } => {
+ return walk_unsafe(unsafe_exprs, db, infer, def, body, *child, true);
+ }
+ _ => {}
+ }
+
+ expr.walk_child_exprs(|child| {
+ walk_unsafe(unsafe_exprs, db, infer, def, body, child, inside_unsafe_block);
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::diagnostics::tests::check_diagnostics;
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_raw_ptr() {
+ check_diagnostics(
+ r#"
+fn main() {
+ let x = &5 as *const usize;
+ unsafe { let y = *x; }
+ let z = *x;
+} //^^ This operation is unsafe and requires an unsafe function or block
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_unsafe_call() {
+ check_diagnostics(
+ r#"
+struct HasUnsafe;
+
+impl HasUnsafe {
+ unsafe fn unsafe_fn(&self) {
+ let x = &5 as *const usize;
+ let y = *x;
+ }
+}
+
+unsafe fn unsafe_fn() {
+ let x = &5 as *const usize;
+ let y = *x;
+}
+
+fn main() {
+ unsafe_fn();
+ //^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
+ HasUnsafe.unsafe_fn();
+ //^^^^^^^^^^^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
+ unsafe {
+ unsafe_fn();
+ HasUnsafe.unsafe_fn();
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_unsafe_diagnostic_with_static_mut() {
+ check_diagnostics(
+ r#"
+struct Ty {
+ a: u8,
+}
+
+static mut static_mut: Ty = Ty { a: 0 };
+
+fn main() {
+ let x = static_mut.a;
+ //^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
+ unsafe {
+ let x = static_mut.a;
+ }
+}
+"#,
+ );
+ }
+}
--- /dev/null
+//! FIXME: write short doc here
+
+use std::fmt;
+
+use crate::{
+ db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate,
+ Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
+};
+use hir_def::{
+ find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId,
+ Lookup, ModuleId,
+};
+use hir_expand::name::Name;
+
+pub struct HirFormatter<'a> {
+ pub db: &'a dyn HirDatabase,
+ fmt: &'a mut dyn fmt::Write,
+ buf: String,
+ curr_size: usize,
+ pub(crate) max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+pub trait HirDisplay {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>;
+
+ /// Returns a `Display`able type that is human-readable.
+ /// Use this for showing types to the user (e.g. diagnostics)
+ fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a `Display`able type that is human-readable and tries to be succinct.
+ /// Use this for showing types to the user where space is constrained (e.g. doc popups)
+ fn display_truncated<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ max_size: Option<usize>,
+ ) -> HirDisplayWrapper<'a, Self>
+ where
+ Self: Sized,
+ {
+ HirDisplayWrapper {
+ db,
+ t: self,
+ max_size,
+ omit_verbose_types: true,
+ display_target: DisplayTarget::Diagnostics,
+ }
+ }
+
+ /// Returns a String representation of `self` that can be inserted into the given module.
+ /// Use this when generating code (e.g. assists)
+ fn display_source_code<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ module_id: ModuleId,
+ ) -> Result<String, DisplaySourceCodeError> {
+ let mut result = String::new();
+ match self.hir_fmt(&mut HirFormatter {
+ db,
+ fmt: &mut result,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: None,
+ omit_verbose_types: false,
+ display_target: DisplayTarget::SourceCode { module_id },
+ }) {
+ Ok(()) => {}
+ Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
+ Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
+ };
+ Ok(result)
+ }
+}
+
+impl<'a> HirFormatter<'a> {
+ pub fn write_joined<T: HirDisplay>(
+ &mut self,
+ iter: impl IntoIterator<Item = T>,
+ sep: &str,
+ ) -> Result<(), HirDisplayError> {
+ let mut first = true;
+ for e in iter {
+ if !first {
+ write!(self, "{}", sep)?;
+ }
+ first = false;
+ e.hir_fmt(self)?;
+ }
+ Ok(())
+ }
+
+ /// This allows using the `write!` macro directly with a `HirFormatter`.
+ pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> {
+ // We write to a buffer first to track output size
+ self.buf.clear();
+ fmt::write(&mut self.buf, args)?;
+ self.curr_size += self.buf.len();
+
+ // Then we write to the internal formatter from the buffer
+ self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
+ }
+
+ pub fn should_truncate(&self) -> bool {
+ if let Some(max_size) = self.max_size {
+ self.curr_size >= max_size
+ } else {
+ false
+ }
+ }
+
+ pub fn omit_verbose_types(&self) -> bool {
+ self.omit_verbose_types
+ }
+}
+
+#[derive(Clone, Copy)]
+enum DisplayTarget {
+ /// Display types for inlays, doc popups, autocompletion, etc...
+ /// Showing `{unknown}` or not qualifying paths is fine here.
+ /// There's no reason for this to fail.
+ Diagnostics,
+ /// Display types for inserting them in source files.
+ /// The generated code should compile, so paths need to be qualified.
+ SourceCode { module_id: ModuleId },
+}
+
+impl DisplayTarget {
+ fn is_source_code(&self) -> bool {
+ matches!(self, Self::SourceCode {..})
+ }
+}
+
+#[derive(Debug)]
+pub enum DisplaySourceCodeError {
+ PathNotFound,
+}
+
+pub enum HirDisplayError {
+ /// Errors that can occur when generating source code
+ DisplaySourceCodeError(DisplaySourceCodeError),
+ /// `FmtError` is required to be compatible with std::fmt::Display
+ FmtError,
+}
+impl From<fmt::Error> for HirDisplayError {
+ fn from(_: fmt::Error) -> Self {
+ Self::FmtError
+ }
+}
+
+pub struct HirDisplayWrapper<'a, T> {
+ db: &'a dyn HirDatabase,
+ t: &'a T,
+ max_size: Option<usize>,
+ omit_verbose_types: bool,
+ display_target: DisplayTarget,
+}
+
+impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
+where
+ T: HirDisplay,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self.t.hir_fmt(&mut HirFormatter {
+ db: self.db,
+ fmt: f,
+ buf: String::with_capacity(20),
+ curr_size: 0,
+ max_size: self.max_size,
+ omit_verbose_types: self.omit_verbose_types,
+ display_target: self.display_target,
+ }) {
+ Ok(()) => Ok(()),
+ Err(HirDisplayError::FmtError) => Err(fmt::Error),
+ Err(HirDisplayError::DisplaySourceCodeError(_)) => {
+ // This should never happen
+ panic!("HirDisplay failed when calling Display::fmt!")
+ }
+ }
+ }
+}
+
+const TYPE_HINT_TRUNCATION: &str = "…";
+
+impl HirDisplay for &Ty {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(*self, f)
+ }
+}
+
+impl HirDisplay for ApplicationTy {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self.ctor {
+ TypeCtor::Bool => write!(f, "bool")?,
+ TypeCtor::Char => write!(f, "char")?,
+ TypeCtor::Int(t) => write!(f, "{}", t)?,
+ TypeCtor::Float(t) => write!(f, "{}", t)?,
+ TypeCtor::Str => write!(f, "str")?,
+ TypeCtor::Slice => {
+ let t = self.parameters.as_single();
+ write!(f, "[{}]", t.display(f.db))?;
+ }
+ TypeCtor::Array => {
+ let t = self.parameters.as_single();
+ write!(f, "[{}; _]", t.display(f.db))?;
+ }
+ TypeCtor::RawPtr(m) => {
+ let t = self.parameters.as_single();
+ write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
+ }
+ TypeCtor::Ref(m) => {
+ let t = self.parameters.as_single();
+ let ty_display = if f.omit_verbose_types() {
+ t.display_truncated(f.db, f.max_size)
+ } else {
+ t.display(f.db)
+ };
+ write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
+ }
+ TypeCtor::Never => write!(f, "!")?,
+ TypeCtor::Tuple { .. } => {
+ let ts = &self.parameters;
+ if ts.len() == 1 {
+ write!(f, "({},)", ts[0].display(f.db))?;
+ } else {
+ write!(f, "(")?;
+ f.write_joined(&*ts.0, ", ")?;
+ write!(f, ")")?;
+ }
+ }
+ TypeCtor::FnPtr { is_varargs, .. } => {
+ let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs);
+ write!(f, "fn(")?;
+ f.write_joined(sig.params(), ", ")?;
+ if is_varargs {
+ if sig.params().is_empty() {
+ write!(f, "...")?;
+ } else {
+ write!(f, ", ...")?;
+ }
+ }
+ write!(f, ")")?;
+ let ret = sig.ret();
+ if *ret != Ty::unit() {
+ let ret_display = if f.omit_verbose_types() {
+ ret.display_truncated(f.db, f.max_size)
+ } else {
+ ret.display(f.db)
+ };
+ write!(f, " -> {}", ret_display)?;
+ }
+ }
+ TypeCtor::FnDef(def) => {
+ let sig = f.db.callable_item_signature(def).subst(&self.parameters);
+ match def {
+ CallableDefId::FunctionId(ff) => {
+ write!(f, "fn {}", f.db.function_data(ff).name)?
+ }
+ CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+ CallableDefId::EnumVariantId(e) => {
+ write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
+ }
+ };
+ if self.parameters.len() > 0 {
+ let generics = generics(f.db.upcast(), def.into());
+ let (parent_params, self_param, type_params, _impl_trait_params) =
+ generics.provenance_split();
+ let total_len = parent_params + self_param + type_params;
+ // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
+ if total_len > 0 {
+ write!(f, "<")?;
+ f.write_joined(&self.parameters.0[..total_len], ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ write!(f, "(")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, ")")?;
+ let ret = sig.ret();
+ if *ret != Ty::unit() {
+ let ret_display = if f.omit_verbose_types() {
+ ret.display_truncated(f.db, f.max_size)
+ } else {
+ ret.display(f.db)
+ };
+ write!(f, " -> {}", ret_display)?;
+ }
+ }
+ TypeCtor::Adt(def_id) => {
+ match f.display_target {
+ DisplayTarget::Diagnostics => {
+ let name = match def_id {
+ AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
+ AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
+ AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
+ };
+ write!(f, "{}", name)?;
+ }
+ DisplayTarget::SourceCode { module_id } => {
+ if let Some(path) = find_path::find_path(
+ f.db.upcast(),
+ ItemInNs::Types(def_id.into()),
+ module_id,
+ ) {
+ write!(f, "{}", path)?;
+ } else {
+ return Err(HirDisplayError::DisplaySourceCodeError(
+ DisplaySourceCodeError::PathNotFound,
+ ));
+ }
+ }
+ }
+
+ if self.parameters.len() > 0 {
+ let parameters_to_write =
+ if f.display_target.is_source_code() || f.omit_verbose_types() {
+ match self
+ .ctor
+ .as_generic_def()
+ .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+ .filter(|defaults| !defaults.is_empty())
+ {
+ None => self.parameters.0.as_ref(),
+ Some(default_parameters) => {
+ let mut default_from = 0;
+ for (i, parameter) in self.parameters.iter().enumerate() {
+ match (parameter, default_parameters.get(i)) {
+ (&Ty::Unknown, _) | (_, None) => {
+ default_from = i + 1;
+ }
+ (_, Some(default_parameter)) => {
+ let actual_default = default_parameter
+ .clone()
+ .subst(&self.parameters.prefix(i));
+ if parameter != &actual_default {
+ default_from = i + 1;
+ }
+ }
+ }
+ }
+ &self.parameters.0[0..default_from]
+ }
+ }
+ } else {
+ self.parameters.0.as_ref()
+ };
+ if !parameters_to_write.is_empty() {
+ write!(f, "<")?;
+ f.write_joined(parameters_to_write, ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ }
+ TypeCtor::AssociatedType(type_alias) => {
+ let trait_ = match type_alias.lookup(f.db.upcast()).container {
+ AssocContainerId::TraitId(it) => it,
+ _ => panic!("not an associated type"),
+ };
+ let trait_ = f.db.trait_data(trait_);
+ let type_alias = f.db.type_alias_data(type_alias);
+ write!(f, "{}::{}", trait_.name, type_alias.name)?;
+ if self.parameters.len() > 0 {
+ write!(f, "<")?;
+ f.write_joined(&*self.parameters.0, ", ")?;
+ write!(f, ">")?;
+ }
+ }
+ TypeCtor::OpaqueType(opaque_ty_id) => {
+ let bounds = match opaque_ty_id {
+ OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.subst(&self.parameters)
+ }
+ };
+ write!(f, "impl ")?;
+ write_bounds_like_dyn_trait(&bounds.value, f)?;
+ // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
+ }
+ TypeCtor::Closure { .. } => {
+ let sig = self.parameters[0].callable_sig(f.db);
+ if let Some(sig) = sig {
+ if sig.params().is_empty() {
+ write!(f, "||")?;
+ } else if f.omit_verbose_types() {
+ write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
+ } else {
+ write!(f, "|")?;
+ f.write_joined(sig.params(), ", ")?;
+ write!(f, "|")?;
+ };
+
+ let ret_display = if f.omit_verbose_types() {
+ sig.ret().display_truncated(f.db, f.max_size)
+ } else {
+ sig.ret().display(f.db)
+ };
+ write!(f, " -> {}", ret_display)?;
+ } else {
+ write!(f, "{{closure}}")?;
+ }
+ }
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for ProjectionTy {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ let trait_ = f.db.trait_data(self.trait_(f.db));
+ write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?;
+ if self.parameters.len() > 1 {
+ write!(f, "<")?;
+ f.write_joined(&self.parameters[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
+ Ok(())
+ }
+}
+
+impl HirDisplay for Ty {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self {
+ Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
+ Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
+ Ty::Placeholder(id) => {
+ let generics = generics(f.db.upcast(), id.parent);
+ let param_data = &generics.params.types[id.local_id];
+ match param_data.provenance {
+ TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
+ write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
+ }
+ TypeParamProvenance::ArgumentImplTrait => {
+ write!(f, "impl ")?;
+ let bounds = f.db.generic_predicates_for_param(*id);
+ let substs = Substs::type_params_for_generics(&generics);
+ write_bounds_like_dyn_trait(
+ &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
+ f,
+ )?;
+ }
+ }
+ }
+ Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?,
+ Ty::Dyn(predicates) => {
+ write!(f, "dyn ")?;
+ write_bounds_like_dyn_trait(predicates, f)?;
+ }
+ Ty::Opaque(opaque_ty) => {
+ let bounds = match opaque_ty.opaque_ty_id {
+ OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
+ let datas =
+ f.db.return_type_impl_traits(func).expect("impl trait id without data");
+ let data = (*datas)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.subst(&opaque_ty.parameters)
+ }
+ };
+ write!(f, "impl ")?;
+ write_bounds_like_dyn_trait(&bounds.value, f)?;
+ }
+ Ty::Unknown => write!(f, "{{unknown}}")?,
+ Ty::Infer(..) => write!(f, "_")?,
+ }
+ Ok(())
+ }
+}
+
+fn write_bounds_like_dyn_trait(
+ predicates: &[GenericPredicate],
+ f: &mut HirFormatter,
+) -> Result<(), HirDisplayError> {
+ // Note: This code is written to produce nice results (i.e.
+ // corresponding to surface Rust) for types that can occur in
+ // actual Rust. It will have weird results if the predicates
+ // aren't as expected (i.e. self types = $0, projection
+ // predicates for a certain trait come after the Implemented
+ // predicate for that trait).
+ let mut first = true;
+ let mut angle_open = false;
+ for p in predicates.iter() {
+ match p {
+ GenericPredicate::Implemented(trait_ref) => {
+ if angle_open {
+ write!(f, ">")?;
+ }
+ if !first {
+ write!(f, " + ")?;
+ }
+ // We assume that the self type is $0 (i.e. the
+ // existential) here, which is the only thing that's
+ // possible in actual Rust, and hence don't print it
+ write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?;
+ if trait_ref.substs.len() > 1 {
+ write!(f, "<")?;
+ f.write_joined(&trait_ref.substs[1..], ", ")?;
+ // there might be assoc type bindings, so we leave the angle brackets open
+ angle_open = true;
+ }
+ }
+ GenericPredicate::Projection(projection_pred) => {
+ // in types in actual Rust, these will always come
+ // after the corresponding Implemented predicate
+ if angle_open {
+ write!(f, ", ")?;
+ } else {
+ write!(f, "<")?;
+ angle_open = true;
+ }
+ let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty);
+ write!(f, "{} = ", type_alias.name)?;
+ projection_pred.ty.hir_fmt(f)?;
+ }
+ GenericPredicate::Error => {
+ if angle_open {
+ // impl Trait<X, {error}>
+ write!(f, ", ")?;
+ } else if !first {
+ // impl Trait + {error}
+ write!(f, " + ")?;
+ }
+ p.hir_fmt(f)?;
+ }
+ }
+ first = false;
+ }
+ if angle_open {
+ write!(f, ">")?;
+ }
+ Ok(())
+}
+
+impl TraitRef {
+ fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ self.substs[0].hir_fmt(f)?;
+ if use_as {
+ write!(f, " as ")?;
+ } else {
+ write!(f, ": ")?;
+ }
+ write!(f, "{}", f.db.trait_data(self.trait_).name)?;
+ if self.substs.len() > 1 {
+ write!(f, "<")?;
+ f.write_joined(&self.substs[1..], ", ")?;
+ write!(f, ">")?;
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for TraitRef {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ self.hir_fmt_ext(f, false)
+ }
+}
+
+impl HirDisplay for &GenericPredicate {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ HirDisplay::hir_fmt(*self, f)
+ }
+}
+
+impl HirDisplay for GenericPredicate {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ if f.should_truncate() {
+ return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ }
+
+ match self {
+ GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
+ GenericPredicate::Projection(projection_pred) => {
+ write!(f, "<")?;
+ projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
+ write!(
+ f,
+ ">::{} = {}",
+ f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
+ projection_pred.ty.display(f.db)
+ )?;
+ }
+ GenericPredicate::Error => write!(f, "{{error}}")?,
+ }
+ Ok(())
+ }
+}
+
+impl HirDisplay for Obligation {
+ fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
+ Ok(match self {
+ Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?,
+ Obligation::Projection(proj) => write!(
+ f,
+ "Normalize({} => {})",
+ proj.projection_ty.display(f.db),
+ proj.ty.display(f.db)
+ )?,
+ })
+ }
+}
--- /dev/null
+//! Type inference, i.e. the process of walking through the code and determining
+//! the type of each expression and pattern.
+//!
+//! For type inference, compare the implementations in rustc (the various
+//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
+//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
+//! inference here is the `infer` function, which infers the types of all
+//! expressions in a given function.
+//!
+//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
+//! which represent currently unknown types; as we walk through the expressions,
+//! we might determine that certain variables need to be equal to each other, or
+//! to certain types. To record this, we use the union-find implementation from
+//! the `ena` crate, which is extracted from rustc.
+
+use std::borrow::Cow;
+use std::mem;
+use std::ops::Index;
+use std::sync::Arc;
+
+use arena::map::ArenaMap;
+use hir_def::{
+ body::Body,
+ data::{ConstData, FunctionData, StaticData},
+ expr::{BindingAnnotation, ExprId, PatId},
+ lang_item::LangItemTarget,
+ path::{path, Path},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{Mutability, TypeRef},
+ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId,
+ TypeAliasId, VariantId,
+};
+use hir_expand::{diagnostics::DiagnosticSink, name::name};
+use rustc_hash::FxHashMap;
+use stdx::impl_from;
+use syntax::SmolStr;
+
+use super::{
+ primitive::{FloatTy, IntTy},
+ traits::{Guidance, Obligation, ProjectionPredicate, Solution},
+ InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
+};
+use crate::{
+ db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode,
+};
+
+pub(crate) use unify::unify;
+
+macro_rules! ty_app {
+ ($ctor:pat, $param:pat) => {
+ crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
+ };
+ ($ctor:pat) => {
+ ty_app!($ctor, _)
+ };
+}
+
+mod unify;
+mod path;
+mod expr;
+mod pat;
+mod coerce;
+
+/// The entry point of type inference.
+pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
+ let _p = profile::span("infer_query");
+ let resolver = def.resolver(db.upcast());
+ let mut ctx = InferenceContext::new(db, def, resolver);
+
+ match def {
+ DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
+ DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
+ DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
+ }
+
+ ctx.infer_body();
+
+ Arc::new(ctx.resolve_all())
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+enum ExprOrPatId {
+ ExprId(ExprId),
+ PatId(PatId),
+}
+impl_from!(ExprId, PatId for ExprOrPatId);
+
+/// Binding modes inferred for patterns.
+/// https://doc.rust-lang.org/reference/patterns.html#binding-modes
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+impl BindingMode {
+ pub fn convert(annotation: BindingAnnotation) -> BindingMode {
+ match annotation {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
+ BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared),
+ BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
+ }
+ }
+}
+
+impl Default for BindingMode {
+ fn default() -> Self {
+ BindingMode::Move
+ }
+}
+
+/// A mismatch between an expected and an inferred type.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeMismatch {
+ pub expected: Ty,
+ pub actual: Ty,
+}
+
+/// The result of type inference: A mapping from expressions and patterns to types.
+#[derive(Clone, PartialEq, Eq, Debug, Default)]
+pub struct InferenceResult {
+ /// For each method call expr, records the function it resolves to.
+ method_resolutions: FxHashMap<ExprId, FunctionId>,
+ /// For each field access expr, records the field it resolves to.
+ field_resolutions: FxHashMap<ExprId, FieldId>,
+ /// For each field in record literal, records the field it resolves to.
+ record_field_resolutions: FxHashMap<ExprId, FieldId>,
+ record_field_pat_resolutions: FxHashMap<PatId, FieldId>,
+ /// For each struct literal, records the variant it resolves to.
+ variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
+ /// For each associated item record what it resolves to
+ assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
+ diagnostics: Vec<InferenceDiagnostic>,
+ pub type_of_expr: ArenaMap<ExprId, Ty>,
+ pub type_of_pat: ArenaMap<PatId, Ty>,
+ pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
+}
+
+impl InferenceResult {
+ pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
+ self.method_resolutions.get(&expr).copied()
+ }
+ pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
+ self.field_resolutions.get(&expr).copied()
+ }
+ pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> {
+ self.record_field_resolutions.get(&expr).copied()
+ }
+ pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> {
+ self.record_field_pat_resolutions.get(&pat).copied()
+ }
+ pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
+ self.variant_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
+ self.assoc_resolutions.get(&id.into()).copied()
+ }
+ pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
+ self.type_mismatches.get(expr)
+ }
+ pub fn add_diagnostics(
+ &self,
+ db: &dyn HirDatabase,
+ owner: DefWithBodyId,
+ sink: &mut DiagnosticSink,
+ ) {
+ self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
+ }
+}
+
+impl Index<ExprId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, expr: ExprId) -> &Ty {
+ self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown)
+ }
+}
+
+impl Index<PatId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, pat: PatId) -> &Ty {
+ self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown)
+ }
+}
+
+/// The inference context contains all information needed during type inference.
+#[derive(Clone, Debug)]
+struct InferenceContext<'a> {
+ db: &'a dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: Arc<Body>,
+ resolver: Resolver,
+ table: unify::InferenceTable,
+ trait_env: Arc<TraitEnvironment>,
+ obligations: Vec<Obligation>,
+ result: InferenceResult,
+ /// The return type of the function being inferred, or the closure if we're
+ /// currently within one.
+ ///
+ /// We might consider using a nested inference context for checking
+ /// closures, but currently this is the only field that will change there,
+ /// so it doesn't make sense.
+ return_ty: Ty,
+ diverges: Diverges,
+ breakables: Vec<BreakableContext>,
+}
+
+#[derive(Clone, Debug)]
+struct BreakableContext {
+ pub may_break: bool,
+ pub break_ty: Ty,
+ pub label: Option<name::Name>,
+}
+
+fn find_breakable<'c>(
+ ctxs: &'c mut [BreakableContext],
+ label: Option<&name::Name>,
+) -> Option<&'c mut BreakableContext> {
+ match label {
+ Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
+ None => ctxs.last_mut(),
+ }
+}
+
+impl<'a> InferenceContext<'a> {
+ fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self {
+ InferenceContext {
+ result: InferenceResult::default(),
+ table: unify::InferenceTable::new(),
+ obligations: Vec::default(),
+ return_ty: Ty::Unknown, // set in collect_fn_signature
+ trait_env: TraitEnvironment::lower(db, &resolver),
+ db,
+ owner,
+ body: db.body(owner),
+ resolver,
+ diverges: Diverges::Maybe,
+ breakables: Vec::new(),
+ }
+ }
+
+ fn resolve_all(mut self) -> InferenceResult {
+ // FIXME resolve obligations as well (use Guidance if necessary)
+ let mut result = std::mem::take(&mut self.result);
+ for ty in result.type_of_expr.values_mut() {
+ let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
+ *ty = resolved;
+ }
+ for ty in result.type_of_pat.values_mut() {
+ let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
+ *ty = resolved;
+ }
+ result
+ }
+
+ fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
+ self.result.type_of_expr.insert(expr, ty);
+ }
+
+ fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
+ self.result.method_resolutions.insert(expr, func);
+ }
+
+ fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) {
+ self.result.field_resolutions.insert(expr, field);
+ }
+
+ fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
+ self.result.variant_resolutions.insert(id, variant);
+ }
+
+ fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
+ self.result.assoc_resolutions.insert(id, item);
+ }
+
+ fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
+ self.result.type_of_pat.insert(pat, ty);
+ }
+
+ fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
+ self.result.diagnostics.push(diagnostic);
+ }
+
+ fn make_ty_with_mode(
+ &mut self,
+ type_ref: &TypeRef,
+ impl_trait_mode: ImplTraitLoweringMode,
+ ) -> Ty {
+ // FIXME use right resolver for block
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(impl_trait_mode);
+ let ty = Ty::from_hir(&ctx, type_ref);
+ let ty = self.insert_type_vars(ty);
+ self.normalize_associated_types_in(ty)
+ }
+
+ fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
+ self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
+ }
+
+ /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
+ fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
+ match ty {
+ Ty::Unknown => self.table.new_type_var(),
+ _ => ty,
+ }
+ }
+
+ fn insert_type_vars(&mut self, ty: Ty) -> Ty {
+ ty.fold(&mut |ty| self.insert_type_vars_shallow(ty))
+ }
+
+ fn resolve_obligations_as_possible(&mut self) {
+ let obligations = mem::replace(&mut self.obligations, Vec::new());
+ for obligation in obligations {
+ let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
+ let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
+ let solution =
+ self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone());
+
+ match solution {
+ Some(Solution::Unique(substs)) => {
+ canonicalized.apply_solution(self, substs.0);
+ }
+ Some(Solution::Ambig(Guidance::Definite(substs))) => {
+ canonicalized.apply_solution(self, substs.0);
+ self.obligations.push(obligation);
+ }
+ Some(_) => {
+ // FIXME use this when trying to resolve everything at the end
+ self.obligations.push(obligation);
+ }
+ None => {
+ // FIXME obligation cannot be fulfilled => diagnostic
+ }
+ };
+ }
+ }
+
+ fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ self.table.unify(ty1, ty2)
+ }
+
+ /// Resolves the type as far as currently possible, replacing type variables
+ /// by their known types. All types returned by the infer_* functions should
+ /// be resolved as far as possible, i.e. contain no type variables with
+ /// known type.
+ fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
+ self.resolve_obligations_as_possible();
+
+ self.table.resolve_ty_as_possible(ty)
+ }
+
+ fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
+ self.table.resolve_ty_shallow(ty)
+ }
+
+ fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
+ self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
+ }
+
+ fn resolve_associated_type_with_params(
+ &mut self,
+ inner_ty: Ty,
+ assoc_ty: Option<TypeAliasId>,
+ params: &[Ty],
+ ) -> Ty {
+ match assoc_ty {
+ Some(res_assoc_ty) => {
+ let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
+ hir_def::AssocContainerId::TraitId(trait_) => trait_,
+ _ => panic!("resolve_associated_type called with non-associated type"),
+ };
+ let ty = self.table.new_type_var();
+ let substs = Substs::build_for_def(self.db, res_assoc_ty)
+ .push(inner_ty)
+ .fill(params.iter().cloned())
+ .build();
+ let trait_ref = TraitRef { trait_, substs: substs.clone() };
+ let projection = ProjectionPredicate {
+ ty: ty.clone(),
+ projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs },
+ };
+ self.obligations.push(Obligation::Trait(trait_ref));
+ self.obligations.push(Obligation::Projection(projection));
+ self.resolve_ty_as_possible(ty)
+ }
+ None => Ty::Unknown,
+ }
+ }
+
+ /// Recurses through the given type, normalizing associated types mentioned
+ /// in it by replacing them by type variables and registering obligations to
+ /// resolve later. This should be done once for every type we get from some
+ /// type annotation (e.g. from a let type annotation, field type or function
+ /// call). `make_ty` handles this already, but e.g. for field types we need
+ /// to do it as well.
+ fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
+ let ty = self.resolve_ty_as_possible(ty);
+ ty.fold(&mut |ty| match ty {
+ Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty),
+ _ => ty,
+ })
+ }
+
+ fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
+ let var = self.table.new_type_var();
+ let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
+ let obligation = Obligation::Projection(predicate);
+ self.obligations.push(obligation);
+ var
+ }
+
+ fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
+ let path = match path {
+ Some(path) => path,
+ None => return (Ty::Unknown, None),
+ };
+ let resolver = &self.resolver;
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ // FIXME: this should resolve assoc items as well, see this example:
+ // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
+ let (resolution, unresolved) =
+ match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (Ty::Unknown, None),
+ };
+ return match resolution {
+ TypeNs::AdtId(AdtId::StructId(strukt)) => {
+ let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true);
+ let ty = self.db.ty(strukt.into());
+ let ty = self.insert_type_vars(ty.subst(&substs));
+ forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
+ }
+ TypeNs::AdtId(AdtId::UnionId(u)) => {
+ let substs = Ty::substs_from_path(&ctx, path, u.into(), true);
+ let ty = self.db.ty(u.into());
+ let ty = self.insert_type_vars(ty.subst(&substs));
+ forbid_unresolved_segments((ty, Some(u.into())), unresolved)
+ }
+ TypeNs::EnumVariantId(var) => {
+ let substs = Ty::substs_from_path(&ctx, path, var.into(), true);
+ let ty = self.db.ty(var.parent.into());
+ let ty = self.insert_type_vars(ty.subst(&substs));
+ forbid_unresolved_segments((ty, Some(var.into())), unresolved)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = Substs::type_params_for_generics(&generics);
+ let ty = self.db.impl_self_ty(impl_id).subst(&substs);
+ match unresolved {
+ None => {
+ let variant = ty_variant(&ty);
+ (ty, variant)
+ }
+ Some(1) => {
+ let segment = path.mod_path().segments.last().unwrap();
+ // this could be an enum variant or associated type
+ if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
+ let enum_data = self.db.enum_data(enum_id);
+ if let Some(local_id) = enum_data.variant(segment) {
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ return (ty, Some(variant.into()));
+ }
+ }
+ // FIXME potentially resolve assoc type
+ (Ty::Unknown, None)
+ }
+ Some(_) => {
+ // FIXME diagnostic
+ (Ty::Unknown, None)
+ }
+ }
+ }
+ TypeNs::TypeAliasId(it) => {
+ let substs = Substs::build_for_def(self.db, it)
+ .fill(std::iter::repeat_with(|| self.table.new_type_var()))
+ .build();
+ let ty = self.db.ty(it.into()).subst(&substs);
+ let variant = ty_variant(&ty);
+ forbid_unresolved_segments((ty, variant), unresolved)
+ }
+ TypeNs::AdtSelfType(_) => {
+ // FIXME this could happen in array size expressions, once we're checking them
+ (Ty::Unknown, None)
+ }
+ TypeNs::GenericParam(_) => {
+ // FIXME potentially resolve assoc type
+ (Ty::Unknown, None)
+ }
+ TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ // FIXME diagnostic
+ (Ty::Unknown, None)
+ }
+ };
+
+ fn forbid_unresolved_segments(
+ result: (Ty, Option<VariantId>),
+ unresolved: Option<usize>,
+ ) -> (Ty, Option<VariantId>) {
+ if unresolved.is_none() {
+ result
+ } else {
+ // FIXME diagnostic
+ (Ty::Unknown, None)
+ }
+ }
+
+ fn ty_variant(ty: &Ty) -> Option<VariantId> {
+ ty.as_adt().and_then(|(adt_id, _)| match adt_id {
+ AdtId::StructId(s) => Some(VariantId::StructId(s)),
+ AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
+ AdtId::EnumId(_) => {
+ // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
+ None
+ }
+ })
+ }
+ }
+
+ fn collect_const(&mut self, data: &ConstData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_static(&mut self, data: &StaticData) {
+ self.return_ty = self.make_ty(&data.type_ref);
+ }
+
+ fn collect_fn(&mut self, data: &FunctionData) {
+ let body = Arc::clone(&self.body); // avoid borrow checker problem
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Param);
+ let param_tys =
+ data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>();
+ for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) {
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT
+ self.return_ty = return_ty;
+ }
+
+ fn infer_body(&mut self) {
+ self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ }
+
+ fn resolve_lang_item(&self, name: &str) -> Option<LangItemTarget> {
+ let krate = self.resolver.krate()?;
+ let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes());
+ self.db.lang_item(krate, name)
+ }
+
+ fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
+ let path = path![core::iter::IntoIterator];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Item])
+ }
+
+ fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
+ let path = path![core::ops::Try];
+ let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
+ }
+
+ fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item("neg")?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item("not")?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+
+ fn resolve_boxed_box(&self) -> Option<AdtId> {
+ let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_full(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFull];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range(&self) -> Option<AdtId> {
+ let path = path![core::ops::Range];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_from(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeFrom];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeTo];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
+ let path = path![core::ops::RangeToInclusive];
+ let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+ Some(struct_.into())
+ }
+
+ fn resolve_ops_index(&self) -> Option<TraitId> {
+ self.resolve_lang_item("index")?.as_trait()
+ }
+
+ fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
+ let trait_ = self.resolve_ops_index()?;
+ self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+ }
+}
+
+/// The kinds of placeholders we need during type inference. There's separate
+/// values for general types, and for integer and float variables. The latter
+/// two are used for inference of literal values (e.g. `100` could be one of
+/// several integer types).
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+pub enum InferTy {
+ TypeVar(unify::TypeVarId),
+ IntVar(unify::TypeVarId),
+ FloatVar(unify::TypeVarId),
+ MaybeNeverTypeVar(unify::TypeVarId),
+}
+
+impl InferTy {
+ fn to_inner(self) -> unify::TypeVarId {
+ match self {
+ InferTy::TypeVar(ty)
+ | InferTy::IntVar(ty)
+ | InferTy::FloatVar(ty)
+ | InferTy::MaybeNeverTypeVar(ty) => ty,
+ }
+ }
+
+ fn fallback_value(self) -> Ty {
+ match self {
+ InferTy::TypeVar(..) => Ty::Unknown,
+ InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())),
+ InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())),
+ InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
+ }
+ }
+}
+
+/// When inferring an expression, we propagate downward whatever type hint we
+/// are able in the form of an `Expectation`.
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct Expectation {
+ ty: Ty,
+ /// See the `rvalue_hint` method.
+ rvalue_hint: bool,
+}
+
+impl Expectation {
+ /// The expectation that the type of the expression needs to equal the given
+ /// type.
+ fn has_type(ty: Ty) -> Self {
+ Expectation { ty, rvalue_hint: false }
+ }
+
+ /// The following explanation is copied straight from rustc:
+ /// Provides an expectation for an rvalue expression given an *optional*
+ /// hint, which is not required for type safety (the resulting type might
+ /// be checked higher up, as is the case with `&expr` and `box expr`), but
+ /// is useful in determining the concrete type.
+ ///
+ /// The primary use case is where the expected type is a fat pointer,
+ /// like `&[isize]`. For example, consider the following statement:
+ ///
+ /// let x: &[isize] = &[1, 2, 3];
+ ///
+ /// In this case, the expected type for the `&[1, 2, 3]` expression is
+ /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
+ /// expectation `ExpectHasType([isize])`, that would be too strong --
+ /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
+ /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
+ /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
+ /// which still is useful, because it informs integer literals and the like.
+ /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
+ /// for examples of where this comes up,.
+ fn rvalue_hint(ty: Ty) -> Self {
+ Expectation { ty, rvalue_hint: true }
+ }
+
+ /// This expresses no expectation on the type.
+ fn none() -> Self {
+ Expectation { ty: Ty::Unknown, rvalue_hint: false }
+ }
+
+ fn coercion_target(&self) -> &Ty {
+ if self.rvalue_hint {
+ &Ty::Unknown
+ } else {
+ &self.ty
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+enum Diverges {
+ Maybe,
+ Always,
+}
+
+impl Diverges {
+ fn is_always(self) -> bool {
+ self == Diverges::Always
+ }
+}
+
+impl std::ops::BitAnd for Diverges {
+ type Output = Self;
+ fn bitand(self, other: Self) -> Self {
+ std::cmp::min(self, other)
+ }
+}
+
+impl std::ops::BitOr for Diverges {
+ type Output = Self;
+ fn bitor(self, other: Self) -> Self {
+ std::cmp::max(self, other)
+ }
+}
+
+impl std::ops::BitAndAssign for Diverges {
+ fn bitand_assign(&mut self, other: Self) {
+ *self = *self & other;
+ }
+}
+
+impl std::ops::BitOrAssign for Diverges {
+ fn bitor_assign(&mut self, other: Self) {
+ *self = *self | other;
+ }
+}
+
+mod diagnostics {
+ use hir_def::{expr::ExprId, DefWithBodyId};
+ use hir_expand::diagnostics::DiagnosticSink;
+
+ use crate::{
+ db::HirDatabase,
+ diagnostics::{BreakOutsideOfLoop, NoSuchField},
+ };
+
+ #[derive(Debug, PartialEq, Eq, Clone)]
+ pub(super) enum InferenceDiagnostic {
+ NoSuchField { expr: ExprId, field: usize },
+ BreakOutsideOfLoop { expr: ExprId },
+ }
+
+ impl InferenceDiagnostic {
+ pub(super) fn add_to(
+ &self,
+ db: &dyn HirDatabase,
+ owner: DefWithBodyId,
+ sink: &mut DiagnosticSink,
+ ) {
+ match self {
+ InferenceDiagnostic::NoSuchField { expr, field } => {
+ let (_, source_map) = db.body_with_source_map(owner);
+ let field = source_map.field_syntax(*expr, *field);
+ sink.push(NoSuchField { file: field.file_id, field: field.value })
+ }
+ InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ptr = source_map
+ .expr_syntax(*expr)
+ .expect("break outside of loop in synthetic syntax");
+ sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value })
+ }
+ }
+ }
+ }
+}
--- /dev/null
+//! Coercion logic. Coercions are certain type conversions that can implicitly
+//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
+//! like going from `&Vec<T>` to `&[T]`.
+//!
+//! See: https://doc.rust-lang.org/nomicon/coercions.html
+
+use hir_def::{lang_item::LangItemTarget, type_ref::Mutability};
+use test_utils::mark;
+
+use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor};
+
+use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
+
+impl<'a> InferenceContext<'a> {
+ /// Unify two types, but may coerce the first one to the second one
+ /// using "implicit coercion rules" if needed.
+ pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
+ let from_ty = self.resolve_ty_shallow(from_ty).into_owned();
+ let to_ty = self.resolve_ty_shallow(to_ty);
+ self.coerce_inner(from_ty, &to_ty)
+ }
+
+ /// Merge two types from different branches, with possible coercion.
+ ///
+ /// Mostly this means trying to coerce one to the other, but
+ /// - if we have two function types for different functions, we need to
+ /// coerce both to function pointers;
+ /// - if we were concerned with lifetime subtyping, we'd need to look for a
+ /// least upper bound.
+ pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
+ if self.coerce(ty1, ty2) {
+ ty2.clone()
+ } else if self.coerce(ty2, ty1) {
+ ty1.clone()
+ } else {
+ if let (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnDef(_))) = (ty1, ty2) {
+ mark::hit!(coerce_fn_reification);
+ // Special case: two function types. Try to coerce both to
+ // pointers to have a chance at getting a match. See
+ // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
+ let sig1 = ty1.callable_sig(self.db).expect("FnDef without callable sig");
+ let sig2 = ty2.callable_sig(self.db).expect("FnDef without callable sig");
+ let ptr_ty1 = Ty::fn_ptr(sig1);
+ let ptr_ty2 = Ty::fn_ptr(sig2);
+ self.coerce_merge_branch(&ptr_ty1, &ptr_ty2)
+ } else {
+ mark::hit!(coerce_merge_fail_fallback);
+ ty1.clone()
+ }
+ }
+ }
+
+ fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool {
+ match (&from_ty, to_ty) {
+ // Never type will make type variable to fallback to Never Type instead of Unknown.
+ (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => {
+ let var = self.table.new_maybe_never_type_var();
+ self.table.var_unification_table.union_value(*tv, TypeVarValue::Known(var));
+ return true;
+ }
+ (ty_app!(TypeCtor::Never), _) => return true,
+
+ // Trivial cases, this should go after `never` check to
+ // avoid infer result type to be never
+ _ => {
+ if self.table.unify_inner_trivial(&from_ty, &to_ty, 0) {
+ return true;
+ }
+ }
+ }
+
+ // Pointer weakening and function to pointer
+ match (&mut from_ty, to_ty) {
+ // `*mut T`, `&mut T, `&T`` -> `*const T`
+ // `&mut T` -> `&T`
+ // `&mut T` -> `*mut T`
+ (ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
+ | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
+ | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared)))
+ | (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => {
+ *c1 = *c2;
+ }
+
+ // Illegal mutablity conversion
+ (
+ ty_app!(TypeCtor::RawPtr(Mutability::Shared)),
+ ty_app!(TypeCtor::RawPtr(Mutability::Mut)),
+ )
+ | (
+ ty_app!(TypeCtor::Ref(Mutability::Shared)),
+ ty_app!(TypeCtor::Ref(Mutability::Mut)),
+ ) => return false,
+
+ // `{function_type}` -> `fn()`
+ (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => {
+ match from_ty.callable_sig(self.db) {
+ None => return false,
+ Some(sig) => {
+ from_ty = Ty::fn_ptr(sig);
+ }
+ }
+ }
+
+ (ty_app!(TypeCtor::Closure { .. }, params), ty_app!(TypeCtor::FnPtr { .. })) => {
+ from_ty = params[0].clone();
+ }
+
+ _ => {}
+ }
+
+ if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
+ return ret;
+ }
+
+ // Auto Deref if cannot coerce
+ match (&from_ty, to_ty) {
+ // FIXME: DerefMut
+ (ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => {
+ self.unify_autoderef_behind_ref(&st1[0], &st2[0])
+ }
+
+ // Otherwise, normal unify
+ _ => self.unify(&from_ty, to_ty),
+ }
+ }
+
+ /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
+ ///
+ /// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html
+ fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> {
+ let krate = self.resolver.krate().unwrap();
+ let coerce_unsized_trait = match self.db.lang_item(krate, "coerce_unsized".into()) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return None,
+ };
+
+ let generic_params = crate::utils::generics(self.db.upcast(), coerce_unsized_trait.into());
+ if generic_params.len() != 2 {
+ // The CoerceUnsized trait should have two generic params: Self and T.
+ return None;
+ }
+
+ let substs = Substs::build_for_generics(&generic_params)
+ .push(from_ty.clone())
+ .push(to_ty.clone())
+ .build();
+ let trait_ref = TraitRef { trait_: coerce_unsized_trait, substs };
+ let goal = InEnvironment::new(self.trait_env.clone(), Obligation::Trait(trait_ref));
+
+ let canonicalizer = self.canonicalizer();
+ let canonicalized = canonicalizer.canonicalize_obligation(goal);
+
+ let solution = self.db.trait_solve(krate, canonicalized.value.clone())?;
+
+ match solution {
+ Solution::Unique(v) => {
+ canonicalized.apply_solution(self, v.0);
+ }
+ _ => return None,
+ };
+
+ Some(true)
+ }
+
+ /// Unify `from_ty` to `to_ty` with optional auto Deref
+ ///
+ /// Note that the parameters are already stripped the outer reference.
+ fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
+ let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone());
+ let to_ty = self.resolve_ty_shallow(&to_ty);
+ // FIXME: Auto DerefMut
+ for derefed_ty in autoderef::autoderef(
+ self.db,
+ self.resolver.krate(),
+ InEnvironment {
+ value: canonicalized.value.clone(),
+ environment: self.trait_env.clone(),
+ },
+ ) {
+ let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value);
+ match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) {
+ // Stop when constructor matches.
+ (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => {
+ // It will not recurse to `coerce`.
+ return self.table.unify_substs(st1, st2, 0);
+ }
+ _ => {
+ if self.table.unify_inner_trivial(&derefed_ty, &to_ty, 0) {
+ return true;
+ }
+ }
+ }
+ }
+
+ false
+ }
+}
--- /dev/null
+//! Type inference for expressions.
+
+use std::iter::{repeat, repeat_with};
+use std::{mem, sync::Arc};
+
+use hir_def::{
+ builtin_type::Signedness,
+ expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
+ AdtId, AssocContainerId, FieldId, Lookup,
+};
+use hir_expand::name::{name, Name};
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef, method_resolution, op,
+ traits::{FnTrait, InEnvironment},
+ utils::{generics, variant_data, Generics},
+ ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs,
+ TraitRef, Ty, TypeCtor,
+};
+
+use super::{
+ find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext,
+ InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if ty.is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ let could_unify = self.unify(&ty, &expected.ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr,
+ TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
+ );
+ }
+ self.resolve_ty_as_possible(ty)
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, &expected);
+ let ty = if !self.coerce(&ty, &expected.coercion_target()) {
+ self.result
+ .type_mismatches
+ .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() });
+ // Return actual type when type mismatch.
+ // This is needed for diagnostic when return type mismatch.
+ ty
+ } else if expected.coercion_target() == &Ty::Unknown {
+ ty
+ } else {
+ expected.ty.clone()
+ };
+
+ self.resolve_ty_as_possible(ty)
+ }
+
+ fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ let krate = self.resolver.krate()?;
+ let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
+ let output_assoc_type =
+ self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+ let generic_params = generics(self.db.upcast(), fn_once_trait.into());
+ if generic_params.len() != 2 {
+ return None;
+ }
+
+ let mut param_builder = Substs::builder(num_args);
+ let mut arg_tys = vec![];
+ for _ in 0..num_args {
+ let arg = self.table.new_type_var();
+ param_builder = param_builder.push(arg.clone());
+ arg_tys.push(arg);
+ }
+ let parameters = param_builder.build();
+ let arg_ty = Ty::Apply(ApplicationTy {
+ ctor: TypeCtor::Tuple { cardinality: num_args as u16 },
+ parameters,
+ });
+ let substs =
+ Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build();
+
+ let trait_env = Arc::clone(&self.trait_env);
+ let implements_fn_trait =
+ Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() });
+ let goal = self.canonicalizer().canonicalize_obligation(InEnvironment {
+ value: implements_fn_trait.clone(),
+ environment: trait_env,
+ });
+ if self.db.trait_solve(krate, goal.value).is_some() {
+ self.obligations.push(implements_fn_trait);
+ let output_proj_ty =
+ crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs };
+ let return_ty = self.normalize_projection_ty(output_proj_ty);
+ Some((arg_tys, return_ty))
+ } else {
+ None
+ }
+ }
+
+ pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+ match ty.callable_sig(self.db) {
+ Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+ None => self.callable_sig_from_fn_trait(ty, num_args),
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let body = Arc::clone(&self.body); // avoid borrow checker problem
+ let ty = match &body[tgt_expr] {
+ Expr::Missing => Ty::Unknown,
+ Expr::If { condition, then_branch, else_branch } => {
+ // if let is desugared to match, so this is always simple if
+ self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let then_ty = self.infer_expr_inner(*then_branch, &expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
+ None => Ty::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ self.coerce_merge_branch(&then_ty, &else_ty)
+ }
+ Expr::Block { statements, tail, .. } => {
+ // FIXME: Breakable block inference
+ self.infer_block(statements, *tail, expected)
+ }
+ Expr::Unsafe { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ Ty::Unknown
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ break_ty: self.table.new_type_var(),
+ label: label.clone(),
+ });
+ self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ }
+
+ if ctxt.may_break {
+ ctxt.break_ty
+ } else {
+ Ty::simple(TypeCtor::Never)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ break_ty: Ty::Unknown,
+ label: label.clone(),
+ });
+ // while let is desugared to a match loop, so this is always simple while
+ self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
+ self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ Ty::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ break_ty: Ty::Unknown,
+ label: label.clone(),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ Ty::unit()
+ }
+ Expr::Lambda { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = if let Some(type_ref) = arg_type {
+ self.make_ty(type_ref)
+ } else {
+ self.table.new_type_var()
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = Ty::apply(
+ TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false },
+ Substs(sig_tys.clone().into()),
+ );
+ let closure_ty =
+ Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.coerce(&closure_ty, &expected.ty);
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ let resolved = self.resolve_ty_as_possible(arg_ty);
+ self.infer_pat(*arg_pat, &resolved, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone());
+ let mut derefs = autoderef(
+ self.db,
+ self.resolver.krate(),
+ InEnvironment {
+ value: canonicalized.value.clone(),
+ environment: self.trait_env.clone(),
+ },
+ );
+ let (param_tys, ret_ty): (Vec<Ty>, Ty) = derefs
+ .find_map(|callee_deref_ty| {
+ self.callable_sig(
+ &canonicalized.decanonicalize_ty(callee_deref_ty.value),
+ args.len(),
+ )
+ })
+ .unwrap_or((Vec::new(), Ty::Unknown));
+ self.register_obligations_for_call(&callee_ty);
+ self.check_call_arguments(args, ¶m_tys);
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let mut result_ty = if arms.is_empty() {
+ Ty::simple(TypeCtor::Never)
+ } else {
+ self.table.new_type_var()
+ };
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(Ty::simple(TypeCtor::Bool)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ result_ty = self.coerce_merge_branch(&result_ty, &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ result_ty
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
+ }
+ Expr::Continue { .. } => Ty::simple(TypeCtor::Never),
+ Expr::Break { expr, label } => {
+ let val_ty = if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none())
+ } else {
+ Ty::unit()
+ };
+
+ let last_ty =
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.break_ty.clone()
+ } else {
+ Ty::Unknown
+ };
+
+ let merged_type = self.coerce_merge_branch(&last_ty, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.break_ty = merged_type;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ }
+
+ Ty::simple(TypeCtor::Never)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = Ty::unit();
+ self.coerce(&unit, &self.return_ty.clone());
+ }
+ Ty::simple(TypeCtor::Never)
+ }
+ Expr::RecordLit { path, fields, spread } => {
+ let (ty, def_id) = self.resolve_variant(path.as_ref());
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ self.unify(&ty, &expected.ty);
+
+ let substs = ty.substs().unwrap_or_else(Substs::empty);
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it));
+ for (field_idx, field) in fields.iter().enumerate() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: tgt_expr,
+ field: field_idx,
+ });
+ None
+ }
+ });
+ if let Some(field_def) = field_def {
+ self.result.record_field_resolutions.insert(field.expr, field_def);
+ }
+ let field_ty = field_def
+ .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs));
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
+ let ty = autoderef::autoderef(
+ self.db,
+ self.resolver.krate(),
+ InEnvironment {
+ value: canonicalized.value.clone(),
+ environment: self.trait_env.clone(),
+ },
+ )
+ .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
+ Ty::Apply(a_ty) => match a_ty.ctor {
+ TypeCtor::Tuple { .. } => name
+ .as_tuple_index()
+ .and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
+ TypeCtor::Adt(AdtId::StructId(s)) => {
+ self.db.struct_data(s).variant_data.field(name).map(|local_id| {
+ let field = FieldId { parent: s.into(), local_id };
+ self.write_field_resolution(tgt_expr, field);
+ self.db.field_types(s.into())[field.local_id]
+ .clone()
+ .subst(&a_ty.parameters)
+ })
+ }
+ TypeCtor::Adt(AdtId::UnionId(u)) => {
+ self.db.union_data(u).variant_data.field(name).map(|local_id| {
+ let field = FieldId { parent: u.into(), local_id };
+ self.write_field_resolution(tgt_expr, field);
+ self.db.field_types(u.into())[field.local_id]
+ .clone()
+ .subst(&a_ty.parameters)
+ })
+ }
+ _ => None,
+ },
+ _ => None,
+ })
+ .unwrap_or(Ty::Unknown);
+ let ty = self.insert_type_vars(ty);
+ self.normalize_associated_types_in(ty)
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) =
+ &expected.ty.as_reference_or_ptr()
+ {
+ if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared {
+ // FIXME: throw type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: throw type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ let ty = match rawness {
+ Rawness::RawPtr => TypeCtor::RawPtr(*mutability),
+ Rawness::Ref => TypeCtor::Ref(*mutability),
+ };
+ Ty::apply_one(ty, inner_ty)
+ }
+ Expr::Box { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ if let Some(box_) = self.resolve_boxed_box() {
+ Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
+ } else {
+ Ty::Unknown
+ }
+ }
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ match op {
+ UnaryOp::Deref => match self.resolver.krate() {
+ Some(krate) => {
+ let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
+ match autoderef::deref(
+ self.db,
+ krate,
+ InEnvironment {
+ value: &canonicalized.value,
+ environment: self.trait_env.clone(),
+ },
+ ) {
+ Some(derefed_ty) => {
+ canonicalized.decanonicalize_ty(derefed_ty.value)
+ }
+ None => Ty::Unknown,
+ }
+ }
+ None => Ty::Unknown,
+ },
+ UnaryOp::Neg => {
+ match &inner_ty {
+ // Fast path for builtins
+ Ty::Apply(ApplicationTy {
+ ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }),
+ ..
+ })
+ | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
+ | Ty::Infer(InferTy::IntVar(..))
+ | Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match &inner_ty {
+ // Fast path for builtins
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
+ | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
+ | Ty::Infer(InferTy::IntVar(..)) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(op) => {
+ let lhs_expectation = match op {
+ BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)),
+ _ => Expectation::none(),
+ };
+ let lhs_ty = self.infer_expr(*lhs, &lhs_expectation);
+ // FIXME: find implementation of trait corresponding to operation
+ // symbol and resolve associated `Output` type
+ let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation));
+
+ // FIXME: similar as above, return ty is often associated trait type
+ op::binary_op_return_ty(*op, lhs_ty, rhs_ty)
+ }
+ _ => Ty::Unknown,
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => Ty::simple(TypeCtor::Adt(adt)),
+ None => Ty::Unknown,
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
+ None => Ty::Unknown,
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
+ None => Ty::Unknown,
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
+ None => Ty::Unknown,
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
+ None => Ty::Unknown,
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
+ None => Ty::Unknown,
+ },
+ (RangeOp::Inclusive, _, None) => Ty::Unknown,
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let (Some(index_trait), Some(krate)) =
+ (self.resolve_ops_index(), self.resolver.krate())
+ {
+ let canonicalized = self.canonicalizer().canonicalize_ty(base_ty);
+ let self_ty = method_resolution::resolve_indexing_op(
+ self.db,
+ &canonicalized.value,
+ self.trait_env.clone(),
+ krate,
+ index_trait,
+ );
+ let self_ty =
+ self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value));
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[index_ty],
+ )
+ } else {
+ Ty::Unknown
+ }
+ }
+ Expr::Tuple { exprs } => {
+ let mut tys = match &expected.ty {
+ ty_app!(TypeCtor::Tuple { .. }, st) => st
+ .iter()
+ .cloned()
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into()))
+ }
+ Expr::Array(array) => {
+ let elem_ty = match &expected.ty {
+ ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
+ st.as_single().clone()
+ }
+ _ => self.table.new_type_var(),
+ };
+
+ match array {
+ Array::ElementList(items) => {
+ for expr in items.iter() {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
+ }
+ }
+ Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(
+ *initializer,
+ &Expectation::has_type(elem_ty.clone()),
+ );
+ self.infer_expr(
+ *repeat,
+ &Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))),
+ );
+ }
+ }
+
+ Ty::apply_one(TypeCtor::Array, elem_ty)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => Ty::simple(TypeCtor::Bool),
+ Literal::String(..) => {
+ Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
+ }
+ Literal::ByteString(..) => {
+ let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8()));
+ let array_type = Ty::apply_one(TypeCtor::Array, byte_type);
+ Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type)
+ }
+ Literal::Char(..) => Ty::simple(TypeCtor::Char),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())),
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())),
+ None => self.table.new_float_var(),
+ },
+ },
+ };
+ // use a new type variable if we got Ty::Unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ let ty = self.resolve_ty_as_possible(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ ty
+ }
+
+ fn infer_block(
+ &mut self,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer } => {
+ let decl_ty =
+ type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown);
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty == Ty::Unknown {
+ ty = actual_ty;
+ }
+ }
+
+ let ty = self.resolve_ty_as_possible(ty);
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr(expr) => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ let ty = if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_type_var()
+ } else {
+ self.coerce(&Ty::unit(), expected.coercion_target());
+ Ty::unit()
+ }
+ };
+ ty
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = self.resolver.krate().and_then(|krate| {
+ method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ krate,
+ &traits_in_scope,
+ method_name,
+ )
+ });
+ let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
+ Some((ty, func)) => {
+ let ty = canonicalized_receiver.decanonicalize_ty(ty);
+ self.write_method_resolution(tgt_expr, func);
+ (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into())))
+ }
+ None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
+ };
+ let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty);
+ let method_ty = method_ty.subst(&substs);
+ let method_ty = self.insert_type_vars(method_ty);
+ self.register_obligations_for_call(&method_ty);
+ let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone())
+ } else {
+ (Ty::Unknown, Vec::new(), sig.ret().clone())
+ }
+ }
+ None => (Ty::Unknown, Vec::new(), Ty::Unknown),
+ };
+ // Apply autoref so the below unification works correctly
+ // FIXME: return correct autorefs from lookup_method
+ let actual_receiver_ty = match expected_receiver_ty.as_reference() {
+ Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty),
+ _ => derefed_receiver_ty,
+ };
+ self.unify(&expected_receiver_ty, &actual_receiver_ty);
+
+ self.check_call_arguments(args, ¶m_tys);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown));
+ for (&arg, param_ty) in args.iter().zip(param_iter) {
+ let is_closure = matches!(&self.body[arg], Expr::Lambda { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Option<Generics>,
+ generic_args: Option<&GenericArgs>,
+ receiver_ty: &Ty,
+ ) -> Substs {
+ let (parent_params, self_params, type_params, impl_trait_params) =
+ def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split());
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown, except for the receiver type
+ if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
+ for (_id, param) in parent_generics {
+ if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf {
+ substs.push(receiver_ty.clone());
+ } else {
+ substs.push(Ty::Unknown);
+ }
+ }
+ }
+ // handle provided type arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args.args.iter().take(type_params) {
+ match arg {
+ GenericArg::Type(type_ref) => {
+ let ty = self.make_ty(type_ref);
+ substs.push(ty);
+ }
+ }
+ }
+ };
+ let supplied_params = substs.len();
+ for _ in supplied_params..total_len {
+ substs.push(Ty::Unknown);
+ }
+ assert_eq!(substs.len(), total_len);
+ Substs(substs.into())
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ if let Ty::Apply(a_ty) = callable_ty {
+ if let TypeCtor::FnDef(def) = a_ty.ctor {
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let predicate = predicate.clone().subst(&a_ty.parameters);
+ if let Some(obligation) = Obligation::from_predicate(predicate) {
+ self.obligations.push(obligation);
+ }
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let AssocContainerId::TraitId(trait_) =
+ f.lookup(self.db.upcast()).container
+ {
+ // construct a TraitDef
+ let substs = a_ty
+ .parameters
+ .prefix(generics(self.db.upcast(), trait_.into()).len());
+ self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+ }
+}
--- /dev/null
+//! Type inference for patterns.
+
+use std::iter::repeat;
+use std::sync::Arc;
+
+use hir_def::{
+ expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
+ path::Path,
+ type_ref::Mutability,
+ FieldId,
+};
+use hir_expand::name::Name;
+use test_utils::mark;
+
+use super::{BindingMode, Expectation, InferenceContext};
+use crate::{utils::variant_data, Substs, Ty, TypeCtor};
+
+impl<'a> InferenceContext<'a> {
+ fn infer_tuple_struct_pat(
+ &mut self,
+ path: Option<&Path>,
+ subpats: &[PatId],
+ expected: &Ty,
+ default_bm: BindingMode,
+ id: PatId,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path);
+ let var_data = def.map(|it| variant_data(self.db.upcast(), it));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+ self.unify(&ty, expected);
+
+ let substs = ty.substs().unwrap_or_else(Substs::empty);
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+
+ for (i, &subpat) in subpats.iter().enumerate() {
+ let expected_ty = var_data
+ .as_ref()
+ .and_then(|d| d.field(&Name::new_tuple_field(i)))
+ .map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ self.infer_pat(subpat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ fn infer_record_pat(
+ &mut self,
+ path: Option<&Path>,
+ subpats: &[RecordFieldPat],
+ expected: &Ty,
+ default_bm: BindingMode,
+ id: PatId,
+ ) -> Ty {
+ let (ty, def) = self.resolve_variant(path);
+ let var_data = def.map(|it| variant_data(self.db.upcast(), it));
+ if let Some(variant) = def {
+ self.write_variant_resolution(id.into(), variant);
+ }
+
+ self.unify(&ty, expected);
+
+ let substs = ty.substs().unwrap_or_else(Substs::empty);
+
+ let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
+ for subpat in subpats {
+ let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
+ if let Some(local_id) = matching_field {
+ let field_def = FieldId { parent: def.unwrap(), local_id };
+ self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
+ }
+
+ let expected_ty =
+ matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
+ let expected_ty = self.normalize_associated_types_in(expected_ty);
+ self.infer_pat(subpat.pat, &expected_ty, default_bm);
+ }
+
+ ty
+ }
+
+ pub(super) fn infer_pat(
+ &mut self,
+ pat: PatId,
+ mut expected: &Ty,
+ mut default_bm: BindingMode,
+ ) -> Ty {
+ let body = Arc::clone(&self.body); // avoid borrow checker problem
+
+ if is_non_ref_pat(&body, pat) {
+ while let Some((inner, mutability)) = expected.as_reference() {
+ expected = inner;
+ default_bm = match default_bm {
+ BindingMode::Move => BindingMode::Ref(mutability),
+ BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared),
+ BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
+ }
+ }
+ } else if let Pat::Ref { .. } = &body[pat] {
+ mark::hit!(match_ergonomics_ref);
+ // When you encounter a `&pat` pattern, reset to Move.
+ // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
+ default_bm = BindingMode::Move;
+ }
+
+ // Lose mutability.
+ let default_bm = default_bm;
+ let expected = expected;
+
+ let ty = match &body[pat] {
+ Pat::Tuple { ref args, .. } => {
+ let expectations = match expected.as_tuple() {
+ Some(parameters) => &*parameters.0,
+ _ => &[],
+ };
+ let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown));
+
+ let inner_tys = args
+ .iter()
+ .zip(expectations_iter)
+ .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm))
+ .collect();
+
+ Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys))
+ }
+ Pat::Or(ref pats) => {
+ if let Some((first_pat, rest)) = pats.split_first() {
+ let ty = self.infer_pat(*first_pat, expected, default_bm);
+ for pat in rest {
+ self.infer_pat(*pat, expected, default_bm);
+ }
+ ty
+ } else {
+ Ty::Unknown
+ }
+ }
+ Pat::Ref { pat, mutability } => {
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, exp_mut)) => {
+ if *mutability != exp_mut {
+ // FIXME: emit type error?
+ }
+ inner_ty
+ }
+ _ => &Ty::Unknown,
+ };
+ let subty = self.infer_pat(*pat, expectation, default_bm);
+ Ty::apply_one(TypeCtor::Ref(*mutability), subty)
+ }
+ Pat::TupleStruct { path: p, args: subpats, .. } => {
+ self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
+ }
+ Pat::Record { path: p, args: fields, ellipsis: _ } => {
+ self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)
+ }
+ Pat::Path(path) => {
+ // FIXME use correct resolver for the surrounding expression
+ let resolver = self.resolver.clone();
+ self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown)
+ }
+ Pat::Bind { mode, name: _, subpat } => {
+ let mode = if mode == &BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(*mode)
+ };
+ let inner_ty = if let Some(subpat) = subpat {
+ self.infer_pat(*subpat, expected, default_bm)
+ } else {
+ expected.clone()
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone())
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ let bound_ty = self.resolve_ty_as_possible(bound_ty);
+ self.write_pat_ty(pat, bound_ty);
+ return inner_ty;
+ }
+ Pat::Slice { prefix, slice, suffix } => {
+ let (container_ty, elem_ty) = match &expected {
+ ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()),
+ ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()),
+ _ => (TypeCtor::Slice, Ty::Unknown),
+ };
+
+ for pat_id in prefix.iter().chain(suffix) {
+ self.infer_pat(*pat_id, &elem_ty, default_bm);
+ }
+
+ let pat_ty = Ty::apply_one(container_ty, elem_ty);
+ if let Some(slice_pat_id) = slice {
+ self.infer_pat(*slice_pat_id, &pat_ty, default_bm);
+ }
+
+ pat_ty
+ }
+ Pat::Wild => expected.clone(),
+ Pat::Range { start, end } => {
+ let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
+ let end_ty = self.infer_expr(*end, &Expectation::has_type(start_ty));
+ end_ty
+ }
+ Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
+ Pat::Missing => Ty::Unknown,
+ };
+ // use a new type variable if we got Ty::Unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, expected) {
+ // FIXME record mismatch, we need to change the type of self.type_mismatches for that
+ }
+ let ty = self.resolve_ty_as_possible(ty);
+ self.write_pat_ty(pat, ty.clone());
+ ty
+ }
+}
+
+fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
+ match &body[pat] {
+ Pat::Tuple { .. }
+ | Pat::TupleStruct { .. }
+ | Pat::Record { .. }
+ | Pat::Range { .. }
+ | Pat::Slice { .. } => true,
+ Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
+ // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented.
+ Pat::Path(..) => true,
+ Pat::Lit(expr) => match body[*expr] {
+ Expr::Literal(Literal::String(..)) => false,
+ _ => true,
+ },
+ Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false,
+ }
+}
--- /dev/null
+//! Path expression resolution.
+
+use std::iter;
+
+use hir_def::{
+ path::{Path, PathSegment},
+ resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ AdtId, AssocContainerId, AssocItemId, EnumVariantId, Lookup,
+};
+use hir_expand::name::Name;
+
+use crate::{method_resolution, Substs, Ty, ValueTyDefId};
+
+use super::{ExprOrPatId, InferenceContext, TraitRef};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn infer_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let ty = self.resolve_value_path(resolver, path, id)?;
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ Some(ty)
+ }
+
+ fn resolve_value_path(
+ &mut self,
+ resolver: &Resolver,
+ path: &Path,
+ id: ExprOrPatId,
+ ) -> Option<Ty> {
+ let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
+ if path.segments().is_empty() {
+ // This can't actually happen syntax-wise
+ return None;
+ }
+ let ty = self.make_ty(type_ref);
+ let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
+ let (ty, _) = Ty::from_type_relative_path(&ctx, ty, None, remaining_segments_for_ty);
+ self.resolve_ty_assoc_item(
+ ty,
+ &path.segments().last().expect("path had at least one segment").name,
+ id,
+ )?
+ } else {
+ let value_or_partial =
+ resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+
+ match value_or_partial {
+ ResolveValueResult::ValueNs(it) => (it, None),
+ ResolveValueResult::Partial(def, remaining_index) => {
+ self.resolve_assoc_item(def, path, remaining_index, id)?
+ }
+ }
+ };
+
+ let typable: ValueTyDefId = match value {
+ ValueNs::LocalBinding(pat) => {
+ let ty = self.result.type_of_pat.get(pat)?.clone();
+ let ty = self.resolve_ty_as_possible(ty);
+ return Some(ty);
+ }
+ ValueNs::FunctionId(it) => it.into(),
+ ValueNs::ConstId(it) => it.into(),
+ ValueNs::StaticId(it) => it.into(),
+ ValueNs::StructId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::EnumVariantId(it) => {
+ self.write_variant_resolution(id, it.into());
+
+ it.into()
+ }
+ ValueNs::ImplSelf(impl_id) => {
+ let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
+ let substs = Substs::type_params_for_generics(&generics);
+ let ty = self.db.impl_self_ty(impl_id).subst(&substs);
+ if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
+ let ty = self.db.value_ty(struct_id.into()).subst(&substs);
+ return Some(ty);
+ } else {
+ // FIXME: diagnostic, invalid Self reference
+ return None;
+ }
+ }
+ };
+
+ let ty = self.db.value_ty(typable);
+ // self_subst is just for the parent
+ let parent_substs = self_subst.unwrap_or_else(Substs::empty);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let substs = Ty::substs_from_path(&ctx, path, typable, true);
+ let full_substs = Substs::builder(substs.len())
+ .use_parent_substs(&parent_substs)
+ .fill(substs.0[parent_substs.len()..].iter().cloned())
+ .build();
+ let ty = ty.subst(&full_substs);
+ Some(ty)
+ }
+
+ fn resolve_assoc_item(
+ &mut self,
+ def: TypeNs,
+ path: &Path,
+ remaining_index: usize,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substs>)> {
+ assert!(remaining_index < path.segments().len());
+ // there may be more intermediate segments between the resolved one and
+ // the end. Only the last segment needs to be resolved to a value; from
+ // the segments before that, we need to get either a type or a trait ref.
+
+ let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
+ let remaining_segments = path.segments().skip(remaining_index);
+ let is_before_last = remaining_segments.len() == 1;
+
+ match (def, is_before_last) {
+ (TypeNs::TraitId(trait_), true) => {
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None);
+ self.resolve_trait_assoc_item(trait_ref, segment, id)
+ }
+ (def, _) => {
+ // Either we already have a type (e.g. `Vec::new`), or we have a
+ // trait but it's not the last segment, so the next segment
+ // should resolve to an associated type of that trait (e.g. `<T
+ // as Iterator>::Item::default`)
+ let remaining_segments_for_ty =
+ remaining_segments.take(remaining_segments.len() - 1);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
+ let (ty, _) = Ty::from_partly_resolved_hir_path(
+ &ctx,
+ def,
+ resolved_segment,
+ remaining_segments_for_ty,
+ true,
+ );
+ if let Ty::Unknown = ty {
+ return None;
+ }
+
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ let segment =
+ remaining_segments.last().expect("there should be at least one segment here");
+
+ self.resolve_ty_assoc_item(ty, &segment.name, id)
+ }
+ }
+ }
+
+ fn resolve_trait_assoc_item(
+ &mut self,
+ trait_ref: TraitRef,
+ segment: PathSegment<'_>,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substs>)> {
+ let trait_ = trait_ref.trait_;
+ let item =
+ self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
+ match item {
+ AssocItemId::FunctionId(func) => {
+ if segment.name == &self.db.function_data(func).name {
+ Some(AssocItemId::FunctionId(func))
+ } else {
+ None
+ }
+ }
+
+ AssocItemId::ConstId(konst) => {
+ if self
+ .db
+ .const_data(konst)
+ .name
+ .as_ref()
+ .map_or(false, |n| n == segment.name)
+ {
+ Some(AssocItemId::ConstId(konst))
+ } else {
+ None
+ }
+ }
+ AssocItemId::TypeAliasId(_) => None,
+ }
+ })?;
+ let def = match item {
+ AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
+ AssocItemId::ConstId(c) => ValueNs::ConstId(c),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, Some(trait_ref.substs)))
+ }
+
+ fn resolve_ty_assoc_item(
+ &mut self,
+ ty: Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substs>)> {
+ if let Ty::Unknown = ty {
+ return None;
+ }
+
+ if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
+ return Some(result);
+ }
+
+ let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
+ let krate = self.resolver.krate()?;
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ method_resolution::iterate_method_candidates(
+ &canonical_ty.value,
+ self.db,
+ self.trait_env.clone(),
+ krate,
+ &traits_in_scope,
+ Some(name),
+ method_resolution::LookupMode::Path,
+ move |_ty, item| {
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => {
+ (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ AssocContainerId::ImplId(impl_id) => {
+ let impl_substs = Substs::build_for_def(self.db, impl_id)
+ .fill(iter::repeat_with(|| self.table.new_type_var()))
+ .build();
+ let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ Some(impl_substs)
+ }
+ AssocContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_substs = Substs::build_for_def(self.db, trait_)
+ .push(ty.clone())
+ .fill(std::iter::repeat_with(|| self.table.new_type_var()))
+ .build();
+ self.obligations.push(super::Obligation::Trait(TraitRef {
+ trait_,
+ substs: trait_substs.clone(),
+ }));
+ Some(trait_substs)
+ }
+ AssocContainerId::ContainerId(_) => None,
+ };
+
+ self.write_assoc_resolution(id, item);
+ Some((def, substs))
+ },
+ )
+ }
+
+ fn resolve_enum_variant_on_ty(
+ &mut self,
+ ty: &Ty,
+ name: &Name,
+ id: ExprOrPatId,
+ ) -> Option<(ValueNs, Option<Substs>)> {
+ let (enum_id, subst) = match ty.as_adt() {
+ Some((AdtId::EnumId(e), subst)) => (e, subst),
+ _ => return None,
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let local_id = enum_data.variant(name)?;
+ let variant = EnumVariantId { parent: enum_id, local_id };
+ self.write_variant_resolution(id, variant.into());
+ Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ }
+}
--- /dev/null
+//! Unification and canonicalization logic.
+
+use std::borrow::Cow;
+
+use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
+
+use test_utils::mark;
+
+use super::{InferenceContext, Obligation};
+use crate::{
+ BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
+ TyKind, TypeCtor, TypeWalk,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b>
+ where
+ 'a: 'b,
+ {
+ Canonicalizer { ctx: self, free_vars: Vec::new(), var_stack: Vec::new() }
+ }
+}
+
+pub(super) struct Canonicalizer<'a, 'b>
+where
+ 'a: 'b,
+{
+ ctx: &'b mut InferenceContext<'a>,
+ free_vars: Vec<InferTy>,
+ /// A stack of type variables that is used to detect recursive types (which
+ /// are an error, but we need to protect against them to avoid stack
+ /// overflows).
+ var_stack: Vec<TypeVarId>,
+}
+
+#[derive(Debug)]
+pub(super) struct Canonicalized<T> {
+ pub value: Canonical<T>,
+ free_vars: Vec<InferTy>,
+}
+
+impl<'a, 'b> Canonicalizer<'a, 'b>
+where
+ 'a: 'b,
+{
+ fn add(&mut self, free_var: InferTy) -> usize {
+ self.free_vars.iter().position(|&v| v == free_var).unwrap_or_else(|| {
+ let next_index = self.free_vars.len();
+ self.free_vars.push(free_var);
+ next_index
+ })
+ }
+
+ fn do_canonicalize<T: TypeWalk>(&mut self, t: T, binders: DebruijnIndex) -> T {
+ t.fold_binders(
+ &mut |ty, binders| match ty {
+ Ty::Infer(tv) => {
+ let inner = tv.to_inner();
+ if self.var_stack.contains(&inner) {
+ // recursive type
+ return tv.fallback_value();
+ }
+ if let Some(known_ty) =
+ self.ctx.table.var_unification_table.inlined_probe_value(inner).known()
+ {
+ self.var_stack.push(inner);
+ let result = self.do_canonicalize(known_ty.clone(), binders);
+ self.var_stack.pop();
+ result
+ } else {
+ let root = self.ctx.table.var_unification_table.find(inner);
+ let free_var = match tv {
+ InferTy::TypeVar(_) => InferTy::TypeVar(root),
+ InferTy::IntVar(_) => InferTy::IntVar(root),
+ InferTy::FloatVar(_) => InferTy::FloatVar(root),
+ InferTy::MaybeNeverTypeVar(_) => InferTy::MaybeNeverTypeVar(root),
+ };
+ let position = self.add(free_var);
+ Ty::Bound(BoundVar::new(binders, position))
+ }
+ }
+ _ => ty,
+ },
+ binders,
+ )
+ }
+
+ fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> {
+ let kinds = self
+ .free_vars
+ .iter()
+ .map(|v| match v {
+ // mapping MaybeNeverTypeVar to the same kind as general ones
+ // should be fine, because as opposed to int or float type vars,
+ // they don't restrict what kind of type can go into them, they
+ // just affect fallback.
+ InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General,
+ InferTy::IntVar(_) => TyKind::Integer,
+ InferTy::FloatVar(_) => TyKind::Float,
+ })
+ .collect();
+ Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars }
+ }
+
+ pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> {
+ let result = self.do_canonicalize(ty, DebruijnIndex::INNERMOST);
+ self.into_canonicalized(result)
+ }
+
+ pub(crate) fn canonicalize_obligation(
+ mut self,
+ obligation: InEnvironment<Obligation>,
+ ) -> Canonicalized<InEnvironment<Obligation>> {
+ let result = match obligation.value {
+ Obligation::Trait(tr) => {
+ Obligation::Trait(self.do_canonicalize(tr, DebruijnIndex::INNERMOST))
+ }
+ Obligation::Projection(pr) => {
+ Obligation::Projection(self.do_canonicalize(pr, DebruijnIndex::INNERMOST))
+ }
+ };
+ self.into_canonicalized(InEnvironment {
+ value: result,
+ environment: obligation.environment,
+ })
+ }
+}
+
+impl<T> Canonicalized<T> {
+ pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
+ ty.walk_mut_binders(
+ &mut |ty, binders| {
+ if let &mut Ty::Bound(bound) = ty {
+ if bound.debruijn >= binders {
+ *ty = Ty::Infer(self.free_vars[bound.index]);
+ }
+ }
+ },
+ DebruijnIndex::INNERMOST,
+ );
+ ty
+ }
+
+ pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
+ // the solution may contain new variables, which we need to convert to new inference vars
+ let new_vars = Substs(
+ solution
+ .kinds
+ .iter()
+ .map(|k| match k {
+ TyKind::General => ctx.table.new_type_var(),
+ TyKind::Integer => ctx.table.new_integer_var(),
+ TyKind::Float => ctx.table.new_float_var(),
+ })
+ .collect(),
+ );
+ for (i, ty) in solution.value.into_iter().enumerate() {
+ let var = self.free_vars[i];
+ // eagerly replace projections in the type; we may be getting types
+ // e.g. from where clauses where this hasn't happened yet
+ let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars));
+ ctx.table.unify(&Ty::Infer(var), &ty);
+ }
+ }
+}
+
+pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
+ let mut table = InferenceTable::new();
+ let vars = Substs(
+ tys.kinds
+ .iter()
+ // we always use type vars here because we want everything to
+ // fallback to Unknown in the end (kind of hacky, as below)
+ .map(|_| table.new_type_var())
+ .collect(),
+ );
+ let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars);
+ let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars);
+ if !table.unify(&ty1_with_vars, &ty2_with_vars) {
+ return None;
+ }
+ // default any type vars that weren't unified back to their original bound vars
+ // (kind of hacky)
+ for (i, var) in vars.iter().enumerate() {
+ if &*table.resolve_ty_shallow(var) == var {
+ table.unify(var, &Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i)));
+ }
+ }
+ Some(
+ Substs::builder(tys.kinds.len())
+ .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
+ .build(),
+ )
+}
+
+#[derive(Clone, Debug)]
+pub(crate) struct InferenceTable {
+ pub(super) var_unification_table: InPlaceUnificationTable<TypeVarId>,
+}
+
+impl InferenceTable {
+ pub fn new() -> Self {
+ InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
+ }
+
+ pub fn new_type_var(&mut self) -> Ty {
+ Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
+ }
+
+ pub fn new_integer_var(&mut self) -> Ty {
+ Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
+ }
+
+ pub fn new_float_var(&mut self) -> Ty {
+ Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
+ }
+
+ pub fn new_maybe_never_type_var(&mut self) -> Ty {
+ Ty::Infer(InferTy::MaybeNeverTypeVar(
+ self.var_unification_table.new_key(TypeVarValue::Unknown),
+ ))
+ }
+
+ pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
+ self.resolve_ty_completely_inner(&mut Vec::new(), ty)
+ }
+
+ pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
+ self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
+ }
+
+ pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
+ self.unify_inner(ty1, ty2, 0)
+ }
+
+ pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
+ substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
+ }
+
+ fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
+ if depth > 1000 {
+ // prevent stackoverflows
+ panic!("infinite recursion in unification");
+ }
+ if ty1 == ty2 {
+ return true;
+ }
+ // try to resolve type vars first
+ let ty1 = self.resolve_ty_shallow(ty1);
+ let ty2 = self.resolve_ty_shallow(ty2);
+ match (&*ty1, &*ty2) {
+ (Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
+ self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
+ }
+
+ _ => self.unify_inner_trivial(&ty1, &ty2, depth),
+ }
+ }
+
+ pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
+ match (ty1, ty2) {
+ (Ty::Unknown, _) | (_, Ty::Unknown) => true,
+
+ (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
+
+ (Ty::Dyn(dyn1), Ty::Dyn(dyn2)) if dyn1.len() == dyn2.len() => {
+ for (pred1, pred2) in dyn1.iter().zip(dyn2.iter()) {
+ if !self.unify_preds(pred1, pred2, depth + 1) {
+ return false;
+ }
+ }
+ true
+ }
+
+ (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
+ | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
+ | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
+ | (
+ Ty::Infer(InferTy::MaybeNeverTypeVar(tv1)),
+ Ty::Infer(InferTy::MaybeNeverTypeVar(tv2)),
+ ) => {
+ // both type vars are unknown since we tried to resolve them
+ self.var_unification_table.union(*tv1, *tv2);
+ true
+ }
+
+ // The order of MaybeNeverTypeVar matters here.
+ // Unifying MaybeNeverTypeVar and TypeVar will let the latter become MaybeNeverTypeVar.
+ // Unifying MaybeNeverTypeVar and other concrete type will let the former become it.
+ (Ty::Infer(InferTy::TypeVar(tv)), other)
+ | (other, Ty::Infer(InferTy::TypeVar(tv)))
+ | (Ty::Infer(InferTy::MaybeNeverTypeVar(tv)), other)
+ | (other, Ty::Infer(InferTy::MaybeNeverTypeVar(tv)))
+ | (Ty::Infer(InferTy::IntVar(tv)), other @ ty_app!(TypeCtor::Int(_)))
+ | (other @ ty_app!(TypeCtor::Int(_)), Ty::Infer(InferTy::IntVar(tv)))
+ | (Ty::Infer(InferTy::FloatVar(tv)), other @ ty_app!(TypeCtor::Float(_)))
+ | (other @ ty_app!(TypeCtor::Float(_)), Ty::Infer(InferTy::FloatVar(tv))) => {
+ // the type var is unknown since we tried to resolve it
+ self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
+ true
+ }
+
+ _ => false,
+ }
+ }
+
+ fn unify_preds(
+ &mut self,
+ pred1: &GenericPredicate,
+ pred2: &GenericPredicate,
+ depth: usize,
+ ) -> bool {
+ match (pred1, pred2) {
+ (GenericPredicate::Implemented(tr1), GenericPredicate::Implemented(tr2))
+ if tr1.trait_ == tr2.trait_ =>
+ {
+ self.unify_substs(&tr1.substs, &tr2.substs, depth + 1)
+ }
+ (GenericPredicate::Projection(proj1), GenericPredicate::Projection(proj2))
+ if proj1.projection_ty.associated_ty == proj2.projection_ty.associated_ty =>
+ {
+ self.unify_substs(
+ &proj1.projection_ty.parameters,
+ &proj2.projection_ty.parameters,
+ depth + 1,
+ ) && self.unify_inner(&proj1.ty, &proj2.ty, depth + 1)
+ }
+ _ => false,
+ }
+ }
+
+ /// If `ty` is a type variable with known type, returns that type;
+ /// otherwise, return ty.
+ pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
+ let mut ty = Cow::Borrowed(ty);
+ // The type variable could resolve to a int/float variable. Hence try
+ // resolving up to three times; each type of variable shouldn't occur
+ // more than once
+ for i in 0..3 {
+ if i > 0 {
+ mark::hit!(type_var_resolves_to_int_var);
+ }
+ match &*ty {
+ Ty::Infer(tv) => {
+ let inner = tv.to_inner();
+ match self.var_unification_table.inlined_probe_value(inner).known() {
+ Some(known_ty) => {
+ // The known_ty can't be a type var itself
+ ty = Cow::Owned(known_ty.clone());
+ }
+ _ => return ty,
+ }
+ }
+ _ => return ty,
+ }
+ }
+ log::error!("Inference variable still not resolved: {:?}", ty);
+ ty
+ }
+
+ /// Resolves the type as far as currently possible, replacing type variables
+ /// by their known types. All types returned by the infer_* functions should
+ /// be resolved as far as possible, i.e. contain no type variables with
+ /// known type.
+ fn resolve_ty_as_possible_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
+ ty.fold(&mut |ty| match ty {
+ Ty::Infer(tv) => {
+ let inner = tv.to_inner();
+ if tv_stack.contains(&inner) {
+ mark::hit!(type_var_cycles_resolve_as_possible);
+ // recursive type
+ return tv.fallback_value();
+ }
+ if let Some(known_ty) =
+ self.var_unification_table.inlined_probe_value(inner).known()
+ {
+ // known_ty may contain other variables that are known by now
+ tv_stack.push(inner);
+ let result = self.resolve_ty_as_possible_inner(tv_stack, known_ty.clone());
+ tv_stack.pop();
+ result
+ } else {
+ ty
+ }
+ }
+ _ => ty,
+ })
+ }
+
+ /// Resolves the type completely; type variables without known type are
+ /// replaced by Ty::Unknown.
+ fn resolve_ty_completely_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
+ ty.fold(&mut |ty| match ty {
+ Ty::Infer(tv) => {
+ let inner = tv.to_inner();
+ if tv_stack.contains(&inner) {
+ mark::hit!(type_var_cycles_resolve_completely);
+ // recursive type
+ return tv.fallback_value();
+ }
+ if let Some(known_ty) =
+ self.var_unification_table.inlined_probe_value(inner).known()
+ {
+ // known_ty may contain other variables that are known by now
+ tv_stack.push(inner);
+ let result = self.resolve_ty_completely_inner(tv_stack, known_ty.clone());
+ tv_stack.pop();
+ result
+ } else {
+ tv.fallback_value()
+ }
+ }
+ _ => ty,
+ })
+ }
+}
+
+/// The ID of a type variable.
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub struct TypeVarId(pub(super) u32);
+
+impl UnifyKey for TypeVarId {
+ type Value = TypeVarValue;
+
+ fn index(&self) -> u32 {
+ self.0
+ }
+
+ fn from_index(i: u32) -> Self {
+ TypeVarId(i)
+ }
+
+ fn tag() -> &'static str {
+ "TypeVarId"
+ }
+}
+
+/// The value of a type variable: either we already know the type, or we don't
+/// know it yet.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum TypeVarValue {
+ Known(Ty),
+ Unknown,
+}
+
+impl TypeVarValue {
+ fn known(&self) -> Option<&Ty> {
+ match self {
+ TypeVarValue::Known(ty) => Some(ty),
+ TypeVarValue::Unknown => None,
+ }
+ }
+}
+
+impl UnifyValue for TypeVarValue {
+ type Error = NoError;
+
+ fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
+ match (value1, value2) {
+ // We should never equate two type variables, both of which have
+ // known types. Instead, we recursively equate those types.
+ (TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!(
+ "equating two type variables, both of which have known types: {:?} and {:?}",
+ t1, t2
+ ),
+
+ // If one side is known, prefer that one.
+ (TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()),
+ (TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()),
+
+ (TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),
+ }
+ }
+}
--- /dev/null
+//! The type system. We currently use this to infer types for completion, hover
+//! information and various assists.
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod autoderef;
+pub mod primitive;
+pub mod traits;
+pub mod method_resolution;
+mod op;
+mod lower;
+pub(crate) mod infer;
+pub(crate) mod utils;
+
+pub mod display;
+pub mod db;
+pub mod diagnostics;
+
+#[cfg(test)]
+mod tests;
+#[cfg(test)]
+mod test_db;
+
+use std::{iter, mem, ops::Deref, sync::Arc};
+
+use base_db::{salsa, CrateId};
+use hir_def::{
+ expr::ExprId,
+ type_ref::{Mutability, Rawness},
+ AdtId, AssocContainerId, DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId,
+ TypeParamId,
+};
+use itertools::Itertools;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ primitive::{FloatTy, IntTy},
+ utils::{generics, make_mut_slice, Generics},
+};
+
+pub use autoderef::autoderef;
+pub use infer::{InferTy, InferenceResult};
+pub use lower::CallableDefId;
+pub use lower::{
+ associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId,
+ TyLoweringContext, ValueTyDefId,
+};
+pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
+
+pub use chalk_ir::{BoundVar, DebruijnIndex};
+
+/// A type constructor or type name: this might be something like the primitive
+/// type `bool`, a struct like `Vec`, or things like function pointers or
+/// tuples.
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeCtor {
+ /// The primitive boolean type. Written as `bool`.
+ Bool,
+
+ /// The primitive character type; holds a Unicode scalar value
+ /// (a non-surrogate code point). Written as `char`.
+ Char,
+
+ /// A primitive integer type. For example, `i32`.
+ Int(IntTy),
+
+ /// A primitive floating-point type. For example, `f64`.
+ Float(FloatTy),
+
+ /// Structures, enumerations and unions.
+ Adt(AdtId),
+
+ /// The pointee of a string slice. Written as `str`.
+ Str,
+
+ /// The pointee of an array slice. Written as `[T]`.
+ Slice,
+
+ /// An array with the given length. Written as `[T; n]`.
+ Array,
+
+ /// A raw pointer. Written as `*mut T` or `*const T`
+ RawPtr(Mutability),
+
+ /// A reference; a pointer with an associated lifetime. Written as
+ /// `&'a mut T` or `&'a T`.
+ Ref(Mutability),
+
+ /// The anonymous type of a function declaration/definition. Each
+ /// function has a unique type, which is output (for a function
+ /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`.
+ ///
+ /// This includes tuple struct / enum variant constructors as well.
+ ///
+ /// For example the type of `bar` here:
+ ///
+ /// ```
+ /// fn foo() -> i32 { 1 }
+ /// let bar = foo; // bar: fn() -> i32 {foo}
+ /// ```
+ FnDef(CallableDefId),
+
+ /// A pointer to a function. Written as `fn() -> i32`.
+ ///
+ /// For example the type of `bar` here:
+ ///
+ /// ```
+ /// fn foo() -> i32 { 1 }
+ /// let bar: fn() -> i32 = foo;
+ /// ```
+ // FIXME make this a Ty variant like in Chalk
+ FnPtr { num_args: u16, is_varargs: bool },
+
+ /// The never type `!`.
+ Never,
+
+ /// A tuple type. For example, `(i32, bool)`.
+ Tuple { cardinality: u16 },
+
+ /// Represents an associated item like `Iterator::Item`. This is used
+ /// when we have tried to normalize a projection like `T::Item` but
+ /// couldn't find a better representation. In that case, we generate
+ /// an **application type** like `(Iterator::Item)<T>`.
+ AssociatedType(TypeAliasId),
+
+ /// This represents a placeholder for an opaque type in situations where we
+ /// don't know the hidden type (i.e. currently almost always). This is
+ /// analogous to the `AssociatedType` type constructor. As with that one,
+ /// these are only produced by Chalk.
+ OpaqueType(OpaqueTyId),
+
+ /// The type of a specific closure.
+ ///
+ /// The closure signature is stored in a `FnPtr` type in the first type
+ /// parameter.
+ Closure { def: DefWithBodyId, expr: ExprId },
+}
+
+impl TypeCtor {
+ pub fn num_ty_params(self, db: &dyn HirDatabase) -> usize {
+ match self {
+ TypeCtor::Bool
+ | TypeCtor::Char
+ | TypeCtor::Int(_)
+ | TypeCtor::Float(_)
+ | TypeCtor::Str
+ | TypeCtor::Never => 0,
+ TypeCtor::Slice
+ | TypeCtor::Array
+ | TypeCtor::RawPtr(_)
+ | TypeCtor::Ref(_)
+ | TypeCtor::Closure { .. } // 1 param representing the signature of the closure
+ => 1,
+ TypeCtor::Adt(adt) => {
+ let generic_params = generics(db.upcast(), adt.into());
+ generic_params.len()
+ }
+ TypeCtor::FnDef(callable) => {
+ let generic_params = generics(db.upcast(), callable.into());
+ generic_params.len()
+ }
+ TypeCtor::AssociatedType(type_alias) => {
+ let generic_params = generics(db.upcast(), type_alias.into());
+ generic_params.len()
+ }
+ TypeCtor::OpaqueType(opaque_ty_id) => {
+ match opaque_ty_id {
+ OpaqueTyId::ReturnTypeImplTrait(func, _) => {
+ let generic_params = generics(db.upcast(), func.into());
+ generic_params.len()
+ }
+ }
+ }
+ TypeCtor::FnPtr { num_args, is_varargs: _ } => num_args as usize + 1,
+ TypeCtor::Tuple { cardinality } => cardinality as usize,
+ }
+ }
+
+ pub fn krate(self, db: &dyn HirDatabase) -> Option<CrateId> {
+ match self {
+ TypeCtor::Bool
+ | TypeCtor::Char
+ | TypeCtor::Int(_)
+ | TypeCtor::Float(_)
+ | TypeCtor::Str
+ | TypeCtor::Never
+ | TypeCtor::Slice
+ | TypeCtor::Array
+ | TypeCtor::RawPtr(_)
+ | TypeCtor::Ref(_)
+ | TypeCtor::FnPtr { .. }
+ | TypeCtor::Tuple { .. } => None,
+ // Closure's krate is irrelevant for coherence I would think?
+ TypeCtor::Closure { .. } => None,
+ TypeCtor::Adt(adt) => Some(adt.module(db.upcast()).krate),
+ TypeCtor::FnDef(callable) => Some(callable.krate(db)),
+ TypeCtor::AssociatedType(type_alias) => {
+ Some(type_alias.lookup(db.upcast()).module(db.upcast()).krate)
+ }
+ TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
+ OpaqueTyId::ReturnTypeImplTrait(func, _) => {
+ Some(func.lookup(db.upcast()).module(db.upcast()).krate)
+ }
+ },
+ }
+ }
+
+ pub fn as_generic_def(self) -> Option<GenericDefId> {
+ match self {
+ TypeCtor::Bool
+ | TypeCtor::Char
+ | TypeCtor::Int(_)
+ | TypeCtor::Float(_)
+ | TypeCtor::Str
+ | TypeCtor::Never
+ | TypeCtor::Slice
+ | TypeCtor::Array
+ | TypeCtor::RawPtr(_)
+ | TypeCtor::Ref(_)
+ | TypeCtor::FnPtr { .. }
+ | TypeCtor::Tuple { .. }
+ | TypeCtor::Closure { .. } => None,
+ TypeCtor::Adt(adt) => Some(adt.into()),
+ TypeCtor::FnDef(callable) => Some(callable.into()),
+ TypeCtor::AssociatedType(type_alias) => Some(type_alias.into()),
+ TypeCtor::OpaqueType(_impl_trait_id) => None,
+ }
+ }
+}
+
+/// A nominal type with (maybe 0) type parameters. This might be a primitive
+/// type like `bool`, a struct, tuple, function pointer, reference or
+/// several other things.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ApplicationTy {
+ pub ctor: TypeCtor,
+ pub parameters: Substs,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct OpaqueTy {
+ pub opaque_ty_id: OpaqueTyId,
+ pub parameters: Substs,
+}
+
+/// A "projection" type corresponds to an (unnormalized)
+/// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the
+/// trait and all its parameters are fully known.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ProjectionTy {
+ pub associated_ty: TypeAliasId,
+ pub parameters: Substs,
+}
+
+impl ProjectionTy {
+ pub fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
+ TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() }
+ }
+
+ fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
+ match self.associated_ty.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(it) => it,
+ _ => panic!("projection ty without parent trait"),
+ }
+ }
+}
+
+impl TypeWalk for ProjectionTy {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.parameters.walk(f);
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ self.parameters.walk_mut_binders(f, binders);
+ }
+}
+
+/// A type.
+///
+/// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents
+/// the same thing (but in a different way).
+///
+/// This should be cheap to clone.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum Ty {
+ /// A nominal type with (maybe 0) type parameters. This might be a primitive
+ /// type like `bool`, a struct, tuple, function pointer, reference or
+ /// several other things.
+ Apply(ApplicationTy),
+
+ /// A "projection" type corresponds to an (unnormalized)
+ /// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the
+ /// trait and all its parameters are fully known.
+ Projection(ProjectionTy),
+
+ /// An opaque type (`impl Trait`).
+ ///
+ /// This is currently only used for return type impl trait; each instance of
+ /// `impl Trait` in a return type gets its own ID.
+ Opaque(OpaqueTy),
+
+ /// A placeholder for a type parameter; for example, `T` in `fn f<T>(x: T)
+ /// {}` when we're type-checking the body of that function. In this
+ /// situation, we know this stands for *some* type, but don't know the exact
+ /// type.
+ Placeholder(TypeParamId),
+
+ /// A bound type variable. This is used in various places: when representing
+ /// some polymorphic type like the type of function `fn f<T>`, the type
+ /// parameters get turned into variables; during trait resolution, inference
+ /// variables get turned into bound variables and back; and in `Dyn` the
+ /// `Self` type is represented with a bound variable as well.
+ Bound(BoundVar),
+
+ /// A type variable used during type checking.
+ Infer(InferTy),
+
+ /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust).
+ ///
+ /// The predicates are quantified over the `Self` type, i.e. `Ty::Bound(0)`
+ /// represents the `Self` type inside the bounds. This is currently
+ /// implicit; Chalk has the `Binders` struct to make it explicit, but it
+ /// didn't seem worth the overhead yet.
+ Dyn(Arc<[GenericPredicate]>),
+
+ /// A placeholder for a type which could not be computed; this is propagated
+ /// to avoid useless error messages. Doubles as a placeholder where type
+ /// variables are inserted before type checking, since we want to try to
+ /// infer a better type here anyway -- for the IDE use case, we want to try
+ /// to infer as much as possible even in the presence of type errors.
+ Unknown,
+}
+
+/// A list of substitutions for generic parameters.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct Substs(Arc<[Ty]>);
+
+impl TypeWalk for Substs {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.0.iter() {
+ t.walk(f);
+ }
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ for t in make_mut_slice(&mut self.0) {
+ t.walk_mut_binders(f, binders);
+ }
+ }
+}
+
+impl Substs {
+ pub fn empty() -> Substs {
+ Substs(Arc::new([]))
+ }
+
+ pub fn single(ty: Ty) -> Substs {
+ Substs(Arc::new([ty]))
+ }
+
+ pub fn prefix(&self, n: usize) -> Substs {
+ Substs(self.0[..std::cmp::min(self.0.len(), n)].into())
+ }
+
+ pub fn suffix(&self, n: usize) -> Substs {
+ Substs(self.0[self.0.len() - std::cmp::min(self.0.len(), n)..].into())
+ }
+
+ pub fn as_single(&self) -> &Ty {
+ if self.0.len() != 1 {
+ panic!("expected substs of len 1, got {:?}", self);
+ }
+ &self.0[0]
+ }
+
+ /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
+ pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
+ Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect())
+ }
+
+ /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
+ pub fn type_params(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substs {
+ let params = generics(db.upcast(), def.into());
+ Substs::type_params_for_generics(¶ms)
+ }
+
+ /// Return Substs that replace each parameter by a bound variable.
+ pub(crate) fn bound_vars(generic_params: &Generics, debruijn: DebruijnIndex) -> Substs {
+ Substs(
+ generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, _)| Ty::Bound(BoundVar::new(debruijn, idx)))
+ .collect(),
+ )
+ }
+
+ pub fn build_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder {
+ let def = def.into();
+ let params = generics(db.upcast(), def);
+ let param_count = params.len();
+ Substs::builder(param_count)
+ }
+
+ pub(crate) fn build_for_generics(generic_params: &Generics) -> SubstsBuilder {
+ Substs::builder(generic_params.len())
+ }
+
+ pub fn build_for_type_ctor(db: &dyn HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder {
+ Substs::builder(type_ctor.num_ty_params(db))
+ }
+
+ fn builder(param_count: usize) -> SubstsBuilder {
+ SubstsBuilder { vec: Vec::with_capacity(param_count), param_count }
+ }
+}
+
+/// Return an index of a parameter in the generic type parameter list by it's id.
+pub fn param_idx(db: &dyn HirDatabase, id: TypeParamId) -> Option<usize> {
+ generics(db.upcast(), id.parent).param_idx(id)
+}
+
+#[derive(Debug, Clone)]
+pub struct SubstsBuilder {
+ vec: Vec<Ty>,
+ param_count: usize,
+}
+
+impl SubstsBuilder {
+ pub fn build(self) -> Substs {
+ assert_eq!(self.vec.len(), self.param_count);
+ Substs(self.vec.into())
+ }
+
+ pub fn push(mut self, ty: Ty) -> Self {
+ self.vec.push(ty);
+ self
+ }
+
+ fn remaining(&self) -> usize {
+ self.param_count - self.vec.len()
+ }
+
+ pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
+ self.fill((starting_from..).map(|idx| Ty::Bound(BoundVar::new(debruijn, idx))))
+ }
+
+ pub fn fill_with_unknown(self) -> Self {
+ self.fill(iter::repeat(Ty::Unknown))
+ }
+
+ pub fn fill(mut self, filler: impl Iterator<Item = Ty>) -> Self {
+ self.vec.extend(filler.take(self.remaining()));
+ assert_eq!(self.remaining(), 0);
+ self
+ }
+
+ pub fn use_parent_substs(mut self, parent_substs: &Substs) -> Self {
+ assert!(self.vec.is_empty());
+ assert!(parent_substs.len() <= self.param_count);
+ self.vec.extend(parent_substs.iter().cloned());
+ self
+ }
+}
+
+impl Deref for Substs {
+ type Target = [Ty];
+
+ fn deref(&self) -> &[Ty] {
+ &self.0
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub struct Binders<T> {
+ pub num_binders: usize,
+ pub value: T,
+}
+
+impl<T> Binders<T> {
+ pub fn new(num_binders: usize, value: T) -> Self {
+ Self { num_binders, value }
+ }
+
+ pub fn as_ref(&self) -> Binders<&T> {
+ Binders { num_binders: self.num_binders, value: &self.value }
+ }
+
+ pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Binders<U> {
+ Binders { num_binders: self.num_binders, value: f(self.value) }
+ }
+
+ pub fn filter_map<U>(self, f: impl FnOnce(T) -> Option<U>) -> Option<Binders<U>> {
+ Some(Binders { num_binders: self.num_binders, value: f(self.value)? })
+ }
+}
+
+impl<T: Clone> Binders<&T> {
+ pub fn cloned(&self) -> Binders<T> {
+ Binders { num_binders: self.num_binders, value: self.value.clone() }
+ }
+}
+
+impl<T: TypeWalk> Binders<T> {
+ /// Substitutes all variables.
+ pub fn subst(self, subst: &Substs) -> T {
+ assert_eq!(subst.len(), self.num_binders);
+ self.value.subst_bound_vars(subst)
+ }
+
+ /// Substitutes just a prefix of the variables (shifting the rest).
+ pub fn subst_prefix(self, subst: &Substs) -> Binders<T> {
+ assert!(subst.len() < self.num_binders);
+ Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst))
+ }
+}
+
+impl<T: TypeWalk> TypeWalk for Binders<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.value.walk(f);
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ self.value.walk_mut_binders(f, binders.shifted_in())
+ }
+}
+
+/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait.
+/// Name to be bikeshedded: TraitBound? TraitImplements?
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TraitRef {
+ /// FIXME name?
+ pub trait_: TraitId,
+ pub substs: Substs,
+}
+
+impl TraitRef {
+ pub fn self_ty(&self) -> &Ty {
+ &self.substs[0]
+ }
+}
+
+impl TypeWalk for TraitRef {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.substs.walk(f);
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ self.substs.walk_mut_binders(f, binders);
+ }
+}
+
+/// Like `generics::WherePredicate`, but with resolved types: A condition on the
+/// parameters of a generic item.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericPredicate {
+ /// The given trait needs to be implemented for its type parameters.
+ Implemented(TraitRef),
+ /// An associated type bindings like in `Iterator<Item = T>`.
+ Projection(ProjectionPredicate),
+ /// We couldn't resolve the trait reference. (If some type parameters can't
+ /// be resolved, they will just be Unknown).
+ Error,
+}
+
+impl GenericPredicate {
+ pub fn is_error(&self) -> bool {
+ matches!(self, GenericPredicate::Error)
+ }
+
+ pub fn is_implemented(&self) -> bool {
+ matches!(self, GenericPredicate::Implemented(_))
+ }
+
+ pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option<TraitRef> {
+ match self {
+ GenericPredicate::Implemented(tr) => Some(tr.clone()),
+ GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)),
+ GenericPredicate::Error => None,
+ }
+ }
+}
+
+impl TypeWalk for GenericPredicate {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f),
+ GenericPredicate::Projection(projection_pred) => projection_pred.walk(f),
+ GenericPredicate::Error => {}
+ }
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ match self {
+ GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut_binders(f, binders),
+ GenericPredicate::Projection(projection_pred) => {
+ projection_pred.walk_mut_binders(f, binders)
+ }
+ GenericPredicate::Error => {}
+ }
+ }
+}
+
+/// Basically a claim (currently not validated / checked) that the contained
+/// type / trait ref contains no inference variables; any inference variables it
+/// contained have been replaced by bound variables, and `kinds` tells us how
+/// many there are and whether they were normal or float/int variables. This is
+/// used to erase irrelevant differences between types before using them in
+/// queries.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Canonical<T> {
+ pub value: T,
+ pub kinds: Arc<[TyKind]>,
+}
+
+impl<T> Canonical<T> {
+ pub fn new(value: T, kinds: impl IntoIterator<Item = TyKind>) -> Self {
+ Self { value, kinds: kinds.into_iter().collect() }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TyKind {
+ General,
+ Integer,
+ Float,
+}
+
+/// A function signature as seen by type inference: Several parameter types and
+/// one return type.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct FnSig {
+ params_and_return: Arc<[Ty]>,
+ is_varargs: bool,
+}
+
+/// A polymorphic function signature.
+pub type PolyFnSig = Binders<FnSig>;
+
+impl FnSig {
+ pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> FnSig {
+ params.push(ret);
+ FnSig { params_and_return: params.into(), is_varargs }
+ }
+
+ pub fn from_fn_ptr_substs(substs: &Substs, is_varargs: bool) -> FnSig {
+ FnSig { params_and_return: Arc::clone(&substs.0), is_varargs }
+ }
+
+ pub fn params(&self) -> &[Ty] {
+ &self.params_and_return[0..self.params_and_return.len() - 1]
+ }
+
+ pub fn ret(&self) -> &Ty {
+ &self.params_and_return[self.params_and_return.len() - 1]
+ }
+}
+
+impl TypeWalk for FnSig {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self.params_and_return.iter() {
+ t.walk(f);
+ }
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ for t in make_mut_slice(&mut self.params_and_return) {
+ t.walk_mut_binders(f, binders);
+ }
+ }
+}
+
+impl Ty {
+ pub fn simple(ctor: TypeCtor) -> Ty {
+ Ty::Apply(ApplicationTy { ctor, parameters: Substs::empty() })
+ }
+ pub fn apply_one(ctor: TypeCtor, param: Ty) -> Ty {
+ Ty::Apply(ApplicationTy { ctor, parameters: Substs::single(param) })
+ }
+ pub fn apply(ctor: TypeCtor, parameters: Substs) -> Ty {
+ Ty::Apply(ApplicationTy { ctor, parameters })
+ }
+ pub fn unit() -> Self {
+ Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty())
+ }
+ pub fn fn_ptr(sig: FnSig) -> Self {
+ Ty::apply(
+ TypeCtor::FnPtr { num_args: sig.params().len() as u16, is_varargs: sig.is_varargs },
+ Substs(sig.params_and_return),
+ )
+ }
+
+ pub fn as_reference(&self) -> Option<(&Ty, Mutability)> {
+ match self {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => {
+ Some((parameters.as_single(), *mutability))
+ }
+ _ => None,
+ }
+ }
+
+ pub fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
+ match self {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => {
+ Some((parameters.as_single(), Rawness::Ref, *mutability))
+ }
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(mutability), parameters }) => {
+ Some((parameters.as_single(), Rawness::RawPtr, *mutability))
+ }
+ _ => None,
+ }
+ }
+
+ pub fn strip_references(&self) -> &Ty {
+ let mut t: &Ty = self;
+
+ while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t {
+ t = parameters.as_single();
+ }
+
+ t
+ }
+
+ pub fn as_adt(&self) -> Option<(AdtId, &Substs)> {
+ match self {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => {
+ Some((*adt_def, parameters))
+ }
+ _ => None,
+ }
+ }
+
+ pub fn as_tuple(&self) -> Option<&Substs> {
+ match self {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { .. }, parameters }) => {
+ Some(parameters)
+ }
+ _ => None,
+ }
+ }
+
+ pub fn is_never(&self) -> bool {
+ matches!(self, Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }))
+ }
+
+ /// If this is a `dyn Trait` type, this returns the `Trait` part.
+ pub fn dyn_trait_ref(&self) -> Option<&TraitRef> {
+ match self {
+ Ty::Dyn(bounds) => bounds.get(0).and_then(|b| match b {
+ GenericPredicate::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ }),
+ _ => None,
+ }
+ }
+
+ /// If this is a `dyn Trait`, returns that trait.
+ pub fn dyn_trait(&self) -> Option<TraitId> {
+ self.dyn_trait_ref().map(|it| it.trait_)
+ }
+
+ fn builtin_deref(&self) -> Option<Ty> {
+ match self {
+ Ty::Apply(a_ty) => match a_ty.ctor {
+ TypeCtor::Ref(..) => Some(Ty::clone(a_ty.parameters.as_single())),
+ TypeCtor::RawPtr(..) => Some(Ty::clone(a_ty.parameters.as_single())),
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+
+ pub fn callable_sig(&self, db: &dyn HirDatabase) -> Option<FnSig> {
+ match self {
+ Ty::Apply(a_ty) => match a_ty.ctor {
+ TypeCtor::FnPtr { is_varargs, .. } => {
+ Some(FnSig::from_fn_ptr_substs(&a_ty.parameters, is_varargs))
+ }
+ TypeCtor::FnDef(def) => {
+ let sig = db.callable_item_signature(def);
+ Some(sig.subst(&a_ty.parameters))
+ }
+ TypeCtor::Closure { .. } => {
+ let sig_param = &a_ty.parameters[0];
+ sig_param.callable_sig(db)
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+
+ /// If this is a type with type parameters (an ADT or function), replaces
+ /// the `Substs` for these type parameters with the given ones. (So e.g. if
+ /// `self` is `Option<_>` and the substs contain `u32`, we'll have
+ /// `Option<u32>` afterwards.)
+ pub fn apply_substs(self, substs: Substs) -> Ty {
+ match self {
+ Ty::Apply(ApplicationTy { ctor, parameters: previous_substs }) => {
+ assert_eq!(previous_substs.len(), substs.len());
+ Ty::Apply(ApplicationTy { ctor, parameters: substs })
+ }
+ _ => self,
+ }
+ }
+
+ /// Returns the type parameters of this type if it has some (i.e. is an ADT
+ /// or function); so if `self` is `Option<u32>`, this returns the `u32`.
+ pub fn substs(&self) -> Option<Substs> {
+ match self {
+ Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()),
+ _ => None,
+ }
+ }
+
+ pub fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<GenericPredicate>> {
+ match self {
+ Ty::Opaque(opaque_ty) => {
+ let predicates = match opaque_ty.opaque_ty_id {
+ OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
+ db.return_type_impl_traits(func).map(|it| {
+ let data = (*it)
+ .as_ref()
+ .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+ data.subst(&opaque_ty.parameters)
+ })
+ }
+ };
+
+ predicates.map(|it| it.value)
+ }
+ Ty::Placeholder(id) => {
+ let generic_params = db.generic_params(id.parent);
+ let param_data = &generic_params.types[id.local_id];
+ match param_data.provenance {
+ hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
+ let predicates = db
+ .generic_predicates_for_param(*id)
+ .into_iter()
+ .map(|pred| pred.value.clone())
+ .collect_vec();
+
+ Some(predicates)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+
+ pub fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
+ match self {
+ Ty::Apply(ApplicationTy { ctor: TypeCtor::AssociatedType(type_alias_id), .. }) => {
+ match type_alias_id.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ Ty::Projection(projection_ty) => {
+ match projection_ty.associated_ty.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(trait_id) => Some(trait_id),
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ }
+}
+
+/// This allows walking structures that contain types to do something with those
+/// types, similar to Chalk's `Fold` trait.
+pub trait TypeWalk {
+ fn walk(&self, f: &mut impl FnMut(&Ty));
+ fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
+ self.walk_mut_binders(&mut |ty, _binders| f(ty), DebruijnIndex::INNERMOST);
+ }
+ /// Walk the type, counting entered binders.
+ ///
+ /// `Ty::Bound` variables use DeBruijn indexing, which means that 0 refers
+ /// to the innermost binder, 1 to the next, etc.. So when we want to
+ /// substitute a certain bound variable, we can't just walk the whole type
+ /// and blindly replace each instance of a certain index; when we 'enter'
+ /// things that introduce new bound variables, we have to keep track of
+ /// that. Currently, the only thing that introduces bound variables on our
+ /// side are `Ty::Dyn` and `Ty::Opaque`, which each introduce a bound
+ /// variable for the self type.
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ );
+
+ fn fold_binders(
+ mut self,
+ f: &mut impl FnMut(Ty, DebruijnIndex) -> Ty,
+ binders: DebruijnIndex,
+ ) -> Self
+ where
+ Self: Sized,
+ {
+ self.walk_mut_binders(
+ &mut |ty_mut, binders| {
+ let ty = mem::replace(ty_mut, Ty::Unknown);
+ *ty_mut = f(ty, binders);
+ },
+ binders,
+ );
+ self
+ }
+
+ fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self
+ where
+ Self: Sized,
+ {
+ self.walk_mut(&mut |ty_mut| {
+ let ty = mem::replace(ty_mut, Ty::Unknown);
+ *ty_mut = f(ty);
+ });
+ self
+ }
+
+ /// Substitutes `Ty::Bound` vars with the given substitution.
+ fn subst_bound_vars(self, substs: &Substs) -> Self
+ where
+ Self: Sized,
+ {
+ self.subst_bound_vars_at_depth(substs, DebruijnIndex::INNERMOST)
+ }
+
+ /// Substitutes `Ty::Bound` vars with the given substitution.
+ fn subst_bound_vars_at_depth(mut self, substs: &Substs, depth: DebruijnIndex) -> Self
+ where
+ Self: Sized,
+ {
+ self.walk_mut_binders(
+ &mut |ty, binders| {
+ if let &mut Ty::Bound(bound) = ty {
+ if bound.debruijn >= binders {
+ *ty = substs.0[bound.index].clone().shift_bound_vars(binders);
+ }
+ }
+ },
+ depth,
+ );
+ self
+ }
+
+ /// Shifts up debruijn indices of `Ty::Bound` vars by `n`.
+ fn shift_bound_vars(self, n: DebruijnIndex) -> Self
+ where
+ Self: Sized,
+ {
+ self.fold_binders(
+ &mut |ty, binders| match ty {
+ Ty::Bound(bound) if bound.debruijn >= binders => {
+ Ty::Bound(bound.shifted_in_from(n))
+ }
+ ty => ty,
+ },
+ DebruijnIndex::INNERMOST,
+ )
+ }
+}
+
+impl TypeWalk for Ty {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ match self {
+ Ty::Apply(a_ty) => {
+ for t in a_ty.parameters.iter() {
+ t.walk(f);
+ }
+ }
+ Ty::Projection(p_ty) => {
+ for t in p_ty.parameters.iter() {
+ t.walk(f);
+ }
+ }
+ Ty::Dyn(predicates) => {
+ for p in predicates.iter() {
+ p.walk(f);
+ }
+ }
+ Ty::Opaque(o_ty) => {
+ for t in o_ty.parameters.iter() {
+ t.walk(f);
+ }
+ }
+ Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
+ }
+ f(self);
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ match self {
+ Ty::Apply(a_ty) => {
+ a_ty.parameters.walk_mut_binders(f, binders);
+ }
+ Ty::Projection(p_ty) => {
+ p_ty.parameters.walk_mut_binders(f, binders);
+ }
+ Ty::Dyn(predicates) => {
+ for p in make_mut_slice(predicates) {
+ p.walk_mut_binders(f, binders.shifted_in());
+ }
+ }
+ Ty::Opaque(o_ty) => {
+ o_ty.parameters.walk_mut_binders(f, binders);
+ }
+ Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
+ }
+ f(self, binders);
+ }
+}
+
+impl<T: TypeWalk> TypeWalk for Vec<T> {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ for t in self {
+ t.walk(f);
+ }
+ }
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ for t in self {
+ t.walk_mut_binders(f, binders);
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum OpaqueTyId {
+ ReturnTypeImplTrait(hir_def::FunctionId, u16),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ReturnTypeImplTraits {
+ pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct ReturnTypeImplTrait {
+ pub bounds: Binders<Vec<GenericPredicate>>,
+}
--- /dev/null
+//! Methods for lowering the HIR to types. There are two main cases here:
+//!
+//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
+//! type: The entry point for this is `Ty::from_hir`.
+//! - Building the type for an item: This happens through the `type_for_def` query.
+//!
+//! This usually involves resolving names, collecting generic arguments etc.
+use std::{iter, sync::Arc};
+
+use arena::map::ArenaMap;
+use base_db::CrateId;
+use hir_def::{
+ adt::StructKind,
+ builtin_type::BuiltinType,
+ generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget},
+ path::{GenericArg, Path, PathSegment, PathSegments},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{TypeBound, TypeRef},
+ AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId,
+ HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
+ UnionId, VariantId,
+};
+use hir_expand::name::Name;
+use smallvec::SmallVec;
+use stdx::impl_from;
+use test_utils::mark;
+
+use crate::{
+ db::HirDatabase,
+ primitive::{FloatTy, IntTy},
+ utils::{
+ all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
+ make_mut_slice, variant_data,
+ },
+ Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, OpaqueTy, OpaqueTyId, PolyFnSig,
+ ProjectionPredicate, ProjectionTy, ReturnTypeImplTrait, ReturnTypeImplTraits, Substs,
+ TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
+};
+
+#[derive(Debug)]
+pub struct TyLoweringContext<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub resolver: &'a Resolver,
+ in_binders: DebruijnIndex,
+ /// Note: Conceptually, it's thinkable that we could be in a location where
+ /// some type params should be represented as placeholders, and others
+ /// should be converted to variables. I think in practice, this isn't
+ /// possible currently, so this should be fine for now.
+ pub type_param_mode: TypeParamLoweringMode,
+ pub impl_trait_mode: ImplTraitLoweringMode,
+ impl_trait_counter: std::cell::Cell<u16>,
+ /// When turning `impl Trait` into opaque types, we have to collect the
+ /// bounds at the same time to get the IDs correct (without becoming too
+ /// complicated). I don't like using interior mutability (as for the
+ /// counter), but I've tried and failed to make the lifetimes work for
+ /// passing around a `&mut TyLoweringContext`. The core problem is that
+ /// we're grouping the mutable data (the counter and this field) together
+ /// with the immutable context (the references to the DB and resolver).
+ /// Splitting this up would be a possible fix.
+ opaque_type_data: std::cell::RefCell<Vec<ReturnTypeImplTrait>>,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ let impl_trait_counter = std::cell::Cell::new(0);
+ let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+ let type_param_mode = TypeParamLoweringMode::Placeholder;
+ let in_binders = DebruijnIndex::INNERMOST;
+ let opaque_type_data = std::cell::RefCell::new(Vec::new());
+ Self {
+ db,
+ resolver,
+ in_binders,
+ impl_trait_mode,
+ impl_trait_counter,
+ type_param_mode,
+ opaque_type_data,
+ }
+ }
+
+ pub fn with_debruijn<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext) -> T,
+ ) -> T {
+ let opaque_ty_data_vec = self.opaque_type_data.replace(Vec::new());
+ let new_ctx = Self {
+ in_binders: debruijn,
+ impl_trait_counter: std::cell::Cell::new(self.impl_trait_counter.get()),
+ opaque_type_data: std::cell::RefCell::new(opaque_ty_data_vec),
+ ..*self
+ };
+ let result = f(&new_ctx);
+ self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
+ self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+ result
+ }
+
+ pub fn with_shifted_in<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext) -> T,
+ ) -> T {
+ self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
+ }
+
+ pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
+ Self { impl_trait_mode, ..self }
+ }
+
+ pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self {
+ Self { type_param_mode, ..self }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImplTraitLoweringMode {
+ /// `impl Trait` gets lowered into an opaque type that doesn't unify with
+ /// anything except itself. This is used in places where values flow 'out',
+ /// i.e. for arguments of the function we're currently checking, and return
+ /// types of functions we're calling.
+ Opaque,
+ /// `impl Trait` gets lowered into a type variable. Used for argument
+ /// position impl Trait when inside the respective function, since it allows
+ /// us to support that without Chalk.
+ Param,
+ /// `impl Trait` gets lowered into a variable that can unify with some
+ /// type. This is used in places where values flow 'in', i.e. for arguments
+ /// of functions we're calling, and the return type of the function we're
+ /// currently checking.
+ Variable,
+ /// `impl Trait` is disallowed and will be an error.
+ Disallowed,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum TypeParamLoweringMode {
+ Placeholder,
+ Variable,
+}
+
+impl Ty {
+ pub fn from_hir(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Self {
+ Ty::from_hir_ext(ctx, type_ref).0
+ }
+ pub fn from_hir_ext(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> (Self, Option<TypeNs>) {
+ let mut res = None;
+ let ty = match type_ref {
+ TypeRef::Never => Ty::simple(TypeCtor::Never),
+ TypeRef::Tuple(inner) => {
+ let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect();
+ Ty::apply(
+ TypeCtor::Tuple { cardinality: inner_tys.len() as u16 },
+ Substs(inner_tys),
+ )
+ }
+ TypeRef::Path(path) => {
+ let (ty, res_) = Ty::from_hir_path(ctx, path);
+ res = res_;
+ ty
+ }
+ TypeRef::RawPtr(inner, mutability) => {
+ let inner_ty = Ty::from_hir(ctx, inner);
+ Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty)
+ }
+ TypeRef::Array(inner) => {
+ let inner_ty = Ty::from_hir(ctx, inner);
+ Ty::apply_one(TypeCtor::Array, inner_ty)
+ }
+ TypeRef::Slice(inner) => {
+ let inner_ty = Ty::from_hir(ctx, inner);
+ Ty::apply_one(TypeCtor::Slice, inner_ty)
+ }
+ TypeRef::Reference(inner, mutability) => {
+ let inner_ty = Ty::from_hir(ctx, inner);
+ Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty)
+ }
+ TypeRef::Placeholder => Ty::Unknown,
+ TypeRef::Fn(params, is_varargs) => {
+ let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect());
+ Ty::apply(
+ TypeCtor::FnPtr { num_args: sig.len() as u16 - 1, is_varargs: *is_varargs },
+ sig,
+ )
+ }
+ TypeRef::DynTrait(bounds) => {
+ let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
+ let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ bounds
+ .iter()
+ .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
+ .collect()
+ });
+ Ty::Dyn(predicates)
+ }
+ TypeRef::ImplTrait(bounds) => {
+ match ctx.impl_trait_mode {
+ ImplTraitLoweringMode::Opaque => {
+ let idx = ctx.impl_trait_counter.get();
+ ctx.impl_trait_counter.set(idx + 1);
+
+ assert!(idx as usize == ctx.opaque_type_data.borrow().len());
+ // this dance is to make sure the data is in the right
+ // place even if we encounter more opaque types while
+ // lowering the bounds
+ ctx.opaque_type_data
+ .borrow_mut()
+ .push(ReturnTypeImplTrait { bounds: Binders::new(1, Vec::new()) });
+ // We don't want to lower the bounds inside the binders
+ // we're currently in, because they don't end up inside
+ // those binders. E.g. when we have `impl Trait<impl
+ // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
+ // to the self parameter from `impl Trait`, and the
+ // bounds aren't actually stored nested within each
+ // other, but separately. So if the `T` refers to a type
+ // parameter of the outer function, it's just one binder
+ // away instead of two.
+ let actual_opaque_type_data = ctx
+ .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
+ ReturnTypeImplTrait::from_hir(ctx, &bounds)
+ });
+ ctx.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+
+ let func = match ctx.resolver.generic_def() {
+ Some(GenericDefId::FunctionId(f)) => f,
+ _ => panic!("opaque impl trait lowering in non-function"),
+ };
+ let impl_trait_id = OpaqueTyId::ReturnTypeImplTrait(func, idx);
+ let generics = generics(ctx.db.upcast(), func.into());
+ let parameters = Substs::bound_vars(&generics, ctx.in_binders);
+ Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters })
+ }
+ ImplTraitLoweringMode::Param => {
+ let idx = ctx.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ if let Some(def) = ctx.resolver.generic_def() {
+ let generics = generics(ctx.db.upcast(), def);
+ let param = generics
+ .iter()
+ .filter(|(_, data)| {
+ data.provenance == TypeParamProvenance::ArgumentImplTrait
+ })
+ .nth(idx as usize)
+ .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id));
+ param
+ } else {
+ Ty::Unknown
+ }
+ }
+ ImplTraitLoweringMode::Variable => {
+ let idx = ctx.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ let (parent_params, self_params, list_params, _impl_trait_params) =
+ if let Some(def) = ctx.resolver.generic_def() {
+ let generics = generics(ctx.db.upcast(), def);
+ generics.provenance_split()
+ } else {
+ (0, 0, 0, 0)
+ };
+ Ty::Bound(BoundVar::new(
+ ctx.in_binders,
+ idx as usize + parent_params + self_params + list_params,
+ ))
+ }
+ ImplTraitLoweringMode::Disallowed => {
+ // FIXME: report error
+ Ty::Unknown
+ }
+ }
+ }
+ TypeRef::Error => Ty::Unknown,
+ };
+ (ty, res)
+ }
+
+ /// This is only for `generic_predicates_for_param`, where we can't just
+ /// lower the self types of the predicates since that could lead to cycles.
+ /// So we just check here if the `type_ref` resolves to a generic param, and which.
+ fn from_hir_only_param(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Option<TypeParamId> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ if path.segments().len() > 1 {
+ return None;
+ }
+ let resolution =
+ match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
+ Some((it, None)) => it,
+ _ => return None,
+ };
+ if let TypeNs::GenericParam(param_id) = resolution {
+ Some(param_id)
+ } else {
+ None
+ }
+ }
+
+ pub(crate) fn from_type_relative_path(
+ ctx: &TyLoweringContext<'_>,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ remaining_segments: PathSegments<'_>,
+ ) -> (Ty, Option<TypeNs>) {
+ if remaining_segments.len() == 1 {
+ // resolve unselected assoc types
+ let segment = remaining_segments.first().unwrap();
+ (Ty::select_associated_type(ctx, res, segment), None)
+ } else if remaining_segments.len() > 1 {
+ // FIXME report error (ambiguous associated type)
+ (Ty::Unknown, None)
+ } else {
+ (ty, res)
+ }
+ }
+
+ pub(crate) fn from_partly_resolved_hir_path(
+ ctx: &TyLoweringContext<'_>,
+ resolution: TypeNs,
+ resolved_segment: PathSegment<'_>,
+ remaining_segments: PathSegments<'_>,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there
+ let self_ty = if remaining_segments.len() == 0 {
+ Some(Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)))
+ } else {
+ None
+ };
+ let trait_ref =
+ TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty);
+ let ty = if remaining_segments.len() == 1 {
+ let segment = remaining_segments.first().unwrap();
+ let found = associated_type_by_name_including_super_traits(
+ ctx.db,
+ trait_ref,
+ &segment.name,
+ );
+ match found {
+ Some((super_trait_ref, associated_ty)) => {
+ // FIXME handle type parameters on the segment
+ Ty::Projection(ProjectionTy {
+ associated_ty,
+ parameters: super_trait_ref.substs,
+ })
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ Ty::Unknown
+ }
+ }
+ } else if remaining_segments.len() > 1 {
+ // FIXME report error (ambiguous associated type)
+ Ty::Unknown
+ } else {
+ Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)]))
+ };
+ return (ty, None);
+ }
+ TypeNs::GenericParam(param_id) => {
+ let generics = generics(
+ ctx.db.upcast(),
+ ctx.resolver.generic_def().expect("generics in scope"),
+ );
+ match ctx.type_param_mode {
+ TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
+ TypeParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ Ty::Bound(BoundVar::new(ctx.in_binders, idx))
+ }
+ }
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = generics(ctx.db.upcast(), impl_id.into());
+ let substs = match ctx.type_param_mode {
+ TypeParamLoweringMode::Placeholder => {
+ Substs::type_params_for_generics(&generics)
+ }
+ TypeParamLoweringMode::Variable => {
+ Substs::bound_vars(&generics, ctx.in_binders)
+ }
+ };
+ ctx.db.impl_self_ty(impl_id).subst(&substs)
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(ctx.db.upcast(), adt.into());
+ let substs = match ctx.type_param_mode {
+ TypeParamLoweringMode::Placeholder => {
+ Substs::type_params_for_generics(&generics)
+ }
+ TypeParamLoweringMode::Variable => {
+ Substs::bound_vars(&generics, ctx.in_binders)
+ }
+ };
+ ctx.db.ty(adt.into()).subst(&substs)
+ }
+
+ TypeNs::AdtId(it) => {
+ Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::BuiltinType(it) => {
+ Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::TypeAliasId(it) => {
+ Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
+ }
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (Ty::Unknown, None),
+ };
+
+ Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments)
+ }
+
+ pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_>, path: &Path) -> (Ty, Option<TypeNs>) {
+ // Resolve the path (in type namespace)
+ if let Some(type_ref) = path.type_anchor() {
+ let (ty, res) = Ty::from_hir_ext(ctx, &type_ref);
+ return Ty::from_type_relative_path(ctx, ty, res, path.segments());
+ }
+ let (resolution, remaining_index) =
+ match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (Ty::Unknown, None),
+ };
+ let (resolved_segment, remaining_segments) = match remaining_index {
+ None => (
+ path.segments().last().expect("resolved path has at least one element"),
+ PathSegments::EMPTY,
+ ),
+ Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
+ };
+ Ty::from_partly_resolved_hir_path(
+ ctx,
+ resolution,
+ resolved_segment,
+ remaining_segments,
+ false,
+ )
+ }
+
+ fn select_associated_type(
+ ctx: &TyLoweringContext<'_>,
+ res: Option<TypeNs>,
+ segment: PathSegment<'_>,
+ ) -> Ty {
+ if let Some(res) = res {
+ let ty =
+ associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| {
+ if name == segment.name {
+ let substs = match ctx.type_param_mode {
+ TypeParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put
+ // them in now
+ let s = Substs::type_params(
+ ctx.db,
+ ctx.resolver.generic_def().expect(
+ "there should be generics if there's a generic param",
+ ),
+ );
+ t.substs.clone().subst_bound_vars(&s)
+ }
+ TypeParamLoweringMode::Variable => t.substs.clone(),
+ };
+ // We need to shift in the bound vars, since
+ // associated_type_shorthand_candidates does not do that
+ let substs = substs.shift_bound_vars(ctx.in_binders);
+ // FIXME handle type parameters on the segment
+ return Some(Ty::Projection(ProjectionTy {
+ associated_ty,
+ parameters: substs,
+ }));
+ }
+
+ None
+ });
+
+ ty.unwrap_or(Ty::Unknown)
+ } else {
+ Ty::Unknown
+ }
+ }
+
+ fn from_hir_path_inner(
+ ctx: &TyLoweringContext<'_>,
+ segment: PathSegment<'_>,
+ typable: TyDefId,
+ infer_args: bool,
+ ) -> Ty {
+ let generic_def = match typable {
+ TyDefId::BuiltinType(_) => None,
+ TyDefId::AdtId(it) => Some(it.into()),
+ TyDefId::TypeAliasId(it) => Some(it.into()),
+ };
+ let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args);
+ ctx.db.ty(typable).subst(&substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(super) fn substs_from_path(
+ ctx: &TyLoweringContext<'_>,
+ path: &Path,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substs {
+ let last = path.segments().last().expect("path should have at least one segment");
+ let (segment, generic_def) = match resolved {
+ ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
+ ValueTyDefId::StructId(it) => (last, Some(it.into())),
+ ValueTyDefId::UnionId(it) => (last, Some(it.into())),
+ ValueTyDefId::ConstId(it) => (last, Some(it.into())),
+ ValueTyDefId::StaticId(_) => (last, None),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ let len = path.segments().len();
+ let penultimate = if len >= 2 { path.segments().get(len - 2) } else { None };
+ let segment = match penultimate {
+ Some(segment) if segment.args_and_bindings.is_some() => segment,
+ _ => last,
+ };
+ (segment, Some(var.parent.into()))
+ }
+ };
+ substs_from_path_segment(ctx, segment, generic_def, infer_args)
+ }
+}
+
+fn substs_from_path_segment(
+ ctx: &TyLoweringContext<'_>,
+ segment: PathSegment<'_>,
+ def_generic: Option<GenericDefId>,
+ infer_args: bool,
+) -> Substs {
+ let mut substs = Vec::new();
+ let def_generics = def_generic.map(|def| generics(ctx.db.upcast(), def));
+
+ let (parent_params, self_params, type_params, impl_trait_params) =
+ def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
+ let total_len = parent_params + self_params + type_params + impl_trait_params;
+
+ substs.extend(iter::repeat(Ty::Unknown).take(parent_params));
+
+ let mut had_explicit_args = false;
+
+ if let Some(generic_args) = &segment.args_and_bindings {
+ if !generic_args.has_self_type {
+ substs.extend(iter::repeat(Ty::Unknown).take(self_params));
+ }
+ let expected_num =
+ if generic_args.has_self_type { self_params + type_params } else { type_params };
+ let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args.args.iter().skip(skip).take(expected_num) {
+ match arg {
+ GenericArg::Type(type_ref) => {
+ had_explicit_args = true;
+ let ty = Ty::from_hir(ctx, type_ref);
+ substs.push(ty);
+ }
+ }
+ }
+ }
+
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ if !infer_args || had_explicit_args {
+ if let Some(def_generic) = def_generic {
+ let defaults = ctx.db.generic_defaults(def_generic);
+ assert_eq!(total_len, defaults.len());
+
+ for default_ty in defaults.iter().skip(substs.len()) {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substs(substs.clone().into());
+ substs.push(default_ty.clone().subst(&substs_so_far));
+ }
+ }
+ }
+
+ // add placeholders for args that were not provided
+ // FIXME: emit diagnostics in contexts where this is not allowed
+ for _ in substs.len()..total_len {
+ substs.push(Ty::Unknown);
+ }
+ assert_eq!(substs.len(), total_len);
+
+ Substs(substs.into())
+}
+
+impl TraitRef {
+ fn from_path(
+ ctx: &TyLoweringContext<'_>,
+ path: &Path,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<Self> {
+ let resolved =
+ match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db.upcast(), path.mod_path())? {
+ TypeNs::TraitId(tr) => tr,
+ _ => return None,
+ };
+ let segment = path.segments().last().expect("path should have at least one segment");
+ Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty))
+ }
+
+ pub(crate) fn from_resolved_path(
+ ctx: &TyLoweringContext<'_>,
+ resolved: TraitId,
+ segment: PathSegment<'_>,
+ explicit_self_ty: Option<Ty>,
+ ) -> Self {
+ let mut substs = TraitRef::substs_from_path(ctx, segment, resolved);
+ if let Some(self_ty) = explicit_self_ty {
+ make_mut_slice(&mut substs.0)[0] = self_ty;
+ }
+ TraitRef { trait_: resolved, substs }
+ }
+
+ fn from_hir(
+ ctx: &TyLoweringContext<'_>,
+ type_ref: &TypeRef,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<Self> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ TraitRef::from_path(ctx, path, explicit_self_ty)
+ }
+
+ fn substs_from_path(
+ ctx: &TyLoweringContext<'_>,
+ segment: PathSegment<'_>,
+ resolved: TraitId,
+ ) -> Substs {
+ substs_from_path_segment(ctx, segment, Some(resolved.into()), false)
+ }
+
+ pub(crate) fn from_type_bound(
+ ctx: &TyLoweringContext<'_>,
+ bound: &TypeBound,
+ self_ty: Ty,
+ ) -> Option<TraitRef> {
+ match bound {
+ TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)),
+ TypeBound::Error => None,
+ }
+ }
+}
+
+impl GenericPredicate {
+ pub(crate) fn from_where_predicate<'a>(
+ ctx: &'a TyLoweringContext<'a>,
+ where_predicate: &'a WherePredicate,
+ ) -> impl Iterator<Item = GenericPredicate> + 'a {
+ let self_ty = match &where_predicate.target {
+ WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref),
+ WherePredicateTarget::TypeParam(param_id) => {
+ let generic_def = ctx.resolver.generic_def().expect("generics in scope");
+ let generics = generics(ctx.db.upcast(), generic_def);
+ let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
+ match ctx.type_param_mode {
+ TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
+ TypeParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx))
+ }
+ }
+ }
+ };
+ GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty)
+ }
+
+ pub(crate) fn from_type_bound<'a>(
+ ctx: &'a TyLoweringContext<'a>,
+ bound: &'a TypeBound,
+ self_ty: Ty,
+ ) -> impl Iterator<Item = GenericPredicate> + 'a {
+ let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty);
+ iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented))
+ .chain(
+ trait_ref
+ .into_iter()
+ .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)),
+ )
+ }
+}
+
+fn assoc_type_bindings_from_type_bound<'a>(
+ ctx: &'a TyLoweringContext<'a>,
+ bound: &'a TypeBound,
+ trait_ref: TraitRef,
+) -> impl Iterator<Item = GenericPredicate> + 'a {
+ let last_segment = match bound {
+ TypeBound::Path(path) => path.segments().last(),
+ TypeBound::Error => None,
+ };
+ last_segment
+ .into_iter()
+ .flat_map(|segment| segment.args_and_bindings.into_iter())
+ .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
+ .flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ ctx.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::<[GenericPredicate; 1]>::new(),
+ Some(t) => t,
+ };
+ let projection_ty = ProjectionTy { associated_ty, parameters: super_trait_ref.substs };
+ let mut preds = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = &binding.type_ref {
+ let ty = Ty::from_hir(ctx, type_ref);
+ let projection_predicate =
+ ProjectionPredicate { projection_ty: projection_ty.clone(), ty };
+ preds.push(GenericPredicate::Projection(projection_predicate));
+ }
+ for bound in &binding.bounds {
+ preds.extend(GenericPredicate::from_type_bound(
+ ctx,
+ bound,
+ Ty::Projection(projection_ty.clone()),
+ ));
+ }
+ preds
+ })
+}
+
+impl ReturnTypeImplTrait {
+ fn from_hir(ctx: &TyLoweringContext, bounds: &[TypeBound]) -> Self {
+ mark::hit!(lower_rpit);
+ let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
+ let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ bounds
+ .iter()
+ .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
+ .collect()
+ });
+ ReturnTypeImplTrait { bounds: Binders::new(1, predicates) }
+ }
+}
+
+fn count_impl_traits(type_ref: &TypeRef) -> usize {
+ let mut count = 0;
+ type_ref.walk(&mut |type_ref| {
+ if matches!(type_ref, TypeRef::ImplTrait(_)) {
+ count += 1;
+ }
+ });
+ count
+}
+
+/// Build the signature of a callable item (function, struct or enum variant).
+pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+ match def {
+ CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
+ CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
+ CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
+ }
+}
+
+pub fn associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ res: TypeNs,
+ mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ let traits_from_env: Vec<_> = match res {
+ TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) {
+ None => vec![],
+ Some(trait_ref) => vec![trait_ref.value],
+ },
+ TypeNs::GenericParam(param_id) => {
+ let predicates = db.generic_predicates_for_param(param_id);
+ let mut traits_: Vec<_> = predicates
+ .iter()
+ .filter_map(|pred| match &pred.value {
+ GenericPredicate::Implemented(tr) => Some(tr.clone()),
+ _ => None,
+ })
+ .collect();
+ // Handle `Self::Type` referring to own associated type in trait definitions
+ if let GenericDefId::TraitId(trait_id) = param_id.parent {
+ let generics = generics(db.upcast(), trait_id.into());
+ if generics.params.types[param_id.local_id].provenance
+ == TypeParamProvenance::TraitSelf
+ {
+ let trait_ref = TraitRef {
+ trait_: trait_id,
+ substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST),
+ };
+ traits_.push(trait_ref);
+ }
+ }
+ traits_
+ }
+ _ => vec![],
+ };
+
+ for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) {
+ let data = db.trait_data(t.trait_);
+
+ for (name, assoc_id) in &data.items {
+ match assoc_id {
+ AssocItemId::TypeAliasId(alias) => {
+ if let Some(result) = cb(name, &t, *alias) {
+ return Some(result);
+ }
+ }
+ AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {}
+ }
+ }
+ }
+
+ None
+}
+
+/// Build the type of all specific fields of a struct or enum variant.
+pub(crate) fn field_types_query(
+ db: &dyn HirDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
+ let var_data = variant_data(db.upcast(), variant_id);
+ let (resolver, def): (_, GenericDefId) = match variant_id {
+ VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
+ };
+ let generics = generics(db.upcast(), def);
+ let mut res = ArenaMap::default();
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ for (field_id, field_data) in var_data.fields().iter() {
+ res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref)))
+ }
+ Arc::new(res)
+}
+
+/// This query exists only to be used when resolving short-hand associated types
+/// like `T::Item`.
+///
+/// See the analogous query in rustc and its comment:
+/// https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46
+/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
+/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
+/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
+pub(crate) fn generic_predicates_for_param_query(
+ db: &dyn HirDatabase,
+ param_id: TypeParamId,
+) -> Arc<[Binders<GenericPredicate>]> {
+ let resolver = param_id.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), param_id.parent);
+ resolver
+ .where_predicates_in_scope()
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ .filter(|pred| match &pred.target {
+ WherePredicateTarget::TypeRef(type_ref) => {
+ Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id)
+ }
+ WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id,
+ })
+ .flat_map(|pred| {
+ GenericPredicate::from_where_predicate(&ctx, pred)
+ .map(|p| Binders::new(generics.len(), p))
+ })
+ .collect()
+}
+
+pub(crate) fn generic_predicates_for_param_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _param_id: &TypeParamId,
+) -> Arc<[Binders<GenericPredicate>]> {
+ Arc::new([])
+}
+
+impl TraitEnvironment {
+ pub fn lower(db: &dyn HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
+ let ctx = TyLoweringContext::new(db, &resolver)
+ .with_type_param_mode(TypeParamLoweringMode::Placeholder);
+ let mut predicates = resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
+ .collect::<Vec<_>>();
+
+ if let Some(def) = resolver.generic_def() {
+ let container: Option<AssocContainerId> = match def {
+ // FIXME: is there a function for this?
+ GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
+ GenericDefId::AdtId(_) => None,
+ GenericDefId::TraitId(_) => None,
+ GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
+ GenericDefId::ImplId(_) => None,
+ GenericDefId::EnumVariantId(_) => None,
+ GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
+ };
+ if let Some(AssocContainerId::TraitId(trait_id)) = container {
+ // add `Self: Trait<T1, T2, ...>` to the environment in trait
+ // function default implementations (and hypothetical code
+ // inside consts or type aliases)
+ test_utils::mark::hit!(trait_self_implements_self);
+ let substs = Substs::type_params(db, trait_id);
+ let trait_ref = TraitRef { trait_: trait_id, substs };
+ let pred = GenericPredicate::Implemented(trait_ref);
+
+ predicates.push(pred);
+ }
+ }
+
+ Arc::new(TraitEnvironment { predicates })
+ }
+}
+
+/// Resolve the where clause(s) of an item with generics.
+pub(crate) fn generic_predicates_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<GenericPredicate>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+ resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| {
+ GenericPredicate::from_where_predicate(&ctx, pred)
+ .map(|p| Binders::new(generics.len(), p))
+ })
+ .collect()
+}
+
+/// Resolve the default type params from generics
+pub(crate) fn generic_defaults_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<Ty>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let generic_params = generics(db.upcast(), def);
+
+ let defaults = generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, (_, p))| {
+ let mut ty = p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t));
+
+ // Each default can only refer to previous parameters.
+ ty.walk_mut_binders(
+ &mut |ty, binders| match ty {
+ Ty::Bound(BoundVar { debruijn, index }) if *debruijn == binders => {
+ if *index >= idx {
+ // type variable default referring to parameter coming
+ // after it. This is forbidden (FIXME: report
+ // diagnostic)
+ *ty = Ty::Unknown;
+ }
+ }
+ _ => {}
+ },
+ DebruijnIndex::INNERMOST,
+ );
+
+ Binders::new(idx, ty)
+ })
+ .collect();
+
+ defaults
+}
+
+fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_params = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
+ .with_type_param_mode(TypeParamLoweringMode::Variable);
+ let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>();
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(TypeParamLoweringMode::Variable);
+ let ret = Ty::from_hir(&ctx_ret, &data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let num_binders = generics.len();
+ Binders::new(num_binders, FnSig::from_params_and_return(params, ret, data.is_varargs))
+}
+
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), def.into());
+ let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
+ Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
+}
+
+/// Build the declared type of a const.
+fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
+ let data = db.const_data(def);
+ let generics = generics(db.upcast(), def.into());
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+
+ Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
+ let data = db.static_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+
+ Binders::new(0, Ty::from_hir(&ctx, &data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_builtin(def: BuiltinType) -> Ty {
+ Ty::simple(match def {
+ BuiltinType::Char => TypeCtor::Char,
+ BuiltinType::Bool => TypeCtor::Bool,
+ BuiltinType::Str => TypeCtor::Str,
+ BuiltinType::Int(t) => TypeCtor::Int(IntTy::from(t).into()),
+ BuiltinType::Float(t) => TypeCtor::Float(FloatTy::from(t).into()),
+ })
+}
+
+fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
+ let struct_data = db.struct_data(def);
+ let fields = struct_data.variant_data.fields();
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let params =
+ fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
+ let ret = type_for_adt(db, def.into());
+ Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
+ let struct_data = db.struct_data(def);
+ if let StructKind::Unit = struct_data.variant_data.kind() {
+ return type_for_adt(db, def.into());
+ }
+ let generics = generics(db.upcast(), def.into());
+ let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
+ Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
+}
+
+fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id];
+ let fields = var_data.variant_data.fields();
+ let resolver = def.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let params =
+ fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
+ let ret = type_for_adt(db, def.parent.into());
+ Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false))
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id].variant_data;
+ if let StructKind::Unit = var_data.kind() {
+ return type_for_adt(db, def.parent.into());
+ }
+ let generics = generics(db.upcast(), def.parent.into());
+ let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
+ Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
+}
+
+fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
+ Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs))
+}
+
+fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), t.into());
+ let resolver = t.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let type_ref = &db.type_alias_data(t).type_ref;
+ let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
+ let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error));
+ Binders::new(substs.len(), inner)
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum CallableDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+}
+impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+
+impl CallableDefId {
+ pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
+ let db = db.upcast();
+ match self {
+ CallableDefId::FunctionId(f) => f.lookup(db).module(db),
+ CallableDefId::StructId(s) => s.lookup(db).container.module(db),
+ CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container.module(db),
+ }
+ .krate
+ }
+}
+
+impl From<CallableDefId> for GenericDefId {
+ fn from(def: CallableDefId) -> GenericDefId {
+ match def {
+ CallableDefId::FunctionId(f) => f.into(),
+ CallableDefId::StructId(s) => s.into(),
+ CallableDefId::EnumVariantId(e) => e.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum TyDefId {
+ BuiltinType(BuiltinType),
+ AdtId(AdtId),
+ TypeAliasId(TypeAliasId),
+}
+impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ValueTyDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+}
+impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
+
+/// Build the declared type of an item. This depends on the namespace; e.g. for
+/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
+/// the constructor function `(usize) -> Foo` which lives in the values
+/// namespace.
+pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
+ match def {
+ TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)),
+ TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
+ }
+}
+
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+ let num_binders = match *def {
+ TyDefId::BuiltinType(_) => 0,
+ TyDefId::AdtId(it) => generics(db.upcast(), it.into()).len(),
+ TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()).len(),
+ };
+ Binders::new(num_binders, Ty::Unknown)
+}
+
+pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
+ match def {
+ ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => type_for_const(db, it),
+ ValueTyDefId::StaticId(it) => type_for_static(db, it),
+ }
+}
+
+pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let generics = generics(db.upcast(), impl_id.into());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type))
+}
+
+pub(crate) fn impl_self_ty_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ impl_id: &ImplId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), (*impl_id).into());
+ Binders::new(generics.len(), Ty::Unknown)
+}
+
+pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
+ let self_ty = db.impl_self_ty(impl_id);
+ let target_trait = impl_data.target_trait.as_ref()?;
+ Some(Binders::new(
+ self_ty.num_binders,
+ TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?,
+ ))
+}
+
+pub(crate) fn return_type_impl_traits(
+ db: &dyn HirDatabase,
+ def: hir_def::FunctionId,
+) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(TypeParamLoweringMode::Variable);
+ let _ret = Ty::from_hir(&ctx_ret, &data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let num_binders = generics.len();
+ let return_type_impl_traits =
+ ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ Some(Arc::new(Binders::new(num_binders, return_type_impl_traits)))
+ }
+}
--- /dev/null
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
+use std::{iter, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::CrateId;
+use hir_def::{
+ builtin_type::{IntBitness, Signedness},
+ lang_item::LangItemTarget,
+ type_ref::Mutability,
+ AssocContainerId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use super::Substs;
+use crate::{
+ autoderef,
+ db::HirDatabase,
+ primitive::{FloatBitness, FloatTy, IntTy},
+ utils::all_super_traits,
+ ApplicationTy, Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind,
+ TypeCtor, TypeWalk,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ Apply(TypeCtor),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an impl. Only certain types can
+ /// have impls: if we have some `struct S`, we can have an `impl S`, but not
+ /// `impl &S`. Hence, this will return `None` for reference types and such.
+ pub(crate) fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
+ match ty {
+ Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)),
+ _ => None,
+ }
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::X8,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::X16,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::X32,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::X64,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::X128,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: IntBitness::Xsize,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::X8,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::X16,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::X32,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::X64,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::X128,
+ })),
+ TyFingerprint::Apply(TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: IntBitness::Xsize,
+ })),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })),
+ TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ for (_module_id, module_data) in crate_def_map.modules.iter() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.value.trait_,
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
+ impls
+ .map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+ }
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query");
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+
+ Arc::new(res)
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ let vec = map.entry(*fp).or_default();
+ vec.extend(impls);
+ }
+ }
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty))))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
+
+ let crate_def_map = db.crate_def_map(krate);
+ for (_module_id, module_data) in crate_def_map.modules.iter() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) {
+ map.entry(fp).or_default().push(impl_id);
+ }
+ }
+ }
+
+ Arc::new(Self { map })
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
+impl Ty {
+ pub fn def_crates(
+ &self,
+ db: &dyn HirDatabase,
+ cur_crate: CrateId,
+ ) -> Option<ArrayVec<[CrateId; 2]>> {
+ // Types like slice can have inherent impls in several crates, (core and alloc).
+ // The corresponding impls are marked with lang items, so we can use them to find the required crates.
+ macro_rules! lang_item_crate {
+ ($($name:expr),+ $(,)?) => {{
+ let mut v = ArrayVec::<[LangItemTarget; 2]>::new();
+ $(
+ v.extend(db.lang_item(cur_crate, $name.into()));
+ )+
+ v
+ }};
+ }
+
+ let lang_item_targets = match self {
+ Ty::Apply(a_ty) => match a_ty.ctor {
+ TypeCtor::Adt(def_id) => {
+ return Some(std::iter::once(def_id.module(db.upcast()).krate).collect())
+ }
+ TypeCtor::Bool => lang_item_crate!("bool"),
+ TypeCtor::Char => lang_item_crate!("char"),
+ TypeCtor::Float(f) => match f.bitness {
+ // There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
+ FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
+ FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
+ },
+ TypeCtor::Int(i) => lang_item_crate!(i.ty_to_string()),
+ TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
+ TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
+ TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
+ TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
+ _ => return None,
+ },
+ _ => return None,
+ };
+ let res = lang_item_targets
+ .into_iter()
+ .filter_map(|it| match it {
+ LangItemTarget::ImplDefId(it) => Some(it),
+ _ => None,
+ })
+ .map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate)
+ .collect();
+ Some(res)
+ }
+}
+/// Look up the method with the given name, returning the actual autoderefed
+/// receiver type (but without autoref applied yet).
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: &Name,
+) -> Option<(Ty, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ krate,
+ &traits_in_scope,
+ Some(name),
+ LookupMode::MethodCall,
+ |ty, f| match f {
+ AssocItemId::FunctionId(f) => Some((ty.clone(), f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_impl(
+ ty,
+ db,
+ env,
+ krate,
+ traits_in_scope,
+ name,
+ mode,
+ &mut |ty, item| {
+ assert!(slot.is_none());
+ slot = callback(ty, item);
+ slot.is_some()
+ },
+ );
+ slot
+}
+
+fn iterate_method_candidates_impl(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and then one
+ // autoref (i.e. when the method takes &self or &mut self). We just ignore
+ // the autoref currently -- when we find a method matching the given name,
+ // we assume it fits.
+
+ // Also note that when we've got a receiver like &S, even if the method we
+ // find in the end takes &self, we still do the autoderef step (just as
+ // rustc does an autoderef and then autoref again).
+ let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let deref_chain = autoderef_method_receiver(db, krate, ty);
+ for i in 0..deref_chain.len() {
+ if iterate_method_candidates_with_autoref(
+ &deref_chain[i..],
+ db,
+ env.clone(),
+ krate,
+ traits_in_scope,
+ name,
+ callback,
+ ) {
+ return true;
+ }
+ }
+ false
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ &ty,
+ db,
+ env,
+ krate,
+ traits_in_scope,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ deref_chain: &[Canonical<Ty>],
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ if iterate_method_candidates_by_receiver(
+ &deref_chain[0],
+ &deref_chain[1..],
+ db,
+ env.clone(),
+ krate,
+ &traits_in_scope,
+ name,
+ &mut callback,
+ ) {
+ return true;
+ }
+ let refed = Canonical {
+ kinds: deref_chain[0].kinds.clone(),
+ value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()),
+ };
+ if iterate_method_candidates_by_receiver(
+ &refed,
+ deref_chain,
+ db,
+ env.clone(),
+ krate,
+ &traits_in_scope,
+ name,
+ &mut callback,
+ ) {
+ return true;
+ }
+ let ref_muted = Canonical {
+ kinds: deref_chain[0].kinds.clone(),
+ value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()),
+ };
+ if iterate_method_candidates_by_receiver(
+ &ref_muted,
+ deref_chain,
+ db,
+ env,
+ krate,
+ &traits_in_scope,
+ name,
+ &mut callback,
+ ) {
+ return true;
+ }
+ false
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ rest_of_deref_chain: &[Canonical<Ty>],
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
+ if iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback) {
+ return true;
+ }
+ }
+ for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
+ if iterate_trait_method_candidates(
+ self_ty,
+ db,
+ env.clone(),
+ krate,
+ &traits_in_scope,
+ name,
+ Some(receiver_ty),
+ &mut callback,
+ ) {
+ return true;
+ }
+ }
+ false
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ if iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) {
+ return true;
+ }
+ iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback)
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Canonical<Ty>>,
+ callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ // if ty is `dyn Trait`, the trait doesn't need to be in scope
+ let inherent_trait =
+ self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
+ let env_traits = if let Ty::Placeholder(_) = self_ty.value {
+ // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
+ env.trait_predicates_for_self_ty(&self_ty.value)
+ .map(|tr| tr.trait_)
+ .flat_map(|t| all_super_traits(db.upcast(), t))
+ .collect()
+ } else {
+ Vec::new()
+ };
+ let traits =
+ inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied());
+ 'traits: for t in traits {
+ let data = db.trait_data(t);
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for (_name, item) in data.items.iter() {
+ if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
+ if db.trait_solve(krate, goal).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ if callback(&self_ty.value, *item) {
+ return true;
+ }
+ }
+ }
+ false
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ name: Option<&Name>,
+ receiver_ty: Option<&Canonical<Ty>>,
+ krate: CrateId,
+ callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
+) -> bool {
+ let def_crates = match self_ty.value.def_crates(db, krate) {
+ Some(k) => k,
+ None => return false,
+ };
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+
+ for &impl_def in impls.for_self_ty(&self_ty.value) {
+ for &item in db.impl_data(impl_def).items.iter() {
+ if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
+ continue;
+ }
+ // we have to check whether the self type unifies with the type
+ // that the impl is for. If we have a receiver type, this
+ // already happens in `is_valid_candidate` above; if not, we
+ // check it here
+ if receiver_ty.is_none() && inherent_impl_substs(db, impl_def, self_ty).is_none() {
+ test_utils::mark::hit!(impl_self_type_match_without_receiver);
+ continue;
+ }
+ if callback(&self_ty.value, item) {
+ return true;
+ }
+ }
+ }
+ }
+ false
+}
+
+/// Returns the self type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ ty: &Canonical<Ty>,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ index_trait: TraitId,
+) -> Option<Canonical<Ty>> {
+ let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
+ let deref_chain = autoderef_method_receiver(db, krate, ty);
+ for ty in deref_chain {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone());
+ if db.trait_solve(krate, goal).is_some() {
+ return Some(ty);
+ }
+ }
+ None
+}
+
+fn is_valid_candidate(
+ db: &dyn HirDatabase,
+ name: Option<&Name>,
+ receiver_ty: Option<&Canonical<Ty>>,
+ item: AssocItemId,
+ self_ty: &Canonical<Ty>,
+) -> bool {
+ match item {
+ AssocItemId::FunctionId(m) => {
+ let data = db.function_data(m);
+ if let Some(name) = name {
+ if &data.name != name {
+ return false;
+ }
+ }
+ if let Some(receiver_ty) = receiver_ty {
+ if !data.has_self_param {
+ return false;
+ }
+ let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) {
+ Some(ty) => ty,
+ None => return false,
+ };
+ if transformed_receiver_ty != receiver_ty.value {
+ return false;
+ }
+ }
+ true
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none()
+ }
+ _ => false,
+ }
+}
+
+pub(crate) fn inherent_impl_substs(
+ db: &dyn HirDatabase,
+ impl_id: ImplId,
+ self_ty: &Canonical<Ty>,
+) -> Option<Substs> {
+ // we create a var for each type parameter of the impl; we need to keep in
+ // mind here that `self_ty` might have vars of its own
+ let vars = Substs::build_for_def(db, impl_id)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len())
+ .build();
+ let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars);
+ let mut kinds = self_ty.kinds.to_vec();
+ kinds.extend(iter::repeat(TyKind::General).take(vars.len()));
+ let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) };
+ let substs = super::infer::unify(&tys);
+ // We only want the substs for the vars we added, not the ones from self_ty.
+ // Also, if any of the vars we added are still in there, we replace them by
+ // Unknown. I think this can only really happen if self_ty contained
+ // Unknown, and in that case we want the result to contain Unknown in those
+ // places again.
+ substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len()))
+}
+
+/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
+/// num_vars_to_keep) by `Ty::Unknown`.
+fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs {
+ s.fold_binders(
+ &mut |ty, binders| {
+ if let Ty::Bound(bound) = &ty {
+ if bound.index >= num_vars_to_keep && bound.debruijn >= binders {
+ Ty::Unknown
+ } else {
+ ty
+ }
+ } else {
+ ty
+ }
+ },
+ DebruijnIndex::INNERMOST,
+ )
+}
+
+fn transform_receiver_ty(
+ db: &dyn HirDatabase,
+ function_id: FunctionId,
+ self_ty: &Canonical<Ty>,
+) -> Option<Ty> {
+ let substs = match function_id.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id)
+ .push(self_ty.value.clone())
+ .fill_with_unknown()
+ .build(),
+ AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?,
+ AssocContainerId::ContainerId(_) => unreachable!(),
+ };
+ let sig = db.callable_item_signature(function_id.into());
+ Some(sig.value.params()[0].clone().subst_bound_vars(&substs))
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ krate: CrateId,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env, trait_, ty.clone());
+ let solution = db.trait_solve(krate, goal);
+
+ solution.is_some()
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: Canonical<Ty>,
+) -> Canonical<InEnvironment<super::Obligation>> {
+ let mut kinds = self_ty.kinds.to_vec();
+ let substs = super::Substs::build_for_def(db, trait_)
+ .push(self_ty.value)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1));
+ let trait_ref = TraitRef { trait_, substs };
+ let obligation = super::Obligation::Trait(trait_ref);
+ Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) }
+}
+
+fn autoderef_method_receiver(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ ty: InEnvironment<Canonical<Ty>>,
+) -> Vec<Canonical<Ty>> {
+ let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect();
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) =
+ deref_chain.last().map(|ty| &ty.value)
+ {
+ let kinds = deref_chain.last().unwrap().kinds.clone();
+ let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone());
+ deref_chain.push(Canonical { value: unsized_ty, kinds })
+ }
+ deref_chain
+}
--- /dev/null
+//! Helper functions for binary operator type inference.
+use hir_def::expr::{ArithOp, BinaryOp, CmpOp};
+
+use super::{InferTy, Ty, TypeCtor};
+use crate::ApplicationTy;
+
+pub(super) fn binary_op_return_ty(op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Ty {
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => Ty::simple(TypeCtor::Bool),
+ BinaryOp::Assignment { .. } => Ty::unit(),
+ BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => match lhs_ty {
+ Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
+ TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ BinaryOp::ArithOp(_) => match rhs_ty {
+ Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
+ TypeCtor::Int(..) | TypeCtor::Float(..) => rhs_ty,
+ _ => Ty::Unknown,
+ },
+ Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => rhs_ty,
+ _ => Ty::Unknown,
+ },
+ }
+}
+
+pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty {
+ match op {
+ BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match lhs_ty {
+ Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
+ TypeCtor::Int(..)
+ | TypeCtor::Float(..)
+ | TypeCtor::Str
+ | TypeCtor::Char
+ | TypeCtor::Bool => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match lhs_ty {
+ Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
+ TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
+ _ => Ty::Unknown,
+ },
+ }
+}
--- /dev/null
+//! Defines primitive types, which have a couple of peculiarities:
+//!
+//! * during type inference, they can be uncertain (ie, `let x = 92;`)
+//! * they don't belong to any particular crate.
+
+use std::fmt;
+
+pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, FloatBitness, IntBitness, Signedness};
+
+#[derive(Copy, Clone, Eq, PartialEq, Hash)]
+pub struct IntTy {
+ pub signedness: Signedness,
+ pub bitness: IntBitness,
+}
+
+impl fmt::Debug for IntTy {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(self, f)
+ }
+}
+
+impl fmt::Display for IntTy {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.ty_to_string())
+ }
+}
+
+impl IntTy {
+ pub fn isize() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::Xsize }
+ }
+
+ pub fn i8() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::X8 }
+ }
+
+ pub fn i16() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::X16 }
+ }
+
+ pub fn i32() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::X32 }
+ }
+
+ pub fn i64() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::X64 }
+ }
+
+ pub fn i128() -> IntTy {
+ IntTy { signedness: Signedness::Signed, bitness: IntBitness::X128 }
+ }
+
+ pub fn usize() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize }
+ }
+
+ pub fn u8() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X8 }
+ }
+
+ pub fn u16() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X16 }
+ }
+
+ pub fn u32() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X32 }
+ }
+
+ pub fn u64() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X64 }
+ }
+
+ pub fn u128() -> IntTy {
+ IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }
+ }
+
+ pub fn ty_to_string(self) -> &'static str {
+ match (self.signedness, self.bitness) {
+ (Signedness::Signed, IntBitness::Xsize) => "isize",
+ (Signedness::Signed, IntBitness::X8) => "i8",
+ (Signedness::Signed, IntBitness::X16) => "i16",
+ (Signedness::Signed, IntBitness::X32) => "i32",
+ (Signedness::Signed, IntBitness::X64) => "i64",
+ (Signedness::Signed, IntBitness::X128) => "i128",
+ (Signedness::Unsigned, IntBitness::Xsize) => "usize",
+ (Signedness::Unsigned, IntBitness::X8) => "u8",
+ (Signedness::Unsigned, IntBitness::X16) => "u16",
+ (Signedness::Unsigned, IntBitness::X32) => "u32",
+ (Signedness::Unsigned, IntBitness::X64) => "u64",
+ (Signedness::Unsigned, IntBitness::X128) => "u128",
+ }
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct FloatTy {
+ pub bitness: FloatBitness,
+}
+
+impl fmt::Debug for FloatTy {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(self, f)
+ }
+}
+
+impl fmt::Display for FloatTy {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.ty_to_string())
+ }
+}
+
+impl FloatTy {
+ pub fn f32() -> FloatTy {
+ FloatTy { bitness: FloatBitness::X32 }
+ }
+
+ pub fn f64() -> FloatTy {
+ FloatTy { bitness: FloatBitness::X64 }
+ }
+
+ pub fn ty_to_string(self) -> &'static str {
+ match self.bitness {
+ FloatBitness::X32 => "f32",
+ FloatBitness::X64 => "f64",
+ }
+ }
+}
+
+impl From<BuiltinInt> for IntTy {
+ fn from(t: BuiltinInt) -> Self {
+ IntTy { signedness: t.signedness, bitness: t.bitness }
+ }
+}
+
+impl From<BuiltinFloat> for FloatTy {
+ fn from(t: BuiltinFloat) -> Self {
+ FloatTy { bitness: t.bitness }
+ }
+}
--- /dev/null
+//! Database used for testing `hir`.
+
+use std::{
+ fmt, panic,
+ sync::{Arc, Mutex},
+};
+
+use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast};
+use hir_def::{db::DefDatabase, ModuleId};
+use hir_expand::db::AstDatabase;
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::TextRange;
+use test_utils::extract_annotations;
+
+#[salsa::database(
+ base_db::SourceDatabaseExtStorage,
+ base_db::SourceDatabaseStorage,
+ hir_expand::db::AstDatabaseStorage,
+ hir_def::db::InternDatabaseStorage,
+ hir_def::db::DefDatabaseStorage,
+ crate::db::HirDatabaseStorage
+)]
+#[derive(Default)]
+pub struct TestDB {
+ storage: salsa::Storage<TestDB>,
+ events: Mutex<Option<Vec<salsa::Event>>>,
+}
+impl fmt::Debug for TestDB {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TestDB").finish()
+ }
+}
+
+impl Upcast<dyn AstDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+ &*self
+ }
+}
+
+impl Upcast<dyn DefDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn DefDatabase + 'static) {
+ &*self
+ }
+}
+
+impl salsa::Database for TestDB {
+ fn salsa_event(&self, event: salsa::Event) {
+ let mut events = self.events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+}
+
+impl salsa::ParallelDatabase for TestDB {
+ fn snapshot(&self) -> salsa::Snapshot<TestDB> {
+ salsa::Snapshot::new(TestDB {
+ storage: self.storage.snapshot(),
+ events: Default::default(),
+ })
+ }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
+impl FileLoader for TestDB {
+ fn file_text(&self, file_id: FileId) -> Arc<String> {
+ FileLoaderDelegate(self).file_text(file_id)
+ }
+ fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
+ FileLoaderDelegate(self).resolve_path(anchor, path)
+ }
+ fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
+ FileLoaderDelegate(self).relevant_crates(file_id)
+ }
+}
+
+impl TestDB {
+ pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+ for &krate in self.relevant_crates(file_id).iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (local_id, data) in crate_def_map.modules.iter() {
+ if data.origin.file_id() == Some(file_id) {
+ return ModuleId { krate, local_id };
+ }
+ }
+ }
+ panic!("Can't find module for file")
+ }
+
+ pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
+ let mut files = Vec::new();
+ let crate_graph = self.crate_graph();
+ for krate in crate_graph.iter() {
+ let crate_def_map = self.crate_def_map(krate);
+ for (module_id, _) in crate_def_map.modules.iter() {
+ let file_id = crate_def_map[module_id].origin.file_id();
+ files.extend(file_id)
+ }
+ }
+ files
+ .into_iter()
+ .filter_map(|file_id| {
+ let text = self.file_text(file_id);
+ let annotations = extract_annotations(&text);
+ if annotations.is_empty() {
+ return None;
+ }
+ Some((file_id, annotations))
+ })
+ .collect()
+ }
+}
+
+impl TestDB {
+ pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
+ *self.events.lock().unwrap() = Some(Vec::new());
+ f();
+ self.events.lock().unwrap().take().unwrap()
+ }
+
+ pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
+ let events = self.log(f);
+ events
+ .into_iter()
+ .filter_map(|e| match e.kind {
+ // This pretty horrible, but `Debug` is the only way to inspect
+ // QueryDescriptor at the moment.
+ salsa::EventKind::WillExecute { database_key } => {
+ Some(format!("{:?}", database_key.debug(self)))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+}
--- /dev/null
+mod never_type;
+mod coercion;
+mod regression;
+mod simple;
+mod patterns;
+mod traits;
+mod method_resolution;
+mod macros;
+mod display_source_code;
+
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt};
+use expect::Expect;
+use hir_def::{
+ body::{BodySourceMap, SyntheticSyntax},
+ child_by_source::ChildBySource,
+ db::DefDatabase,
+ item_scope::ItemScope,
+ keys,
+ nameres::CrateDefMap,
+ AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId,
+};
+use hir_expand::{db::AstDatabase, InFile};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, AstNode},
+ SyntaxNode,
+};
+
+use crate::{
+ db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty,
+};
+
+// These tests compare the inference results for all expressions in a file
+// against snapshots of the expected results using expect. Use
+// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
+
+fn setup_tracing() -> tracing::subscriber::DefaultGuard {
+ use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
+ use tracing_tree::HierarchicalLayer;
+ let filter = EnvFilter::from_env("CHALK_DEBUG");
+ let layer = HierarchicalLayer::default()
+ .with_indent_lines(true)
+ .with_ansi(false)
+ .with_indent_amount(2)
+ .with_writer(std::io::stderr);
+ let subscriber = Registry::default().with(filter).with(layer);
+ tracing::subscriber::set_default(subscriber)
+}
+
+fn check_types(ra_fixture: &str) {
+ check_types_impl(ra_fixture, false)
+}
+
+fn check_types_source_code(ra_fixture: &str) {
+ check_types_impl(ra_fixture, true)
+}
+
+fn check_types_impl(ra_fixture: &str, display_source: bool) {
+ let _tracing = setup_tracing();
+ let db = TestDB::with_files(ra_fixture);
+ let mut checked_one = false;
+ for (file_id, annotations) in db.extract_annotations() {
+ for (range, expected) in annotations {
+ let ty = type_at_range(&db, FileRange { file_id, range });
+ let actual = if display_source {
+ let module = db.module_for_file(file_id);
+ ty.display_source_code(&db, module).unwrap()
+ } else {
+ ty.display(&db).to_string()
+ };
+ assert_eq!(expected, actual);
+ checked_one = true;
+ }
+ }
+ assert!(checked_one, "no `//^` annotations found");
+}
+
+fn type_at_range(db: &TestDB, pos: FileRange) -> Ty {
+ let file = db.parse(pos.file_id).ok().unwrap();
+ let expr = algo::find_node_at_range::<ast::Expr>(file.syntax(), pos.range).unwrap();
+ let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap();
+ let module = db.module_for_file(pos.file_id);
+ let func = *module.child_by_source(db)[keys::FUNCTION]
+ .get(&InFile::new(pos.file_id.into(), fn_def))
+ .unwrap();
+
+ let (_body, source_map) = db.body_with_source_map(func.into());
+ if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) {
+ let infer = db.infer(func.into());
+ return infer[expr_id].clone();
+ }
+ panic!("Can't find expression")
+}
+
+fn infer(ra_fixture: &str) -> String {
+ infer_with_mismatches(ra_fixture, false)
+}
+
+fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
+ let _tracing = setup_tracing();
+ let (db, file_id) = TestDB::with_single_file(content);
+
+ let mut buf = String::new();
+
+ let mut infer_def = |inference_result: Arc<InferenceResult>,
+ body_source_map: Arc<BodySourceMap>| {
+ let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
+ let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
+
+ for (pat, ty) in inference_result.type_of_pat.iter() {
+ let syntax_ptr = match body_source_map.pat_syntax(pat) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| {
+ ptr.either(
+ |it| it.to_node(&root).syntax().clone(),
+ |it| it.to_node(&root).syntax().clone(),
+ )
+ })
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((syntax_ptr, ty));
+ }
+
+ for (expr, ty) in inference_result.type_of_expr.iter() {
+ let node = match body_source_map.expr_syntax(expr) {
+ Ok(sp) => {
+ let root = db.parse_or_expand(sp.file_id).unwrap();
+ sp.map(|ptr| ptr.to_node(&root).syntax().clone())
+ }
+ Err(SyntheticSyntax) => continue,
+ };
+ types.push((node.clone(), ty));
+ if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
+ mismatches.push((node, mismatch));
+ }
+ }
+
+ // sort ranges for consistency
+ types.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (node, ty) in &types {
+ let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
+ (self_param.self_token().unwrap().text_range(), "self".to_string())
+ } else {
+ (node.value.text_range(), node.value.text().to_string().replace("\n", " "))
+ };
+ let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?} '{}': {}\n",
+ macro_prefix,
+ range,
+ ellipsize(text, 15),
+ ty.display(&db)
+ );
+ }
+ if include_mismatches {
+ mismatches.sort_by_key(|(node, _)| {
+ let range = node.value.text_range();
+ (range.start(), range.end())
+ });
+ for (src_ptr, mismatch) in &mismatches {
+ let range = src_ptr.value.text_range();
+ let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
+ format_to!(
+ buf,
+ "{}{:?}: expected {}, got {}\n",
+ macro_prefix,
+ range,
+ mismatch.expected.display(&db),
+ mismatch.actual.display(&db),
+ );
+ }
+ }
+ };
+
+ let module = db.module_for_file(file_id);
+ let crate_def_map = db.crate_def_map(module.krate);
+
+ let mut defs: Vec<DefWithBodyId> = Vec::new();
+ visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it));
+ defs.sort_by_key(|def| match def {
+ DefWithBodyId::FunctionId(it) => {
+ let loc = it.lookup(&db);
+ let tree = db.item_tree(loc.id.file_id);
+ tree.source(&db, loc.id).syntax().text_range().start()
+ }
+ DefWithBodyId::ConstId(it) => {
+ let loc = it.lookup(&db);
+ let tree = db.item_tree(loc.id.file_id);
+ tree.source(&db, loc.id).syntax().text_range().start()
+ }
+ DefWithBodyId::StaticId(it) => {
+ let loc = it.lookup(&db);
+ let tree = db.item_tree(loc.id.file_id);
+ tree.source(&db, loc.id).syntax().text_range().start()
+ }
+ });
+ for def in defs {
+ let (_body, source_map) = db.body_with_source_map(def);
+ let infer = db.infer(def);
+ infer_def(infer, source_map);
+ }
+
+ buf.truncate(buf.trim_end().len());
+ buf
+}
+
+fn visit_module(
+ db: &TestDB,
+ crate_def_map: &CrateDefMap,
+ module_id: LocalModuleId,
+ cb: &mut dyn FnMut(DefWithBodyId),
+) {
+ visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
+ for impl_id in crate_def_map[module_id].scope.impls() {
+ let impl_data = db.impl_data(impl_id);
+ for &item in impl_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_scope(db, crate_def_map, &body.item_scope, cb);
+ }
+ AssocItemId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_scope(db, crate_def_map, &body.item_scope, cb);
+ }
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+
+ fn visit_scope(
+ db: &TestDB,
+ crate_def_map: &CrateDefMap,
+ scope: &ItemScope,
+ cb: &mut dyn FnMut(DefWithBodyId),
+ ) {
+ for decl in scope.declarations() {
+ match decl {
+ ModuleDefId::FunctionId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_scope(db, crate_def_map, &body.item_scope, cb);
+ }
+ ModuleDefId::ConstId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_scope(db, crate_def_map, &body.item_scope, cb);
+ }
+ ModuleDefId::StaticId(it) => {
+ let def = it.into();
+ cb(def);
+ let body = db.body(def);
+ visit_scope(db, crate_def_map, &body.item_scope, cb);
+ }
+ ModuleDefId::TraitId(it) => {
+ let trait_data = db.trait_data(it);
+ for &(_, item) in trait_data.items.iter() {
+ match item {
+ AssocItemId::FunctionId(it) => cb(it.into()),
+ AssocItemId::ConstId(it) => cb(it.into()),
+ AssocItemId::TypeAliasId(_) => (),
+ }
+ }
+ }
+ ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
+ _ => (),
+ }
+ }
+ }
+}
+
+fn ellipsize(mut text: String, max_len: usize) -> String {
+ if text.len() <= max_len {
+ return text;
+ }
+ let ellipsis = "...";
+ let e_len = ellipsis.len();
+ let mut prefix_len = (max_len - e_len) / 2;
+ while !text.is_char_boundary(prefix_len) {
+ prefix_len += 1;
+ }
+ let mut suffix_len = max_len - e_len - prefix_len;
+ while !text.is_char_boundary(text.len() - suffix_len) {
+ suffix_len += 1;
+ }
+ text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
+ text
+}
+
+#[test]
+fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+ //- /lib.rs
+ fn foo() -> i32 {
+ <|>1 + 1
+ }
+ ",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = db.crate_def_map(module.krate);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(format!("{:?}", events).contains("infer"))
+ }
+
+ let new_text = "
+ fn foo() -> i32 {
+ 1
+ +
+ 1
+ }
+ "
+ .to_string();
+
+ db.set_file_text(pos.file_id, Arc::new(new_text));
+
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id);
+ let crate_def_map = db.crate_def_map(module.krate);
+ visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ db.infer(def);
+ });
+ });
+ assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
+ }
+}
+
+fn check_infer(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer(ra_fixture);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
+
+fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
+ let mut actual = infer_with_mismatches(ra_fixture, true);
+ actual.push('\n');
+ expect.assert_eq(&actual);
+}
--- /dev/null
+use expect::expect;
+use test_utils::mark;
+
+use super::{check_infer, check_infer_with_mismatches};
+
+#[test]
+fn infer_block_expr_type_mismatch() {
+ check_infer(
+ r"
+ fn test() {
+ let a: i32 = { 1i64 };
+ }
+ ",
+ expect![[r"
+ 10..40 '{ ...4 }; }': ()
+ 20..21 'a': i32
+ 29..37 '{ 1i64 }': i64
+ 31..35 '1i64': i64
+ "]],
+ );
+}
+
+#[test]
+fn coerce_places() {
+ check_infer(
+ r#"
+ struct S<T> { a: T }
+
+ fn f<T>(_: &[T]) -> T { loop {} }
+ fn g<T>(_: S<&[T]>) -> T { loop {} }
+
+ fn gen<T>() -> *mut [T; 2] { loop {} }
+ fn test1<U>() -> *mut [U] {
+ gen()
+ }
+
+ fn test2() {
+ let arr: &[u8; 1] = &[1];
+
+ let a: &[_] = arr;
+ let b = f(arr);
+ let c: &[_] = { arr };
+ let d = g(S { a: arr });
+ let e: [&[_]; 1] = [arr];
+ let f: [&[_]; 2] = [arr; 2];
+ let g: (&[_], &[_]) = (arr, arr);
+ }
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ "#,
+ expect![[r"
+ 30..31 '_': &[T]
+ 44..55 '{ loop {} }': T
+ 46..53 'loop {}': !
+ 51..53 '{}': ()
+ 64..65 '_': S<&[T]>
+ 81..92 '{ loop {} }': T
+ 83..90 'loop {}': !
+ 88..90 '{}': ()
+ 121..132 '{ loop {} }': *mut [T; _]
+ 123..130 'loop {}': !
+ 128..130 '{}': ()
+ 159..172 '{ gen() }': *mut [U]
+ 165..168 'gen': fn gen<U>() -> *mut [U; _]
+ 165..170 'gen()': *mut [U; _]
+ 185..419 '{ ...rr); }': ()
+ 195..198 'arr': &[u8; _]
+ 211..215 '&[1]': &[u8; _]
+ 212..215 '[1]': [u8; _]
+ 213..214 '1': u8
+ 226..227 'a': &[u8]
+ 236..239 'arr': &[u8; _]
+ 249..250 'b': u8
+ 253..254 'f': fn f<u8>(&[u8]) -> u8
+ 253..259 'f(arr)': u8
+ 255..258 'arr': &[u8; _]
+ 269..270 'c': &[u8]
+ 279..286 '{ arr }': &[u8]
+ 281..284 'arr': &[u8; _]
+ 296..297 'd': u8
+ 300..301 'g': fn g<u8>(S<&[u8]>) -> u8
+ 300..315 'g(S { a: arr })': u8
+ 302..314 'S { a: arr }': S<&[u8]>
+ 309..312 'arr': &[u8; _]
+ 325..326 'e': [&[u8]; _]
+ 340..345 '[arr]': [&[u8]; _]
+ 341..344 'arr': &[u8; _]
+ 355..356 'f': [&[u8]; _]
+ 370..378 '[arr; 2]': [&[u8]; _]
+ 371..374 'arr': &[u8; _]
+ 376..377 '2': usize
+ 388..389 'g': (&[u8], &[u8])
+ 406..416 '(arr, arr)': (&[u8], &[u8])
+ 407..410 'arr': &[u8; _]
+ 412..415 'arr': &[u8; _]
+ "]],
+ );
+}
+
+#[test]
+fn infer_let_stmt_coerce() {
+ check_infer(
+ r"
+ fn test() {
+ let x: &[isize] = &[1];
+ let x: *const [isize] = &[1];
+ }
+ ",
+ expect![[r"
+ 10..75 '{ ...[1]; }': ()
+ 20..21 'x': &[isize]
+ 34..38 '&[1]': &[isize; _]
+ 35..38 '[1]': [isize; _]
+ 36..37 '1': isize
+ 48..49 'x': *const [isize]
+ 68..72 '&[1]': &[isize; _]
+ 69..72 '[1]': [isize; _]
+ 70..71 '1': isize
+ "]],
+ );
+}
+
+#[test]
+fn infer_custom_coerce_unsized() {
+ check_infer(
+ r#"
+ struct A<T: ?Sized>(*const T);
+ struct B<T: ?Sized>(*const T);
+ struct C<T: ?Sized> { inner: *const T }
+
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
+
+ fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
+ fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
+ fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
+
+ fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
+ let d = foo1(a);
+ let e = foo2(b);
+ let f = foo3(c);
+ }
+
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ "#,
+ expect![[r"
+ 257..258 'x': A<[T]>
+ 278..283 '{ x }': A<[T]>
+ 280..281 'x': A<[T]>
+ 295..296 'x': B<[T]>
+ 316..321 '{ x }': B<[T]>
+ 318..319 'x': B<[T]>
+ 333..334 'x': C<[T]>
+ 354..359 '{ x }': C<[T]>
+ 356..357 'x': C<[T]>
+ 369..370 'a': A<[u8; _]>
+ 384..385 'b': B<[u8; _]>
+ 399..400 'c': C<[u8; _]>
+ 414..480 '{ ...(c); }': ()
+ 424..425 'd': A<[{unknown}]>
+ 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
+ 428..435 'foo1(a)': A<[{unknown}]>
+ 433..434 'a': A<[u8; _]>
+ 445..446 'e': B<[u8]>
+ 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
+ 449..456 'foo2(b)': B<[u8]>
+ 454..455 'b': B<[u8; _]>
+ 466..467 'f': C<[u8]>
+ 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
+ 470..477 'foo3(c)': C<[u8]>
+ 475..476 'c': C<[u8; _]>
+ "]],
+ );
+}
+
+#[test]
+fn infer_if_coerce() {
+ check_infer(
+ r#"
+ fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ fn test() {
+ let x = if true {
+ foo(&[1])
+ } else {
+ &[1]
+ };
+ }
+
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ "#,
+ expect![[r"
+ 10..11 'x': &[T]
+ 27..38 '{ loop {} }': &[T]
+ 29..36 'loop {}': !
+ 34..36 '{}': ()
+ 49..125 '{ ... }; }': ()
+ 59..60 'x': &[i32]
+ 63..122 'if tru... }': &[i32]
+ 66..70 'true': bool
+ 71..96 '{ ... }': &[i32]
+ 81..84 'foo': fn foo<i32>(&[i32]) -> &[i32]
+ 81..90 'foo(&[1])': &[i32]
+ 85..89 '&[1]': &[i32; _]
+ 86..89 '[1]': [i32; _]
+ 87..88 '1': i32
+ 102..122 '{ ... }': &[i32; _]
+ 112..116 '&[1]': &[i32; _]
+ 113..116 '[1]': [i32; _]
+ 114..115 '1': i32
+ "]],
+ );
+}
+
+#[test]
+fn infer_if_else_coerce() {
+ check_infer(
+ r#"
+ fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ fn test() {
+ let x = if true {
+ &[1]
+ } else {
+ foo(&[1])
+ };
+ }
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ "#,
+ expect![[r"
+ 10..11 'x': &[T]
+ 27..38 '{ loop {} }': &[T]
+ 29..36 'loop {}': !
+ 34..36 '{}': ()
+ 49..125 '{ ... }; }': ()
+ 59..60 'x': &[i32]
+ 63..122 'if tru... }': &[i32]
+ 66..70 'true': bool
+ 71..91 '{ ... }': &[i32; _]
+ 81..85 '&[1]': &[i32; _]
+ 82..85 '[1]': [i32; _]
+ 83..84 '1': i32
+ 97..122 '{ ... }': &[i32]
+ 107..110 'foo': fn foo<i32>(&[i32]) -> &[i32]
+ 107..116 'foo(&[1])': &[i32]
+ 111..115 '&[1]': &[i32; _]
+ 112..115 '[1]': [i32; _]
+ 113..114 '1': i32
+ "]],
+ )
+}
+
+#[test]
+fn infer_match_first_coerce() {
+ check_infer(
+ r#"
+ fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ fn test(i: i32) {
+ let x = match i {
+ 2 => foo(&[2]),
+ 1 => &[1],
+ _ => &[3],
+ };
+ }
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ "#,
+ expect![[r"
+ 10..11 'x': &[T]
+ 27..38 '{ loop {} }': &[T]
+ 29..36 'loop {}': !
+ 34..36 '{}': ()
+ 47..48 'i': i32
+ 55..149 '{ ... }; }': ()
+ 65..66 'x': &[i32]
+ 69..146 'match ... }': &[i32]
+ 75..76 'i': i32
+ 87..88 '2': i32
+ 87..88 '2': i32
+ 92..95 'foo': fn foo<i32>(&[i32]) -> &[i32]
+ 92..101 'foo(&[2])': &[i32]
+ 96..100 '&[2]': &[i32; _]
+ 97..100 '[2]': [i32; _]
+ 98..99 '2': i32
+ 111..112 '1': i32
+ 111..112 '1': i32
+ 116..120 '&[1]': &[i32; _]
+ 117..120 '[1]': [i32; _]
+ 118..119 '1': i32
+ 130..131 '_': i32
+ 135..139 '&[3]': &[i32; _]
+ 136..139 '[3]': [i32; _]
+ 137..138 '3': i32
+ "]],
+ );
+}
+
+#[test]
+fn infer_match_second_coerce() {
+ check_infer(
+ r#"
+ fn foo<T>(x: &[T]) -> &[T] { loop {} }
+ fn test(i: i32) {
+ let x = match i {
+ 1 => &[1],
+ 2 => foo(&[2]),
+ _ => &[3],
+ };
+ }
+
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T: ?Sized> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
+ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
+ "#,
+ expect![[r"
+ 10..11 'x': &[T]
+ 27..38 '{ loop {} }': &[T]
+ 29..36 'loop {}': !
+ 34..36 '{}': ()
+ 47..48 'i': i32
+ 55..149 '{ ... }; }': ()
+ 65..66 'x': &[i32]
+ 69..146 'match ... }': &[i32]
+ 75..76 'i': i32
+ 87..88 '1': i32
+ 87..88 '1': i32
+ 92..96 '&[1]': &[i32; _]
+ 93..96 '[1]': [i32; _]
+ 94..95 '1': i32
+ 106..107 '2': i32
+ 106..107 '2': i32
+ 111..114 'foo': fn foo<i32>(&[i32]) -> &[i32]
+ 111..120 'foo(&[2])': &[i32]
+ 115..119 '&[2]': &[i32; _]
+ 116..119 '[2]': [i32; _]
+ 117..118 '2': i32
+ 130..131 '_': i32
+ 135..139 '&[3]': &[i32; _]
+ 136..139 '[3]': [i32; _]
+ 137..138 '3': i32
+ "]],
+ );
+}
+
+#[test]
+fn coerce_merge_one_by_one1() {
+ mark::check!(coerce_merge_fail_fallback);
+
+ check_infer(
+ r"
+ fn test() {
+ let t = &mut 1;
+ let x = match 1 {
+ 1 => t as *mut i32,
+ 2 => t as &i32,
+ _ => t as *const i32,
+ };
+ }
+ ",
+ expect![[r"
+ 10..144 '{ ... }; }': ()
+ 20..21 't': &mut i32
+ 24..30 '&mut 1': &mut i32
+ 29..30 '1': i32
+ 40..41 'x': *const i32
+ 44..141 'match ... }': *const i32
+ 50..51 '1': i32
+ 62..63 '1': i32
+ 62..63 '1': i32
+ 67..68 't': &mut i32
+ 67..80 't as *mut i32': *mut i32
+ 90..91 '2': i32
+ 90..91 '2': i32
+ 95..96 't': &mut i32
+ 95..104 't as &i32': &i32
+ 114..115 '_': i32
+ 119..120 't': &mut i32
+ 119..134 't as *const i32': *const i32
+ "]],
+ );
+}
+
+#[test]
+fn return_coerce_unknown() {
+ check_infer_with_mismatches(
+ r"
+ fn foo() -> u32 {
+ return unknown;
+ }
+ ",
+ expect![[r"
+ 16..39 '{ ...own; }': u32
+ 22..36 'return unknown': !
+ 29..36 'unknown': u32
+ "]],
+ );
+}
+
+#[test]
+fn coerce_autoderef() {
+ check_infer_with_mismatches(
+ r"
+ struct Foo;
+ fn takes_ref_foo(x: &Foo) {}
+ fn test() {
+ takes_ref_foo(&Foo);
+ takes_ref_foo(&&Foo);
+ takes_ref_foo(&&&Foo);
+ }
+ ",
+ expect![[r"
+ 29..30 'x': &Foo
+ 38..40 '{}': ()
+ 51..132 '{ ...oo); }': ()
+ 57..70 'takes_ref_foo': fn takes_ref_foo(&Foo)
+ 57..76 'takes_...(&Foo)': ()
+ 71..75 '&Foo': &Foo
+ 72..75 'Foo': Foo
+ 82..95 'takes_ref_foo': fn takes_ref_foo(&Foo)
+ 82..102 'takes_...&&Foo)': ()
+ 96..101 '&&Foo': &&Foo
+ 97..101 '&Foo': &Foo
+ 98..101 'Foo': Foo
+ 108..121 'takes_ref_foo': fn takes_ref_foo(&Foo)
+ 108..129 'takes_...&&Foo)': ()
+ 122..128 '&&&Foo': &&&Foo
+ 123..128 '&&Foo': &&Foo
+ 124..128 '&Foo': &Foo
+ 125..128 'Foo': Foo
+ "]],
+ );
+}
+
+#[test]
+fn coerce_autoderef_generic() {
+ check_infer_with_mismatches(
+ r"
+ struct Foo;
+ fn takes_ref<T>(x: &T) -> T { *x }
+ fn test() {
+ takes_ref(&Foo);
+ takes_ref(&&Foo);
+ takes_ref(&&&Foo);
+ }
+ ",
+ expect![[r"
+ 28..29 'x': &T
+ 40..46 '{ *x }': T
+ 42..44 '*x': T
+ 43..44 'x': &T
+ 57..126 '{ ...oo); }': ()
+ 63..72 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo
+ 63..78 'takes_ref(&Foo)': Foo
+ 73..77 '&Foo': &Foo
+ 74..77 'Foo': Foo
+ 84..93 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo
+ 84..100 'takes_...&&Foo)': &Foo
+ 94..99 '&&Foo': &&Foo
+ 95..99 '&Foo': &Foo
+ 96..99 'Foo': Foo
+ 106..115 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo
+ 106..123 'takes_...&&Foo)': &&Foo
+ 116..122 '&&&Foo': &&&Foo
+ 117..122 '&&Foo': &&Foo
+ 118..122 '&Foo': &Foo
+ 119..122 'Foo': Foo
+ "]],
+ );
+}
+
+#[test]
+fn coerce_autoderef_block() {
+ check_infer_with_mismatches(
+ r#"
+ struct String {}
+ #[lang = "deref"]
+ trait Deref { type Target; }
+ impl Deref for String { type Target = str; }
+ fn takes_ref_str(x: &str) {}
+ fn returns_string() -> String { loop {} }
+ fn test() {
+ takes_ref_str(&{ returns_string() });
+ }
+ "#,
+ expect![[r"
+ 126..127 'x': &str
+ 135..137 '{}': ()
+ 168..179 '{ loop {} }': String
+ 170..177 'loop {}': !
+ 175..177 '{}': ()
+ 190..235 '{ ... }); }': ()
+ 196..209 'takes_ref_str': fn takes_ref_str(&str)
+ 196..232 'takes_...g() })': ()
+ 210..231 '&{ ret...ng() }': &String
+ 211..231 '{ retu...ng() }': String
+ 213..227 'returns_string': fn returns_string() -> String
+ 213..229 'return...ring()': String
+ "]],
+ );
+}
+
+#[test]
+fn closure_return_coerce() {
+ check_infer_with_mismatches(
+ r"
+ fn foo() {
+ let x = || {
+ if true {
+ return &1u32;
+ }
+ &&1u32
+ };
+ }
+ ",
+ expect![[r"
+ 9..105 '{ ... }; }': ()
+ 19..20 'x': || -> &u32
+ 23..102 '|| { ... }': || -> &u32
+ 26..102 '{ ... }': &u32
+ 36..81 'if tru... }': ()
+ 39..43 'true': bool
+ 44..81 '{ ... }': ()
+ 58..70 'return &1u32': !
+ 65..70 '&1u32': &u32
+ 66..70 '1u32': u32
+ 90..96 '&&1u32': &&u32
+ 91..96 '&1u32': &u32
+ 92..96 '1u32': u32
+ "]],
+ );
+}
+
+#[test]
+fn coerce_fn_item_to_fn_ptr() {
+ check_infer_with_mismatches(
+ r"
+ fn foo(x: u32) -> isize { 1 }
+ fn test() {
+ let f: fn(u32) -> isize = foo;
+ }
+ ",
+ expect![[r"
+ 7..8 'x': u32
+ 24..29 '{ 1 }': isize
+ 26..27 '1': isize
+ 40..78 '{ ...foo; }': ()
+ 50..51 'f': fn(u32) -> isize
+ 72..75 'foo': fn foo(u32) -> isize
+ "]],
+ );
+}
+
+#[test]
+fn coerce_fn_items_in_match_arms() {
+ mark::check!(coerce_fn_reification);
+
+ check_infer_with_mismatches(
+ r"
+ fn foo1(x: u32) -> isize { 1 }
+ fn foo2(x: u32) -> isize { 2 }
+ fn foo3(x: u32) -> isize { 3 }
+ fn test() {
+ let x = match 1 {
+ 1 => foo1,
+ 2 => foo2,
+ _ => foo3,
+ };
+ }
+ ",
+ expect![[r"
+ 8..9 'x': u32
+ 25..30 '{ 1 }': isize
+ 27..28 '1': isize
+ 39..40 'x': u32
+ 56..61 '{ 2 }': isize
+ 58..59 '2': isize
+ 70..71 'x': u32
+ 87..92 '{ 3 }': isize
+ 89..90 '3': isize
+ 103..192 '{ ... }; }': ()
+ 113..114 'x': fn(u32) -> isize
+ 117..189 'match ... }': fn(u32) -> isize
+ 123..124 '1': i32
+ 135..136 '1': i32
+ 135..136 '1': i32
+ 140..144 'foo1': fn foo1(u32) -> isize
+ 154..155 '2': i32
+ 154..155 '2': i32
+ 159..163 'foo2': fn foo2(u32) -> isize
+ 173..174 '_': i32
+ 178..182 'foo3': fn foo3(u32) -> isize
+ "]],
+ );
+}
+
+#[test]
+fn coerce_closure_to_fn_ptr() {
+ check_infer_with_mismatches(
+ r"
+ fn test() {
+ let f: fn(u32) -> isize = |x| { 1 };
+ }
+ ",
+ expect![[r"
+ 10..54 '{ ...1 }; }': ()
+ 20..21 'f': fn(u32) -> isize
+ 42..51 '|x| { 1 }': |u32| -> isize
+ 43..44 'x': u32
+ 46..51 '{ 1 }': isize
+ 48..49 '1': isize
+ "]],
+ );
+}
+
+#[test]
+fn coerce_placeholder_ref() {
+ // placeholders should unify, even behind references
+ check_infer_with_mismatches(
+ r"
+ struct S<T> { t: T }
+ impl<TT> S<TT> {
+ fn get(&self) -> &TT {
+ &self.t
+ }
+ }
+ ",
+ expect![[r"
+ 50..54 'self': &S<TT>
+ 63..86 '{ ... }': &TT
+ 73..80 '&self.t': &TT
+ 74..78 'self': &S<TT>
+ 74..80 'self.t': TT
+ "]],
+ );
+}
+
+#[test]
+fn coerce_unsize_array() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "unsize"]
+ pub trait Unsize<T> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
+
+ fn test() {
+ let f: &[usize] = &[1, 2, 3];
+ }
+ "#,
+ expect![[r"
+ 161..198 '{ ... 3]; }': ()
+ 171..172 'f': &[usize]
+ 185..195 '&[1, 2, 3]': &[usize; _]
+ 186..195 '[1, 2, 3]': [usize; _]
+ 187..188 '1': usize
+ 190..191 '2': usize
+ 193..194 '3': usize
+ "]],
+ );
+}
+
+#[test]
+fn coerce_unsize_trait_object_simple() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
+
+ trait Foo<T, U> {}
+ trait Bar<U, T, X>: Foo<T, U> {}
+ trait Baz<T, X>: Bar<usize, T, X> {}
+
+ struct S<T, X>;
+ impl<T, X> Foo<T, usize> for S<T, X> {}
+ impl<T, X> Bar<usize, T, X> for S<T, X> {}
+ impl<T, X> Baz<T, X> for S<T, X> {}
+
+ fn test() {
+ let obj: &dyn Baz<i8, i16> = &S;
+ let obj: &dyn Bar<_, i8, i16> = &S;
+ let obj: &dyn Foo<i8, _> = &S;
+ }
+ "#,
+ expect![[r"
+ 424..539 '{ ... &S; }': ()
+ 434..437 'obj': &dyn Baz<i8, i16>
+ 459..461 '&S': &S<i8, i16>
+ 460..461 'S': S<i8, i16>
+ 471..474 'obj': &dyn Bar<usize, i8, i16>
+ 499..501 '&S': &S<i8, i16>
+ 500..501 'S': S<i8, i16>
+ 511..514 'obj': &dyn Foo<i8, usize>
+ 534..536 '&S': &S<i8, {unknown}>
+ 535..536 'S': S<i8, {unknown}>
+ "]],
+ );
+}
+
+#[test]
+// The rust reference says this should be possible, but rustc doesn't implement
+// it. We used to support it, but Chalk doesn't.
+#[ignore]
+fn coerce_unsize_trait_object_to_trait_object() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
+
+ trait Foo<T, U> {}
+ trait Bar<U, T, X>: Foo<T, U> {}
+ trait Baz<T, X>: Bar<usize, T, X> {}
+
+ struct S<T, X>;
+ impl<T, X> Foo<T, usize> for S<T, X> {}
+ impl<T, X> Bar<usize, T, X> for S<T, X> {}
+ impl<T, X> Baz<T, X> for S<T, X> {}
+
+ fn test() {
+ let obj: &dyn Baz<i8, i16> = &S;
+ let obj: &dyn Bar<_, _, _> = obj;
+ let obj: &dyn Foo<_, _> = obj;
+ let obj2: &dyn Baz<i8, i16> = &S;
+ let _: &dyn Foo<_, _> = obj2;
+ }
+ "#,
+ expect![[r"
+ 424..609 '{ ...bj2; }': ()
+ 434..437 'obj': &dyn Baz<i8, i16>
+ 459..461 '&S': &S<i8, i16>
+ 460..461 'S': S<i8, i16>
+ 471..474 'obj': &dyn Bar<usize, i8, i16>
+ 496..499 'obj': &dyn Baz<i8, i16>
+ 509..512 'obj': &dyn Foo<i8, usize>
+ 531..534 'obj': &dyn Bar<usize, i8, i16>
+ 544..548 'obj2': &dyn Baz<i8, i16>
+ 570..572 '&S': &S<i8, i16>
+ 571..572 'S': S<i8, i16>
+ 582..583 '_': &dyn Foo<i8, usize>
+ 602..606 'obj2': &dyn Baz<i8, i16>
+ "]],
+ );
+}
+
+#[test]
+fn coerce_unsize_super_trait_cycle() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "sized"]
+ pub trait Sized {}
+ #[lang = "unsize"]
+ pub trait Unsize<T> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
+
+ trait A {}
+ trait B: C + A {}
+ trait C: B {}
+ trait D: C
+
+ struct S;
+ impl A for S {}
+ impl B for S {}
+ impl C for S {}
+ impl D for S {}
+
+ fn test() {
+ let obj: &dyn D = &S;
+ let obj: &dyn A = &S;
+ }
+ "#,
+ expect![[r"
+ 328..383 '{ ... &S; }': ()
+ 338..341 'obj': &dyn D
+ 352..354 '&S': &S
+ 353..354 'S': S
+ 364..367 'obj': &dyn A
+ 378..380 '&S': &S
+ 379..380 'S': S
+ "]],
+ );
+}
+
+#[ignore]
+#[test]
+fn coerce_unsize_generic() {
+ // FIXME: Implement this
+ // https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "unsize"]
+ pub trait Unsize<T> {}
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
+
+ struct Foo<T> { t: T };
+ struct Bar<T>(Foo<T>);
+
+ fn test() {
+ let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
+ let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
+ }
+ "#,
+ expect![[r"
+ "]],
+ );
+}
--- /dev/null
+use super::check_types_source_code;
+
+#[test]
+fn qualify_path_to_submodule() {
+ check_types_source_code(
+ r#"
+mod foo {
+ pub struct Foo;
+}
+
+fn bar() {
+ let foo: foo::Foo = foo::Foo;
+ foo
+} //^ foo::Foo
+
+"#,
+ );
+}
+
+#[test]
+fn omit_default_type_parameters() {
+ check_types_source_code(
+ r#"
+struct Foo<T = u8> { t: T }
+fn main() {
+ let foo = Foo { t: 5u8 };
+ foo;
+} //^ Foo
+"#,
+ );
+
+ check_types_source_code(
+ r#"
+struct Foo<K, T = u8> { k: K, t: T }
+fn main() {
+ let foo = Foo { k: 400, t: 5u8 };
+ foo;
+} //^ Foo<i32>
+"#,
+ );
+}
--- /dev/null
+use std::fs;
+
+use expect::expect;
+use test_utils::project_dir;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn cfg_impl_def() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo cfg:test
+use foo::S as T;
+struct S;
+
+#[cfg(test)]
+impl S {
+ fn foo1(&self) -> i32 { 0 }
+}
+
+#[cfg(not(test))]
+impl S {
+ fn foo2(&self) -> i32 { 0 }
+}
+
+fn test() {
+ let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
+ t;
+} //^ (i32, {unknown}, i32, {unknown})
+
+//- /foo.rs crate:foo
+struct S;
+
+#[cfg(not(test))]
+impl S {
+ fn foo3(&self) -> i32 { 0 }
+}
+
+#[cfg(test)]
+impl S {
+ fn foo4(&self) -> i32 { 0 }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 155..181 '{ ...,2); }': ()
+ 165..166 'x': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn infer_legacy_textual_scoped_macros_expanded() {
+ check_infer(
+ r#"
+ struct Foo(Vec<i32>);
+
+ #[macro_use]
+ mod m {
+ macro_rules! foo {
+ ($($item:expr),*) => {
+ {
+ Foo(vec![$($item,)*])
+ }
+ };
+ }
+ }
+
+ fn main() {
+ let x = foo!(1,2);
+ let y = crate::foo!(1,2);
+ }
+ "#,
+ expect![[r#"
+ !0..17 '{Foo(v...,2,])}': Foo
+ !1..4 'Foo': Foo({unknown}) -> Foo
+ !1..16 'Foo(vec![1,2,])': Foo
+ !5..15 'vec![1,2,]': {unknown}
+ 194..250 '{ ...,2); }': ()
+ 204..205 'x': Foo
+ 227..228 'y': {unknown}
+ 231..247 'crate:...!(1,2)': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_qualified_macros_expanded() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => { 42i32 }
+ }
+
+ mod m {
+ pub use super::foo as bar;
+ }
+
+ fn main() {
+ let x = crate::foo!();
+ let y = m::bar!();
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ !0..5 '42i32': i32
+ 110..163 '{ ...!(); }': ()
+ 120..121 'x': i32
+ 147..148 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn expr_macro_expanded_in_various_places() {
+ check_infer(
+ r#"
+ macro_rules! spam {
+ () => (1isize);
+ }
+
+ fn spam() {
+ spam!();
+ (spam!());
+ spam!().spam(spam!());
+ for _ in spam!() {}
+ || spam!();
+ while spam!() {}
+ break spam!();
+ return spam!();
+ match spam!() {
+ _ if spam!() => spam!(),
+ }
+ spam!()(spam!());
+ Spam { spam: spam!() };
+ spam!()[spam!()];
+ await spam!();
+ spam!() as usize;
+ &spam!();
+ -spam!();
+ spam!()..spam!();
+ spam!() + spam!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ !0..6 '1isize': isize
+ 53..456 '{ ...!(); }': ()
+ 87..108 'spam!(...am!())': {unknown}
+ 114..133 'for _ ...!() {}': ()
+ 118..119 '_': {unknown}
+ 131..133 '{}': ()
+ 138..148 '|| spam!()': || -> isize
+ 154..170 'while ...!() {}': ()
+ 168..170 '{}': ()
+ 175..188 'break spam!()': !
+ 194..208 'return spam!()': !
+ 214..268 'match ... }': isize
+ 238..239 '_': isize
+ 273..289 'spam!(...am!())': {unknown}
+ 295..317 'Spam {...m!() }': {unknown}
+ 323..339 'spam!(...am!()]': {unknown}
+ 364..380 'spam!(... usize': usize
+ 386..394 '&spam!()': &isize
+ 400..408 '-spam!()': isize
+ 414..430 'spam!(...pam!()': {unknown}
+ 436..453 'spam!(...pam!()': isize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_value_macro_having_same_name() {
+ check_infer(
+ r#"
+ #[macro_export]
+ macro_rules! foo {
+ () => {
+ mod foo {
+ pub use super::foo;
+ }
+ };
+ ($x:tt) => {
+ $x
+ };
+ }
+
+ foo!();
+
+ fn foo() {
+ let foo = foo::foo!(42i32);
+ }
+ "#,
+ expect![[r#"
+ !0..5 '42i32': i32
+ 170..205 '{ ...32); }': ()
+ 180..183 'foo': i32
+ "#]],
+ );
+}
+
+#[test]
+fn processes_impls_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ ($ident:ident) => (impl Trait for $ident {})
+}
+trait Trait { fn foo(self) -> u128 {} }
+struct S;
+m!(S);
+fn test() { S.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros() {
+ check_types(
+ r#"
+macro_rules! m {
+ () => (fn foo(&self) -> u128 {0})
+}
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_assoc_items_generated_by_macros_chain() {
+ check_types(
+ r#"
+macro_rules! m_inner {
+ () => {fn foo(&self) -> u128 {0}}
+}
+macro_rules! m {
+ () => {m_inner!();}
+}
+
+struct S;
+impl S {
+ m!();
+}
+
+fn test() { S.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_expr() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = (foo::foo!(1), foo::foo!(2));
+ x;
+} //^ (i32, usize)
+
+//- /lib.rs crate:foo
+#[macro_export]
+macro_rules! foo {
+ (1) => { $crate::bar!() };
+ (2) => { 1 + $crate::baz() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+pub fn baz() -> usize { 31usize }
+"#,
+ );
+}
+
+#[test]
+fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo::Trait;
+
+fn test() {
+ let msg = foo::Message(foo::MessageRef);
+ let r = msg.deref();
+ r;
+ //^ &MessageRef
+}
+
+//- /lib.rs crate:foo
+pub struct MessageRef;
+pub struct Message(MessageRef);
+
+pub trait Trait {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+#[macro_export]
+macro_rules! expand {
+ () => {
+ impl Trait for Message {
+ type Target = $crate::MessageRef;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+ }
+}
+
+expand!();
+"#,
+ );
+}
+
+#[test]
+fn infer_type_value_non_legacy_macro_use_as() {
+ check_infer(
+ r#"
+ mod m {
+ macro_rules! _foo {
+ ($x:ident) => { type $x = u64; }
+ }
+ pub(crate) use _foo as foo;
+ }
+
+ m::foo!(foo);
+ use foo as bar;
+ fn f() -> bar { 0 }
+ fn main() {
+ let _a = f();
+ }
+ "#,
+ expect![[r#"
+ 158..163 '{ 0 }': u64
+ 160..161 '0': u64
+ 174..196 '{ ...f(); }': ()
+ 184..186 '_a': u64
+ 190..191 'f': fn f() -> u64
+ 190..193 'f()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_macro() {
+ check_infer(
+ r#"
+ fn main() {
+ macro_rules! foo {
+ () => { 1usize }
+ }
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ !0..6 '1usize': usize
+ 10..89 '{ ...!(); }': ()
+ 16..65 'macro_... }': {unknown}
+ 74..76 '_a': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_local_inner_macros() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:foo
+fn test() {
+ let x = foo::foo!(1);
+ x;
+} //^ i32
+
+//- /lib.rs crate:foo
+#[macro_export(local_inner_macros)]
+macro_rules! foo {
+ (1) => { bar!() };
+}
+
+#[macro_export]
+macro_rules! bar {
+ () => { 42 }
+}
+
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_line() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! line {() => {}}
+
+ fn main() {
+ let x = line!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_file() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! file {() => {}}
+
+ fn main() {
+ let x = file!();
+ }
+ "#,
+ expect![[r#"
+ !0..2 '""': &str
+ 63..87 '{ ...!(); }': ()
+ 73..74 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_column() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! column {() => {}}
+
+ fn main() {
+ let x = column!();
+ }
+ "#,
+ expect![[r#"
+ !0..1 '0': i32
+ 65..91 '{ ...!(); }': ()
+ 75..76 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat() {
+ check_infer(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!("hello", concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 65..121 '{ ...")); }': ()
+ 75..76 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ bar();
+} //^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+#[ignore]
+fn include_accidentally_quadratic() {
+ let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic");
+ let big_file = fs::read_to_string(file).unwrap();
+ let big_file = vec![big_file; 10].join("\n");
+
+ let fixture = r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("foo.rs");
+
+fn main() {
+ RegisterBlock { };
+ //^ RegisterBlock
+}
+ "#;
+ let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file);
+ check_types(&fixture);
+}
+
+#[test]
+fn infer_builtin_macros_include_concat() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+include!(concat!("f", "oo.rs"));
+
+fn main() {
+ bar();
+} //^ u32
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
+ check_types(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! concat {() => {}}
+
+#[rustc_builtin_macro]
+macro_rules! env {() => {}}
+
+include!(concat!(env!("OUT_DIR"), "/foo.rs"));
+
+fn main() {
+ bar();
+} //^ {unknown}
+
+//- /foo.rs
+fn bar() -> u32 {0}
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_include_itself_should_failed() {
+ check_types(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include {() => {}}
+
+include!("main.rs");
+
+fn main() {
+ 0
+} //^ i32
+"#,
+ );
+}
+
+#[test]
+fn infer_builtin_macros_concat_with_lazy() {
+ check_infer(
+ r#"
+ macro_rules! hello {() => {"hello"}}
+
+ #[rustc_builtin_macro]
+ macro_rules! concat {() => {}}
+
+ fn main() {
+ let x = concat!(hello!(), concat!("world", "!"));
+ }
+ "#,
+ expect![[r#"
+ !0..13 '"helloworld!"': &str
+ 103..160 '{ ...")); }': ()
+ 113..114 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_builtin_macros_env() {
+ check_infer(
+ r#"
+ //- /main.rs env:foo=bar
+ #[rustc_builtin_macro]
+ macro_rules! env {() => {}}
+
+ fn main() {
+ let x = env!("foo");
+ }
+ "#,
+ expect![[r#"
+ !0..22 '"__RA_...TED__"': &str
+ 62..90 '{ ...o"); }': ()
+ 72..73 'x': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_derive_clone_simple() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+#[derive(Clone)]
+struct S;
+fn test() {
+ S.clone();
+} //^ S
+
+//- /lib.rs crate:core
+#[prelude_import]
+use clone::*;
+mod clone {
+ trait Clone {
+ fn clone(&self) -> Self;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_derive_clone_in_core() {
+ check_types(
+ r#"
+//- /lib.rs crate:core
+#[prelude_import]
+use clone::*;
+mod clone {
+ trait Clone {
+ fn clone(&self) -> Self;
+ }
+}
+#[derive(Clone)]
+pub struct S;
+
+//- /main.rs crate:main deps:core
+use core::S;
+fn test() {
+ S.clone();
+} //^ S
+"#,
+ );
+}
+
+#[test]
+fn infer_derive_clone_with_params() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+#[derive(Clone)]
+struct S;
+#[derive(Clone)]
+struct Wrapper<T>(T);
+struct NonClone;
+fn test() {
+ (Wrapper(S).clone(), Wrapper(NonClone).clone());
+ //^ (Wrapper<S>, {unknown})
+}
+
+//- /lib.rs crate:core
+#[prelude_import]
+use clone::*;
+mod clone {
+ trait Clone {
+ fn clone(&self) -> Self;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_custom_derive_simple() {
+ // FIXME: this test current now do nothing
+ check_types(
+ r#"
+//- /main.rs crate:main
+use foo::Foo;
+
+#[derive(Foo)]
+struct S{}
+
+fn test() {
+ S{};
+} //^ S
+"#,
+ );
+}
+
+#[test]
+fn macro_in_arm() {
+ check_infer(
+ r#"
+ macro_rules! unit {
+ () => { () };
+ }
+
+ fn main() {
+ let x = match () {
+ unit!() => 92u32,
+ };
+ }
+ "#,
+ expect![[r#"
+ 51..110 '{ ... }; }': ()
+ 61..62 'x': u32
+ 65..107 'match ... }': u32
+ 71..73 '()': ()
+ 84..91 'unit!()': ()
+ 95..100 '92u32': u32
+ "#]],
+ );
+}
--- /dev/null
+use expect::expect;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn infer_slice_method() {
+ check_infer(
+ r#"
+ #[lang = "slice"]
+ impl<T> [T] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+ }
+
+ #[lang = "slice_alloc"]
+ impl<T> [T] {}
+
+ fn test(x: &[u8]) {
+ <[_]>::foo(x);
+ }
+ "#,
+ expect![[r#"
+ 44..48 'self': &[T]
+ 55..78 '{ ... }': T
+ 65..72 'loop {}': !
+ 70..72 '{}': ()
+ 130..131 'x': &[u8]
+ 140..162 '{ ...(x); }': ()
+ 146..156 '<[_]>::foo': fn foo<u8>(&[u8]) -> u8
+ 146..159 '<[_]>::foo(x)': u8
+ 157..158 'x': &[u8]
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_struct() {
+ check_infer(
+ r#"
+ struct A { x: u32 }
+
+ impl A {
+ fn new() -> A {
+ A { x: 0 }
+ }
+ }
+ fn test() {
+ let a = A::new();
+ a.x;
+ }
+ "#,
+ expect![[r#"
+ 48..74 '{ ... }': A
+ 58..68 'A { x: 0 }': A
+ 65..66 '0': u32
+ 87..121 '{ ...a.x; }': ()
+ 97..98 'a': A
+ 101..107 'A::new': fn new() -> A
+ 101..109 'A::new()': A
+ 115..116 'a': A
+ 115..118 'a.x': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_enum() {
+ check_infer(
+ r#"
+ enum A { B, C }
+
+ impl A {
+ pub fn b() -> A {
+ A::B
+ }
+ pub fn c() -> A {
+ A::C
+ }
+ }
+ fn test() {
+ let a = A::b();
+ a;
+ let c = A::c();
+ c;
+ }
+ "#,
+ expect![[r#"
+ 46..66 '{ ... }': A
+ 56..60 'A::B': A
+ 87..107 '{ ... }': A
+ 97..101 'A::C': A
+ 120..177 '{ ... c; }': ()
+ 130..131 'a': A
+ 134..138 'A::b': fn b() -> A
+ 134..140 'A::b()': A
+ 146..147 'a': A
+ 157..158 'c': A
+ 161..165 'A::c': fn c() -> A
+ 161..167 'A::c()': A
+ 173..174 'c': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_with_modules() {
+ check_infer(
+ r#"
+ mod a {
+ struct A;
+ impl A { pub fn thing() -> A { A {} }}
+ }
+
+ mod b {
+ struct B;
+ impl B { pub fn thing() -> u32 { 99 }}
+
+ mod c {
+ struct C;
+ impl C { pub fn thing() -> C { C {} }}
+ }
+ }
+ use b::c;
+
+ fn test() {
+ let x = a::A::thing();
+ let y = b::B::thing();
+ let z = c::C::thing();
+ }
+ "#,
+ expect![[r#"
+ 55..63 '{ A {} }': A
+ 57..61 'A {}': A
+ 125..131 '{ 99 }': u32
+ 127..129 '99': u32
+ 201..209 '{ C {} }': C
+ 203..207 'C {}': C
+ 240..324 '{ ...g(); }': ()
+ 250..251 'x': A
+ 254..265 'a::A::thing': fn thing() -> A
+ 254..267 'a::A::thing()': A
+ 277..278 'y': u32
+ 281..292 'b::B::thing': fn thing() -> u32
+ 281..294 'b::B::thing()': u32
+ 304..305 'z': C
+ 308..319 'c::C::thing': fn thing() -> C
+ 308..321 'c::C::thing()': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make(val: T) -> Gen<T> {
+ Gen { val }
+ }
+ }
+
+ fn test() {
+ let a = Gen::make(0u32);
+ }
+ "#,
+ expect![[r#"
+ 63..66 'val': T
+ 81..108 '{ ... }': Gen<T>
+ 91..102 'Gen { val }': Gen<T>
+ 97..100 'val': T
+ 122..154 '{ ...32); }': ()
+ 132..133 'a': Gen<u32>
+ 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
+ 136..151 'Gen::make(0u32)': Gen<u32>
+ 146..150 '0u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make() -> Gen<T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32>::make();
+ }
+ "#,
+ expect![[r#"
+ 75..99 '{ ... }': Gen<T>
+ 85..93 'loop { }': !
+ 90..93 '{ }': ()
+ 113..148 '{ ...e(); }': ()
+ 123..124 'a': Gen<u32>
+ 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
+ 127..145 'Gen::<...make()': Gen<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_2_type_params_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T, U> {
+ val: T,
+ val2: U,
+ }
+
+ impl<T> Gen<u32, T> {
+ pub fn make() -> Gen<u32,T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32, u64>::make();
+ }
+ "#,
+ expect![[r#"
+ 101..125 '{ ... }': Gen<u32, T>
+ 111..119 'loop { }': !
+ 116..119 '{ }': ()
+ 139..179 '{ ...e(); }': ()
+ 149..150 'a': Gen<u32, u64>
+ 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
+ 153..176 'Gen::<...make()': Gen<u32, u64>
+ "#]],
+ );
+}
+
+#[test]
+fn cross_crate_associated_method_call() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = other_crate::foo::S::thing();
+ x;
+} //^ i128
+
+//- /lib.rs crate:other_crate
+mod foo {
+ struct S;
+ impl S {
+ fn thing() -> i128 {}
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_trait_method_simple() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_infer(
+ r#"
+ trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ struct S1;
+ impl Trait1 for S1 {}
+ trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ struct S2;
+ impl Trait2 for S2 {}
+ fn test() {
+ S1.method(); // -> u32
+ S2.method(); // -> i128
+ }
+ "#,
+ expect![[r#"
+ 30..34 'self': &Self
+ 109..113 'self': &Self
+ 169..227 '{ ...i128 }': ()
+ 175..177 'S1': S1
+ 175..186 'S1.method()': u32
+ 202..204 'S2': S2
+ 202..213 'S2.method()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_method_scoped() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_infer(
+ r#"
+ struct S;
+ mod foo {
+ pub trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ impl Trait1 for super::S {}
+ }
+ mod bar {
+ pub trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ impl Trait2 for super::S {}
+ }
+
+ mod foo_test {
+ use super::S;
+ use super::foo::Trait1;
+ fn test() {
+ S.method(); // -> u32
+ }
+ }
+
+ mod bar_test {
+ use super::S;
+ use super::bar::Trait2;
+ fn test() {
+ S.method(); // -> i128
+ }
+ }
+ "#,
+ expect![[r#"
+ 62..66 'self': &Self
+ 168..172 'self': &Self
+ 299..336 '{ ... }': ()
+ 309..310 'S': S
+ 309..319 'S.method()': u32
+ 415..453 '{ ... }': ()
+ 425..426 'S': S
+ 425..435 'S.method()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_1() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ fn test() {
+ S.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 91..110 '{ ...d(); }': ()
+ 97..98 'S': S
+ 97..107 'S.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_more_params() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_infer(
+ r#"
+ trait Trait<T1, T2, T3> {
+ fn method1(&self) -> (T1, T2, T3);
+ fn method2(&self) -> (T3, T2, T1);
+ }
+ struct S1;
+ impl Trait<u8, u16, u32> for S1 {}
+ struct S2;
+ impl<T> Trait<i8, i16, T> for S2 {}
+ fn test() {
+ S1.method1(); // u8, u16, u32
+ S1.method2(); // u32, u16, u8
+ S2.method1(); // i8, i16, {unknown}
+ S2.method2(); // {unknown}, i16, i8
+ }
+ "#,
+ expect![[r#"
+ 42..46 'self': &Self
+ 81..85 'self': &Self
+ 209..360 '{ ..., i8 }': ()
+ 215..217 'S1': S1
+ 215..227 'S1.method1()': (u8, u16, u32)
+ 249..251 'S1': S1
+ 249..261 'S1.method2()': (u32, u16, u8)
+ 283..285 'S2': S2
+ 283..295 'S2.method1()': (i8, i16, {unknown})
+ 323..325 'S2': S2
+ 323..335 'S2.method2()': ({unknown}, i16, i8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_2() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ struct S<T>(T);
+ impl<U> Trait<U> for S<U> {}
+ fn test() {
+ S(1u32).method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 101..126 '{ ...d(); }': ()
+ 107..108 'S': S<u32>(u32) -> S<u32>
+ 107..114 'S(1u32)': S<u32>
+ 107..123 'S(1u32...thod()': u32
+ 109..113 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method() {
+ check_infer(
+ r#"
+ trait Default {
+ fn default() -> Self;
+ }
+ struct S;
+ impl Default for S {}
+ fn test() {
+ let s1: S = Default::default();
+ let s2 = S::default();
+ let s3 = <S as Default>::default();
+ }
+ "#,
+ expect![[r#"
+ 86..192 '{ ...t(); }': ()
+ 96..98 's1': S
+ 104..120 'Defaul...efault': fn default<S>() -> S
+ 104..122 'Defaul...ault()': S
+ 132..134 's2': S
+ 137..147 'S::default': fn default<S>() -> S
+ 137..149 'S::default()': S
+ 159..161 's3': S
+ 164..187 '<S as ...efault': fn default<S>() -> S
+ 164..189 '<S as ...ault()': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make();
+ let b = G::<u64>::make();
+ let c: f64 = G::make();
+ }
+ "#,
+ expect![[r#"
+ 126..210 '{ ...e(); }': ()
+ 136..137 'a': u32
+ 140..147 'S::make': fn make<S, u32>() -> u32
+ 140..149 'S::make()': u32
+ 159..160 'b': u64
+ 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
+ 163..179 'G::<u6...make()': u64
+ 189..190 'c': f64
+ 198..205 'G::make': fn make<G<f64>, f64>() -> f64
+ 198..207 'G::make()': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (T, U);
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make::<i64>();
+ let b: (_, i64) = S::make();
+ let c = G::<u32>::make::<i64>();
+ let d: (u32, _) = G::make::<i64>();
+ let e: (u32, i64) = G::make();
+ }
+ "#,
+ expect![[r#"
+ 134..312 '{ ...e(); }': ()
+ 144..145 'a': (u32, i64)
+ 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
+ 148..164 'S::mak...i64>()': (u32, i64)
+ 174..175 'b': (u32, i64)
+ 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
+ 188..197 'S::make()': (u32, i64)
+ 207..208 'c': (u32, i64)
+ 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 211..234 'G::<u3...i64>()': (u32, i64)
+ 244..245 'd': (u32, i64)
+ 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 258..274 'G::mak...i64>()': (u32, i64)
+ 284..285 'e': (u32, i64)
+ 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 300..309 'G::make()': (u32, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_3() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<i32> {}
+ fn test() {
+ let a = S::make();
+ }
+ "#,
+ expect![[r#"
+ 100..126 '{ ...e(); }': ()
+ 110..111 'a': (S<i32>, i64)
+ 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 114..123 'S::make()': (S<i32>, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_4() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ impl Trait<i32> for S<u32> {}
+ fn test() {
+ let a: (S<u64>, _) = S::make();
+ let b: (_, i32) = S::make();
+ }
+ "#,
+ expect![[r#"
+ 130..202 '{ ...e(); }': ()
+ 140..141 'a': (S<u64>, i64)
+ 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 157..166 'S::make()': (S<u64>, i64)
+ 176..177 'b': (S<u32>, i32)
+ 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 190..199 'S::make()': (S<u32>, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_5() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (Self, T, U);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ fn test() {
+ let a = <S as Trait<i64>>::make::<u8>();
+ let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
+ }
+ "#,
+ expect![[r#"
+ 106..210 '{ ...>(); }': ()
+ 116..117 'a': (S<u64>, i64, u8)
+ 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 161..162 'b': (S<u64>, i64, u8)
+ 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_1() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn method(&self) -> u32;
+ }
+ fn test<T: Trait>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 63..64 't': T
+ 69..88 '{ ...d(); }': ()
+ 75..76 't': T
+ 75..85 't.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ fn test<U, T: Trait<U>>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 70..71 't': T
+ 76..95 '{ ...d(); }': ()
+ 82..83 't': T
+ 82..92 't.method()': U
+ "#]],
+ );
+}
+
+#[test]
+fn infer_with_multiple_trait_impls() {
+ check_infer(
+ r#"
+ trait Into<T> {
+ fn into(self) -> T;
+ }
+ struct S;
+ impl Into<u32> for S {}
+ impl Into<u64> for S {}
+ fn test() {
+ let x: u32 = S.into();
+ let y: u64 = S.into();
+ let z = Into::<u64>::into(S);
+ }
+ "#,
+ expect![[r#"
+ 28..32 'self': Self
+ 110..201 '{ ...(S); }': ()
+ 120..121 'x': u32
+ 129..130 'S': S
+ 129..137 'S.into()': u32
+ 147..148 'y': u64
+ 156..157 'S': S
+ 156..164 'S.into()': u64
+ 174..175 'z': u64
+ 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
+ 178..198 'Into::...nto(S)': u64
+ 196..197 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_unify_impl_self_type() {
+ check_types(
+ r#"
+struct S<T>;
+impl S<u32> { fn foo(&self) -> u8 {} }
+impl S<i32> { fn foo(&self) -> i8 {} }
+fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
+ //^ (u8, i8)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoref() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_by_value_before_autoref() {
+ check_types(
+ r#"
+trait Clone { fn clone(&self) -> Self; }
+struct S;
+impl Clone for S {}
+impl Clone for &S {}
+fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
+ //^ (S, S, &S)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_ref_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_unsize_array() {
+ check_types(
+ r#"
+#[lang = "slice"]
+impl<T> [T] {
+ fn len(&self) -> usize { loop {} }
+}
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^ usize
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_from_prelude() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+struct S;
+impl Clone for S {}
+
+fn test() {
+ S.clone();
+ //^ S
+}
+
+//- /lib.rs crate:other_crate
+#[prelude_import] use foo::*;
+
+mod foo {
+ trait Clone {
+ fn clone(&self) -> Self;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_for_unknown_trait() {
+ // The blanket impl currently applies because we ignore the unresolved where clause
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: UnknownTrait {}
+fn test() { (&S).foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ // This is also to make sure that we don't resolve to the foo method just
+ // because that's the only method named foo we can find, which would make
+ // the below tests not work
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: Clone {}
+fn test() { (&S).foo(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T: Clone> Trait for T {}
+fn test() { (&S).foo(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test() { S.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_2() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U> Into<U> for T where U: From<T> {}
+fn test() { S2.into(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U: From<T>> Into<U> for T {}
+fn test() { S2.into(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_method() {
+ test_utils::mark::check!(impl_self_type_match_without_receiver);
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub fn new(foo_: T) -> Self {
+ Wrapper(Foo(foo_))
+ }
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub fn new(bar_: T) -> Self {
+ Wrapper(Bar(bar_))
+ }
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::new(1.0);
+ let b = Wrapper::<Bar<f32>>::new(1.0);
+ (a, b);
+ //^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_encountering_fn_type() {
+ check_types(
+ r#"
+//- /main.rs
+fn foo() {}
+trait FnOnce { fn call(self); }
+fn test() { foo.call(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_non_parameter_type() {
+ check_types(
+ r#"
+mod a {
+ pub trait Foo {
+ fn foo(&self);
+ }
+}
+
+struct Wrapper<T>(T);
+fn foo<T>(t: Wrapper<T>)
+where
+ Wrapper<T>: a::Foo,
+{
+ t.foo();
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_3373() {
+ check_types(
+ r#"
+struct A<T>(T);
+
+impl A<i32> {
+ fn from(v: i32) -> A<i32> { A(v) }
+}
+
+fn main() {
+ A::from(3);
+} //^ A<i32>
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_slow() {
+ // this can get quite slow if we set the solver size limit too high
+ check_types(
+ r#"
+trait SendX {}
+
+struct S1; impl SendX for S1 {}
+struct S2; impl SendX for S2 {}
+struct U1;
+
+trait Trait { fn method(self); }
+
+struct X1<A, B> {}
+impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
+
+struct S<B, C> {}
+
+trait FnX {}
+
+impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
+
+fn test() { (S {}).method(); }
+ //^ ()
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_super_trait_not_in_scope() {
+ check_infer(
+ r#"
+ mod m {
+ pub trait SuperTrait {
+ fn foo(&self) -> u32 { 0 }
+ }
+ }
+ trait Trait: m::SuperTrait {}
+
+ struct S;
+ impl m::SuperTrait for S {}
+ impl Trait for S {}
+
+ fn test(d: &dyn Trait) {
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 51..55 'self': &Self
+ 64..69 '{ 0 }': u32
+ 66..67 '0': u32
+ 176..177 'd': &dyn Trait
+ 191..207 '{ ...o(); }': ()
+ 197..198 'd': &dyn Trait
+ 197..204 'd.foo()': u32
+ "#]],
+ );
+}
--- /dev/null
+use expect::expect;
+
+use super::{check_infer_with_mismatches, check_types};
+
+#[test]
+fn infer_never1() {
+ check_types(
+ r#"
+fn test() {
+ let t = return;
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never2() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { a } else { loop {} };
+ a;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn infer_never3() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ !
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_in_generic_args() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+} //^ Option<!>
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred1() {
+ check_types(
+ r#"
+fn gen<T>() -> T { loop {} }
+
+fn test() {
+ let a = gen();
+ if false { loop {} } else { a };
+ a;
+ //^ ()
+ if false { a };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred2() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<i32>
+ match 42 {
+ 42 => a,
+ _ => Option::Some(42),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn never_type_can_be_reinferred3() {
+ check_types(
+ r#"
+enum Option<T> { None, Some(T) }
+
+fn test() {
+ let a = if true { Option::None } else { Option::Some(return) };
+ a;
+ //^ Option<&str>
+ match 42 {
+ 42 => a,
+ _ => Option::Some("str"),
+ };
+}
+"#,
+ );
+}
+
+#[test]
+fn match_no_arm() {
+ check_types(
+ r#"
+enum Void {}
+
+fn test(a: Void) {
+ let t = match a {};
+ t;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_unknown_arm() {
+ check_types(
+ r#"
+fn test(a: Option) {
+ let t = match 0 {
+ _ => unknown,
+ };
+ t;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn if_never() {
+ check_types(
+ r#"
+fn test() {
+ let i = if true {
+ loop {}
+ } else {
+ 3.0
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn if_else_never() {
+ check_types(
+ r#"
+fn test(input: bool) {
+ let i = if input {
+ 2.0
+ } else {
+ return
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_first_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => return,
+ 2 => 2.0,
+ 3 => loop {},
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_second_arm_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 1 => 3.0,
+ 2 => loop {},
+ 3 => 3.0,
+ _ => return,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn match_all_arms_never() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => return,
+ _ => loop {},
+ };
+ i;
+} //^ !
+"#,
+ );
+}
+
+#[test]
+fn match_no_never_arms() {
+ check_types(
+ r#"
+fn test(a: i32) {
+ let i = match a {
+ 2 => 2.0,
+ _ => 3.0,
+ };
+ i;
+} //^ f64
+"#,
+ );
+}
+
+#[test]
+fn diverging_expression_1() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ let x: u32 = return;
+ }
+ fn test2() {
+ let x: u32 = { return; };
+ }
+ fn test3() {
+ let x: u32 = loop {};
+ }
+ fn test4() {
+ let x: u32 = { loop {} };
+ }
+ fn test5() {
+ let x: u32 = { if true { loop {}; } else { loop {}; } };
+ }
+ fn test6() {
+ let x: u32 = { let y: u32 = { loop {}; }; };
+ }
+ ",
+ expect![[r"
+ 11..39 '{ ...urn; }': ()
+ 21..22 'x': u32
+ 30..36 'return': !
+ 51..84 '{ ...; }; }': ()
+ 61..62 'x': u32
+ 70..81 '{ return; }': u32
+ 72..78 'return': !
+ 96..125 '{ ... {}; }': ()
+ 106..107 'x': u32
+ 115..122 'loop {}': !
+ 120..122 '{}': ()
+ 137..170 '{ ...} }; }': ()
+ 147..148 'x': u32
+ 156..167 '{ loop {} }': u32
+ 158..165 'loop {}': !
+ 163..165 '{}': ()
+ 182..246 '{ ...} }; }': ()
+ 192..193 'x': u32
+ 201..243 '{ if t...}; } }': u32
+ 203..241 'if tru... {}; }': u32
+ 206..210 'true': bool
+ 211..223 '{ loop {}; }': u32
+ 213..220 'loop {}': !
+ 218..220 '{}': ()
+ 229..241 '{ loop {}; }': u32
+ 231..238 'loop {}': !
+ 236..238 '{}': ()
+ 258..310 '{ ...; }; }': ()
+ 268..269 'x': u32
+ 277..307 '{ let ...; }; }': u32
+ 283..284 'y': u32
+ 292..304 '{ loop {}; }': u32
+ 294..301 'loop {}': !
+ 299..301 '{}': ()
+ "]],
+ );
+}
+
+#[test]
+fn diverging_expression_2() {
+ check_infer_with_mismatches(
+ r#"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop {}; "foo" };
+ }
+ "#,
+ expect![[r#"
+ 11..84 '{ ..." }; }': ()
+ 54..55 'x': u32
+ 63..81 '{ loop...foo" }': &str
+ 65..72 'loop {}': !
+ 70..72 '{}': ()
+ 74..79 '"foo"': &str
+ 63..81: expected u32, got &str
+ 74..79: expected u32, got &str
+ "#]],
+ );
+}
+
+#[test]
+fn diverging_expression_3_break() {
+ check_infer_with_mismatches(
+ r"
+ //- /main.rs
+ fn test1() {
+ // should give type mismatch
+ let x: u32 = { loop { break; } };
+ }
+ fn test2() {
+ // should give type mismatch
+ let x: u32 = { for a in b { break; }; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b {}; };
+ // should give type mismatch as well
+ let x: u32 = { for a in b { return; }; };
+ }
+ fn test3() {
+ // should give type mismatch
+ let x: u32 = { while true { break; }; };
+ // should give type mismatch as well -- there's an implicit break, even if it's never hit
+ let x: u32 = { while true {}; };
+ // should give type mismatch as well
+ let x: u32 = { while true { return; }; };
+ }
+ ",
+ expect![[r"
+ 11..85 '{ ...} }; }': ()
+ 54..55 'x': u32
+ 63..82 '{ loop...k; } }': ()
+ 65..80 'loop { break; }': ()
+ 70..80 '{ break; }': ()
+ 72..77 'break': !
+ 63..82: expected u32, got ()
+ 65..80: expected u32, got ()
+ 97..343 '{ ...; }; }': ()
+ 140..141 'x': u32
+ 149..175 '{ for ...; }; }': ()
+ 151..172 'for a ...eak; }': ()
+ 155..156 'a': {unknown}
+ 160..161 'b': {unknown}
+ 162..172 '{ break; }': ()
+ 164..169 'break': !
+ 226..227 'x': u32
+ 235..253 '{ for ... {}; }': ()
+ 237..250 'for a in b {}': ()
+ 241..242 'a': {unknown}
+ 246..247 'b': {unknown}
+ 248..250 '{}': ()
+ 304..305 'x': u32
+ 313..340 '{ for ...; }; }': ()
+ 315..337 'for a ...urn; }': ()
+ 319..320 'a': {unknown}
+ 324..325 'b': {unknown}
+ 326..337 '{ return; }': ()
+ 328..334 'return': !
+ 149..175: expected u32, got ()
+ 235..253: expected u32, got ()
+ 313..340: expected u32, got ()
+ 355..654 '{ ...; }; }': ()
+ 398..399 'x': u32
+ 407..433 '{ whil...; }; }': ()
+ 409..430 'while ...eak; }': ()
+ 415..419 'true': bool
+ 420..430 '{ break; }': ()
+ 422..427 'break': !
+ 537..538 'x': u32
+ 546..564 '{ whil... {}; }': ()
+ 548..561 'while true {}': ()
+ 554..558 'true': bool
+ 559..561 '{}': ()
+ 615..616 'x': u32
+ 624..651 '{ whil...; }; }': ()
+ 626..648 'while ...urn; }': ()
+ 632..636 'true': bool
+ 637..648 '{ return; }': ()
+ 639..645 'return': !
+ 407..433: expected u32, got ()
+ 546..564: expected u32, got ()
+ 624..651: expected u32, got ()
+ "]],
+ );
+}
--- /dev/null
+use expect::expect;
+use test_utils::mark;
+
+use super::{check_infer, check_infer_with_mismatches};
+
+#[test]
+fn infer_pattern() {
+ check_infer(
+ r#"
+ fn test(x: &i32) {
+ let y = x;
+ let &z = x;
+ let a = z;
+ let (c, d) = (1, "hello");
+
+ for (e, f) in some_iter {
+ let g = e;
+ }
+
+ if let [val] = opt {
+ let h = val;
+ }
+
+ let lambda = |a: u64, b, c: i32| { a + b; c };
+
+ let ref ref_to_x = x;
+ let mut mut_x = x;
+ let ref mut mut_ref_to_x = x;
+ let k = mut_ref_to_x;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..368 '{ ...o_x; }': ()
+ 27..28 'y': &i32
+ 31..32 'x': &i32
+ 42..44 '&z': &i32
+ 43..44 'z': i32
+ 47..48 'x': &i32
+ 58..59 'a': i32
+ 62..63 'z': i32
+ 73..79 '(c, d)': (i32, &str)
+ 74..75 'c': i32
+ 77..78 'd': &str
+ 82..94 '(1, "hello")': (i32, &str)
+ 83..84 '1': i32
+ 86..93 '"hello"': &str
+ 101..151 'for (e... }': ()
+ 105..111 '(e, f)': ({unknown}, {unknown})
+ 106..107 'e': {unknown}
+ 109..110 'f': {unknown}
+ 115..124 'some_iter': {unknown}
+ 125..151 '{ ... }': ()
+ 139..140 'g': {unknown}
+ 143..144 'e': {unknown}
+ 157..204 'if let... }': ()
+ 164..169 '[val]': [{unknown}]
+ 165..168 'val': {unknown}
+ 172..175 'opt': [{unknown}]
+ 176..204 '{ ... }': ()
+ 190..191 'h': {unknown}
+ 194..197 'val': {unknown}
+ 214..220 'lambda': |u64, u64, i32| -> i32
+ 223..255 '|a: u6...b; c }': |u64, u64, i32| -> i32
+ 224..225 'a': u64
+ 232..233 'b': u64
+ 235..236 'c': i32
+ 243..255 '{ a + b; c }': i32
+ 245..246 'a': u64
+ 245..250 'a + b': u64
+ 249..250 'b': u64
+ 252..253 'c': i32
+ 266..278 'ref ref_to_x': &&i32
+ 281..282 'x': &i32
+ 292..301 'mut mut_x': &i32
+ 304..305 'x': &i32
+ 315..335 'ref mu...f_to_x': &mut &i32
+ 338..339 'x': &i32
+ 349..350 'k': &mut &i32
+ 353..365 'mut_ref_to_x': &mut &i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literal_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn any<T>() -> T { loop {} }
+ fn test(x: &i32) {
+ if let "foo" = any() {}
+ if let 1 = any() {}
+ if let 1u32 = any() {}
+ if let 1f32 = any() {}
+ if let 1.0 = any() {}
+ if let true = any() {}
+ }
+ "#,
+ expect![[r#"
+ 17..28 '{ loop {} }': T
+ 19..26 'loop {}': !
+ 24..26 '{}': ()
+ 37..38 'x': &i32
+ 46..208 '{ ...) {} }': ()
+ 52..75 'if let...y() {}': ()
+ 59..64 '"foo"': &str
+ 59..64 '"foo"': &str
+ 67..70 'any': fn any<&str>() -> &str
+ 67..72 'any()': &str
+ 73..75 '{}': ()
+ 80..99 'if let...y() {}': ()
+ 87..88 '1': i32
+ 87..88 '1': i32
+ 91..94 'any': fn any<i32>() -> i32
+ 91..96 'any()': i32
+ 97..99 '{}': ()
+ 104..126 'if let...y() {}': ()
+ 111..115 '1u32': u32
+ 111..115 '1u32': u32
+ 118..121 'any': fn any<u32>() -> u32
+ 118..123 'any()': u32
+ 124..126 '{}': ()
+ 131..153 'if let...y() {}': ()
+ 138..142 '1f32': f32
+ 138..142 '1f32': f32
+ 145..148 'any': fn any<f32>() -> f32
+ 145..150 'any()': f32
+ 151..153 '{}': ()
+ 158..179 'if let...y() {}': ()
+ 165..168 '1.0': f64
+ 165..168 '1.0': f64
+ 171..174 'any': fn any<f64>() -> f64
+ 171..176 'any()': f64
+ 177..179 '{}': ()
+ 184..206 'if let...y() {}': ()
+ 191..195 'true': bool
+ 191..195 'true': bool
+ 198..201 'any': fn any<bool>() -> bool
+ 198..203 'any()': bool
+ 204..206 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_range_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ fn test(x: &i32) {
+ if let 1..76 = 2u32 {}
+ if let 1..=76 = 2u32 {}
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &i32
+ 17..75 '{ ...2 {} }': ()
+ 23..45 'if let...u32 {}': ()
+ 30..35 '1..76': u32
+ 38..42 '2u32': u32
+ 43..45 '{}': ()
+ 50..73 'if let...u32 {}': ()
+ 57..63 '1..=76': u32
+ 66..70 '2u32': u32
+ 71..73 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+
+ fn test() {
+ let A(n) = &A(1);
+ let A(n) = &mut A(1);
+ }
+ "#,
+ expect![[r#"
+ 27..78 '{ ...(1); }': ()
+ 37..41 'A(n)': A<i32>
+ 39..40 'n': &i32
+ 44..49 '&A(1)': &A<i32>
+ 45..46 'A': A<i32>(i32) -> A<i32>
+ 45..49 'A(1)': A<i32>
+ 47..48 '1': i32
+ 59..63 'A(n)': A<i32>
+ 61..62 'n': &mut i32
+ 66..75 '&mut A(1)': &mut A<i32>
+ 71..72 'A': A<i32>(i32) -> A<i32>
+ 71..75 'A(1)': A<i32>
+ 73..74 '1': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_ergonomics_ref() {
+ mark::check!(match_ergonomics_ref);
+ check_infer(
+ r#"
+ fn test() {
+ let v = &(1, &2);
+ let (_, &w) = v;
+ }
+ "#,
+ expect![[r#"
+ 10..56 '{ ...= v; }': ()
+ 20..21 'v': &(i32, &i32)
+ 24..32 '&(1, &2)': &(i32, &i32)
+ 25..32 '(1, &2)': (i32, &i32)
+ 26..27 '1': i32
+ 29..31 '&2': &i32
+ 30..31 '2': i32
+ 42..49 '(_, &w)': (i32, &i32)
+ 43..44 '_': i32
+ 46..48 '&w': &i32
+ 47..48 'w': i32
+ 52..53 'v': &(i32, &i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_slice() {
+ check_infer(
+ r#"
+ fn test() {
+ let slice: &[f64] = &[0.0];
+ match slice {
+ &[] => {},
+ &[a] => {
+ a;
+ },
+ &[b, c] => {
+ b;
+ c;
+ }
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..209 '{ ... } }': ()
+ 20..25 'slice': &[f64]
+ 36..42 '&[0.0]': &[f64; _]
+ 37..42 '[0.0]': [f64; _]
+ 38..41 '0.0': f64
+ 48..207 'match ... }': ()
+ 54..59 'slice': &[f64]
+ 70..73 '&[]': &[f64]
+ 71..73 '[]': [f64]
+ 77..79 '{}': ()
+ 89..93 '&[a]': &[f64]
+ 90..93 '[a]': [f64]
+ 91..92 'a': f64
+ 97..123 '{ ... }': ()
+ 111..112 'a': f64
+ 133..140 '&[b, c]': &[f64]
+ 134..140 '[b, c]': [f64]
+ 135..136 'b': f64
+ 138..139 'c': f64
+ 144..185 '{ ... }': ()
+ 158..159 'b': f64
+ 173..174 'c': f64
+ 194..195 '_': &[f64]
+ 199..201 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_string_literal() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..98 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..96 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 72..74 '{}': ()
+ 83..84 '_': &str
+ 88..90 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_or() {
+ check_infer_with_mismatches(
+ r#"
+ fn test() {
+ let s: &str = "hello";
+ match s {
+ "hello" | "world" => {}
+ _ => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..108 '{ ... } }': ()
+ 20..21 's': &str
+ 30..37 '"hello"': &str
+ 43..106 'match ... }': ()
+ 49..50 's': &str
+ 61..68 '"hello"': &str
+ 61..68 '"hello"': &str
+ 61..78 '"hello...world"': &str
+ 71..78 '"world"': &str
+ 71..78 '"world"': &str
+ 82..84 '{}': ()
+ 93..94 '_': &str
+ 98..100 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_pattern_match_arr() {
+ check_infer(
+ r#"
+ fn test() {
+ let arr: [f64; 2] = [0.0, 1.0];
+ match arr {
+ [1.0, a] => {
+ a;
+ },
+ [b, c] => {
+ b;
+ c;
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..179 '{ ... } }': ()
+ 20..23 'arr': [f64; _]
+ 36..46 '[0.0, 1.0]': [f64; _]
+ 37..40 '0.0': f64
+ 42..45 '1.0': f64
+ 52..177 'match ... }': ()
+ 58..61 'arr': [f64; _]
+ 72..80 '[1.0, a]': [f64; _]
+ 73..76 '1.0': f64
+ 73..76 '1.0': f64
+ 78..79 'a': f64
+ 84..110 '{ ... }': ()
+ 98..99 'a': f64
+ 120..126 '[b, c]': [f64; _]
+ 121..122 'b': f64
+ 124..125 'c': f64
+ 130..171 '{ ... }': ()
+ 144..145 'b': f64
+ 159..160 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_adt_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B
+ }
+
+ struct S(u32, E);
+
+ fn test() {
+ let e = E::A { x: 3 };
+
+ let S(y, z) = foo;
+ let E::A { x: new_var } = e;
+
+ match e {
+ E::A { x } => x,
+ E::B if foo => 1,
+ E::B => 10,
+ };
+
+ let ref d @ E::A { .. } = e;
+ d;
+ }
+ "#,
+ expect![[r#"
+ 67..288 '{ ... d; }': ()
+ 77..78 'e': E
+ 81..94 'E::A { x: 3 }': E
+ 91..92 '3': usize
+ 105..112 'S(y, z)': S
+ 107..108 'y': u32
+ 110..111 'z': E
+ 115..118 'foo': S
+ 128..147 'E::A {..._var }': E
+ 138..145 'new_var': usize
+ 150..151 'e': E
+ 158..244 'match ... }': usize
+ 164..165 'e': E
+ 176..186 'E::A { x }': E
+ 183..184 'x': usize
+ 190..191 'x': usize
+ 201..205 'E::B': E
+ 209..212 'foo': bool
+ 216..217 '1': usize
+ 227..231 'E::B': E
+ 235..237 '10': usize
+ 255..274 'ref d ...{ .. }': &E
+ 263..274 'E::A { .. }': E
+ 277..278 'e': E
+ 284..285 'd': &E
+ "#]],
+ );
+}
+
+#[test]
+fn enum_variant_through_self_in_pattern() {
+ check_infer(
+ r#"
+ enum E {
+ A { x: usize },
+ B(usize),
+ C
+ }
+
+ impl E {
+ fn test() {
+ match (loop {}) {
+ Self::A { x } => { x; },
+ Self::B(x) => { x; },
+ Self::C => {},
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 75..217 '{ ... }': ()
+ 85..210 'match ... }': ()
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 115..128 'Self::A { x }': E
+ 125..126 'x': usize
+ 132..138 '{ x; }': ()
+ 134..135 'x': usize
+ 152..162 'Self::B(x)': E
+ 160..161 'x': usize
+ 166..172 '{ x; }': ()
+ 168..169 'x': usize
+ 186..193 'Self::C': E
+ 197..199 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generics_in_patterns() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+
+ fn test(a1: A<u32>, o: Option<u64>) {
+ let A { x: x2 } = a1;
+ let A::<i64> { x: x3 } = A { x: 1 };
+ match o {
+ Option::Some(t) => t,
+ _ => 1,
+ };
+ }
+ "#,
+ expect![[r#"
+ 78..80 'a1': A<u32>
+ 90..91 'o': Option<u64>
+ 106..243 '{ ... }; }': ()
+ 116..127 'A { x: x2 }': A<u32>
+ 123..125 'x2': u32
+ 130..132 'a1': A<u32>
+ 142..160 'A::<i6...: x3 }': A<i64>
+ 156..158 'x3': i64
+ 163..173 'A { x: 1 }': A<i64>
+ 170..171 '1': i64
+ 179..240 'match ... }': u64
+ 185..186 'o': Option<u64>
+ 197..212 'Option::Some(t)': Option<u64>
+ 210..211 't': u64
+ 216..217 't': u64
+ 227..228 '_': Option<u64>
+ 232..233 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const_pattern() {
+ check_infer_with_mismatches(
+ r#"
+ enum Option<T> { None }
+ use Option::None;
+ struct Foo;
+ const Bar: usize = 1;
+
+ fn test() {
+ let a: Option<u32> = None;
+ let b: Option<i64> = match a {
+ None => None,
+ };
+ let _: () = match () { Foo => Foo }; // Expected mismatch
+ let _: () = match () { Bar => Bar }; // Expected mismatch
+ }
+ "#,
+ expect![[r#"
+ 73..74 '1': usize
+ 87..309 '{ ...atch }': ()
+ 97..98 'a': Option<u32>
+ 114..118 'None': Option<u32>
+ 128..129 'b': Option<i64>
+ 145..182 'match ... }': Option<i64>
+ 151..152 'a': Option<u32>
+ 163..167 'None': Option<u32>
+ 171..175 'None': Option<i64>
+ 192..193 '_': ()
+ 200..223 'match ... Foo }': Foo
+ 206..208 '()': ()
+ 211..214 'Foo': Foo
+ 218..221 'Foo': Foo
+ 254..255 '_': ()
+ 262..285 'match ... Bar }': usize
+ 268..270 '()': ()
+ 273..276 'Bar': usize
+ 280..283 'Bar': usize
+ 200..223: expected (), got Foo
+ 262..285: expected (), got usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_guard() {
+ check_infer(
+ r#"
+struct S;
+impl S { fn foo(&self) -> bool { false } }
+
+fn main() {
+ match S {
+ s if s.foo() => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 27..31 'self': &S
+ 41..50 '{ false }': bool
+ 43..48 'false': bool
+ 64..115 '{ ... } }': ()
+ 70..113 'match ... }': ()
+ 76..77 'S': S
+ 88..89 's': S
+ 93..94 's': S
+ 93..100 's.foo()': bool
+ 104..106 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn match_ergonomics_in_closure_params() {
+ check_infer(
+ r#"
+ #[lang = "fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+ }
+
+ fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
+
+ fn test() {
+ foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
+ foo(&(1, "a"), |(x, y)| x);
+ }
+ "#,
+ expect![[r#"
+ 93..94 't': T
+ 99..100 'f': F
+ 110..121 '{ loop {} }': U
+ 112..119 'loop {}': !
+ 117..119 '{}': ()
+ 133..232 '{ ... x); }': ()
+ 139..142 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
+ 139..166 'foo(&(...y)| x)': i32
+ 143..152 '&(1, "a")': &(i32, &str)
+ 144..152 '(1, "a")': (i32, &str)
+ 145..146 '1': i32
+ 148..151 '"a"': &str
+ 154..165 '|&(x, y)| x': |&(i32, &str)| -> i32
+ 155..162 '&(x, y)': &(i32, &str)
+ 156..162 '(x, y)': (i32, &str)
+ 157..158 'x': i32
+ 160..161 'y': &str
+ 164..165 'x': i32
+ 203..206 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
+ 203..229 'foo(&(...y)| x)': &i32
+ 207..216 '&(1, "a")': &(i32, &str)
+ 208..216 '(1, "a")': (i32, &str)
+ 209..210 '1': i32
+ 212..215 '"a"': &str
+ 218..228 '|(x, y)| x': |&(i32, &str)| -> &i32
+ 219..225 '(x, y)': (i32, &str)
+ 220..221 'x': &i32
+ 223..224 'y': &&str
+ 227..228 'x': &i32
+ "#]],
+ );
+}
+
+#[test]
+fn slice_tail_pattern() {
+ check_infer(
+ r#"
+ fn foo(params: &[i32]) {
+ match params {
+ [head, tail @ ..] => {
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[i32]
+ 23..92 '{ ... } }': ()
+ 29..90 'match ... }': ()
+ 35..41 'params': &[i32]
+ 52..69 '[head,... @ ..]': [i32]
+ 53..57 'head': &i32
+ 59..68 'tail @ ..': &[i32]
+ 66..68 '..': [i32]
+ 73..84 '{ }': ()
+ "#]],
+ );
+}
--- /dev/null
+use expect::expect;
+use test_utils::mark;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn bug_484() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = if true {};
+ }
+ "#,
+ expect![[r#"
+ 10..37 '{ ... {}; }': ()
+ 20..21 'x': ()
+ 24..34 'if true {}': ()
+ 27..31 'true': bool
+ 32..34 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn no_panic_on_field_of_enum() {
+ check_infer(
+ r#"
+ enum X {}
+
+ fn test(x: X) {
+ x.some_field;
+ }
+ "#,
+ expect![[r#"
+ 19..20 'x': X
+ 25..46 '{ ...eld; }': ()
+ 31..32 'x': X
+ 31..43 'x.some_field': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn bug_585() {
+ check_infer(
+ r#"
+ fn test() {
+ X {};
+ match x {
+ A::B {} => (),
+ A::Y() => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..88 '{ ... } }': ()
+ 16..20 'X {}': {unknown}
+ 26..86 'match ... }': ()
+ 32..33 'x': {unknown}
+ 44..51 'A::B {}': {unknown}
+ 55..57 '()': ()
+ 67..73 'A::Y()': {unknown}
+ 77..79 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn bug_651() {
+ check_infer(
+ r#"
+ fn quux() {
+ let y = 92;
+ 1 + y;
+ }
+ "#,
+ expect![[r#"
+ 10..40 '{ ...+ y; }': ()
+ 20..21 'y': i32
+ 24..26 '92': i32
+ 32..33 '1': i32
+ 32..37 '1 + y': i32
+ 36..37 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars() {
+ mark::check!(type_var_cycles_resolve_completely);
+ mark::check!(type_var_cycles_resolve_as_possible);
+ check_infer(
+ r#"
+ fn test() {
+ let y = unknown;
+ [y, &y];
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...&y]; }': ()
+ 20..21 'y': &{unknown}
+ 24..31 'unknown': &{unknown}
+ 37..44 '[y, &y]': [&&{unknown}; _]
+ 38..39 'y': &{unknown}
+ 41..43 '&y': &&{unknown}
+ 42..43 'y': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars_2() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = unknown;
+ let y = unknown;
+ [(x, y), (&y, &x)];
+ }
+ "#,
+ expect![[r#"
+ 10..79 '{ ...x)]; }': ()
+ 20..21 'x': &&{unknown}
+ 24..31 'unknown': &&{unknown}
+ 41..42 'y': &&{unknown}
+ 45..52 'unknown': &&{unknown}
+ 58..76 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _]
+ 59..65 '(x, y)': (&&&{unknown}, &&&{unknown})
+ 60..61 'x': &&{unknown}
+ 63..64 'y': &&{unknown}
+ 67..75 '(&y, &x)': (&&&{unknown}, &&&{unknown})
+ 68..70 '&y': &&&{unknown}
+ 69..70 'y': &&{unknown}
+ 72..74 '&x': &&&{unknown}
+ 73..74 'x': &&{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_1() {
+ // caused stack overflow, taken from std
+ check_infer(
+ r#"
+ enum Maybe<T> {
+ Real(T),
+ Fake,
+ }
+
+ fn write() {
+ match something_unknown {
+ Maybe::Real(ref mut something) => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 53..138 '{ ... } }': ()
+ 59..136 'match ... }': ()
+ 65..82 'someth...nknown': Maybe<{unknown}>
+ 93..123 'Maybe:...thing)': Maybe<{unknown}>
+ 105..122 'ref mu...ething': &mut {unknown}
+ 127..129 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_2() {
+ mark::check!(type_var_resolves_to_int_var);
+ // caused "equating two type variables, ...", taken from std
+ check_infer(
+ r#"
+ fn test_line_buffer() {
+ &[0, b'\n', 1, b'\n'];
+ }
+ "#,
+ expect![[r#"
+ 22..52 '{ ...n']; }': ()
+ 28..49 '&[0, b...b'\n']': &[u8; _]
+ 29..49 '[0, b'...b'\n']': [u8; _]
+ 30..31 '0': u8
+ 33..38 'b'\n'': u8
+ 40..41 '1': u8
+ 43..48 'b'\n'': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_3() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn compute() {
+ match nope!() {
+ SizeSkeleton::Pointer { non_zero: true, tail } => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 17..107 '{ ... } }': ()
+ 23..105 'match ... }': ()
+ 29..36 'nope!()': {unknown}
+ 47..93 'SizeSk...tail }': {unknown}
+ 81..85 'true': bool
+ 81..85 'true': bool
+ 87..91 'tail': {unknown}
+ 97..99 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_4() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn primitive_type() {
+ match *self {
+ BorrowedRef { type_: Primitive(p), ..} => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 24..105 '{ ... } }': ()
+ 30..103 'match ... }': ()
+ 36..41 '*self': {unknown}
+ 37..41 'self': {unknown}
+ 52..90 'Borrow...), ..}': {unknown}
+ 73..85 'Primitive(p)': {unknown}
+ 83..84 'p': {unknown}
+ 94..96 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_5() {
+ // taken from rustc
+ check_infer(
+ r#"
+ fn extra_compiler_flags() {
+ for content in doesnt_matter {
+ let name = if doesnt_matter {
+ first
+ } else {
+ &content
+ };
+
+ let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
+ name
+ } else {
+ content
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..322 '{ ... } }': ()
+ 32..320 'for co... }': ()
+ 36..43 'content': &{unknown}
+ 47..60 'doesnt_matter': {unknown}
+ 61..320 '{ ... }': ()
+ 75..79 'name': &&{unknown}
+ 82..166 'if doe... }': &&{unknown}
+ 85..98 'doesnt_matter': bool
+ 99..128 '{ ... }': &&{unknown}
+ 113..118 'first': &&{unknown}
+ 134..166 '{ ... }': &&{unknown}
+ 148..156 '&content': &&{unknown}
+ 149..156 'content': &{unknown}
+ 181..188 'content': &{unknown}
+ 191..313 'if ICE... }': &{unknown}
+ 194..231 'ICE_RE..._VALUE': {unknown}
+ 194..247 'ICE_RE...&name)': bool
+ 241..246 '&name': &&&{unknown}
+ 242..246 'name': &&{unknown}
+ 248..276 '{ ... }': &&{unknown}
+ 262..266 'name': &&{unknown}
+ 282..313 '{ ... }': &{unknown}
+ 296..303 'content': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_generics_crash() {
+ // another crash found typechecking rustc
+ check_infer(
+ r#"
+ struct Canonical<V> {
+ value: V,
+ }
+ struct QueryResponse<V> {
+ value: V,
+ }
+ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
+ &query_response.value;
+ }
+ "#,
+ expect![[r#"
+ 91..105 'query_response': Canonical<QueryResponse<R>>
+ 136..166 '{ ...lue; }': ()
+ 142..163 '&query....value': &QueryResponse<R>
+ 143..157 'query_response': Canonical<QueryResponse<R>>
+ 143..163 'query_....value': QueryResponse<R>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paren_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = (bar!());
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()); }': ()
+ 54..55 'a': u32
+ "#]],
+ );
+}
+
+#[test]
+fn bug_1030() {
+ check_infer(
+ r#"
+ struct HashSet<T, H>;
+ struct FxHasher;
+ type FxHashSet<T> = HashSet<T, FxHasher>;
+
+ impl<T, H> HashSet<T, H> {
+ fn default() -> HashSet<T, H> {}
+ }
+
+ pub fn main_loop() {
+ FxHashSet::default();
+ }
+ "#,
+ expect![[r#"
+ 143..145 '{}': ()
+ 168..197 '{ ...t(); }': ()
+ 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
+ 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2669() {
+ check_infer(
+ r#"
+ trait A {}
+ trait Write {}
+ struct Response<T> {}
+
+ trait D {
+ fn foo();
+ }
+
+ impl<T:A> D for Response<T> {
+ fn foo() {
+ end();
+ fn end<W: Write>() {
+ let _x: T = loop {};
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 119..214 '{ ... }': ()
+ 129..132 'end': fn end<{unknown}>()
+ 129..134 'end()': ()
+ 163..208 '{ ... }': ()
+ 181..183 '_x': !
+ 190..197 'loop {}': !
+ 195..197 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn issue_2705() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test() {
+ <Trait<u32>>::foo()
+ }
+ "#,
+ expect![[r#"
+ 25..52 '{ ...oo() }': ()
+ 31..48 '<Trait...>::foo': {unknown}
+ 31..50 '<Trait...:foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2683_chars_impl() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let chars: std::str::Chars<'_>;
+ (chars.next(), chars.nth(1));
+} //^ (Option<char>, Option<char>)
+
+//- /std.rs crate:std
+#[prelude_import]
+use prelude::*;
+
+pub mod prelude {
+ pub use crate::iter::Iterator;
+ pub use crate::option::Option;
+}
+
+pub mod iter {
+ pub use self::traits::Iterator;
+ pub mod traits {
+ pub use self::iterator::Iterator;
+
+ pub mod iterator {
+ pub trait Iterator {
+ type Item;
+ fn next(&mut self) -> Option<Self::Item>;
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {}
+ }
+ }
+ }
+}
+
+pub mod option {
+ pub enum Option<T> {}
+}
+
+pub mod str {
+ pub struct Chars<'a> {}
+ impl<'a> Iterator for Chars<'a> {
+ type Item = char;
+ fn next(&mut self) -> Option<char> {}
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn issue_3642_bad_macro_stackover() {
+ check_types(
+ r#"
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ let anchor = match_ast! {
+ //^ ()
+ match parent {
+ as => {},
+ _ => return None
+ }
+ };
+}"#,
+ );
+}
+
+#[test]
+fn issue_3999_slice() {
+ check_infer(
+ r#"
+ fn foo(params: &[usize]) {
+ match params {
+ [ps @ .., _] => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[usize]
+ 25..80 '{ ... } }': ()
+ 31..78 'match ... }': ()
+ 37..43 'params': &[usize]
+ 54..66 '[ps @ .., _]': [usize]
+ 55..62 'ps @ ..': &[usize]
+ 60..62 '..': [usize]
+ 64..65 '_': usize
+ 70..72 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_3999_struct() {
+ // rust-analyzer should not panic on seeing this malformed
+ // record pattern.
+ check_infer(
+ r#"
+ struct Bar {
+ a: bool,
+ }
+ fn foo(b: Bar) {
+ match b {
+ Bar { a: .. } => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 35..36 'b': Bar
+ 43..95 '{ ... } }': ()
+ 49..93 'match ... }': ()
+ 55..56 'b': Bar
+ 67..80 'Bar { a: .. }': Bar
+ 76..78 '..': bool
+ 84..86 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4235_name_conflicts() {
+ check_infer(
+ r#"
+ struct FOO {}
+ static FOO:FOO = FOO {};
+
+ impl FOO {
+ fn foo(&self) {}
+ }
+
+ fn main() {
+ let a = &FOO;
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 31..37 'FOO {}': FOO
+ 63..67 'self': &FOO
+ 69..71 '{}': ()
+ 85..119 '{ ...o(); }': ()
+ 95..96 'a': &FOO
+ 99..103 '&FOO': &FOO
+ 100..103 'FOO': FOO
+ 109..110 'a': &FOO
+ 109..116 'a.foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4465_dollar_crate_at_type() {
+ check_infer(
+ r#"
+ pub struct Foo {}
+ pub fn anything<T>() -> T {
+ loop {}
+ }
+ macro_rules! foo {
+ () => {{
+ let r: $crate::Foo = anything();
+ r
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ 44..59 '{ loop {} }': T
+ 50..57 'loop {}': !
+ 55..57 '{}': ()
+ !0..31 '{letr:...g();r}': Foo
+ !4..5 'r': Foo
+ !18..26 'anything': fn anything<Foo>() -> Foo
+ !18..28 'anything()': Foo
+ !29..30 'r': Foo
+ 163..187 '{ ...!(); }': ()
+ 173..175 '_a': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4053_diesel_where_clauses() {
+ check_infer(
+ r#"
+ trait BoxedDsl<DB> {
+ type Output;
+ fn internal_into_boxed(self) -> Self::Output;
+ }
+
+ struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
+ order: Order,
+ }
+
+ trait QueryFragment<DB: Backend> {}
+
+ trait Into<T> { fn into(self) -> T; }
+
+ impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
+ for SelectStatement<F, S, D, W, O, LOf, G>
+ where
+ O: Into<dyn QueryFragment<DB>>,
+ {
+ type Output = XXX;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ self.order.into();
+ }
+ }
+ "#,
+ expect![[r#"
+ 65..69 'self': Self
+ 267..271 'self': Self
+ 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 488..522 '{ ... }': ()
+ 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 498..508 'self.order': O
+ 498..515 'self.o...into()': dyn QueryFragment<DB>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4953() {
+ check_infer(
+ r#"
+ pub struct Foo(pub i64);
+ impl Foo {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 58..72 '{ Self(0i64) }': Foo
+ 60..64 'Self': Foo(i64) -> Foo
+ 60..70 'Self(0i64)': Foo
+ 65..69 '0i64': i64
+ "#]],
+ );
+ check_infer(
+ r#"
+ pub struct Foo<T>(pub T);
+ impl Foo<i64> {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 64..78 '{ Self(0i64) }': Foo<i64>
+ 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
+ 66..76 'Self(0i64)': Foo<i64>
+ 71..75 '0i64': i64
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4931() {
+ check_infer(
+ r#"
+ trait Div<T> {
+ type Output;
+ }
+
+ trait CheckedDiv: Div<()> {}
+
+ trait PrimInt: CheckedDiv<Output = ()> {
+ fn pow(self);
+ }
+
+ fn check<T: PrimInt>(i: T) {
+ i.pow();
+ }
+ "#,
+ expect![[r#"
+ 117..121 'self': Self
+ 148..149 'i': T
+ 154..170 '{ ...w(); }': ()
+ 160..161 'i': T
+ 160..167 'i.pow()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4885() {
+ check_infer(
+ r#"
+ #[lang = "coerce_unsized"]
+ pub trait CoerceUnsized<T> {}
+
+ trait Future {
+ type Output;
+ }
+ trait Foo<R> {
+ type Bar;
+ }
+ fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ bar(key)
+ }
+ fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ }
+ "#,
+ expect![[r#"
+ 136..139 'key': &K
+ 198..214 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
+ 204..207 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
+ 204..212 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
+ 208..211 'key': &K
+ 228..231 'key': &K
+ 290..293 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4800() {
+ check_infer(
+ r#"
+ trait Debug {}
+
+ struct Foo<T>;
+
+ type E1<T> = (T, T, T);
+ type E2<T> = E1<E1<E1<(T, T, T)>>>;
+
+ impl Debug for Foo<E2<()>> {}
+
+ struct Request;
+
+ pub trait Future {
+ type Output;
+ }
+
+ pub struct PeerSet<D>;
+
+ impl<D> Service<Request> for PeerSet<D>
+ where
+ D: Discover,
+ D::Key: Debug,
+ {
+ type Error = ();
+ type Future = dyn Future<Output = Self::Error>;
+
+ fn call(&mut self) -> Self::Future {
+ loop {}
+ }
+ }
+
+ pub trait Discover {
+ type Key;
+ }
+
+ pub trait Service<Request> {
+ type Error;
+ type Future: Future<Output = Self::Error>;
+ fn call(&mut self) -> Self::Future;
+ }
+ "#,
+ expect![[r#"
+ 379..383 'self': &mut PeerSet<D>
+ 401..424 '{ ... }': dyn Future<Output = ()>
+ 411..418 'loop {}': !
+ 416..418 '{}': ()
+ 575..579 'self': &mut Self
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4966() {
+ check_infer(
+ r#"
+ pub trait IntoIterator {
+ type Item;
+ }
+
+ struct Repeat<A> { element: A }
+
+ struct Map<F> { f: F }
+
+ struct Vec<T> {}
+
+ #[lang = "deref"]
+ pub trait Deref {
+ type Target;
+ }
+
+ impl<T> Deref for Vec<T> {
+ type Target = [T];
+ }
+
+ fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
+
+ fn main() {
+ let inner = Map { f: |_: &f64| 0.0 };
+
+ let repeat = Repeat { element: inner };
+
+ let vec = from_iter(repeat);
+
+ vec.foo_bar();
+ }
+ "#,
+ expect![[r#"
+ 270..274 'iter': T
+ 289..291 '{}': ()
+ 303..447 '{ ...r(); }': ()
+ 313..318 'inner': Map<|&f64| -> f64>
+ 321..345 'Map { ... 0.0 }': Map<|&f64| -> f64>
+ 330..343 '|_: &f64| 0.0': |&f64| -> f64
+ 331..332 '_': &f64
+ 340..343 '0.0': f64
+ 356..362 'repeat': Repeat<Map<|&f64| -> f64>>
+ 365..390 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
+ 383..388 'inner': Map<|&f64| -> f64>
+ 401..404 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 407..416 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 407..424 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 417..423 'repeat': Repeat<Map<|&f64| -> f64>>
+ 431..434 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 431..444 'vec.foo_bar()': {unknown}
+ "#]],
+ );
+}
--- /dev/null
+use expect::expect;
+
+use super::{check_infer, check_types};
+
+#[test]
+fn infer_box() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_adt_self() {
+ check_types(
+ r#"
+enum Nat { Succ(Self), Demo(Nat), Zero }
+
+fn test() {
+ let foo: Nat = Nat::Zero;
+ if let Nat::Succ(x) = foo {
+ x
+ } //^ Nat
+}
+"#,
+ );
+}
+
+#[test]
+fn self_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ impl S<u32> {
+ fn foo() {
+ Self { x: 1 };
+ }
+ }
+ "#,
+ expect![[r#"
+ 49..79 '{ ... }': ()
+ 59..72 'Self { x: 1 }': S<u32>
+ 69..70 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn type_alias_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ type SS = S<u32>;
+
+ fn foo() {
+ SS { x: 1 };
+ }
+ "#,
+ expect![[r#"
+ 50..70 '{ ...1 }; }': ()
+ 56..67 'SS { x: 1 }': S<u32>
+ 64..65 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_ranges() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let a = ..;
+ let b = 1..;
+ let c = ..2u32;
+ let d = 1..2usize;
+ let e = ..=10;
+ let f = 'a'..='z';
+
+ let t = (a, b, c, d, e, f);
+ t;
+} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+
+//- /core.rs crate:core
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+ pub struct RangeFrom<Idx> {
+ pub start: Idx,
+ }
+ struct RangeFull;
+ pub struct RangeInclusive<Idx> {
+ start: Idx,
+ end: Idx,
+ is_empty: u8,
+ }
+ pub struct RangeTo<Idx> {
+ pub end: Idx,
+ }
+ pub struct RangeToInclusive<Idx> {
+ pub end: Idx,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_while_let() {
+ check_types(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(x) = foo {
+ x
+ } //^ f32
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_basics() {
+ check_infer(
+ r#"
+ fn test(a: u32, b: isize, c: !, d: &str) {
+ a;
+ b;
+ c;
+ d;
+ 1usize;
+ 1isize;
+ "test";
+ 1.0f32;
+ }"#,
+ expect![[r#"
+ 8..9 'a': u32
+ 16..17 'b': isize
+ 26..27 'c': !
+ 32..33 'd': &str
+ 41..120 '{ ...f32; }': ()
+ 47..48 'a': u32
+ 54..55 'b': isize
+ 61..62 'c': !
+ 68..69 'd': &str
+ 75..81 '1usize': usize
+ 87..93 '1isize': isize
+ 99..105 '"test"': &str
+ 111..117 '1.0f32': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_let() {
+ check_infer(
+ r#"
+ fn test() {
+ let a = 1isize;
+ let b: usize = 1;
+ let c = b;
+ let d: u32;
+ let e;
+ let f: i32 = e;
+ }
+ "#,
+ expect![[r#"
+ 10..117 '{ ...= e; }': ()
+ 20..21 'a': isize
+ 24..30 '1isize': isize
+ 40..41 'b': usize
+ 51..52 '1': usize
+ 62..63 'c': usize
+ 66..67 'b': usize
+ 77..78 'd': u32
+ 93..94 'e': i32
+ 104..105 'f': i32
+ 113..114 'e': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paths() {
+ check_infer(
+ r#"
+ fn a() -> u32 { 1 }
+
+ mod b {
+ fn c() -> u32 { 1 }
+ }
+
+ fn test() {
+ a();
+ b::c();
+ }
+ "#,
+ expect![[r#"
+ 14..19 '{ 1 }': u32
+ 16..17 '1': u32
+ 47..52 '{ 1 }': u32
+ 49..50 '1': u32
+ 66..90 '{ ...c(); }': ()
+ 72..73 'a': fn a() -> u32
+ 72..75 'a()': u32
+ 81..85 'b::c': fn c() -> u32
+ 81..87 'b::c()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_type() {
+ check_infer(
+ r#"
+ struct S;
+
+ impl S {
+ fn foo() -> i32 { 1 }
+ }
+
+ fn test() {
+ S::foo();
+ <S>::foo();
+ }
+ "#,
+ expect![[r#"
+ 40..45 '{ 1 }': i32
+ 42..43 '1': i32
+ 59..92 '{ ...o(); }': ()
+ 65..71 'S::foo': fn foo() -> i32
+ 65..73 'S::foo()': i32
+ 79..87 '<S>::foo': fn foo() -> i32
+ 79..89 '<S>::foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct() {
+ check_infer(
+ r#"
+ struct A {
+ b: B,
+ c: C,
+ }
+ struct B;
+ struct C(usize);
+
+ fn test() {
+ let c = C(1);
+ B;
+ let a: A = A { b: B, c: C(1) };
+ a.b;
+ a.c;
+ }
+ "#,
+ expect![[r#"
+ 71..153 '{ ...a.c; }': ()
+ 81..82 'c': C
+ 85..86 'C': C(usize) -> C
+ 85..89 'C(1)': C
+ 87..88 '1': usize
+ 95..96 'B': B
+ 106..107 'a': A
+ 113..132 'A { b:...C(1) }': A
+ 120..121 'B': B
+ 126..127 'C': C(usize) -> C
+ 126..130 'C(1)': C
+ 128..129 '1': usize
+ 138..139 'a': A
+ 138..141 'a.b': B
+ 147..148 'a': A
+ 147..150 'a.c': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum() {
+ check_infer(
+ r#"
+ enum E {
+ V1 { field: u32 },
+ V2
+ }
+ fn test() {
+ E::V1 { field: 1 };
+ E::V2;
+ }"#,
+ expect![[r#"
+ 51..89 '{ ...:V2; }': ()
+ 57..75 'E::V1 ...d: 1 }': E
+ 72..73 '1': u32
+ 81..86 'E::V2': E
+ "#]],
+ );
+}
+
+#[test]
+fn infer_union() {
+ check_infer(
+ r#"
+ union MyUnion {
+ foo: u32,
+ bar: f32,
+ }
+
+ fn test() {
+ let u = MyUnion { foo: 0 };
+ unsafe { baz(u); }
+ let u = MyUnion { bar: 0.0 };
+ unsafe { baz(u); }
+ }
+
+ unsafe fn baz(u: MyUnion) {
+ let inner = u.foo;
+ let inner = u.bar;
+ }
+ "#,
+ expect![[r#"
+ 57..172 '{ ...); } }': ()
+ 67..68 'u': MyUnion
+ 71..89 'MyUnio...o: 0 }': MyUnion
+ 86..87 '0': u32
+ 95..113 'unsafe...(u); }': ()
+ 102..113 '{ baz(u); }': ()
+ 104..107 'baz': fn baz(MyUnion)
+ 104..110 'baz(u)': ()
+ 108..109 'u': MyUnion
+ 122..123 'u': MyUnion
+ 126..146 'MyUnio... 0.0 }': MyUnion
+ 141..144 '0.0': f32
+ 152..170 'unsafe...(u); }': ()
+ 159..170 '{ baz(u); }': ()
+ 161..164 'baz': fn baz(MyUnion)
+ 161..167 'baz(u)': ()
+ 165..166 'u': MyUnion
+ 188..189 'u': MyUnion
+ 200..249 '{ ...bar; }': ()
+ 210..215 'inner': u32
+ 218..219 'u': MyUnion
+ 218..223 'u.foo': u32
+ 233..238 'inner': f32
+ 241..242 'u': MyUnion
+ 241..246 'u.bar': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_refs() {
+ check_infer(
+ r#"
+ fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
+ a;
+ *a;
+ &a;
+ &mut a;
+ b;
+ *b;
+ &b;
+ c;
+ *c;
+ d;
+ *d;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'a': &u32
+ 17..18 'b': &mut u32
+ 30..31 'c': *const u32
+ 45..46 'd': *mut u32
+ 58..149 '{ ... *d; }': ()
+ 64..65 'a': &u32
+ 71..73 '*a': u32
+ 72..73 'a': &u32
+ 79..81 '&a': &&u32
+ 80..81 'a': &u32
+ 87..93 '&mut a': &mut &u32
+ 92..93 'a': &u32
+ 99..100 'b': &mut u32
+ 106..108 '*b': u32
+ 107..108 'b': &mut u32
+ 114..116 '&b': &&mut u32
+ 115..116 'b': &mut u32
+ 122..123 'c': *const u32
+ 129..131 '*c': u32
+ 130..131 'c': *const u32
+ 137..138 'd': *mut u32
+ 144..146 '*d': u32
+ 145..146 'd': *mut u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_raw_ref() {
+ check_infer(
+ r#"
+ fn test(a: i32) {
+ &raw mut a;
+ &raw const a;
+ }
+ "#,
+ expect![[r#"
+ 8..9 'a': i32
+ 16..53 '{ ...t a; }': ()
+ 22..32 '&raw mut a': *mut i32
+ 31..32 'a': i32
+ 38..50 '&raw const a': *const i32
+ 49..50 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literals() {
+ check_infer(
+ r##"
+ fn test() {
+ 5i32;
+ 5f32;
+ 5f64;
+ "hello";
+ b"bytes";
+ 'c';
+ b'b';
+ 3.14;
+ 5000;
+ false;
+ true;
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#;
+ br#"yolo"#;
+ }
+ "##,
+ expect![[r##"
+ 10..216 '{ ...o"#; }': ()
+ 16..20 '5i32': i32
+ 26..30 '5f32': f32
+ 36..40 '5f64': f64
+ 46..53 '"hello"': &str
+ 59..67 'b"bytes"': &[u8; _]
+ 73..76 ''c'': char
+ 82..86 'b'b'': u8
+ 92..96 '3.14': f64
+ 102..106 '5000': i32
+ 112..117 'false': bool
+ 123..127 'true': bool
+ 133..197 'r#" ... "#': &str
+ 203..213 'br#"yolo"#': &[u8; _]
+ "##]],
+ );
+}
+
+#[test]
+fn infer_unary_op() {
+ check_infer(
+ r#"
+ enum SomeType {}
+
+ fn test(x: SomeType) {
+ let b = false;
+ let c = !b;
+ let a = 100;
+ let d: i128 = -a;
+ let e = -100;
+ let f = !!!true;
+ let g = !42;
+ let h = !10u32;
+ let j = !a;
+ -3.14;
+ !3;
+ -x;
+ !x;
+ -"hello";
+ !"hello";
+ }
+ "#,
+ expect![[r#"
+ 26..27 'x': SomeType
+ 39..271 '{ ...lo"; }': ()
+ 49..50 'b': bool
+ 53..58 'false': bool
+ 68..69 'c': bool
+ 72..74 '!b': bool
+ 73..74 'b': bool
+ 84..85 'a': i128
+ 88..91 '100': i128
+ 101..102 'd': i128
+ 111..113 '-a': i128
+ 112..113 'a': i128
+ 123..124 'e': i32
+ 127..131 '-100': i32
+ 128..131 '100': i32
+ 141..142 'f': bool
+ 145..152 '!!!true': bool
+ 146..152 '!!true': bool
+ 147..152 '!true': bool
+ 148..152 'true': bool
+ 162..163 'g': i32
+ 166..169 '!42': i32
+ 167..169 '42': i32
+ 179..180 'h': u32
+ 183..189 '!10u32': u32
+ 184..189 '10u32': u32
+ 199..200 'j': i128
+ 203..205 '!a': i128
+ 204..205 'a': i128
+ 211..216 '-3.14': f64
+ 212..216 '3.14': f64
+ 222..224 '!3': i32
+ 223..224 '3': i32
+ 230..232 '-x': {unknown}
+ 231..232 'x': SomeType
+ 238..240 '!x': {unknown}
+ 239..240 'x': SomeType
+ 246..254 '-"hello"': {unknown}
+ 247..254 '"hello"': &str
+ 260..268 '!"hello"': {unknown}
+ 261..268 '"hello"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_backwards() {
+ check_infer(
+ r#"
+ fn takes_u32(x: u32) {}
+
+ struct S { i32_field: i32 }
+
+ fn test() -> &mut &f64 {
+ let a = unknown_function();
+ takes_u32(a);
+ let b = unknown_function();
+ S { i32_field: b };
+ let c = unknown_function();
+ &mut &c
+ }
+ "#,
+ expect![[r#"
+ 13..14 'x': u32
+ 21..23 '{}': ()
+ 77..230 '{ ...t &c }': &mut &f64
+ 87..88 'a': u32
+ 91..107 'unknow...nction': {unknown}
+ 91..109 'unknow...tion()': u32
+ 115..124 'takes_u32': fn takes_u32(u32)
+ 115..127 'takes_u32(a)': ()
+ 125..126 'a': u32
+ 137..138 'b': i32
+ 141..157 'unknow...nction': {unknown}
+ 141..159 'unknow...tion()': i32
+ 165..183 'S { i3...d: b }': S
+ 180..181 'b': i32
+ 193..194 'c': f64
+ 197..213 'unknow...nction': {unknown}
+ 197..215 'unknow...tion()': f64
+ 221..228 '&mut &c': &mut &f64
+ 226..228 '&c': &f64
+ 227..228 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self() {
+ check_infer(
+ r#"
+ struct S;
+
+ impl S {
+ fn test(&self) {
+ self;
+ }
+ fn test2(self: &Self) {
+ self;
+ }
+ fn test3() -> Self {
+ S {}
+ }
+ fn test4() -> Self {
+ Self {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 33..37 'self': &S
+ 39..60 '{ ... }': ()
+ 49..53 'self': &S
+ 74..78 'self': &S
+ 87..108 '{ ... }': ()
+ 97..101 'self': &S
+ 132..152 '{ ... }': S
+ 142..146 'S {}': S
+ 176..199 '{ ... }': S
+ 186..193 'Self {}': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self_as_path() {
+ check_infer(
+ r#"
+ struct S1;
+ struct S2(isize);
+ enum E {
+ V1,
+ V2(u32),
+ }
+
+ impl S1 {
+ fn test() {
+ Self;
+ }
+ }
+ impl S2 {
+ fn test() {
+ Self(1);
+ }
+ }
+ impl E {
+ fn test() {
+ Self::V1;
+ Self::V2(1);
+ }
+ }
+ "#,
+ expect![[r#"
+ 86..107 '{ ... }': ()
+ 96..100 'Self': S1
+ 134..158 '{ ... }': ()
+ 144..148 'Self': S2(isize) -> S2
+ 144..151 'Self(1)': S2
+ 149..150 '1': isize
+ 184..230 '{ ... }': ()
+ 194..202 'Self::V1': E
+ 212..220 'Self::V2': V2(u32) -> E
+ 212..223 'Self::V2(1)': E
+ 221..222 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_binary_op() {
+ check_infer(
+ r#"
+ fn f(x: bool) -> i32 {
+ 0i32
+ }
+
+ fn test() -> bool {
+ let x = a && b;
+ let y = true || false;
+ let z = x == y;
+ let t = x != y;
+ let minus_forty: isize = -40isize;
+ let h = minus_forty <= CONST_2;
+ let c = f(z || y) + 5;
+ let d = b;
+ let g = minus_forty ^= i;
+ let ten: usize = 10;
+ let ten_is_eleven = ten == some_num;
+
+ ten < 3
+ }
+ "#,
+ expect![[r#"
+ 5..6 'x': bool
+ 21..33 '{ 0i32 }': i32
+ 27..31 '0i32': i32
+ 53..369 '{ ... < 3 }': bool
+ 63..64 'x': bool
+ 67..68 'a': bool
+ 67..73 'a && b': bool
+ 72..73 'b': bool
+ 83..84 'y': bool
+ 87..91 'true': bool
+ 87..100 'true || false': bool
+ 95..100 'false': bool
+ 110..111 'z': bool
+ 114..115 'x': bool
+ 114..120 'x == y': bool
+ 119..120 'y': bool
+ 130..131 't': bool
+ 134..135 'x': bool
+ 134..140 'x != y': bool
+ 139..140 'y': bool
+ 150..161 'minus_forty': isize
+ 171..179 '-40isize': isize
+ 172..179 '40isize': isize
+ 189..190 'h': bool
+ 193..204 'minus_forty': isize
+ 193..215 'minus_...ONST_2': bool
+ 208..215 'CONST_2': isize
+ 225..226 'c': i32
+ 229..230 'f': fn f(bool) -> i32
+ 229..238 'f(z || y)': i32
+ 229..242 'f(z || y) + 5': i32
+ 231..232 'z': bool
+ 231..237 'z || y': bool
+ 236..237 'y': bool
+ 241..242 '5': i32
+ 252..253 'd': {unknown}
+ 256..257 'b': {unknown}
+ 267..268 'g': ()
+ 271..282 'minus_forty': isize
+ 271..287 'minus_...y ^= i': ()
+ 286..287 'i': isize
+ 297..300 'ten': usize
+ 310..312 '10': usize
+ 322..335 'ten_is_eleven': bool
+ 338..341 'ten': usize
+ 338..353 'ten == some_num': bool
+ 345..353 'some_num': usize
+ 360..363 'ten': usize
+ 360..367 'ten < 3': bool
+ 366..367 '3': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_shift_op() {
+ check_infer(
+ r#"
+ fn test() {
+ 1u32 << 5u8;
+ 1u32 >> 5u8;
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...5u8; }': ()
+ 16..20 '1u32': u32
+ 16..27 '1u32 << 5u8': u32
+ 24..27 '5u8': u8
+ 33..37 '1u32': u32
+ 33..44 '1u32 >> 5u8': u32
+ 41..44 '5u8': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_field_autoderef() {
+ check_infer(
+ r#"
+ struct A {
+ b: B,
+ }
+ struct B;
+
+ fn test1(a: A) {
+ let a1 = a;
+ a1.b;
+ let a2 = &a;
+ a2.b;
+ let a3 = &mut a;
+ a3.b;
+ let a4 = &&&&&&&a;
+ a4.b;
+ let a5 = &mut &&mut &&mut a;
+ a5.b;
+ }
+
+ fn test2(a1: *const A, a2: *mut A) {
+ a1.b;
+ a2.b;
+ }
+ "#,
+ expect![[r#"
+ 43..44 'a': A
+ 49..212 '{ ...5.b; }': ()
+ 59..61 'a1': A
+ 64..65 'a': A
+ 71..73 'a1': A
+ 71..75 'a1.b': B
+ 85..87 'a2': &A
+ 90..92 '&a': &A
+ 91..92 'a': A
+ 98..100 'a2': &A
+ 98..102 'a2.b': B
+ 112..114 'a3': &mut A
+ 117..123 '&mut a': &mut A
+ 122..123 'a': A
+ 129..131 'a3': &mut A
+ 129..133 'a3.b': B
+ 143..145 'a4': &&&&&&&A
+ 148..156 '&&&&&&&a': &&&&&&&A
+ 149..156 '&&&&&&a': &&&&&&A
+ 150..156 '&&&&&a': &&&&&A
+ 151..156 '&&&&a': &&&&A
+ 152..156 '&&&a': &&&A
+ 153..156 '&&a': &&A
+ 154..156 '&a': &A
+ 155..156 'a': A
+ 162..164 'a4': &&&&&&&A
+ 162..166 'a4.b': B
+ 176..178 'a5': &mut &&mut &&mut A
+ 181..199 '&mut &...&mut a': &mut &&mut &&mut A
+ 186..199 '&&mut &&mut a': &&mut &&mut A
+ 187..199 '&mut &&mut a': &mut &&mut A
+ 192..199 '&&mut a': &&mut A
+ 193..199 '&mut a': &mut A
+ 198..199 'a': A
+ 205..207 'a5': &mut &&mut &&mut A
+ 205..209 'a5.b': B
+ 223..225 'a1': *const A
+ 237..239 'a2': *mut A
+ 249..272 '{ ...2.b; }': ()
+ 255..257 'a1': *const A
+ 255..259 'a1.b': B
+ 265..267 'a2': *mut A
+ 265..269 'a2.b': B
+ "#]],
+ );
+}
+
+#[test]
+fn infer_argument_autoderef() {
+ check_infer(
+ r#"
+ #[lang = "deref"]
+ pub trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+ }
+
+ struct A<T>(T);
+
+ impl<T> A<T> {
+ fn foo(&self) -> &T {
+ &self.0
+ }
+ }
+
+ struct B<T>(T);
+
+ impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+
+ fn test() {
+ let t = A::foo(&&B(B(A(42))));
+ }
+ "#,
+ expect![[r#"
+ 67..71 'self': &Self
+ 138..142 'self': &A<T>
+ 150..173 '{ ... }': &T
+ 160..167 '&self.0': &T
+ 161..165 'self': &A<T>
+ 161..167 'self.0': T
+ 254..258 'self': &B<T>
+ 277..300 '{ ... }': &T
+ 287..294 '&self.0': &T
+ 288..292 'self': &B<T>
+ 288..294 'self.0': T
+ 314..352 '{ ...))); }': ()
+ 324..325 't': &i32
+ 328..334 'A::foo': fn foo<i32>(&A<i32>) -> &i32
+ 328..349 'A::foo...42))))': &i32
+ 335..348 '&&B(B(A(42)))': &&B<B<A<i32>>>
+ 336..348 '&B(B(A(42)))': &B<B<A<i32>>>
+ 337..338 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 337..348 'B(B(A(42)))': B<B<A<i32>>>
+ 339..340 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 339..347 'B(A(42))': B<A<i32>>
+ 341..342 'A': A<i32>(i32) -> A<i32>
+ 341..346 'A(42)': A<i32>
+ 343..345 '42': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_method_argument_autoderef() {
+ check_infer(
+ r#"
+ #[lang = "deref"]
+ pub trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+ }
+
+ struct A<T>(*mut T);
+
+ impl<T> A<T> {
+ fn foo(&self, x: &A<T>) -> &T {
+ &*x.0
+ }
+ }
+
+ struct B<T>(T);
+
+ impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+ }
+
+ fn test(a: A<i32>) {
+ let t = A(0 as *mut _).foo(&&B(B(a)));
+ }
+ "#,
+ expect![[r#"
+ 67..71 'self': &Self
+ 143..147 'self': &A<T>
+ 149..150 'x': &A<T>
+ 165..186 '{ ... }': &T
+ 175..180 '&*x.0': &T
+ 176..180 '*x.0': T
+ 177..178 'x': &A<T>
+ 177..180 'x.0': *mut T
+ 267..271 'self': &B<T>
+ 290..313 '{ ... }': &T
+ 300..307 '&self.0': &T
+ 301..305 'self': &B<T>
+ 301..307 'self.0': T
+ 325..326 'a': A<i32>
+ 336..382 '{ ...))); }': ()
+ 346..347 't': &i32
+ 350..351 'A': A<i32>(*mut i32) -> A<i32>
+ 350..364 'A(0 as *mut _)': A<i32>
+ 350..379 'A(0 as...B(a)))': &i32
+ 352..353 '0': i32
+ 352..363 '0 as *mut _': *mut i32
+ 369..378 '&&B(B(a))': &&B<B<A<i32>>>
+ 370..378 '&B(B(a))': &B<B<A<i32>>>
+ 371..372 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 371..378 'B(B(a))': B<B<A<i32>>>
+ 373..374 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 373..377 'B(a)': B<A<i32>>
+ 375..376 'a': A<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_elseif() {
+ check_infer(
+ r#"
+ struct Foo { field: i32 }
+ fn main(foo: Foo) {
+ if true {
+
+ } else if false {
+ foo.field
+ }
+ }
+ "#,
+ expect![[r#"
+ 34..37 'foo': Foo
+ 44..108 '{ ... } }': ()
+ 50..106 'if tru... }': ()
+ 53..57 'true': bool
+ 58..66 '{ }': ()
+ 72..106 'if fal... }': i32
+ 75..80 'false': bool
+ 81..106 '{ ... }': i32
+ 91..94 'foo': Foo
+ 91..100 'foo.field': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_if_match_with_return() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x1 = if true {
+ 1
+ } else {
+ return;
+ };
+ let _x2 = if true {
+ 2
+ } else {
+ return
+ };
+ let _x3 = match true {
+ true => 3,
+ _ => {
+ return;
+ }
+ };
+ let _x4 = match true {
+ true => 4,
+ _ => return
+ };
+ }"#,
+ expect![[r#"
+ 9..322 '{ ... }; }': ()
+ 19..22 '_x1': i32
+ 25..79 'if tru... }': i32
+ 28..32 'true': bool
+ 33..50 '{ ... }': i32
+ 43..44 '1': i32
+ 56..79 '{ ... }': i32
+ 66..72 'return': !
+ 89..92 '_x2': i32
+ 95..148 'if tru... }': i32
+ 98..102 'true': bool
+ 103..120 '{ ... }': i32
+ 113..114 '2': i32
+ 126..148 '{ ... }': !
+ 136..142 'return': !
+ 158..161 '_x3': i32
+ 164..246 'match ... }': i32
+ 170..174 'true': bool
+ 185..189 'true': bool
+ 185..189 'true': bool
+ 193..194 '3': i32
+ 204..205 '_': bool
+ 209..240 '{ ... }': i32
+ 223..229 'return': !
+ 256..259 '_x4': i32
+ 262..319 'match ... }': i32
+ 268..272 'true': bool
+ 283..287 'true': bool
+ 283..287 'true': bool
+ 291..292 '4': i32
+ 302..303 '_': bool
+ 307..313 'return': !
+ "#]],
+ )
+}
+
+#[test]
+fn infer_inherent_method() {
+ check_infer(
+ r#"
+ struct A;
+
+ impl A {
+ fn foo(self, x: u32) -> i32 {}
+ }
+
+ mod b {
+ impl super::A {
+ fn bar(&self, x: u64) -> i64 {}
+ }
+ }
+
+ fn test(a: A) {
+ a.foo(1);
+ (&a).bar(1);
+ a.bar(1);
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': A
+ 37..38 'x': u32
+ 52..54 '{}': ()
+ 102..106 'self': &A
+ 108..109 'x': u64
+ 123..125 '{}': ()
+ 143..144 'a': A
+ 149..197 '{ ...(1); }': ()
+ 155..156 'a': A
+ 155..163 'a.foo(1)': i32
+ 161..162 '1': u32
+ 169..180 '(&a).bar(1)': i64
+ 170..172 '&a': &A
+ 171..172 'a': A
+ 178..179 '1': u64
+ 186..187 'a': A
+ 186..194 'a.bar(1)': i64
+ 192..193 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inherent_method_str() {
+ check_infer(
+ r#"
+ #[lang = "str"]
+ impl str {
+ fn foo(&self) -> i32 {}
+ }
+
+ fn test() {
+ "foo".foo();
+ }
+ "#,
+ expect![[r#"
+ 39..43 'self': &str
+ 52..54 '{}': ()
+ 68..88 '{ ...o(); }': ()
+ 74..79 '"foo"': &str
+ 74..85 '"foo".foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a: (u32, &str) = (1, "a");
+ let b = (a, x);
+ let c = (y, x);
+ let d = (c, x);
+ let e = (1, "e");
+ let f = (e, "d");
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..169 '{ ...d"); }': ()
+ 37..38 'a': (u32, &str)
+ 54..62 '(1, "a")': (u32, &str)
+ 55..56 '1': u32
+ 58..61 '"a"': &str
+ 72..73 'b': ((u32, &str), &str)
+ 76..82 '(a, x)': ((u32, &str), &str)
+ 77..78 'a': (u32, &str)
+ 80..81 'x': &str
+ 92..93 'c': (isize, &str)
+ 96..102 '(y, x)': (isize, &str)
+ 97..98 'y': isize
+ 100..101 'x': &str
+ 112..113 'd': ((isize, &str), &str)
+ 116..122 '(c, x)': ((isize, &str), &str)
+ 117..118 'c': (isize, &str)
+ 120..121 'x': &str
+ 132..133 'e': (i32, &str)
+ 136..144 '(1, "e")': (i32, &str)
+ 137..138 '1': i32
+ 140..143 '"e"': &str
+ 154..155 'f': ((i32, &str), &str)
+ 158..166 '(e, "d")': ((i32, &str), &str)
+ 159..160 'e': (i32, &str)
+ 162..165 '"d"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a = [x];
+ let b = [a, a];
+ let c = [b, b];
+
+ let d = [y, 1, 2, 3];
+ let d = [1, y, 2, 3];
+ let e = [y];
+ let f = [d, d];
+ let g = [e, e];
+
+ let h = [1, 2];
+ let i = ["a", "b"];
+
+ let b = [a, ["b"]];
+ let x: [u8; 0] = [];
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..292 '{ ... []; }': ()
+ 37..38 'a': [&str; _]
+ 41..44 '[x]': [&str; _]
+ 42..43 'x': &str
+ 54..55 'b': [[&str; _]; _]
+ 58..64 '[a, a]': [[&str; _]; _]
+ 59..60 'a': [&str; _]
+ 62..63 'a': [&str; _]
+ 74..75 'c': [[[&str; _]; _]; _]
+ 78..84 '[b, b]': [[[&str; _]; _]; _]
+ 79..80 'b': [[&str; _]; _]
+ 82..83 'b': [[&str; _]; _]
+ 95..96 'd': [isize; _]
+ 99..111 '[y, 1, 2, 3]': [isize; _]
+ 100..101 'y': isize
+ 103..104 '1': isize
+ 106..107 '2': isize
+ 109..110 '3': isize
+ 121..122 'd': [isize; _]
+ 125..137 '[1, y, 2, 3]': [isize; _]
+ 126..127 '1': isize
+ 129..130 'y': isize
+ 132..133 '2': isize
+ 135..136 '3': isize
+ 147..148 'e': [isize; _]
+ 151..154 '[y]': [isize; _]
+ 152..153 'y': isize
+ 164..165 'f': [[isize; _]; _]
+ 168..174 '[d, d]': [[isize; _]; _]
+ 169..170 'd': [isize; _]
+ 172..173 'd': [isize; _]
+ 184..185 'g': [[isize; _]; _]
+ 188..194 '[e, e]': [[isize; _]; _]
+ 189..190 'e': [isize; _]
+ 192..193 'e': [isize; _]
+ 205..206 'h': [i32; _]
+ 209..215 '[1, 2]': [i32; _]
+ 210..211 '1': i32
+ 213..214 '2': i32
+ 225..226 'i': [&str; _]
+ 229..239 '["a", "b"]': [&str; _]
+ 230..233 '"a"': &str
+ 235..238 '"b"': &str
+ 250..251 'b': [[&str; _]; _]
+ 254..264 '[a, ["b"]]': [[&str; _]; _]
+ 255..256 'a': [&str; _]
+ 258..263 '["b"]': [&str; _]
+ 259..262 '"b"': &str
+ 274..275 'x': [u8; _]
+ 287..289 '[]': [u8; _]
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ fn test(a1: A<u32>, i: i32) {
+ a1.x;
+ let a2 = A { x: i };
+ a2.x;
+ let a3 = A::<i128> { x: 1 };
+ a3.x;
+ }
+ "#,
+ expect![[r#"
+ 35..37 'a1': A<u32>
+ 47..48 'i': i32
+ 55..146 '{ ...3.x; }': ()
+ 61..63 'a1': A<u32>
+ 61..65 'a1.x': u32
+ 75..77 'a2': A<i32>
+ 80..90 'A { x: i }': A<i32>
+ 87..88 'i': i32
+ 96..98 'a2': A<i32>
+ 96..100 'a2.x': i32
+ 110..112 'a3': A<i128>
+ 115..133 'A::<i1...x: 1 }': A<i128>
+ 130..131 '1': i128
+ 139..141 'a3': A<i128>
+ 139..143 'a3.x': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ A(42);
+ A(42u128);
+ Some("x");
+ Option::Some("x");
+ None;
+ let x: Option<i64> = None;
+ }
+ "#,
+ expect![[r#"
+ 75..183 '{ ...one; }': ()
+ 81..82 'A': A<i32>(i32) -> A<i32>
+ 81..86 'A(42)': A<i32>
+ 83..85 '42': i32
+ 92..93 'A': A<u128>(u128) -> A<u128>
+ 92..101 'A(42u128)': A<u128>
+ 94..100 '42u128': u128
+ 107..111 'Some': Some<&str>(&str) -> Option<&str>
+ 107..116 'Some("x")': Option<&str>
+ 112..115 '"x"': &str
+ 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
+ 122..139 'Option...e("x")': Option<&str>
+ 135..138 '"x"': &str
+ 145..149 'None': Option<{unknown}>
+ 159..160 'x': Option<i64>
+ 176..180 'None': Option<i64>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_function_generics() {
+ check_infer(
+ r#"
+ fn id<T>(t: T) -> T { t }
+
+ fn test() {
+ id(1u32);
+ id::<i128>(1);
+ let x: u64 = id(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 't': T
+ 20..25 '{ t }': T
+ 22..23 't': T
+ 37..97 '{ ...(1); }': ()
+ 43..45 'id': fn id<u32>(u32) -> u32
+ 43..51 'id(1u32)': u32
+ 46..50 '1u32': u32
+ 57..67 'id::<i128>': fn id<i128>(i128) -> i128
+ 57..70 'id::<i128>(1)': i128
+ 68..69 '1': i128
+ 80..81 'x': u64
+ 89..91 'id': fn id<u64>(u64) -> u64
+ 89..94 'id(1)': u64
+ 92..93 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_basic() {
+ check_infer(
+ r#"
+ struct A<T1, T2> {
+ x: T1,
+ y: T2,
+ }
+ impl<Y, X> A<X, Y> {
+ fn x(self) -> X {
+ self.x
+ }
+ fn y(self) -> Y {
+ self.y
+ }
+ fn z<T>(self, t: T) -> (X, Y, T) {
+ (self.x, self.y, t)
+ }
+ }
+
+ fn test() -> i128 {
+ let a = A { x: 1u64, y: 1i64 };
+ a.x();
+ a.y();
+ a.z(1i128);
+ a.z::<u128>(1);
+ }
+ "#,
+ expect![[r#"
+ 73..77 'self': A<X, Y>
+ 84..106 '{ ... }': X
+ 94..98 'self': A<X, Y>
+ 94..100 'self.x': X
+ 116..120 'self': A<X, Y>
+ 127..149 '{ ... }': Y
+ 137..141 'self': A<X, Y>
+ 137..143 'self.y': Y
+ 162..166 'self': A<X, Y>
+ 168..169 't': T
+ 187..222 '{ ... }': (X, Y, T)
+ 197..216 '(self.....y, t)': (X, Y, T)
+ 198..202 'self': A<X, Y>
+ 198..204 'self.x': X
+ 206..210 'self': A<X, Y>
+ 206..212 'self.y': Y
+ 214..215 't': T
+ 244..341 '{ ...(1); }': ()
+ 254..255 'a': A<u64, i64>
+ 258..280 'A { x:...1i64 }': A<u64, i64>
+ 265..269 '1u64': u64
+ 274..278 '1i64': i64
+ 286..287 'a': A<u64, i64>
+ 286..291 'a.x()': u64
+ 297..298 'a': A<u64, i64>
+ 297..302 'a.y()': i64
+ 308..309 'a': A<u64, i64>
+ 308..318 'a.z(1i128)': (u64, i64, i128)
+ 312..317 '1i128': i128
+ 324..325 'a': A<u64, i64>
+ 324..338 'a.z::<u128>(1)': (u64, i64, u128)
+ 336..337 '1': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_with_autoderef() {
+ check_infer(
+ r#"
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+ impl<T> Option<T> {
+ fn as_ref(&self) -> Option<&T> {}
+ }
+ fn test(o: Option<u32>) {
+ (&o).as_ref();
+ o.as_ref();
+ }
+ "#,
+ expect![[r#"
+ 77..81 'self': &Option<T>
+ 97..99 '{}': ()
+ 110..111 'o': Option<u32>
+ 126..164 '{ ...f(); }': ()
+ 132..145 '(&o).as_ref()': Option<&u32>
+ 133..135 '&o': &Option<u32>
+ 134..135 'o': Option<u32>
+ 151..152 'o': Option<u32>
+ 151..161 'o.as_ref()': Option<&u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generic_chain() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+ impl<T2> A<T2> {
+ fn x(self) -> T2 {
+ self.x
+ }
+ }
+ fn id<T>(t: T) -> T { t }
+
+ fn test() -> i128 {
+ let x = 1;
+ let y = id(x);
+ let a = A { x: id(y) };
+ let z = id(a.x);
+ let b = A { x: z };
+ b.x()
+ }
+ "#,
+ expect![[r#"
+ 52..56 'self': A<T2>
+ 64..86 '{ ... }': T2
+ 74..78 'self': A<T2>
+ 74..80 'self.x': T2
+ 98..99 't': T
+ 109..114 '{ t }': T
+ 111..112 't': T
+ 134..254 '{ ....x() }': i128
+ 144..145 'x': i128
+ 148..149 '1': i128
+ 159..160 'y': i128
+ 163..165 'id': fn id<i128>(i128) -> i128
+ 163..168 'id(x)': i128
+ 166..167 'x': i128
+ 178..179 'a': A<i128>
+ 182..196 'A { x: id(y) }': A<i128>
+ 189..191 'id': fn id<i128>(i128) -> i128
+ 189..194 'id(y)': i128
+ 192..193 'y': i128
+ 206..207 'z': i128
+ 210..212 'id': fn id<i128>(i128) -> i128
+ 210..217 'id(a.x)': i128
+ 213..214 'a': A<i128>
+ 213..216 'a.x': i128
+ 227..228 'b': A<i128>
+ 231..241 'A { x: z }': A<i128>
+ 238..239 'z': i128
+ 247..248 'b': A<i128>
+ 247..252 'b.x()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_const() {
+ check_infer(
+ r#"
+ struct Struct;
+
+ impl Struct {
+ const FOO: u32 = 1;
+ }
+
+ enum Enum {}
+
+ impl Enum {
+ const BAR: u32 = 2;
+ }
+
+ trait Trait {
+ const ID: u32;
+ }
+
+ struct TraitTest;
+
+ impl Trait for TraitTest {
+ const ID: u32 = 5;
+ }
+
+ fn test() {
+ let x = Struct::FOO;
+ let y = Enum::BAR;
+ let z = TraitTest::ID;
+ }
+ "#,
+ expect![[r#"
+ 51..52 '1': u32
+ 104..105 '2': u32
+ 212..213 '5': u32
+ 228..306 '{ ...:ID; }': ()
+ 238..239 'x': u32
+ 242..253 'Struct::FOO': u32
+ 263..264 'y': u32
+ 267..276 'Enum::BAR': u32
+ 286..287 'z': u32
+ 290..303 'TraitTest::ID': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_alias() {
+ check_infer(
+ r#"
+ struct A<X, Y> { x: X, y: Y }
+ type Foo = A<u32, i128>;
+ type Bar<T> = A<T, u128>;
+ type Baz<U, V> = A<V, U>;
+ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
+ x.x;
+ x.y;
+ y.x;
+ y.y;
+ z.x;
+ z.y;
+ }
+ "#,
+ expect![[r#"
+ 115..116 'x': A<u32, i128>
+ 123..124 'y': A<&str, u128>
+ 137..138 'z': A<u8, i8>
+ 153..210 '{ ...z.y; }': ()
+ 159..160 'x': A<u32, i128>
+ 159..162 'x.x': u32
+ 168..169 'x': A<u32, i128>
+ 168..171 'x.y': i128
+ 177..178 'y': A<&str, u128>
+ 177..180 'y.x': &str
+ 186..187 'y': A<&str, u128>
+ 186..189 'y.y': u128
+ 195..196 'z': A<u8, i8>
+ 195..198 'z.x': u8
+ 204..205 'z': A<u8, i8>
+ 204..207 'z.y': i8
+ "#]],
+ )
+}
+
+#[test]
+fn recursive_type_alias() {
+ check_infer(
+ r#"
+ struct A<X> {}
+ type Foo = Foo;
+ type Bar = A<Bar>;
+ fn test(x: Foo) {}
+ "#,
+ expect![[r#"
+ 58..59 'x': {unknown}
+ 66..68 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_type_param() {
+ check_infer(
+ r#"
+ fn id<T>(x: T) -> T {
+ x
+ }
+
+ fn clone<T>(x: &T) -> T {
+ *x
+ }
+
+ fn test() {
+ let y = 10u32;
+ id(y);
+ let x: bool = clone(z);
+ id::<i128>(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 'x': T
+ 20..29 '{ x }': T
+ 26..27 'x': T
+ 43..44 'x': &T
+ 55..65 '{ *x }': T
+ 61..63 '*x': T
+ 62..63 'x': &T
+ 77..157 '{ ...(1); }': ()
+ 87..88 'y': u32
+ 91..96 '10u32': u32
+ 102..104 'id': fn id<u32>(u32) -> u32
+ 102..107 'id(y)': u32
+ 105..106 'y': u32
+ 117..118 'x': bool
+ 127..132 'clone': fn clone<bool>(&bool) -> bool
+ 127..135 'clone(z)': bool
+ 133..134 'z': &bool
+ 141..151 'id::<i128>': fn id<i128>(i128) -> i128
+ 141..154 'id::<i128>(1)': i128
+ 152..153 '1': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const() {
+ check_infer(
+ r#"
+ struct Foo;
+ impl Foo { const ASSOC_CONST: u32 = 0; }
+ const GLOBAL_CONST: u32 = 101;
+ fn test() {
+ const LOCAL_CONST: u32 = 99;
+ let x = LOCAL_CONST;
+ let z = GLOBAL_CONST;
+ let id = Foo::ASSOC_CONST;
+ }
+ "#,
+ expect![[r#"
+ 48..49 '0': u32
+ 79..82 '101': u32
+ 94..212 '{ ...NST; }': ()
+ 137..138 'x': u32
+ 141..152 'LOCAL_CONST': u32
+ 162..163 'z': u32
+ 166..178 'GLOBAL_CONST': u32
+ 188..190 'id': u32
+ 193..209 'Foo::A..._CONST': u32
+ 125..127 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_static() {
+ check_infer(
+ r#"
+ static GLOBAL_STATIC: u32 = 101;
+ static mut GLOBAL_STATIC_MUT: u32 = 101;
+ fn test() {
+ static LOCAL_STATIC: u32 = 99;
+ static mut LOCAL_STATIC_MUT: u32 = 99;
+ let x = LOCAL_STATIC;
+ let y = LOCAL_STATIC_MUT;
+ let z = GLOBAL_STATIC;
+ let w = GLOBAL_STATIC_MUT;
+ }
+ "#,
+ expect![[r#"
+ 28..31 '101': u32
+ 69..72 '101': u32
+ 84..279 '{ ...MUT; }': ()
+ 172..173 'x': u32
+ 176..188 'LOCAL_STATIC': u32
+ 198..199 'y': u32
+ 202..218 'LOCAL_...IC_MUT': u32
+ 228..229 'z': u32
+ 232..245 'GLOBAL_STATIC': u32
+ 255..256 'w': u32
+ 259..276 'GLOBAL...IC_MUT': u32
+ 117..119 '99': u32
+ 160..162 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_primitive() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ let x: i32 = i32;
+ x.foo();
+ //^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_primitive_by_module() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() {}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ foo();
+ //^ &str
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_module_by_primitive() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() -> u32 {0}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ str::foo();
+ //^ u32
+}"#,
+ );
+}
+
+// This test is actually testing the shadowing behavior within hir_def. It
+// lives here because the testing infrastructure in hir_def isn't currently
+// capable of asserting the necessary conditions.
+#[test]
+fn should_be_shadowing_imports() {
+ check_types(
+ r#"
+mod a {
+ pub fn foo() -> i8 {0}
+ pub struct foo { a: i8 }
+}
+mod b { pub fn foo () -> u8 {0} }
+mod c { pub struct foo { a: u8 } }
+mod d {
+ pub use super::a::*;
+ pub use super::c::foo;
+ pub use super::b::foo;
+}
+
+fn main() {
+ d::foo();
+ //^ u8
+ d::foo{a:0};
+ //^ u8
+}"#,
+ );
+}
+
+#[test]
+fn closure_return() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || -> usize { return 1; };
+ }
+ "#,
+ expect![[r#"
+ 16..58 '{ ...; }; }': ()
+ 26..27 'x': || -> usize
+ 30..55 '|| -> ...n 1; }': || -> usize
+ 42..55 '{ return 1; }': usize
+ 44..52 'return 1': !
+ 51..52 '1': usize
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_unit() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { return; };
+ }
+ "#,
+ expect![[r#"
+ 16..47 '{ ...; }; }': ()
+ 26..27 'x': || -> ()
+ 30..44 '|| { return; }': || -> ()
+ 33..44 '{ return; }': ()
+ 35..41 'return': !
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_inferred() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { "test" };
+ }
+ "#,
+ expect![[r#"
+ 16..46 '{ ..." }; }': ()
+ 26..27 'x': || -> &str
+ 30..43 '|| { "test" }': || -> &str
+ 33..43 '{ "test" }': &str
+ 35..41 '"test"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn fn_pointer_return() {
+ check_infer(
+ r#"
+ struct Vtable {
+ method: fn(),
+ }
+
+ fn main() {
+ let vtable = Vtable { method: || {} };
+ let m = vtable.method;
+ }
+ "#,
+ expect![[r#"
+ 47..120 '{ ...hod; }': ()
+ 57..63 'vtable': Vtable
+ 66..90 'Vtable...| {} }': Vtable
+ 83..88 '|| {}': || -> ()
+ 86..88 '{}': ()
+ 100..101 'm': fn()
+ 104..110 'vtable': Vtable
+ 104..117 'vtable.method': fn()
+ "#]],
+ );
+}
+
+#[test]
+fn effects_smoke_test() {
+ check_infer(
+ r#"
+ fn main() {
+ let x = unsafe { 92 };
+ let y = async { async { () }.await };
+ let z = try { () };
+ let t = 'a: { 92 };
+ }
+ "#,
+ expect![[r#"
+ 10..130 '{ ...2 }; }': ()
+ 20..21 'x': i32
+ 24..37 'unsafe { 92 }': i32
+ 31..37 '{ 92 }': i32
+ 33..35 '92': i32
+ 47..48 'y': {unknown}
+ 57..79 '{ asyn...wait }': {unknown}
+ 59..77 'async ....await': {unknown}
+ 65..71 '{ () }': ()
+ 67..69 '()': ()
+ 89..90 'z': {unknown}
+ 93..103 'try { () }': {unknown}
+ 97..103 '{ () }': ()
+ 99..101 '()': ()
+ 113..114 't': i32
+ 121..127 '{ 92 }': i32
+ 123..125 '92': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_generic_from_later_assignment() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let mut end = None;
+ loop {
+ end = Some(true);
+ }
+ }
+ "#,
+ expect![[r#"
+ 59..129 '{ ... } }': ()
+ 69..76 'mut end': Option<bool>
+ 79..83 'None': Option<bool>
+ 89..127 'loop {... }': !
+ 94..127 '{ ... }': ()
+ 104..107 'end': Option<bool>
+ 104..120 'end = ...(true)': ()
+ 110..114 'Some': Some<bool>(bool) -> Option<bool>
+ 110..120 'Some(true)': Option<bool>
+ 115..119 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_with_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break None;
+ }
+
+ break Some(true);
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..168 '{ ... }; }': ()
+ 69..70 'x': Option<bool>
+ 73..165 'loop {... }': Option<bool>
+ 78..165 '{ ... }': ()
+ 88..132 'if fal... }': ()
+ 91..96 'false': bool
+ 97..132 '{ ... }': ()
+ 111..121 'break None': !
+ 117..121 'None': Option<bool>
+ 142..158 'break ...(true)': !
+ 148..152 'Some': Some<bool>(bool) -> Option<bool>
+ 148..158 'Some(true)': Option<bool>
+ 153..157 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_without_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break;
+ }
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..136 '{ ... }; }': ()
+ 69..70 'x': ()
+ 73..133 'loop {... }': ()
+ 78..133 '{ ... }': ()
+ 88..127 'if fal... }': ()
+ 91..96 'false': bool
+ 97..127 '{ ... }': ()
+ 111..116 'break': !
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_break_with_val() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x = || 'outer: loop {
+ let inner = 'inner: loop {
+ let i = Default::default();
+ if (break 'outer i) {
+ loop { break 'inner 5i8; };
+ } else if true {
+ break 'inner 6;
+ }
+ break 7;
+ };
+ break inner < 8;
+ };
+ }
+ "#,
+ expect![[r#"
+ 9..335 '{ ... }; }': ()
+ 19..21 '_x': || -> bool
+ 24..332 '|| 'ou... }': || -> bool
+ 27..332 ''outer... }': bool
+ 40..332 '{ ... }': ()
+ 54..59 'inner': i8
+ 62..300 ''inner... }': i8
+ 75..300 '{ ... }': ()
+ 93..94 'i': bool
+ 97..113 'Defaul...efault': {unknown}
+ 97..115 'Defaul...ault()': bool
+ 129..269 'if (br... }': ()
+ 133..147 'break 'outer i': !
+ 146..147 'i': bool
+ 149..208 '{ ... }': ()
+ 167..193 'loop {...5i8; }': !
+ 172..193 '{ brea...5i8; }': ()
+ 174..190 'break ...er 5i8': !
+ 187..190 '5i8': i8
+ 214..269 'if tru... }': ()
+ 217..221 'true': bool
+ 222..269 '{ ... }': ()
+ 240..254 'break 'inner 6': !
+ 253..254 '6': i8
+ 282..289 'break 7': !
+ 288..289 '7': i8
+ 310..325 'break inner < 8': !
+ 316..321 'inner': i8
+ 316..325 'inner < 8': bool
+ 324..325 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
+ t1.t;
+ t3.t;
+ match t2 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ match t4 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ }
+ "#,
+ expect![[r#"
+ 97..99 't1': Thing<()>
+ 108..110 't2': OtherThing<()>
+ 124..126 't3': Thing<i32>
+ 140..142 't4': OtherThing<i32>
+ 161..384 '{ ... } }': ()
+ 167..169 't1': Thing<()>
+ 167..171 't1.t': ()
+ 177..179 't3': Thing<i32>
+ 177..181 't3.t': i32
+ 187..282 'match ... }': ()
+ 193..195 't2': OtherThing<()>
+ 206..227 'OtherT... { t }': OtherThing<()>
+ 224..225 't': ()
+ 231..237 '{ t; }': ()
+ 233..234 't': ()
+ 247..265 'OtherT...Two(t)': OtherThing<()>
+ 263..264 't': ()
+ 269..275 '{ t; }': ()
+ 271..272 't': ()
+ 287..382 'match ... }': ()
+ 293..295 't4': OtherThing<i32>
+ 306..327 'OtherT... { t }': OtherThing<i32>
+ 324..325 't': i32
+ 331..337 '{ t; }': ()
+ 333..334 't': i32
+ 347..365 'OtherT...Two(t)': OtherThing<i32>
+ 363..364 't': i32
+ 369..375 '{ t; }': ()
+ 371..372 't': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_in_struct_literal() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test() {
+ let x = Thing { t: loop {} };
+ let y = Thing { t: () };
+ let z = Thing { t: 1i32 };
+ if let Thing { t } = z {
+ t;
+ }
+
+ let a = OtherThing::One { t: 1i32 };
+ let b = OtherThing::Two(1i32);
+ }
+ "#,
+ expect![[r#"
+ 99..319 '{ ...32); }': ()
+ 109..110 'x': Thing<!>
+ 113..133 'Thing ...p {} }': Thing<!>
+ 124..131 'loop {}': !
+ 129..131 '{}': ()
+ 143..144 'y': Thing<()>
+ 147..162 'Thing { t: () }': Thing<()>
+ 158..160 '()': ()
+ 172..173 'z': Thing<i32>
+ 176..193 'Thing ...1i32 }': Thing<i32>
+ 187..191 '1i32': i32
+ 199..240 'if let... }': ()
+ 206..217 'Thing { t }': Thing<i32>
+ 214..215 't': i32
+ 220..221 'z': Thing<i32>
+ 222..240 '{ ... }': ()
+ 232..233 't': i32
+ 250..251 'a': OtherThing<i32>
+ 254..281 'OtherT...1i32 }': OtherThing<i32>
+ 275..279 '1i32': i32
+ 291..292 'b': OtherThing<i32>
+ 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
+ 295..316 'OtherT...(1i32)': OtherThing<i32>
+ 311..315 '1i32': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg() {
+ // FIXME: the {unknown} is a bug
+ check_infer(
+ r#"
+ struct Thing<T = u128, F = fn() -> T> { t: T }
+
+ fn test(t1: Thing<u32>, t2: Thing) {
+ t1;
+ t2;
+ Thing::<_> { t: 1u32 };
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<u32, fn() -> u32>
+ 72..74 't2': Thing<u128, fn() -> u128>
+ 83..130 '{ ...2 }; }': ()
+ 89..91 't1': Thing<u32, fn() -> u32>
+ 97..99 't2': Thing<u128, fn() -> u128>
+ 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
+ 121..125 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg_forward() {
+ // the {unknown} here is intentional, as defaults are not allowed to
+ // refer to type parameters coming later
+ check_infer(
+ r#"
+ struct Thing<F = fn() -> T, T = u128> { t: T }
+
+ fn test(t1: Thing) {
+ t1;
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<fn() -> {unknown}, u128>
+ 67..78 '{ t1; }': ()
+ 73..75 't1': Thing<fn() -> {unknown}, u128>
+ "#]],
+ );
+}
--- /dev/null
+use expect::expect;
+use test_utils::mark;
+
+use super::{check_infer, check_infer_with_mismatches, check_types};
+
+#[test]
+fn infer_await() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+struct IntFuture;
+
+impl Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = IntFuture;
+ let v = r.await;
+ v;
+} //^ u64
+
+//- /core.rs crate:core
+#[prelude_import] use future::*;
+mod future {
+ #[lang = "future_trait"]
+ trait Future {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_async() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+async fn foo() -> u64 {
+ 128
+}
+
+fn test() {
+ let r = foo();
+ let v = r.await;
+ v;
+} //^ u64
+
+//- /core.rs crate:core
+#[prelude_import] use future::*;
+mod future {
+ #[lang = "future_trait"]
+ trait Future {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_desugar_async() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+async fn foo() -> u64 {
+ 128
+}
+
+fn test() {
+ let r = foo();
+ r;
+} //^ impl Future<Output = u64>
+
+//- /core.rs crate:core
+#[prelude_import] use future::*;
+mod future {
+ trait Future {
+ type Output;
+ }
+}
+
+"#,
+ );
+}
+
+#[test]
+fn infer_try() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+#[prelude_import] use ops::*;
+mod ops {
+ trait Try {
+ type Ok;
+ type Error;
+ }
+}
+
+#[prelude_import] use result::*;
+mod result {
+ enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> crate::ops::Try for Result<O, E> {
+ type Ok = O;
+ type Error = E;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_for_loop() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core,alloc
+use alloc::collections::Vec;
+
+fn test() {
+ let v = Vec::new();
+ v.push("foo");
+ for x in v {
+ x;
+ } //^ &str
+}
+
+//- /core.rs crate:core
+#[prelude_import] use iter::*;
+mod iter {
+ trait IntoIterator {
+ type Item;
+ }
+}
+
+//- /alloc.rs crate:alloc deps:core
+mod collections {
+ struct Vec<T> {}
+ impl<T> Vec<T> {
+ fn new() -> Self { Vec {} }
+ fn push(&mut self, t: T) { }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_neg() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Neg for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = -a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "neg"]
+ pub trait Neg {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_not() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Not for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = !a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "not"]
+ pub trait Not {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {}
+ struct S<T>(T);
+ impl<U> Trait<U> for S<U> {}
+ fn foo<T: Trait<u32>>(t: T) {}
+ fn test() {
+ let s = S(unknown);
+ foo(s);
+ }
+ "#,
+ expect![[r#"
+ 85..86 't': T
+ 91..93 '{}': ()
+ 104..143 '{ ...(s); }': ()
+ 114..115 's': S<u32>
+ 118..119 'S': S<u32>(u32) -> S<u32>
+ 118..128 'S(unknown)': S<u32>
+ 120..127 'unknown': u32
+ 134..137 'foo': fn foo<S<u32>>(S<u32>)
+ 134..140 'foo(s)': ()
+ 138..139 's': S<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_from_bound_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {}
+ struct S<T>(T);
+ impl<U> Trait<U> for S<U> {}
+ fn foo<U, T: Trait<U>>(t: T) -> U {}
+ fn test() {
+ let s = S(unknown);
+ let x: u32 = foo(s);
+ }
+ "#,
+ expect![[r#"
+ 86..87 't': T
+ 97..99 '{}': ()
+ 110..162 '{ ...(s); }': ()
+ 120..121 's': S<u32>
+ 124..125 'S': S<u32>(u32) -> S<u32>
+ 124..134 'S(unknown)': S<u32>
+ 126..133 'unknown': u32
+ 144..145 'x': u32
+ 153..156 'foo': fn foo<u32, S<u32>>(S<u32>) -> u32
+ 153..159 'foo(s)': u32
+ 157..158 's': S<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_trait() {
+ mark::check!(trait_self_implements_self);
+ check_infer(
+ r#"
+ trait Trait {
+ fn foo(&self) -> i64;
+ fn bar(&self) -> {
+ let x = self.foo();
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 52..56 'self': &Self
+ 61..96 '{ ... }': ()
+ 75..76 'x': i64
+ 79..83 'self': &Self
+ 79..89 'self.foo()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_super_trait() {
+ check_infer(
+ r#"
+ trait SuperTrait {
+ fn foo(&self) -> i64;
+ }
+ trait Trait: SuperTrait {
+ fn bar(&self) -> {
+ let x = self.foo();
+ }
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': &Self
+ 85..89 'self': &Self
+ 94..129 '{ ... }': ()
+ 108..109 'x': i64
+ 112..116 'self': &Self
+ 112..122 'self.foo()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_project_associated_type() {
+ check_infer(
+ r#"
+ trait Iterable {
+ type Item;
+ }
+ struct S;
+ impl Iterable for S { type Item = u32; }
+ fn test<T: Iterable>() {
+ let x: <S as Iterable>::Item = 1;
+ let y: <T as Iterable>::Item = no_matter;
+ let z: T::Item = no_matter;
+ let a: <T>::Item = no_matter;
+ }
+ "#,
+ expect![[r#"
+ 108..261 '{ ...ter; }': ()
+ 118..119 'x': u32
+ 145..146 '1': u32
+ 156..157 'y': Iterable::Item<T>
+ 183..192 'no_matter': Iterable::Item<T>
+ 202..203 'z': Iterable::Item<T>
+ 215..224 'no_matter': Iterable::Item<T>
+ 234..235 'a': Iterable::Item<T>
+ 249..258 'no_matter': Iterable::Item<T>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_return_associated_type() {
+ check_infer(
+ r#"
+ trait Iterable {
+ type Item;
+ }
+ struct S;
+ impl Iterable for S { type Item = u32; }
+ fn foo1<T: Iterable>(t: T) -> T::Item {}
+ fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item {}
+ fn foo3<T: Iterable>(t: T) -> <T>::Item {}
+ fn test() {
+ let x = foo1(S);
+ let y = foo2(S);
+ let z = foo3(S);
+ }
+ "#,
+ expect![[r#"
+ 106..107 't': T
+ 123..125 '{}': ()
+ 147..148 't': T
+ 178..180 '{}': ()
+ 202..203 't': T
+ 221..223 '{}': ()
+ 234..300 '{ ...(S); }': ()
+ 244..245 'x': u32
+ 248..252 'foo1': fn foo1<S>(S) -> <S as Iterable>::Item
+ 248..255 'foo1(S)': u32
+ 253..254 'S': S
+ 265..266 'y': u32
+ 269..273 'foo2': fn foo2<S>(S) -> <S as Iterable>::Item
+ 269..276 'foo2(S)': u32
+ 274..275 'S': S
+ 286..287 'z': u32
+ 290..294 'foo3': fn foo3<S>(S) -> <S as Iterable>::Item
+ 290..297 'foo3(S)': u32
+ 295..296 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_type_bound() {
+ check_infer(
+ r#"
+ trait Iterable {
+ type Item;
+ }
+ fn test<T: Iterable<Item=u32>>() {
+ let y: T::Item = unknown;
+ }
+ "#,
+ expect![[r#"
+ 67..100 '{ ...own; }': ()
+ 77..78 'y': u32
+ 90..97 'unknown': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const_body() {
+ check_infer(
+ r#"
+ const A: u32 = 1 + 1;
+ static B: u64 = { let x = 1; x };
+ "#,
+ expect![[r#"
+ 15..16 '1': u32
+ 15..20 '1 + 1': u32
+ 19..20 '1': u32
+ 38..54 '{ let ...1; x }': u64
+ 44..45 'x': u64
+ 48..49 '1': u64
+ 51..52 'x': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_fields() {
+ check_infer(
+ r#"
+ struct S(i32, u64);
+ fn test() -> u64 {
+ let a = S(4, 6);
+ let b = a.0;
+ a.1
+ }
+ "#,
+ expect![[r#"
+ 37..86 '{ ... a.1 }': u64
+ 47..48 'a': S
+ 51..52 'S': S(i32, u64) -> S
+ 51..58 'S(4, 6)': S
+ 53..54 '4': i32
+ 56..57 '6': u64
+ 68..69 'b': i32
+ 72..73 'a': S
+ 72..75 'a.0': i32
+ 81..82 'a': S
+ 81..84 'a.1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_with_fn() {
+ check_infer(
+ r#"
+ struct S(fn(u32) -> u64);
+ fn test() -> u64 {
+ let a = S(|i| 2*i);
+ let b = a.0(4);
+ a.0(2)
+ }
+ "#,
+ expect![[r#"
+ 43..101 '{ ...0(2) }': u64
+ 53..54 'a': S
+ 57..58 'S': S(fn(u32) -> u64) -> S
+ 57..67 'S(|i| 2*i)': S
+ 59..66 '|i| 2*i': |u32| -> u64
+ 60..61 'i': u32
+ 63..64 '2': u32
+ 63..66 '2*i': u32
+ 65..66 'i': u32
+ 77..78 'b': u64
+ 81..82 'a': S
+ 81..84 'a.0': fn(u32) -> u64
+ 81..87 'a.0(4)': u64
+ 85..86 '4': u32
+ 93..94 'a': S
+ 93..96 'a.0': fn(u32) -> u64
+ 93..99 'a.0(2)': u64
+ 97..98 '2': u32
+ "#]],
+ );
+}
+
+#[test]
+fn indexing_arrays() {
+ check_infer(
+ "fn main() { &mut [9][2]; }",
+ expect![[r#"
+ 10..26 '{ &mut...[2]; }': ()
+ 12..23 '&mut [9][2]': &mut {unknown}
+ 17..20 '[9]': [i32; _]
+ 17..23 '[9][2]': {unknown}
+ 18..19 '9': i32
+ 21..22 '2': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_ops_index() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32];
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "index"]
+ pub trait Index<Idx> {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_int() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+struct Range;
+impl std::ops::Index<Range> for Bar {
+ type Output = Bar;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1];
+ b;
+ //^ Foo
+}
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "index"]
+ pub trait Index<Idx> {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_autoderef() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let a = &[1u32, 2, 3];
+ let b = a[1u32];
+ b;
+} //^ u32
+
+//- /std.rs crate:std
+impl<T> ops::Index<u32> for [T] {
+ type Output = T;
+}
+
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "index"]
+ pub trait Index<Idx> {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait() {
+ check_types(
+ r#"
+#[lang = "deref"]
+trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Arc<T>;
+impl<T> Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 {}
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_inference_var() {
+ check_types(
+ r#"
+//- /main.rs
+#[lang = "deref"]
+trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Arc<T>;
+fn new_arc<T>() -> Arc<T> {}
+impl<T> Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+fn foo(a: Arc<S>) {}
+
+fn test() {
+ let a = new_arc();
+ let b = (*a);
+ //^ S
+ foo(a);
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_infinite_recursion() {
+ check_types(
+ r#"
+#[lang = "deref"]
+trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct S;
+
+impl Deref for S {
+ type Target = S;
+}
+
+fn test(s: S) {
+ s.foo();
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_question_mark_size() {
+ check_types(
+ r#"
+#[lang = "deref"]
+trait Deref {
+ type Target;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Arc<T>;
+impl<T> Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 {}
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_function_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<u32> for S {}
+
+fn foo<T: Trait<U>, U>(t: T) -> U {}
+
+fn test(s: S) {
+ (foo(s));
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_method_clause() {
+ check_types(
+ r#"
+//- /main.rs
+struct S;
+
+trait Trait<T> {}
+impl Trait<isize> for S {}
+
+struct O;
+impl O {
+ fn foo<T: Trait<U>, U>(&self, t: T) -> U {}
+}
+
+fn test() {
+ O.foo(S);
+} //^ isize
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_self_method_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<i64> for S {}
+
+impl S {
+ fn foo<U>(&self) -> U where Self: Trait<U> {}
+}
+
+fn test() {
+ S.foo();
+} //^ i64
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_impl_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<&str> for S {}
+
+struct O<T>;
+impl<U, T: Trait<U>> O<T> {
+ fn foo(&self) -> U {}
+}
+
+fn test(o: O<S>) {
+ o.foo();
+} //^ &str
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T: Clone>(t: T) { t.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1_not_met() {
+ check_types(
+ r#"
+//- /main.rs
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T>(t: T) { t.foo(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T: Trait>(t: T) { t.foo(); }
+ //^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2_not_met() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T>(t: T) { t.foo(); }
+ //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_deref() {
+ check_types(
+ r#"
+#[lang = "deref"]
+trait Deref {
+ type Target;
+}
+trait Trait {}
+impl<T> Deref for T where T: Trait {
+ type Target = i128;
+}
+fn test<T: Trait>(t: T) { (*t); }
+ //^ i128
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder() {
+ // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+
+pub struct RefMutL<T>;
+
+impl<T> ApplyL for RefMutL<T> {
+ type Out = <T as ApplyL>::Out;
+}
+
+fn test<T: ApplyL>() {
+ let y: <RefMutL<T> as ApplyL>::Out = no_matter;
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder_2() {
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
+
+fn test<T: ApplyL>(t: T) {
+ let y = foo(t);
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn argument_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+ }
+ fn bar(x: impl Trait<u16>) {}
+ struct S<T>(T);
+ impl<T> Trait<T> for S<T> {}
+
+ fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
+ x;
+ y;
+ let z = S(1);
+ bar(z);
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 77..78 'x': impl Trait<u16>
+ 97..99 '{}': ()
+ 154..155 'x': impl Trait<u64>
+ 174..175 'y': &impl Trait<u32>
+ 195..323 '{ ...2(); }': ()
+ 201..202 'x': impl Trait<u64>
+ 208..209 'y': &impl Trait<u32>
+ 219..220 'z': S<u16>
+ 223..224 'S': S<u16>(u16) -> S<u16>
+ 223..227 'S(1)': S<u16>
+ 225..226 '1': u16
+ 233..236 'bar': fn bar(S<u16>)
+ 233..239 'bar(z)': ()
+ 237..238 'z': S<u16>
+ 245..246 'x': impl Trait<u64>
+ 245..252 'x.foo()': u64
+ 258..259 'y': &impl Trait<u32>
+ 258..265 'y.foo()': u32
+ 271..272 'z': S<u16>
+ 271..278 'z.foo()': u16
+ 284..285 'x': impl Trait<u64>
+ 284..292 'x.foo2()': i64
+ 298..299 'y': &impl Trait<u32>
+ 298..306 'y.foo2()': i64
+ 312..313 'z': S<u16>
+ 312..320 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_1() {
+ check_infer_with_mismatches(
+ r#"
+ trait Trait {}
+ trait Foo {
+ // this function has an implicit Self param, an explicit type param,
+ // and an implicit impl Trait param!
+ fn bar<T>(x: impl Trait) -> T { loop {} }
+ }
+ fn foo<T>(x: impl Trait) -> T { loop {} }
+ struct S;
+ impl Trait for S {}
+ struct F;
+ impl Foo for F {}
+
+ fn test() {
+ Foo::bar(S);
+ <F as Foo>::bar(S);
+ F::bar(S);
+ Foo::bar::<u32>(S);
+ <F as Foo>::bar::<u32>(S);
+
+ foo(S);
+ foo::<u32>(S);
+ foo::<u32, i32>(S); // we should ignore the extraneous i32
+ }
+ "#,
+ expect![[r#"
+ 155..156 'x': impl Trait
+ 175..186 '{ loop {} }': T
+ 177..184 'loop {}': !
+ 182..184 '{}': ()
+ 199..200 'x': impl Trait
+ 219..230 '{ loop {} }': T
+ 221..228 'loop {}': !
+ 226..228 '{}': ()
+ 300..509 '{ ... i32 }': ()
+ 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
+ 306..317 'Foo::bar(S)': {unknown}
+ 315..316 'S': S
+ 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 323..341 '<F as ...bar(S)': {unknown}
+ 339..340 'S': S
+ 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 347..356 'F::bar(S)': {unknown}
+ 354..355 'S': S
+ 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
+ 362..380 'Foo::b...32>(S)': u32
+ 378..379 'S': S
+ 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
+ 386..411 '<F as ...32>(S)': u32
+ 409..410 'S': S
+ 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
+ 418..424 'foo(S)': {unknown}
+ 422..423 'S': S
+ 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
+ 430..443 'foo::<u32>(S)': u32
+ 441..442 'S': S
+ 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
+ 449..467 'foo::<...32>(S)': u32
+ 465..466 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_2() {
+ check_infer_with_mismatches(
+ r#"
+ trait Trait {}
+ struct S;
+ impl Trait for S {}
+ struct F<T>;
+ impl<T> F<T> {
+ fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
+ }
+
+ fn test() {
+ F.foo(S);
+ F::<u32>.foo(S);
+ F::<u32>.foo::<i32>(S);
+ F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+ }
+ "#,
+ expect![[r#"
+ 87..91 'self': F<T>
+ 93..94 'x': impl Trait
+ 118..129 '{ loop {} }': (T, U)
+ 120..127 'loop {}': !
+ 125..127 '{}': ()
+ 143..283 '{ ...ored }': ()
+ 149..150 'F': F<{unknown}>
+ 149..157 'F.foo(S)': ({unknown}, {unknown})
+ 155..156 'S': S
+ 163..171 'F::<u32>': F<u32>
+ 163..178 'F::<u32>.foo(S)': (u32, {unknown})
+ 176..177 'S': S
+ 184..192 'F::<u32>': F<u32>
+ 184..206 'F::<u3...32>(S)': (u32, i32)
+ 204..205 'S': S
+ 212..220 'F::<u32>': F<u32>
+ 212..239 'F::<u3...32>(S)': (u32, i32)
+ 237..238 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_to_fn_pointer() {
+ check_infer_with_mismatches(
+ r#"
+ trait Trait {}
+ fn foo(x: impl Trait) { loop {} }
+ struct S;
+ impl Trait for S {}
+
+ fn test() {
+ let f: fn(S) -> () = foo;
+ }
+ "#,
+ expect![[r#"
+ 22..23 'x': impl Trait
+ 37..48 '{ loop {} }': ()
+ 39..46 'loop {}': !
+ 44..46 '{}': ()
+ 90..123 '{ ...foo; }': ()
+ 100..101 'f': fn(S)
+ 117..120 'foo': fn foo(S)
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+ }
+ fn bar() -> impl Trait<u64> {}
+
+ fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 98..100 '{}': ()
+ 110..111 'x': impl Trait<u64>
+ 130..131 'y': &impl Trait<u64>
+ 151..268 '{ ...2(); }': ()
+ 157..158 'x': impl Trait<u64>
+ 164..165 'y': &impl Trait<u64>
+ 175..176 'z': impl Trait<u64>
+ 179..182 'bar': fn bar() -> impl Trait<u64>
+ 179..184 'bar()': impl Trait<u64>
+ 190..191 'x': impl Trait<u64>
+ 190..197 'x.foo()': u64
+ 203..204 'y': &impl Trait<u64>
+ 203..210 'y.foo()': u64
+ 216..217 'z': impl Trait<u64>
+ 216..223 'z.foo()': u64
+ 229..230 'x': impl Trait<u64>
+ 229..237 'x.foo2()': i64
+ 243..244 'y': &impl Trait<u64>
+ 243..251 'y.foo2()': i64
+ 257..258 'z': impl Trait<u64>
+ 257..265 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn simple_return_pos_impl_trait() {
+ mark::check!(lower_rpit);
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ fn bar() -> impl Trait<u64> { loop {} }
+
+ fn test() {
+ let a = bar();
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 71..82 '{ loop {} }': !
+ 73..80 'loop {}': !
+ 78..80 '{}': ()
+ 94..129 '{ ...o(); }': ()
+ 104..105 'a': impl Trait<u64>
+ 108..111 'bar': fn bar() -> impl Trait<u64>
+ 108..113 'bar()': impl Trait<u64>
+ 119..120 'a': impl Trait<u64>
+ 119..126 'a.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn more_return_pos_impl_trait() {
+ check_infer(
+ r#"
+ trait Iterator {
+ type Item;
+ fn next(&mut self) -> Self::Item;
+ }
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
+ fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
+
+ fn test() {
+ let (a, b) = bar();
+ a.next().foo();
+ b.foo();
+ let (c, d) = baz(1u128);
+ c.next().foo();
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 49..53 'self': &mut Self
+ 101..105 'self': &Self
+ 184..195 '{ loop {} }': ({unknown}, {unknown})
+ 186..193 'loop {}': !
+ 191..193 '{}': ()
+ 206..207 't': T
+ 268..279 '{ loop {} }': ({unknown}, {unknown})
+ 270..277 'loop {}': !
+ 275..277 '{}': ()
+ 291..413 '{ ...o(); }': ()
+ 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 302..303 'a': impl Iterator<Item = impl Trait<u32>>
+ 305..306 'b': impl Trait<u64>
+ 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 321..322 'a': impl Iterator<Item = impl Trait<u32>>
+ 321..329 'a.next()': impl Trait<u32>
+ 321..335 'a.next().foo()': u32
+ 341..342 'b': impl Trait<u64>
+ 341..348 'b.foo()': u64
+ 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 359..360 'c': impl Iterator<Item = impl Trait<u128>>
+ 362..363 'd': impl Trait<u128>
+ 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 371..376 '1u128': u128
+ 383..384 'c': impl Iterator<Item = impl Trait<u128>>
+ 383..391 'c.next()': impl Trait<u128>
+ 383..397 'c.next().foo()': u128
+ 403..404 'd': impl Trait<u128>
+ 403..410 'd.foo()': u128
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+ }
+ fn bar() -> dyn Trait<u64> {}
+
+ fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 97..99 '{}': ()
+ 109..110 'x': dyn Trait<u64>
+ 128..129 'y': &dyn Trait<u64>
+ 148..265 '{ ...2(); }': ()
+ 154..155 'x': dyn Trait<u64>
+ 161..162 'y': &dyn Trait<u64>
+ 172..173 'z': dyn Trait<u64>
+ 176..179 'bar': fn bar() -> dyn Trait<u64>
+ 176..181 'bar()': dyn Trait<u64>
+ 187..188 'x': dyn Trait<u64>
+ 187..194 'x.foo()': u64
+ 200..201 'y': &dyn Trait<u64>
+ 200..207 'y.foo()': u64
+ 213..214 'z': dyn Trait<u64>
+ 213..220 'z.foo()': u64
+ 226..227 'x': dyn Trait<u64>
+ 226..234 'x.foo2()': i64
+ 240..241 'y': &dyn Trait<u64>
+ 240..248 'y.foo2()': i64
+ 254..255 'z': dyn Trait<u64>
+ 254..262 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_in_impl() {
+ check_infer(
+ r#"
+ trait Trait<T, U> {
+ fn foo(&self) -> (T, U);
+ }
+ struct S<T, U> {}
+ impl<T, U> S<T, U> {
+ fn bar(&self) -> &dyn Trait<T, U> { loop {} }
+ }
+ trait Trait2<T, U> {
+ fn baz(&self) -> (T, U);
+ }
+ impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
+
+ fn test(s: S<u32, i32>) {
+ s.bar().baz();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 102..106 'self': &S<T, U>
+ 128..139 '{ loop {} }': &dyn Trait<T, U>
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 175..179 'self': &Self
+ 251..252 's': S<u32, i32>
+ 267..289 '{ ...z(); }': ()
+ 273..274 's': S<u32, i32>
+ 273..280 's.bar()': &dyn Trait<u32, i32>
+ 273..286 's.bar().baz()': (u32, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_bare() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn foo(&self) -> u64;
+ }
+ fn bar() -> Trait {}
+
+ fn test(x: Trait, y: &Trait) -> u64 {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ }
+ "#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 60..62 '{}': ()
+ 72..73 'x': dyn Trait
+ 82..83 'y': &dyn Trait
+ 100..175 '{ ...o(); }': ()
+ 106..107 'x': dyn Trait
+ 113..114 'y': &dyn Trait
+ 124..125 'z': dyn Trait
+ 128..131 'bar': fn bar() -> dyn Trait
+ 128..133 'bar()': dyn Trait
+ 139..140 'x': dyn Trait
+ 139..146 'x.foo()': u64
+ 152..153 'y': &dyn Trait
+ 152..159 'y.foo()': u64
+ 165..166 'z': dyn Trait
+ 165..172 'z.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn weird_bounds() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test(a: impl Trait + 'lifetime, b: impl 'lifetime, c: impl (Trait), d: impl ('lifetime), e: impl ?Sized, f: impl Trait + ?Sized) {}
+ "#,
+ expect![[r#"
+ 23..24 'a': impl Trait + {error}
+ 50..51 'b': impl {error}
+ 69..70 'c': impl Trait
+ 86..87 'd': impl {error}
+ 107..108 'e': impl {error}
+ 123..124 'f': impl Trait + {error}
+ 147..149 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+#[ignore]
+fn error_bound_chalk() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self) -> u32 {}
+}
+
+fn test(x: (impl Trait + UnknownTrait)) {
+ x.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn assoc_type_bindings() {
+ check_infer(
+ r#"
+ trait Trait {
+ type Type;
+ }
+
+ fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
+ fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+ fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+ struct S<T>;
+ impl<T> Trait for S<T> { type Type = T; }
+
+ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
+ get(x);
+ get2(x);
+ get(y);
+ get2(y);
+ get(set(S));
+ get2(set(S));
+ get2(S::<str>);
+ }
+ "#,
+ expect![[r#"
+ 49..50 't': T
+ 77..79 '{}': ()
+ 111..112 't': T
+ 122..124 '{}': ()
+ 154..155 't': T
+ 165..168 '{t}': T
+ 166..167 't': T
+ 256..257 'x': T
+ 262..263 'y': impl Trait<Type = i64>
+ 289..397 '{ ...r>); }': ()
+ 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
+ 295..301 'get(x)': u32
+ 299..300 'x': T
+ 307..311 'get2': fn get2<u32, T>(T) -> u32
+ 307..314 'get2(x)': u32
+ 312..313 'x': T
+ 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 320..326 'get(y)': i64
+ 324..325 'y': impl Trait<Type = i64>
+ 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 332..339 'get2(y)': i64
+ 337..338 'y': impl Trait<Type = i64>
+ 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 345..356 'get(set(S))': u64
+ 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 349..355 'set(S)': S<u64>
+ 353..354 'S': S<u64>
+ 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 362..374 'get2(set(S))': u64
+ 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 367..373 'set(S)': S<u64>
+ 371..372 'S': S<u64>
+ 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
+ 380..394 'get2(S::<str>)': str
+ 385..393 'S::<str>': S<str>
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait_assoc_binding_projection_bug() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+pub trait Language {
+ type Kind;
+}
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+}
+struct SyntaxNode<L> {}
+fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
+
+trait Clone {
+ fn clone(&self) -> Self;
+}
+
+fn api_walkthrough() {
+ for node in foo() {
+ node.clone();
+ } //^ {unknown}
+}
+
+//- /std.rs crate:std
+#[prelude_import] use iter::*;
+mod iter {
+ trait IntoIterator {
+ type Item;
+ }
+ trait Iterator {
+ type Item;
+ }
+ impl<T: Iterator> IntoIterator for T {
+ type Item = <T as Iterator>::Item;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn projection_eq_within_chalk() {
+ check_infer(
+ r#"
+ trait Trait1 {
+ type Type;
+ }
+ trait Trait2<T> {
+ fn foo(self) -> T;
+ }
+ impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
+
+ fn test<T: Trait1<Type = u32>>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 61..65 'self': Self
+ 163..164 'x': T
+ 169..185 '{ ...o(); }': ()
+ 175..176 'x': T
+ 175..182 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn where_clause_trait_in_scope_for_method_resolution() {
+ check_types(
+ r#"
+mod foo {
+ trait Trait {
+ fn foo(&self) -> u32 {}
+ }
+}
+
+fn test<T: foo::Trait>(x: T) {
+ x.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn super_trait_method_resolution() {
+ check_infer(
+ r#"
+ mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+ }
+ trait Trait1: foo::SuperTrait {}
+ trait Trait2 where Self: foo::SuperTrait {}
+
+ fn test<T: Trait1, U: Trait2>(x: T, y: U) {
+ x.foo();
+ y.foo();
+ }
+ "#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': ()
+ 181..182 'x': T
+ 187..188 'y': U
+ 193..222 '{ ...o(); }': ()
+ 199..200 'x': T
+ 199..206 'x.foo()': u32
+ 212..213 'y': U
+ 212..219 'y.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_impl_trait_method_resolution() {
+ check_infer(
+ r#"
+ mod foo {
+ trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+ }
+ trait Trait1: foo::SuperTrait {}
+
+ fn test(x: &impl Trait1) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 49..53 'self': &Self
+ 62..64 '{}': ()
+ 115..116 'x': &impl Trait1
+ 132..148 '{ ...o(); }': ()
+ 138..139 'x': &impl Trait1
+ 138..145 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_cycle() {
+ // This just needs to not crash
+ check_infer(
+ r#"
+ trait A: B {}
+ trait B: A {}
+
+ fn test<T: A>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 43..44 'x': T
+ 49..65 '{ ...o(); }': ()
+ 55..56 'x': T
+ 55..62 'x.foo()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_assoc_type_bounds() {
+ check_infer(
+ r#"
+ trait SuperTrait { type Type; }
+ trait Trait where Self: SuperTrait {}
+
+ fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+ fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+ struct S<T>;
+ impl<T> SuperTrait for S<T> { type Type = T; }
+ impl<T> Trait for S<T> {}
+
+ fn test() {
+ get2(set(S));
+ }
+ "#,
+ expect![[r#"
+ 102..103 't': T
+ 113..115 '{}': ()
+ 145..146 't': T
+ 156..159 '{t}': T
+ 157..158 't': T
+ 258..279 '{ ...S)); }': ()
+ 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 264..276 'get2(set(S))': u64
+ 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 269..275 'set(S)': S<u64>
+ 273..274 'S': S<u64>
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait() {
+ check_infer(
+ r#"
+ trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
+ }
+
+ fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
+ f.call_once((1, 2));
+ }
+ "#,
+ expect![[r#"
+ 56..60 'self': Self
+ 62..66 'args': Args
+ 149..150 'f': F
+ 155..183 '{ ...2)); }': ()
+ 161..162 'f': F
+ 161..180 'f.call...1, 2))': u128
+ 173..179 '(1, 2)': (u32, u64)
+ 174..175 '1': u32
+ 177..178 '2': u64
+ "#]],
+ );
+}
+
+#[test]
+fn fn_ptr_and_item() {
+ check_infer(
+ r#"
+ #[lang="fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+ }
+
+ trait Foo<T> {
+ fn foo(&self) -> T;
+ }
+
+ struct Bar<T>(T);
+
+ impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
+ fn foo(&self) -> (A1, R) {}
+ }
+
+ enum Opt<T> { None, Some(T) }
+ impl<T> Opt<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> {}
+ }
+
+ fn test() {
+ let bar: Bar<fn(u8) -> u32>;
+ bar.foo();
+
+ let opt: Opt<u8>;
+ let f: fn(u8) -> u32;
+ opt.map(f);
+ }
+ "#,
+ expect![[r#"
+ 74..78 'self': Self
+ 80..84 'args': Args
+ 139..143 'self': &Self
+ 243..247 'self': &Bar<F>
+ 260..262 '{}': ()
+ 346..350 'self': Opt<T>
+ 352..353 'f': F
+ 368..370 '{}': ()
+ 384..500 '{ ...(f); }': ()
+ 394..397 'bar': Bar<fn(u8) -> u32>
+ 423..426 'bar': Bar<fn(u8) -> u32>
+ 423..432 'bar.foo()': (u8, u32)
+ 443..446 'opt': Opt<u8>
+ 465..466 'f': fn(u8) -> u32
+ 487..490 'opt': Opt<u8>
+ 487..497 'opt.map(f)': Opt<u32>
+ 495..496 'f': fn(u8) -> u32
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait_deref_with_ty_default() {
+ check_infer(
+ r#"
+ #[lang = "deref"]
+ trait Deref {
+ type Target;
+
+ fn deref(&self) -> &Self::Target;
+ }
+
+ #[lang="fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+ }
+
+ struct Foo;
+
+ impl Foo {
+ fn foo(&self) -> usize {}
+ }
+
+ struct Lazy<T, F = fn() -> T>(F);
+
+ impl<T, F> Lazy<T, F> {
+ pub fn new(f: F) -> Lazy<T, F> {}
+ }
+
+ impl<T, F: FnOnce() -> T> Deref for Lazy<T, F> {
+ type Target = T;
+ }
+
+ fn test() {
+ let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
+ let r1 = lazy1.foo();
+
+ fn make_foo_fn() -> Foo {}
+ let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
+ let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
+ let r2 = lazy2.foo();
+ }
+ "#,
+ expect![[r#"
+ 64..68 'self': &Self
+ 165..169 'self': Self
+ 171..175 'args': Args
+ 239..243 'self': &Foo
+ 254..256 '{}': ()
+ 334..335 'f': F
+ 354..356 '{}': ()
+ 443..689 '{ ...o(); }': ()
+ 453..458 'lazy1': Lazy<Foo, || -> Foo>
+ 475..484 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
+ 475..492 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
+ 485..491 '|| Foo': || -> Foo
+ 488..491 'Foo': Foo
+ 502..504 'r1': usize
+ 507..512 'lazy1': Lazy<Foo, || -> Foo>
+ 507..518 'lazy1.foo()': usize
+ 560..575 'make_foo_fn_ptr': fn() -> Foo
+ 591..602 'make_foo_fn': fn make_foo_fn() -> Foo
+ 612..617 'lazy2': Lazy<Foo, fn() -> Foo>
+ 634..643 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 634..660 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 644..659 'make_foo_fn_ptr': fn() -> Foo
+ 670..672 'r2': usize
+ 675..680 'lazy2': Lazy<Foo, fn() -> Foo>
+ 675..686 'lazy2.foo()': usize
+ 549..551 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn closure_1() {
+ check_infer(
+ r#"
+ #[lang = "fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+ }
+
+ enum Option<T> { Some(T), None }
+ impl<T> Option<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> {}
+ }
+
+ fn test() {
+ let x = Option::Some(1u32);
+ x.map(|v| v + 1);
+ x.map(|_v| 1u64);
+ let y: Option<i64> = x.map(|_v| 1);
+ }
+ "#,
+ expect![[r#"
+ 147..151 'self': Option<T>
+ 153..154 'f': F
+ 172..174 '{}': ()
+ 188..307 '{ ... 1); }': ()
+ 198..199 'x': Option<u32>
+ 202..214 'Option::Some': Some<u32>(u32) -> Option<u32>
+ 202..220 'Option...(1u32)': Option<u32>
+ 215..219 '1u32': u32
+ 226..227 'x': Option<u32>
+ 226..242 'x.map(...v + 1)': Option<u32>
+ 232..241 '|v| v + 1': |u32| -> u32
+ 233..234 'v': u32
+ 236..237 'v': u32
+ 236..241 'v + 1': u32
+ 240..241 '1': u32
+ 248..249 'x': Option<u32>
+ 248..264 'x.map(... 1u64)': Option<u64>
+ 254..263 '|_v| 1u64': |u32| -> u64
+ 255..257 '_v': u32
+ 259..263 '1u64': u64
+ 274..275 'y': Option<i64>
+ 291..292 'x': Option<u32>
+ 291..304 'x.map(|_v| 1)': Option<i64>
+ 297..303 '|_v| 1': |u32| -> i64
+ 298..300 '_v': u32
+ 302..303 '1': i64
+ "#]],
+ );
+}
+
+#[test]
+fn closure_2() {
+ check_infer(
+ r#"
+ trait FnOnce<Args> {
+ type Output;
+ }
+
+ fn test<F: FnOnce(u32) -> u64>(f: F) {
+ f(1);
+ let g = |v| v + 1;
+ g(1u64);
+ let h = |v| 1u128 + v;
+ }
+ "#,
+ expect![[r#"
+ 72..73 'f': F
+ 78..154 '{ ...+ v; }': ()
+ 84..85 'f': F
+ 84..88 'f(1)': {unknown}
+ 86..87 '1': i32
+ 98..99 'g': |u64| -> i32
+ 102..111 '|v| v + 1': |u64| -> i32
+ 103..104 'v': u64
+ 106..107 'v': u64
+ 106..111 'v + 1': i32
+ 110..111 '1': i32
+ 117..118 'g': |u64| -> i32
+ 117..124 'g(1u64)': i32
+ 119..123 '1u64': u64
+ 134..135 'h': |u128| -> u128
+ 138..151 '|v| 1u128 + v': |u128| -> u128
+ 139..140 'v': u128
+ 142..147 '1u128': u128
+ 142..151 '1u128 + v': u128
+ 150..151 'v': u128
+ "#]],
+ );
+}
+
+#[test]
+fn closure_as_argument_inference_order() {
+ check_infer(
+ r#"
+ #[lang = "fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+ }
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U {}
+ fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U {}
+
+ struct S;
+ impl S {
+ fn method(self) -> u64;
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U {}
+ fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U {}
+ }
+
+ fn test() {
+ let x1 = foo1(S, |s| s.method());
+ let x2 = foo2(|s| s.method(), S);
+ let x3 = S.foo1(S, |s| s.method());
+ let x4 = S.foo2(|s| s.method(), S);
+ }
+ "#,
+ expect![[r#"
+ 94..95 'x': T
+ 100..101 'f': F
+ 111..113 '{}': ()
+ 147..148 'f': F
+ 153..154 'x': T
+ 164..166 '{}': ()
+ 201..205 'self': S
+ 253..257 'self': S
+ 259..260 'x': T
+ 265..266 'f': F
+ 276..278 '{}': ()
+ 316..320 'self': S
+ 322..323 'f': F
+ 328..329 'x': T
+ 339..341 '{}': ()
+ 355..514 '{ ... S); }': ()
+ 365..367 'x1': u64
+ 370..374 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
+ 370..393 'foo1(S...hod())': u64
+ 375..376 'S': S
+ 378..392 '|s| s.method()': |S| -> u64
+ 379..380 's': S
+ 382..383 's': S
+ 382..392 's.method()': u64
+ 403..405 'x2': u64
+ 408..412 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
+ 408..431 'foo2(|...(), S)': u64
+ 413..427 '|s| s.method()': |S| -> u64
+ 414..415 's': S
+ 417..418 's': S
+ 417..427 's.method()': u64
+ 429..430 'S': S
+ 441..443 'x3': u64
+ 446..447 'S': S
+ 446..471 'S.foo1...hod())': u64
+ 453..454 'S': S
+ 456..470 '|s| s.method()': |S| -> u64
+ 457..458 's': S
+ 460..461 's': S
+ 460..470 's.method()': u64
+ 481..483 'x4': u64
+ 486..487 'S': S
+ 486..511 'S.foo2...(), S)': u64
+ 493..507 '|s| s.method()': |S| -> u64
+ 494..495 's': S
+ 497..498 's': S
+ 497..507 's.method()': u64
+ 509..510 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn fn_item_fn_trait() {
+ check_types(
+ r#"
+#[lang = "fn_once"]
+trait FnOnce<Args> {
+ type Output;
+}
+
+struct S;
+
+fn foo() -> S {}
+
+fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
+
+fn test() {
+ takes_closure(foo);
+} //^^^^^^^^^^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_1() {
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T: Trait>() where T::Item: Trait2 {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_2() {
+ check_types(
+ r#"
+trait Trait<T> {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_on_impl_self() {
+ check_infer(
+ r#"
+ //- /main.rs
+ trait Trait {
+ type Item;
+
+ fn f(&self, x: Self::Item);
+ }
+
+ struct S;
+
+ impl Trait for S {
+ type Item = u32;
+ fn f(&self, x: Self::Item) { let y = x; }
+ }
+
+ struct S2;
+
+ impl Trait for S2 {
+ type Item = i32;
+ fn f(&self, x: <Self>::Item) { let y = x; }
+ }
+ "#,
+ expect![[r#"
+ 40..44 'self': &Self
+ 46..47 'x': Trait::Item<Self>
+ 126..130 'self': &S
+ 132..133 'x': u32
+ 147..161 '{ let y = x; }': ()
+ 153..154 'y': u32
+ 157..158 'x': u32
+ 228..232 'self': &S2
+ 234..235 'x': i32
+ 251..265 '{ let y = x; }': ()
+ 257..258 'y': i32
+ 261..262 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn unselected_projection_on_trait_self() {
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+
+ fn f(&self) -> Self::Item { loop {} }
+}
+
+struct S;
+impl Trait for S {
+ type Item = u32;
+}
+
+fn test() {
+ S.f();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_chalk_fold() {
+ check_types(
+ r#"
+trait Interner {}
+trait Fold<I: Interner, TI = I> {
+ type Result;
+}
+
+struct Ty<I: Interner> {}
+impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
+ type Result = Ty<TI>;
+}
+
+fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
+where
+ T: Fold<I, I>,
+{
+ loop {}
+}
+
+fn foo<I: Interner>(interner: &I, t: Ty<I>) {
+ fold(interner, t);
+} //^ Ty<I>
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty() {
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self);
+}
+
+struct S;
+
+impl Trait<Self> for S {}
+
+fn test() {
+ S.foo();
+} //^ ()
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty_cycle() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self);
+}
+
+struct S<T>;
+
+impl Trait for S<Self> {}
+
+fn test() {
+ S.foo();
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_1() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+}
+
+trait Trait2<T> {}
+
+fn test<T: Trait>() where T: Trait2<T::Item> {
+ let x: T::Item = no_matter;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_2() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Trait<T> {
+ type Item;
+}
+
+fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
+ let x: T::Item = no_matter;
+} //^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_1() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// workaround for Chalk assoc type normalization problems
+pub struct S<T>;
+impl<T: Iterator> Iterator for S<T> {
+ type Item = <T as Iterator>::Item;
+}
+
+fn test<I: Iterator<Item: OtherTrait<u32>>>() {
+ let x: <S<I> as Iterator>::Item;
+ x.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_2() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+
+fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
+ let x: <<I as Iterator>::Item as Iterator>::Item;
+ x;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn proc_macro_server_types() {
+ check_infer(
+ r#"
+ macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ TokenStream {
+ fn new() -> $S::TokenStream;
+ },
+ Group {
+ },
+ }
+ };
+ }
+ macro_rules! associated_item {
+ (type TokenStream) =>
+ (type TokenStream: 'static;);
+ (type Group) =>
+ (type Group: 'static;);
+ ($($item:tt)*) => ($($item)*;)
+ }
+ macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+ }
+
+ with_api!(Self, self_, declare_server_traits);
+ struct G {}
+ struct T {}
+ struct Rustc;
+ impl Types for Rustc {
+ type TokenStream = T;
+ type Group = G;
+ }
+
+ fn make<T>() -> T { loop {} }
+ impl TokenStream for Rustc {
+ fn new() -> Self::TokenStream {
+ let group: Self::Group = make();
+ make()
+ }
+ }
+ "#,
+ expect![[r#"
+ 1061..1072 '{ loop {} }': T
+ 1063..1070 'loop {}': !
+ 1068..1070 '{}': ()
+ 1136..1199 '{ ... }': T
+ 1150..1155 'group': G
+ 1171..1175 'make': fn make<G>() -> G
+ 1171..1177 'make()': G
+ 1187..1191 'make': fn make<T>() -> T
+ 1187..1193 'make()': T
+ "#]],
+ );
+}
+
+#[test]
+fn unify_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+ trait Trait<T> {}
+
+ fn foo(x: impl Trait<u32>) { loop {} }
+ fn bar<T>(x: impl Trait<T>) -> T { loop {} }
+
+ struct S<T>(T);
+ impl<T> Trait<T> for S<T> {}
+
+ fn default<T>() -> T { loop {} }
+
+ fn test() -> impl Trait<i32> {
+ let s1 = S(default());
+ foo(s1);
+ let x: i32 = bar(S(default()));
+ S(default())
+ }
+ "#,
+ expect![[r#"
+ 26..27 'x': impl Trait<u32>
+ 46..57 '{ loop {} }': ()
+ 48..55 'loop {}': !
+ 53..55 '{}': ()
+ 68..69 'x': impl Trait<T>
+ 91..102 '{ loop {} }': T
+ 93..100 'loop {}': !
+ 98..100 '{}': ()
+ 171..182 '{ loop {} }': T
+ 173..180 'loop {}': !
+ 178..180 '{}': ()
+ 213..309 '{ ...t()) }': S<{unknown}>
+ 223..225 's1': S<u32>
+ 228..229 'S': S<u32>(u32) -> S<u32>
+ 228..240 'S(default())': S<u32>
+ 230..237 'default': fn default<u32>() -> u32
+ 230..239 'default()': u32
+ 246..249 'foo': fn foo(S<u32>)
+ 246..253 'foo(s1)': ()
+ 250..252 's1': S<u32>
+ 263..264 'x': i32
+ 272..275 'bar': fn bar<i32>(S<i32>) -> i32
+ 272..289 'bar(S(...lt()))': i32
+ 276..277 'S': S<i32>(i32) -> S<i32>
+ 276..288 'S(default())': S<i32>
+ 278..285 'default': fn default<i32>() -> i32
+ 278..287 'default()': i32
+ 295..296 'S': S<{unknown}>({unknown}) -> S<{unknown}>
+ 295..307 'S(default())': S<{unknown}>
+ 297..304 'default': fn default<{unknown}>() -> {unknown}
+ 297..306 'default()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_types_from_bounds() {
+ check_infer(
+ r#"
+ //- /main.rs
+ #[lang = "fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+ }
+
+ trait T {
+ type O;
+ }
+
+ impl T for () {
+ type O = ();
+ }
+
+ fn f<X, F>(_v: F)
+ where
+ X: T,
+ F: FnOnce(&X::O),
+ { }
+
+ fn main() {
+ f::<(), _>(|z| { z; });
+ }
+ "#,
+ expect![[r#"
+ 133..135 '_v': F
+ 178..181 '{ }': ()
+ 193..224 '{ ... }); }': ()
+ 199..209 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
+ 199..221 'f::<()... z; })': ()
+ 210..220 '|z| { z; }': |&()| -> ()
+ 211..212 'z': &()
+ 214..220 '{ z; }': ()
+ 216..217 'z': &()
+ "#]],
+ );
+}
+
+#[test]
+fn associated_type_bound() {
+ check_types(
+ r#"
+pub trait Trait {
+ type Item: OtherTrait<u32>;
+}
+pub trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// this is just a workaround for chalk#234
+pub struct S<T>;
+impl<T: Trait> Trait for S<T> {
+ type Item = <T as Trait>::Item;
+}
+
+fn test<T: Trait>() {
+ let y: <S<T> as Trait>::Item = no_matter;
+ y.foo();
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_through_chalk() {
+ check_types(
+ r#"
+struct Box<T> {}
+#[lang = "deref"]
+trait Deref {
+ type Target;
+}
+impl<T> Deref for Box<T> {
+ type Target = T;
+}
+trait Trait {
+ fn foo(&self);
+}
+
+fn test(x: Box<dyn Trait>) {
+ x.foo();
+} //^ ()
+"#,
+ );
+}
+
+#[test]
+fn string_to_owned() {
+ check_types(
+ r#"
+struct String {}
+pub trait ToOwned {
+ type Owned;
+ fn to_owned(&self) -> Self::Owned;
+}
+impl ToOwned for str {
+ type Owned = String;
+}
+fn test() {
+ "foo".to_owned();
+} //^ String
+"#,
+ );
+}
+
+#[test]
+fn iterator_chain() {
+ check_infer(
+ r#"
+ //- /main.rs
+ #[lang = "fn_once"]
+ trait FnOnce<Args> {
+ type Output;
+ }
+ #[lang = "fn_mut"]
+ trait FnMut<Args>: FnOnce<Args> { }
+
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ pub trait Iterator {
+ type Item;
+
+ fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ { loop {} }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ { loop {} }
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ fn into_iter(self) -> Self::IntoIter;
+ }
+
+ pub struct FilterMap<I, F> { }
+ impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+ where
+ F: FnMut(I::Item) -> Option<B>,
+ {
+ type Item = B;
+ }
+
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+
+ fn into_iter(self) -> I {
+ self
+ }
+ }
+
+ struct Vec<T> {}
+ impl<T> Vec<T> {
+ fn new() -> Self { loop {} }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+ }
+
+ pub struct IntoIter<T> { }
+ impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+ }
+
+ fn main() {
+ Vec::<i32>::new().into_iter()
+ .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
+ .for_each(|y| { y; });
+ }
+ "#,
+ expect![[r#"
+ 226..230 'self': Self
+ 232..233 'f': F
+ 317..328 '{ loop {} }': FilterMap<Self, F>
+ 319..326 'loop {}': !
+ 324..326 '{}': ()
+ 349..353 'self': Self
+ 355..356 'f': F
+ 405..416 '{ loop {} }': ()
+ 407..414 'loop {}': !
+ 412..414 '{}': ()
+ 525..529 'self': Self
+ 854..858 'self': I
+ 865..885 '{ ... }': I
+ 875..879 'self': I
+ 944..955 '{ loop {} }': Vec<T>
+ 946..953 'loop {}': !
+ 951..953 '{}': ()
+ 1142..1269 '{ ... }); }': ()
+ 1148..1163 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 1148..1165 'Vec::<...:new()': Vec<i32>
+ 1148..1177 'Vec::<...iter()': IntoIter<i32>
+ 1148..1240 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
+ 1148..1266 'Vec::<... y; })': ()
+ 1194..1239 '|x| if...None }': |i32| -> Option<u32>
+ 1195..1196 'x': i32
+ 1198..1239 'if x >...None }': Option<u32>
+ 1201..1202 'x': i32
+ 1201..1206 'x > 0': bool
+ 1205..1206 '0': i32
+ 1207..1225 '{ Some...u32) }': Option<u32>
+ 1209..1213 'Some': Some<u32>(u32) -> Option<u32>
+ 1209..1223 'Some(x as u32)': Option<u32>
+ 1214..1215 'x': i32
+ 1214..1222 'x as u32': u32
+ 1231..1239 '{ None }': Option<u32>
+ 1233..1237 'None': Option<u32>
+ 1255..1265 '|y| { y; }': |u32| -> ()
+ 1256..1257 'y': u32
+ 1259..1265 '{ y; }': ()
+ 1261..1262 'y': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_assoc() {
+ check_types(
+ r#"
+struct Bar;
+struct Foo;
+
+trait A {
+ type OutputA;
+}
+
+impl A for Bar {
+ type OutputA = Foo;
+}
+
+trait B {
+ type Output;
+ fn foo() -> Self::Output;
+}
+
+impl<T:A> B for T {
+ type Output = T::OutputA;
+ fn foo() -> Self::Output { loop {} }
+}
+
+fn main() {
+ Bar::foo();
+} //^ Foo
+"#,
+ );
+}
+
+#[test]
+fn trait_object_no_coercion() {
+ check_infer_with_mismatches(
+ r#"
+ trait Foo {}
+
+ fn foo(x: &dyn Foo) {}
+
+ fn test(x: &dyn Foo) {
+ foo(x);
+ }
+ "#,
+ expect![[r#"
+ 21..22 'x': &dyn Foo
+ 34..36 '{}': ()
+ 46..47 'x': &dyn Foo
+ 59..74 '{ foo(x); }': ()
+ 65..68 'foo': fn foo(&dyn Foo)
+ 65..71 'foo(x)': ()
+ 69..70 'x': &dyn Foo
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_copy() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "copy"]
+ trait Copy {}
+
+ struct IsCopy;
+ impl Copy for IsCopy {}
+ struct NotCopy;
+
+ trait Test { fn test(&self) -> bool; }
+ impl<T: Copy> Test for T {}
+
+ fn test() {
+ IsCopy.test();
+ NotCopy.test();
+ (IsCopy, IsCopy).test();
+ (IsCopy, NotCopy).test();
+ }
+ "#,
+ expect![[r#"
+ 110..114 'self': &Self
+ 166..267 '{ ...t(); }': ()
+ 172..178 'IsCopy': IsCopy
+ 172..185 'IsCopy.test()': bool
+ 191..198 'NotCopy': NotCopy
+ 191..205 'NotCopy.test()': {unknown}
+ 211..227 '(IsCop...sCopy)': (IsCopy, IsCopy)
+ 211..234 '(IsCop...test()': bool
+ 212..218 'IsCopy': IsCopy
+ 220..226 'IsCopy': IsCopy
+ 240..257 '(IsCop...tCopy)': (IsCopy, NotCopy)
+ 240..264 '(IsCop...test()': {unknown}
+ 241..247 'IsCopy': IsCopy
+ 249..256 'NotCopy': NotCopy
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_def_copy() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "copy"]
+ trait Copy {}
+
+ fn foo() {}
+ fn bar<T: Copy>(T) -> T {}
+ struct Struct(usize);
+ enum Enum { Variant(usize) }
+
+ trait Test { fn test(&self) -> bool; }
+ impl<T: Copy> Test for T {}
+
+ fn test() {
+ foo.test();
+ bar.test();
+ Struct.test();
+ Enum::Variant.test();
+ }
+ "#,
+ expect![[r#"
+ 41..43 '{}': ()
+ 60..61 'T': {unknown}
+ 68..70 '{}': ()
+ 68..70: expected T, got ()
+ 145..149 'self': &Self
+ 201..281 '{ ...t(); }': ()
+ 207..210 'foo': fn foo()
+ 207..217 'foo.test()': bool
+ 223..226 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
+ 223..233 'bar.test()': bool
+ 239..245 'Struct': Struct(usize) -> Struct
+ 239..252 'Struct.test()': bool
+ 258..271 'Enum::Variant': Variant(usize) -> Enum
+ 258..278 'Enum::...test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_ptr_copy() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "copy"]
+ trait Copy {}
+
+ trait Test { fn test(&self) -> bool; }
+ impl<T: Copy> Test for T {}
+
+ fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
+ f1.test();
+ f2.test();
+ f3.test();
+ }
+ "#,
+ expect![[r#"
+ 54..58 'self': &Self
+ 108..110 'f1': fn()
+ 118..120 'f2': fn(usize) -> u8
+ 139..141 'f3': fn(u8, u8) -> &u8
+ 162..210 '{ ...t(); }': ()
+ 168..170 'f1': fn()
+ 168..177 'f1.test()': bool
+ 183..185 'f2': fn(usize) -> u8
+ 183..192 'f2.test()': bool
+ 198..200 'f3': fn(u8, u8) -> &u8
+ 198..207 'f3.test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_sized() {
+ check_infer_with_mismatches(
+ r#"
+ #[lang = "sized"]
+ trait Sized {}
+
+ trait Test { fn test(&self) -> bool; }
+ impl<T: Sized> Test for T {}
+
+ fn test() {
+ 1u8.test();
+ (*"foo").test(); // not Sized
+ (1u8, 1u8).test();
+ (1u8, *"foo").test(); // not Sized
+ }
+ "#,
+ expect![[r#"
+ 56..60 'self': &Self
+ 113..228 '{ ...ized }': ()
+ 119..122 '1u8': u8
+ 119..129 '1u8.test()': bool
+ 135..150 '(*"foo").test()': {unknown}
+ 136..142 '*"foo"': str
+ 137..142 '"foo"': &str
+ 169..179 '(1u8, 1u8)': (u8, u8)
+ 169..186 '(1u8, ...test()': bool
+ 170..173 '1u8': u8
+ 175..178 '1u8': u8
+ 192..205 '(1u8, *"foo")': (u8, str)
+ 192..212 '(1u8, ...test()': {unknown}
+ 193..196 '1u8': u8
+ 198..204 '*"foo"': str
+ 199..204 '"foo"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn integer_range_iterate() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ for x in 0..100 { x; }
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+}
+
+pub mod iter {
+ pub trait Iterator {
+ type Item;
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+
+ impl<T> IntoIterator for T where T: Iterator {
+ type Item = <T as Iterator>::Item;
+ type IntoIter = Self;
+ }
+}
+
+trait Step {}
+impl Step for i32 {}
+impl Step for i64 {}
+
+impl<A: Step> iter::Iterator for ops::Range<A> {
+ type Item = A;
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_closure_arg() {
+ check_infer(
+ r#"
+ //- /lib.rs
+
+ enum Option<T> {
+ None,
+ Some(T)
+ }
+
+ fn foo() {
+ let s = Option::None;
+ let f = |x: Option<i32>| {};
+ (&f)(s)
+ }
+ "#,
+ expect![[r#"
+ 52..126 '{ ...)(s) }': ()
+ 62..63 's': Option<i32>
+ 66..78 'Option::None': Option<i32>
+ 88..89 'f': |Option<i32>| -> ()
+ 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
+ 93..94 'x': Option<i32>
+ 109..111 '{}': ()
+ 117..124 '(&f)(s)': ()
+ 118..120 '&f': &|Option<i32>| -> ()
+ 119..120 'f': |Option<i32>| -> ()
+ 122..123 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_fn_trait_arg() {
+ check_infer(
+ r#"
+ //- /lib.rs deps:std
+
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {
+ type Output;
+
+ extern "rust-call" fn call_once(&self, args: Args) -> Self::Output;
+ }
+
+ #[lang = "fn"]
+ pub trait Fn<Args>:FnOnce<Args> {
+ extern "rust-call" fn call(&self, args: Args) -> Self::Output;
+ }
+
+ enum Option<T> {
+ None,
+ Some(T)
+ }
+
+ fn foo<F, T>(f: F) -> T
+ where
+ F: Fn(Option<i32>) -> T,
+ {
+ let s = None;
+ f(s)
+ }
+ "#,
+ expect![[r#"
+ 101..105 'self': &Self
+ 107..111 'args': Args
+ 220..224 'self': &Self
+ 226..230 'args': Args
+ 313..314 'f': F
+ 359..389 '{ ...f(s) }': T
+ 369..370 's': Option<i32>
+ 373..377 'None': Option<i32>
+ 383..384 'f': F
+ 383..387 'f(s)': T
+ 385..386 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_box_fn_arg() {
+ check_infer(
+ r#"
+ //- /lib.rs deps:std
+
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {
+ type Output;
+
+ extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
+ }
+
+ #[lang = "deref"]
+ pub trait Deref {
+ type Target: ?Sized;
+
+ fn deref(&self) -> &Self::Target;
+ }
+
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+
+ impl<T: ?Sized> Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.inner
+ }
+ }
+
+ enum Option<T> {
+ None,
+ Some(T)
+ }
+
+ fn foo() {
+ let s = Option::None;
+ let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
+ f(&s)
+ }
+ "#,
+ expect![[r#"
+ 100..104 'self': Self
+ 106..110 'args': Args
+ 214..218 'self': &Self
+ 384..388 'self': &Box<T>
+ 396..423 '{ ... }': &T
+ 406..417 '&self.inner': &*mut T
+ 407..411 'self': &Box<T>
+ 407..417 'self.inner': *mut T
+ 478..575 '{ ...(&s) }': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)>
+ 488..489 's': Option<i32>
+ 492..504 'Option::None': Option<i32>
+ 514..515 'f': Box<dyn FnOnce<(&Option<i32>,)>>
+ 549..562 'box (|ps| {})': Box<|{unknown}| -> ()>
+ 554..561 '|ps| {}': |{unknown}| -> ()
+ 555..557 'ps': {unknown}
+ 559..561 '{}': ()
+ 568..569 'f': Box<dyn FnOnce<(&Option<i32>,)>>
+ 568..573 'f(&s)': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)>
+ 570..572 '&s': &Option<i32>
+ 571..572 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_dyn_fn_output() {
+ check_types(
+ r#"
+#[lang = "fn_once"]
+pub trait FnOnce<Args> {
+ type Output;
+ extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
+}
+
+#[lang = "fn"]
+pub trait Fn<Args>: FnOnce<Args> {
+ extern "rust-call" fn call(&self, args: Args) -> Self::Output;
+}
+
+fn foo() {
+ let f: &dyn Fn() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn infer_dyn_fn_once_output() {
+ check_types(
+ r#"
+#[lang = "fn_once"]
+pub trait FnOnce<Args> {
+ type Output;
+ extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
+}
+
+fn foo() {
+ let f: dyn FnOnce() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn variable_kinds_1() {
+ check_types(
+ r#"
+trait Trait<T> { fn get(self, t: T) -> T; }
+struct S;
+impl Trait<u128> for S {}
+impl Trait<f32> for S {}
+fn test() {
+ S.get(1);
+ //^^^^^^^^ u128
+ S.get(1.);
+ //^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn variable_kinds_2() {
+ check_types(
+ r#"
+trait Trait { fn get(self) -> Self; }
+impl Trait for u128 {}
+impl Trait for f32 {}
+fn test() {
+ 1.get();
+ //^^^^^^^ u128
+ (1.).get();
+ //^^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check_types(
+ r#"
+mod tr {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+}
+
+struct Tr;
+impl crate::tr::Tr for Tr {}
+
+use crate::tr::Tr as _;
+fn test() {
+ Tr.method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
--- /dev/null
+//! Trait solving using Chalk.
+use std::sync::Arc;
+
+use base_db::CrateId;
+use chalk_ir::cast::Cast;
+use chalk_solve::Solver;
+use hir_def::{lang_item::LangItemTarget, TraitId};
+
+use crate::{db::HirDatabase, DebruijnIndex, Substs};
+
+use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
+
+use self::chalk::{from_chalk, Interner, ToChalk};
+
+pub(crate) mod chalk;
+
+// This controls the maximum size of types Chalk considers. If we set this too
+// high, we can run into slow edge cases; if we set it too low, Chalk won't
+// find some solutions.
+// FIXME this is currently hardcoded in the recursive solver
+// const CHALK_SOLVER_MAX_SIZE: usize = 10;
+
+/// This controls how much 'time' we give the Chalk solver before giving up.
+const CHALK_SOLVER_FUEL: i32 = 100;
+
+#[derive(Debug, Copy, Clone)]
+struct ChalkContext<'a> {
+ db: &'a dyn HirDatabase,
+ krate: CrateId,
+}
+
+fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
+ let overflow_depth = 100;
+ let caching_enabled = true;
+ chalk_recursive::RecursiveSolver::new(overflow_depth, caching_enabled)
+}
+
+/// A set of clauses that we assume to be true. E.g. if we are inside this function:
+/// ```rust
+/// fn foo<T: Default>(t: T) {}
+/// ```
+/// we assume that `T: Default`.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct TraitEnvironment {
+ pub predicates: Vec<GenericPredicate>,
+}
+
+impl TraitEnvironment {
+ /// Returns trait refs with the given self type which are supposed to hold
+ /// in this trait env. E.g. if we are in `foo<T: SomeTrait>()`, this will
+ /// find that `T: SomeTrait` if we call it for `T`.
+ pub(crate) fn trait_predicates_for_self_ty<'a>(
+ &'a self,
+ ty: &'a Ty,
+ ) -> impl Iterator<Item = &'a TraitRef> + 'a {
+ self.predicates.iter().filter_map(move |pred| match pred {
+ GenericPredicate::Implemented(tr) if tr.self_ty() == ty => Some(tr),
+ _ => None,
+ })
+ }
+}
+
+/// Something (usually a goal), along with an environment.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct InEnvironment<T> {
+ pub environment: Arc<TraitEnvironment>,
+ pub value: T,
+}
+
+impl<T> InEnvironment<T> {
+ pub fn new(environment: Arc<TraitEnvironment>, value: T) -> InEnvironment<T> {
+ InEnvironment { environment, value }
+ }
+}
+
+/// Something that needs to be proven (by Chalk) during type checking, e.g. that
+/// a certain type implements a certain trait. Proving the Obligation might
+/// result in additional information about inference variables.
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum Obligation {
+ /// Prove that a certain type implements a trait (the type is the `Self` type
+ /// parameter to the `TraitRef`).
+ Trait(TraitRef),
+ Projection(ProjectionPredicate),
+}
+
+impl Obligation {
+ pub fn from_predicate(predicate: GenericPredicate) -> Option<Obligation> {
+ match predicate {
+ GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)),
+ GenericPredicate::Projection(projection_pred) => {
+ Some(Obligation::Projection(projection_pred))
+ }
+ GenericPredicate::Error => None,
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub struct ProjectionPredicate {
+ pub projection_ty: ProjectionTy,
+ pub ty: Ty,
+}
+
+impl TypeWalk for ProjectionPredicate {
+ fn walk(&self, f: &mut impl FnMut(&Ty)) {
+ self.projection_ty.walk(f);
+ self.ty.walk(f);
+ }
+
+ fn walk_mut_binders(
+ &mut self,
+ f: &mut impl FnMut(&mut Ty, DebruijnIndex),
+ binders: DebruijnIndex,
+ ) {
+ self.projection_ty.walk_mut_binders(f, binders);
+ self.ty.walk_mut_binders(f, binders);
+ }
+}
+
+/// Solve a trait goal using Chalk.
+pub(crate) fn trait_solve_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: Canonical<InEnvironment<Obligation>>,
+) -> Option<Solution> {
+ let _p = profile::span("trait_solve_query").detail(|| match &goal.value.value {
+ Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(),
+ Obligation::Projection(_) => "projection".to_string(),
+ });
+ log::info!("trait_solve_query({})", goal.value.value.display(db));
+
+ if let Obligation::Projection(pred) = &goal.value.value {
+ if let Ty::Bound(_) = &pred.projection_ty.parameters[0] {
+ // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
+ return Some(Solution::Ambig(Guidance::Unknown));
+ }
+ }
+
+ let canonical = goal.to_chalk(db).cast(&Interner);
+
+ // We currently don't deal with universes (I think / hope they're not yet
+ // relevant for our use cases?)
+ let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
+ let solution = solve(db, krate, &u_canonical);
+ solution.map(|solution| solution_from_chalk(db, solution))
+}
+
+fn solve(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
+) -> Option<chalk_solve::Solution<Interner>> {
+ let context = ChalkContext { db, krate };
+ log::debug!("solve goal: {:?}", goal);
+ let mut solver = create_chalk_solver();
+
+ let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
+
+ let should_continue = || {
+ context.db.check_canceled();
+ let remaining = fuel.get();
+ fuel.set(remaining - 1);
+ if remaining == 0 {
+ log::debug!("fuel exhausted");
+ }
+ remaining > 0
+ };
+ let mut solve = || {
+ let solution = solver.solve_limited(&context, goal, should_continue);
+ log::debug!("solve({:?}) => {:?}", goal, solution);
+ solution
+ };
+ // don't set the TLS for Chalk unless Chalk debugging is active, to make
+ // extra sure we only use it for debugging
+ let solution =
+ if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() };
+
+ solution
+}
+
+fn is_chalk_debug() -> bool {
+ std::env::var("CHALK_DEBUG").is_ok()
+}
+
+fn solution_from_chalk(
+ db: &dyn HirDatabase,
+ solution: chalk_solve::Solution<Interner>,
+) -> Solution {
+ let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {
+ let result = from_chalk(db, subst);
+ SolutionVariables(result)
+ };
+ match solution {
+ chalk_solve::Solution::Unique(constr_subst) => {
+ let subst = chalk_ir::Canonical {
+ value: constr_subst.value.subst,
+ binders: constr_subst.binders,
+ };
+ Solution::Unique(convert_subst(subst))
+ }
+ chalk_solve::Solution::Ambig(chalk_solve::Guidance::Definite(subst)) => {
+ Solution::Ambig(Guidance::Definite(convert_subst(subst)))
+ }
+ chalk_solve::Solution::Ambig(chalk_solve::Guidance::Suggested(subst)) => {
+ Solution::Ambig(Guidance::Suggested(convert_subst(subst)))
+ }
+ chalk_solve::Solution::Ambig(chalk_solve::Guidance::Unknown) => {
+ Solution::Ambig(Guidance::Unknown)
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct SolutionVariables(pub Canonical<Substs>);
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+/// A (possible) solution for a proposed goal.
+pub enum Solution {
+ /// The goal indeed holds, and there is a unique value for all existential
+ /// variables.
+ Unique(SolutionVariables),
+
+ /// The goal may be provable in multiple ways, but regardless we may have some guidance
+ /// for type inference. In this case, we don't return any lifetime
+ /// constraints, since we have not "committed" to any particular solution
+ /// yet.
+ Ambig(Guidance),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+/// When a goal holds ambiguously (e.g., because there are multiple possible
+/// solutions), we issue a set of *guidance* back to type inference.
+pub enum Guidance {
+ /// The existential variables *must* have the given values if the goal is
+ /// ever to hold, but that alone isn't enough to guarantee the goal will
+ /// actually hold.
+ Definite(SolutionVariables),
+
+ /// There are multiple plausible values for the existentials, but the ones
+ /// here are suggested as the preferred choice heuristically. These should
+ /// be used for inference fallback only.
+ Suggested(SolutionVariables),
+
+ /// There's no useful information to feed back to type inference
+ Unknown,
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum FnTrait {
+ FnOnce,
+ FnMut,
+ Fn,
+}
+
+impl FnTrait {
+ fn lang_item_name(self) -> &'static str {
+ match self {
+ FnTrait::FnOnce => "fn_once",
+ FnTrait::FnMut => "fn_mut",
+ FnTrait::Fn => "fn",
+ }
+ }
+
+ pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
+ let target = db.lang_item(krate, self.lang_item_name().into())?;
+ match target {
+ LangItemTarget::TraitId(t) => Some(t),
+ _ => None,
+ }
+ }
+}
--- /dev/null
+//! Conversion code from/to Chalk.
+use std::sync::Arc;
+
+use log::debug;
+
+use chalk_ir::{fold::shift::Shift, CanonicalVarKinds, GenericArg, TypeName};
+use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
+
+use base_db::{salsa::InternKey, CrateId};
+use hir_def::{
+ lang_item::{lang_attr, LangItemTarget},
+ AssocContainerId, AssocItemId, HasModule, Lookup, TypeAliasId,
+};
+
+use super::ChalkContext;
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ method_resolution::{TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
+ utils::generics,
+ CallableDefId, DebruijnIndex, FnSig, GenericPredicate, Substs, Ty, TypeCtor,
+};
+use mapping::{
+ convert_where_clauses, generic_predicate_to_inline_bound, make_binders, TypeAliasAsValue,
+};
+
+pub use self::interner::*;
+
+pub(super) mod tls;
+mod interner;
+mod mapping;
+
+pub(super) trait ToChalk {
+ type Chalk;
+ fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
+ fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
+}
+
+pub(super) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
+where
+ T: ToChalk<Chalk = ChalkT>,
+{
+ T::from_chalk(db, chalk)
+}
+
+impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
+ fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
+ self.db.associated_ty_data(id)
+ }
+ fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
+ self.db.trait_datum(self.krate, trait_id)
+ }
+ fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
+ self.db.struct_datum(self.krate, struct_id)
+ }
+ fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr {
+ rust_ir::AdtRepr { repr_c: false, repr_packed: false }
+ }
+ fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
+ self.db.impl_datum(self.krate, impl_id)
+ }
+
+ fn fn_def_datum(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ ) -> Arc<rust_ir::FnDefDatum<Interner>> {
+ self.db.fn_def_datum(self.krate, fn_def_id)
+ }
+
+ fn impls_for_trait(
+ &self,
+ trait_id: TraitId,
+ parameters: &[GenericArg<Interner>],
+ binders: &CanonicalVarKinds<Interner>,
+ ) -> Vec<ImplId> {
+ debug!("impls_for_trait {:?}", trait_id);
+ let trait_: hir_def::TraitId = from_chalk(self.db, trait_id);
+
+ let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone());
+
+ fn binder_kind(ty: &Ty, binders: &CanonicalVarKinds<Interner>) -> Option<chalk_ir::TyKind> {
+ if let Ty::Bound(bv) = ty {
+ let binders = binders.as_slice(&Interner);
+ if bv.debruijn == DebruijnIndex::INNERMOST {
+ if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
+ return Some(tk);
+ }
+ }
+ }
+ None
+ }
+
+ let self_ty_fp = TyFingerprint::for_impl(&ty);
+ let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
+ Some(chalk_ir::TyKind::Integer) => &ALL_INT_FPS,
+ Some(chalk_ir::TyKind::Float) => &ALL_FLOAT_FPS,
+ _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
+ };
+
+ // Note: Since we're using impls_for_trait, only impls where the trait
+ // can be resolved should ever reach Chalk. `impl_datum` relies on that
+ // and will panic if the trait can't be resolved.
+ let in_deps = self.db.trait_impls_in_deps(self.krate);
+ let in_self = self.db.trait_impls_in_crate(self.krate);
+ let impl_maps = [in_deps, in_self];
+
+ let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
+
+ let result: Vec<_> = if fps.is_empty() {
+ debug!("Unrestricted search for {:?} impls...", trait_);
+ impl_maps
+ .iter()
+ .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk))
+ .collect()
+ } else {
+ impl_maps
+ .iter()
+ .flat_map(|crate_impl_defs| {
+ fps.iter().flat_map(move |fp| {
+ crate_impl_defs.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
+ })
+ })
+ .collect()
+ };
+
+ debug!("impls_for_trait returned {} impls", result.len());
+ result
+ }
+ fn impl_provided_for(&self, auto_trait_id: TraitId, struct_id: AdtId) -> bool {
+ debug!("impl_provided_for {:?}, {:?}", auto_trait_id, struct_id);
+ false // FIXME
+ }
+ fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
+ self.db.associated_ty_value(self.krate, id)
+ }
+
+ fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
+ vec![]
+ }
+ fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
+ // We don't do coherence checking (yet)
+ unimplemented!()
+ }
+ fn interner(&self) -> &Interner {
+ &Interner
+ }
+ fn well_known_trait_id(
+ &self,
+ well_known_trait: rust_ir::WellKnownTrait,
+ ) -> Option<chalk_ir::TraitId<Interner>> {
+ let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
+ let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
+ Some(LangItemTarget::TraitId(trait_)) => trait_,
+ _ => return None,
+ };
+ Some(trait_.to_chalk(self.db))
+ }
+
+ fn program_clauses_for_env(
+ &self,
+ environment: &chalk_ir::Environment<Interner>,
+ ) -> chalk_ir::ProgramClauses<Interner> {
+ self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
+ }
+
+ fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
+ let interned_id = crate::db::InternedOpaqueTyId::from(id);
+ let full_id = self.db.lookup_intern_impl_trait_id(interned_id);
+ let (func, idx) = match full_id {
+ crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => (func, idx),
+ };
+ let datas =
+ self.db.return_type_impl_traits(func).expect("impl trait id without impl traits");
+ let data = &datas.value.impl_traits[idx as usize];
+ let bound = OpaqueTyDatumBound {
+ bounds: make_binders(
+ data.bounds
+ .value
+ .iter()
+ .cloned()
+ .filter(|b| !b.is_error())
+ .map(|b| b.to_chalk(self.db))
+ .collect(),
+ 1,
+ ),
+ where_clauses: make_binders(vec![], 0),
+ };
+ let num_vars = datas.num_binders;
+ Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound: make_binders(bound, num_vars) })
+ }
+
+ fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: actually provide the hidden type; it is relevant for auto traits
+ Ty::Unknown.to_chalk(self.db)
+ }
+
+ fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
+ // FIXME: implement actual object safety
+ true
+ }
+
+ fn closure_kind(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> rust_ir::ClosureKind {
+ // Fn is the closure kind that implements all three traits
+ rust_ir::ClosureKind::Fn
+ }
+ fn closure_inputs_and_output(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
+ let sig_ty: Ty =
+ from_chalk(self.db, substs.at(&Interner, 0).assert_ty_ref(&Interner).clone());
+ let sig = FnSig::from_fn_ptr_substs(
+ &sig_ty.substs().expect("first closure param should be fn ptr"),
+ false,
+ );
+ let io = rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig.params().iter().map(|ty| ty.clone().to_chalk(self.db)).collect(),
+ return_type: sig.ret().clone().to_chalk(self.db),
+ };
+ make_binders(io.shifted_in(&Interner), 0)
+ }
+ fn closure_upvars(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
+ let ty = Ty::unit().to_chalk(self.db);
+ make_binders(ty, 0)
+ }
+ fn closure_fn_substitution(
+ &self,
+ _closure_id: chalk_ir::ClosureId<Interner>,
+ _substs: &chalk_ir::Substitution<Interner>,
+ ) -> chalk_ir::Substitution<Interner> {
+ Substs::empty().to_chalk(self.db)
+ }
+
+ fn trait_name(&self, _trait_id: chalk_ir::TraitId<Interner>) -> String {
+ unimplemented!()
+ }
+ fn adt_name(&self, _struct_id: chalk_ir::AdtId<Interner>) -> String {
+ unimplemented!()
+ }
+ fn assoc_type_name(&self, _assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
+ unimplemented!()
+ }
+ fn opaque_type_name(&self, _opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
+ unimplemented!()
+ }
+ fn fn_def_name(&self, _fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
+ unimplemented!()
+ }
+}
+
+pub(crate) fn program_clauses_for_chalk_env_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ environment: chalk_ir::Environment<Interner>,
+) -> chalk_ir::ProgramClauses<Interner> {
+ chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
+}
+
+pub(crate) fn associated_ty_data_query(
+ db: &dyn HirDatabase,
+ id: AssocTypeId,
+) -> Arc<AssociatedTyDatum> {
+ debug!("associated_ty_data {:?}", id);
+ let type_alias: TypeAliasId = from_chalk(db, id);
+ let trait_ = match type_alias.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+
+ // Lower bounds -- we could/should maybe move this to a separate query in `lower`
+ let type_alias_data = db.type_alias_data(type_alias);
+ let generic_params = generics(db.upcast(), type_alias.into());
+ let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
+ let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
+ let ctx = crate::TyLoweringContext::new(db, &resolver)
+ .with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable);
+ let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0));
+ let bounds = type_alias_data
+ .bounds
+ .iter()
+ .flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone()))
+ .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
+ .map(|bound| make_binders(bound.shifted_in(&Interner), 0))
+ .collect();
+
+ let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
+ let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses };
+ let datum = AssociatedTyDatum {
+ trait_id: trait_.to_chalk(db),
+ id,
+ name: type_alias,
+ binders: make_binders(bound_data, generic_params.len()),
+ };
+ Arc::new(datum)
+}
+
+pub(crate) fn trait_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ trait_id: TraitId,
+) -> Arc<TraitDatum> {
+ debug!("trait_datum {:?}", trait_id);
+ let trait_: hir_def::TraitId = from_chalk(db, trait_id);
+ let trait_data = db.trait_data(trait_);
+ debug!("trait {:?} = {:?}", trait_id, trait_data.name);
+ let generic_params = generics(db.upcast(), trait_.into());
+ let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
+ let flags = rust_ir::TraitFlags {
+ auto: trait_data.auto,
+ upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate,
+ non_enumerable: true,
+ coinductive: false, // only relevant for Chalk testing
+ // FIXME: set these flags correctly
+ marker: false,
+ fundamental: false,
+ };
+ let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
+ let associated_ty_ids =
+ trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect();
+ let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
+ let well_known =
+ lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
+ let trait_datum = TraitDatum {
+ id: trait_id,
+ binders: make_binders(trait_datum_bound, bound_vars.len()),
+ flags,
+ associated_ty_ids,
+ well_known,
+ };
+ Arc::new(trait_datum)
+}
+
+fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
+ Some(match name {
+ "sized" => WellKnownTrait::Sized,
+ "copy" => WellKnownTrait::Copy,
+ "clone" => WellKnownTrait::Clone,
+ "drop" => WellKnownTrait::Drop,
+ "fn_once" => WellKnownTrait::FnOnce,
+ "fn_mut" => WellKnownTrait::FnMut,
+ "fn" => WellKnownTrait::Fn,
+ "unsize" => WellKnownTrait::Unsize,
+ _ => return None,
+ })
+}
+
+fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
+ match attr {
+ WellKnownTrait::Sized => "sized",
+ WellKnownTrait::Copy => "copy",
+ WellKnownTrait::Clone => "clone",
+ WellKnownTrait::Drop => "drop",
+ WellKnownTrait::FnOnce => "fn_once",
+ WellKnownTrait::FnMut => "fn_mut",
+ WellKnownTrait::Fn => "fn",
+ WellKnownTrait::Unsize => "unsize",
+ }
+}
+
+pub(crate) fn struct_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ struct_id: AdtId,
+) -> Arc<StructDatum> {
+ debug!("struct_datum {:?}", struct_id);
+ let type_ctor: TypeCtor = from_chalk(db, TypeName::Adt(struct_id));
+ debug!("struct {:?} = {:?}", struct_id, type_ctor);
+ let num_params = type_ctor.num_ty_params(db);
+ let upstream = type_ctor.krate(db) != Some(krate);
+ let where_clauses = type_ctor
+ .as_generic_def()
+ .map(|generic_def| {
+ let generic_params = generics(db.upcast(), generic_def);
+ let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
+ convert_where_clauses(db, generic_def, &bound_vars)
+ })
+ .unwrap_or_else(Vec::new);
+ let flags = rust_ir::AdtFlags {
+ upstream,
+ // FIXME set fundamental and phantom_data flags correctly
+ fundamental: false,
+ phantom_data: false,
+ };
+ // FIXME provide enum variants properly (for auto traits)
+ let variant = rust_ir::AdtVariantDatum {
+ fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
+ };
+ let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
+ let struct_datum = StructDatum {
+ // FIXME set ADT kind
+ kind: rust_ir::AdtKind::Struct,
+ id: struct_id,
+ binders: make_binders(struct_datum_bound, num_params),
+ flags,
+ };
+ Arc::new(struct_datum)
+}
+
+pub(crate) fn impl_datum_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ impl_id: ImplId,
+) -> Arc<ImplDatum> {
+ let _p = profile::span("impl_datum");
+ debug!("impl_datum {:?}", impl_id);
+ let impl_: hir_def::ImplId = from_chalk(db, impl_id);
+ impl_def_datum(db, krate, impl_id, impl_)
+}
+
+fn impl_def_datum(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ chalk_id: ImplId,
+ impl_id: hir_def::ImplId,
+) -> Arc<ImplDatum> {
+ let trait_ref = db
+ .impl_trait(impl_id)
+ // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
+ .expect("invalid impl passed to Chalk")
+ .value;
+ let impl_data = db.impl_data(impl_id);
+
+ let generic_params = generics(db.upcast(), impl_id.into());
+ let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
+ let trait_ = trait_ref.trait_;
+ let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate {
+ rust_ir::ImplType::Local
+ } else {
+ rust_ir::ImplType::External
+ };
+ let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
+ let negative = impl_data.is_negative;
+ debug!(
+ "impl {:?}: {}{} where {:?}",
+ chalk_id,
+ if negative { "!" } else { "" },
+ trait_ref.display(db),
+ where_clauses
+ );
+ let trait_ref = trait_ref.to_chalk(db);
+
+ let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
+
+ let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
+ let trait_data = db.trait_data(trait_);
+ let associated_ty_value_ids = impl_data
+ .items
+ .iter()
+ .filter_map(|item| match item {
+ AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
+ _ => None,
+ })
+ .filter(|&type_alias| {
+ // don't include associated types that don't exist in the trait
+ let name = &db.type_alias_data(type_alias).name;
+ trait_data.associated_type_by_name(name).is_some()
+ })
+ .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
+ .collect();
+ debug!("impl_datum: {:?}", impl_datum_bound);
+ let impl_datum = ImplDatum {
+ binders: make_binders(impl_datum_bound, bound_vars.len()),
+ impl_type,
+ polarity,
+ associated_ty_value_ids,
+ };
+ Arc::new(impl_datum)
+}
+
+pub(crate) fn associated_ty_value_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ id: AssociatedTyValueId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias: TypeAliasAsValue = from_chalk(db, id);
+ type_alias_associated_ty_value(db, krate, type_alias.0)
+}
+
+fn type_alias_associated_ty_value(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ type_alias: TypeAliasId,
+) -> Arc<AssociatedTyValue> {
+ let type_alias_data = db.type_alias_data(type_alias);
+ let impl_id = match type_alias.lookup(db.upcast()).container {
+ AssocContainerId::ImplId(it) => it,
+ _ => panic!("assoc ty value should be in impl"),
+ };
+
+ let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved
+
+ let assoc_ty = db
+ .trait_data(trait_ref.trait_)
+ .associated_type_by_name(&type_alias_data.name)
+ .expect("assoc ty value should not exist"); // validated when building the impl data as well
+ let ty = db.ty(type_alias.into());
+ let value_bound = rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
+ let value = rust_ir::AssociatedTyValue {
+ impl_id: impl_id.to_chalk(db),
+ associated_ty_id: assoc_ty.to_chalk(db),
+ value: make_binders(value_bound, ty.num_binders),
+ };
+ Arc::new(value)
+}
+
+pub(crate) fn fn_def_datum_query(
+ db: &dyn HirDatabase,
+ _krate: CrateId,
+ fn_def_id: FnDefId,
+) -> Arc<FnDefDatum> {
+ let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+ let generic_params = generics(db.upcast(), callable_def.into());
+ let sig = db.callable_item_signature(callable_def);
+ let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
+ let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
+ let bound = rust_ir::FnDefDatumBound {
+ // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
+ inputs_and_output: make_binders(
+ rust_ir::FnDefInputsAndOutputDatum {
+ argument_types: sig
+ .value
+ .params()
+ .iter()
+ .map(|ty| ty.clone().to_chalk(db))
+ .collect(),
+ return_type: sig.value.ret().clone().to_chalk(db),
+ }
+ .shifted_in(&Interner),
+ 0,
+ ),
+ where_clauses,
+ };
+ let datum = FnDefDatum {
+ id: fn_def_id,
+ abi: (),
+ safety: chalk_ir::Safety::Safe,
+ variadic: sig.value.is_varargs,
+ binders: make_binders(bound, sig.num_binders),
+ };
+ Arc::new(datum)
+}
+
+impl From<FnDefId> for crate::db::InternedCallableDefId {
+ fn from(fn_def_id: FnDefId) -> Self {
+ InternKey::from_intern_id(fn_def_id.0)
+ }
+}
+
+impl From<crate::db::InternedCallableDefId> for FnDefId {
+ fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
+ chalk_ir::FnDefId(callable_def_id.as_intern_id())
+ }
+}
+
+impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
+ fn from(id: OpaqueTyId) -> Self {
+ InternKey::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
+ fn from(id: crate::db::InternedOpaqueTyId) -> Self {
+ chalk_ir::OpaqueTyId(id.as_intern_id())
+ }
+}
+
+impl From<chalk_ir::ClosureId<Interner>> for crate::db::ClosureId {
+ fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
+ Self::from_intern_id(id.0)
+ }
+}
+
+impl From<crate::db::ClosureId> for chalk_ir::ClosureId<Interner> {
+ fn from(id: crate::db::ClosureId) -> Self {
+ chalk_ir::ClosureId(id.as_intern_id())
+ }
+}
--- /dev/null
+//! Implementation of the Chalk `Interner` trait, which allows customizing the
+//! representation of the various objects Chalk deals with (types, goals etc.).
+
+use super::tls;
+use base_db::salsa::InternId;
+use chalk_ir::{GenericArg, Goal, GoalData};
+use hir_def::TypeAliasId;
+use std::{fmt, sync::Arc};
+
+#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
+pub struct Interner;
+
+pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
+pub type TraitId = chalk_ir::TraitId<Interner>;
+pub type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
+pub type AdtId = chalk_ir::AdtId<Interner>;
+pub type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
+pub type ImplId = chalk_ir::ImplId<Interner>;
+pub type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
+pub type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
+pub type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
+pub type FnDefId = chalk_ir::FnDefId<Interner>;
+pub type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
+pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
+pub type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
+
+impl chalk_ir::interner::Interner for Interner {
+ type InternedType = Box<chalk_ir::TyData<Self>>; // FIXME use Arc?
+ type InternedLifetime = chalk_ir::LifetimeData<Self>;
+ type InternedConst = Arc<chalk_ir::ConstData<Self>>;
+ type InternedConcreteConst = ();
+ type InternedGenericArg = chalk_ir::GenericArgData<Self>;
+ type InternedGoal = Arc<GoalData<Self>>;
+ type InternedGoals = Vec<Goal<Self>>;
+ type InternedSubstitution = Vec<GenericArg<Self>>;
+ type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
+ type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>;
+ type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
+ type InternedVariableKinds = Vec<chalk_ir::VariableKind<Self>>;
+ type InternedCanonicalVarKinds = Vec<chalk_ir::CanonicalVarKind<Self>>;
+ type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
+ type DefId = InternId;
+ type InternedAdtId = hir_def::AdtId;
+ type Identifier = TypeAliasId;
+ type FnAbi = ();
+
+ fn debug_adt_id(type_kind_id: AdtId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
+ }
+
+ fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
+ }
+
+ fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
+ }
+
+ fn debug_alias(
+ alias: &chalk_ir::AliasTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
+ }
+
+ fn debug_projection_ty(
+ proj: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
+ }
+
+ fn debug_opaque_ty(
+ opaque_ty: &chalk_ir::OpaqueTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt)))
+ }
+
+ fn debug_opaque_ty_id(
+ opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt)))
+ }
+
+ fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
+ }
+
+ fn debug_lifetime(
+ lifetime: &chalk_ir::Lifetime<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt)))
+ }
+
+ fn debug_generic_arg(
+ parameter: &GenericArg<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_generic_arg(parameter, fmt)))
+ }
+
+ fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt)))
+ }
+
+ fn debug_goals(
+ goals: &chalk_ir::Goals<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt)))
+ }
+
+ fn debug_program_clause_implication(
+ pci: &chalk_ir::ProgramClauseImplication<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt)))
+ }
+
+ fn debug_application_ty(
+ application_ty: &chalk_ir::ApplicationTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt)))
+ }
+
+ fn debug_substitution(
+ substitution: &chalk_ir::Substitution<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt)))
+ }
+
+ fn debug_separator_trait_ref(
+ separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| {
+ Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt))
+ })
+ }
+
+ fn debug_fn_def_id(
+ fn_def_id: chalk_ir::FnDefId<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
+ }
+ fn debug_const(
+ constant: &chalk_ir::Const<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_const(constant, fmt)))
+ }
+ fn debug_variable_kinds(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_variable_kinds(variable_kinds, fmt)))
+ }
+ fn debug_variable_kinds_with_angles(
+ variable_kinds: &chalk_ir::VariableKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| {
+ Some(prog?.debug_variable_kinds_with_angles(variable_kinds, fmt))
+ })
+ }
+ fn debug_canonical_var_kinds(
+ canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| {
+ Some(prog?.debug_canonical_var_kinds(canonical_var_kinds, fmt))
+ })
+ }
+ fn debug_program_clause(
+ clause: &chalk_ir::ProgramClause<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_program_clause(clause, fmt)))
+ }
+ fn debug_program_clauses(
+ clauses: &chalk_ir::ProgramClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_program_clauses(clauses, fmt)))
+ }
+ fn debug_quantified_where_clauses(
+ clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ tls::with_current_program(|prog| Some(prog?.debug_quantified_where_clauses(clauses, fmt)))
+ }
+
+ fn intern_ty(&self, ty: chalk_ir::TyData<Self>) -> Box<chalk_ir::TyData<Self>> {
+ Box::new(ty)
+ }
+
+ fn ty_data<'a>(&self, ty: &'a Box<chalk_ir::TyData<Self>>) -> &'a chalk_ir::TyData<Self> {
+ ty
+ }
+
+ fn intern_lifetime(
+ &self,
+ lifetime: chalk_ir::LifetimeData<Self>,
+ ) -> chalk_ir::LifetimeData<Self> {
+ lifetime
+ }
+
+ fn lifetime_data<'a>(
+ &self,
+ lifetime: &'a chalk_ir::LifetimeData<Self>,
+ ) -> &'a chalk_ir::LifetimeData<Self> {
+ lifetime
+ }
+
+ fn intern_const(&self, constant: chalk_ir::ConstData<Self>) -> Arc<chalk_ir::ConstData<Self>> {
+ Arc::new(constant)
+ }
+
+ fn const_data<'a>(
+ &self,
+ constant: &'a Arc<chalk_ir::ConstData<Self>>,
+ ) -> &'a chalk_ir::ConstData<Self> {
+ constant
+ }
+
+ fn const_eq(&self, _ty: &Box<chalk_ir::TyData<Self>>, _c1: &(), _c2: &()) -> bool {
+ true
+ }
+
+ fn intern_generic_arg(
+ &self,
+ parameter: chalk_ir::GenericArgData<Self>,
+ ) -> chalk_ir::GenericArgData<Self> {
+ parameter
+ }
+
+ fn generic_arg_data<'a>(
+ &self,
+ parameter: &'a chalk_ir::GenericArgData<Self>,
+ ) -> &'a chalk_ir::GenericArgData<Self> {
+ parameter
+ }
+
+ fn intern_goal(&self, goal: GoalData<Self>) -> Arc<GoalData<Self>> {
+ Arc::new(goal)
+ }
+
+ fn intern_goals<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
+ ) -> Result<Self::InternedGoals, E> {
+ data.into_iter().collect()
+ }
+
+ fn goal_data<'a>(&self, goal: &'a Arc<GoalData<Self>>) -> &'a GoalData<Self> {
+ goal
+ }
+
+ fn goals_data<'a>(&self, goals: &'a Vec<Goal<Interner>>) -> &'a [Goal<Interner>] {
+ goals
+ }
+
+ fn intern_substitution<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<GenericArg<Self>, E>>,
+ ) -> Result<Vec<GenericArg<Self>>, E> {
+ data.into_iter().collect()
+ }
+
+ fn substitution_data<'a>(
+ &self,
+ substitution: &'a Vec<GenericArg<Self>>,
+ ) -> &'a [GenericArg<Self>] {
+ substitution
+ }
+
+ fn intern_program_clause(
+ &self,
+ data: chalk_ir::ProgramClauseData<Self>,
+ ) -> chalk_ir::ProgramClauseData<Self> {
+ data
+ }
+
+ fn program_clause_data<'a>(
+ &self,
+ clause: &'a chalk_ir::ProgramClauseData<Self>,
+ ) -> &'a chalk_ir::ProgramClauseData<Self> {
+ clause
+ }
+
+ fn intern_program_clauses<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
+ ) -> Result<Arc<[chalk_ir::ProgramClause<Self>]>, E> {
+ data.into_iter().collect()
+ }
+
+ fn program_clauses_data<'a>(
+ &self,
+ clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>,
+ ) -> &'a [chalk_ir::ProgramClause<Self>] {
+ &clauses
+ }
+
+ fn intern_quantified_where_clauses<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
+ ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
+ data.into_iter().collect()
+ }
+
+ fn quantified_where_clauses_data<'a>(
+ &self,
+ clauses: &'a Self::InternedQuantifiedWhereClauses,
+ ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
+ clauses
+ }
+
+ fn intern_generic_arg_kinds<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
+ ) -> Result<Self::InternedVariableKinds, E> {
+ data.into_iter().collect()
+ }
+
+ fn variable_kinds_data<'a>(
+ &self,
+ parameter_kinds: &'a Self::InternedVariableKinds,
+ ) -> &'a [chalk_ir::VariableKind<Self>] {
+ ¶meter_kinds
+ }
+
+ fn intern_canonical_var_kinds<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
+ ) -> Result<Self::InternedCanonicalVarKinds, E> {
+ data.into_iter().collect()
+ }
+
+ fn canonical_var_kinds_data<'a>(
+ &self,
+ canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
+ ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
+ &canonical_var_kinds
+ }
+
+ fn intern_constraints<E>(
+ &self,
+ data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
+ ) -> Result<Self::InternedConstraints, E> {
+ data.into_iter().collect()
+ }
+
+ fn constraints_data<'a>(
+ &self,
+ constraints: &'a Self::InternedConstraints,
+ ) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
+ constraints
+ }
+ fn debug_closure_id(
+ _fn_def_id: chalk_ir::ClosureId<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+ fn debug_constraints(
+ _clauses: &chalk_ir::Constraints<Self>,
+ _fmt: &mut fmt::Formatter<'_>,
+ ) -> Option<fmt::Result> {
+ None
+ }
+}
+
+impl chalk_ir::interner::HasInterner for Interner {
+ type Interner = Self;
+}
--- /dev/null
+//! This module contains the implementations of the `ToChalk` trait, which
+//! handles conversion between our data types and their corresponding types in
+//! Chalk (in both directions); plus some helper functions for more specialized
+//! conversions.
+
+use chalk_ir::{
+ cast::Cast, fold::shift::Shift, interner::HasInterner, PlaceholderIndex, Scalar, TypeName,
+ UniverseIndex,
+};
+use chalk_solve::rust_ir;
+
+use base_db::salsa::InternKey;
+use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId};
+
+use crate::{
+ db::HirDatabase,
+ primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness},
+ traits::{Canonical, Obligation},
+ ApplicationTy, CallableDefId, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId,
+ ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor,
+};
+
+use super::interner::*;
+use super::*;
+
+impl ToChalk for Ty {
+ type Chalk = chalk_ir::Ty<Interner>;
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty<Interner> {
+ match self {
+ Ty::Apply(apply_ty) => match apply_ty.ctor {
+ TypeCtor::Ref(m) => ref_to_chalk(db, m, apply_ty.parameters),
+ TypeCtor::Array => array_to_chalk(db, apply_ty.parameters),
+ TypeCtor::FnPtr { num_args: _, is_varargs } => {
+ let substitution = apply_ty.parameters.to_chalk(db).shifted_in(&Interner);
+ chalk_ir::TyData::Function(chalk_ir::FnPointer {
+ num_binders: 0,
+ abi: (),
+ safety: chalk_ir::Safety::Safe,
+ variadic: is_varargs,
+ substitution,
+ })
+ .intern(&Interner)
+ }
+ _ => {
+ let name = apply_ty.ctor.to_chalk(db);
+ let substitution = apply_ty.parameters.to_chalk(db);
+ chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
+ }
+ },
+ Ty::Projection(proj_ty) => {
+ let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
+ let substitution = proj_ty.parameters.to_chalk(db);
+ chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy {
+ associated_ty_id,
+ substitution,
+ })
+ .cast(&Interner)
+ .intern(&Interner)
+ }
+ Ty::Placeholder(id) => {
+ let interned_id = db.intern_type_param_id(id);
+ PlaceholderIndex {
+ ui: UniverseIndex::ROOT,
+ idx: interned_id.as_intern_id().as_usize(),
+ }
+ .to_ty::<Interner>(&Interner)
+ }
+ Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner),
+ Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
+ Ty::Dyn(predicates) => {
+ let where_clauses = chalk_ir::QuantifiedWhereClauses::from_iter(
+ &Interner,
+ predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)),
+ );
+ let bounded_ty = chalk_ir::DynTy {
+ bounds: make_binders(where_clauses, 1),
+ lifetime: FAKE_PLACEHOLDER.to_lifetime(&Interner),
+ };
+ chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner)
+ }
+ Ty::Opaque(opaque_ty) => {
+ let opaque_ty_id = opaque_ty.opaque_ty_id.to_chalk(db);
+ let substitution = opaque_ty.parameters.to_chalk(db);
+ chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy {
+ opaque_ty_id,
+ substitution,
+ }))
+ .intern(&Interner)
+ }
+ Ty::Unknown => {
+ let substitution = chalk_ir::Substitution::empty(&Interner);
+ let name = TypeName::Error;
+ chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
+ }
+ }
+ }
+ fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self {
+ match chalk.data(&Interner).clone() {
+ chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name {
+ TypeName::Error => Ty::Unknown,
+ TypeName::Ref(m) => ref_from_chalk(db, m, apply_ty.substitution),
+ TypeName::Array => array_from_chalk(db, apply_ty.substitution),
+ _ => {
+ let ctor = from_chalk(db, apply_ty.name);
+ let parameters = from_chalk(db, apply_ty.substitution);
+ Ty::Apply(ApplicationTy { ctor, parameters })
+ }
+ },
+ chalk_ir::TyData::Placeholder(idx) => {
+ assert_eq!(idx.ui, UniverseIndex::ROOT);
+ let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
+ crate::salsa::InternId::from(idx.idx),
+ );
+ Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
+ }
+ chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => {
+ let associated_ty = from_chalk(db, proj.associated_ty_id);
+ let parameters = from_chalk(db, proj.substitution);
+ Ty::Projection(ProjectionTy { associated_ty, parameters })
+ }
+ chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(opaque_ty)) => {
+ let impl_trait_id = from_chalk(db, opaque_ty.opaque_ty_id);
+ let parameters = from_chalk(db, opaque_ty.substitution);
+ Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters })
+ }
+ chalk_ir::TyData::Function(chalk_ir::FnPointer {
+ num_binders,
+ variadic,
+ substitution,
+ ..
+ }) => {
+ assert_eq!(num_binders, 0);
+ let parameters: Substs = from_chalk(
+ db,
+ substitution.shifted_out(&Interner).expect("fn ptr should have no binders"),
+ );
+ Ty::Apply(ApplicationTy {
+ ctor: TypeCtor::FnPtr {
+ num_args: (parameters.len() - 1) as u16,
+ is_varargs: variadic,
+ },
+ parameters,
+ })
+ }
+ chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx),
+ chalk_ir::TyData::InferenceVar(_iv, _kind) => Ty::Unknown,
+ chalk_ir::TyData::Dyn(where_clauses) => {
+ assert_eq!(where_clauses.bounds.binders.len(&Interner), 1);
+ let predicates = where_clauses
+ .bounds
+ .skip_binders()
+ .iter(&Interner)
+ .map(|c| from_chalk(db, c.clone()))
+ .collect();
+ Ty::Dyn(predicates)
+ }
+ }
+ }
+}
+
+const FAKE_PLACEHOLDER: PlaceholderIndex =
+ PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::MAX };
+
+/// We currently don't model lifetimes, but Chalk does. So, we have to insert a
+/// fake lifetime here, because Chalks built-in logic may expect it to be there.
+fn ref_to_chalk(
+ db: &dyn HirDatabase,
+ mutability: Mutability,
+ subst: Substs,
+) -> chalk_ir::Ty<Interner> {
+ let arg = subst[0].clone().to_chalk(db);
+ let lifetime = FAKE_PLACEHOLDER.to_lifetime(&Interner);
+ chalk_ir::ApplicationTy {
+ name: TypeName::Ref(mutability.to_chalk(db)),
+ substitution: chalk_ir::Substitution::from_iter(
+ &Interner,
+ vec![lifetime.cast(&Interner), arg.cast(&Interner)],
+ ),
+ }
+ .intern(&Interner)
+}
+
+/// Here we remove the lifetime from the type we got from Chalk.
+fn ref_from_chalk(
+ db: &dyn HirDatabase,
+ mutability: chalk_ir::Mutability,
+ subst: chalk_ir::Substitution<Interner>,
+) -> Ty {
+ let tys = subst
+ .iter(&Interner)
+ .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
+ .collect();
+ Ty::apply(TypeCtor::Ref(from_chalk(db, mutability)), Substs(tys))
+}
+
+/// We currently don't model constants, but Chalk does. So, we have to insert a
+/// fake constant here, because Chalks built-in logic may expect it to be there.
+fn array_to_chalk(db: &dyn HirDatabase, subst: Substs) -> chalk_ir::Ty<Interner> {
+ let arg = subst[0].clone().to_chalk(db);
+ let usize_ty = chalk_ir::ApplicationTy {
+ name: TypeName::Scalar(Scalar::Uint(chalk_ir::UintTy::Usize)),
+ substitution: chalk_ir::Substitution::empty(&Interner),
+ }
+ .intern(&Interner);
+ let const_ = FAKE_PLACEHOLDER.to_const(&Interner, usize_ty);
+ chalk_ir::ApplicationTy {
+ name: TypeName::Array,
+ substitution: chalk_ir::Substitution::from_iter(
+ &Interner,
+ vec![arg.cast(&Interner), const_.cast(&Interner)],
+ ),
+ }
+ .intern(&Interner)
+}
+
+/// Here we remove the const from the type we got from Chalk.
+fn array_from_chalk(db: &dyn HirDatabase, subst: chalk_ir::Substitution<Interner>) -> Ty {
+ let tys = subst
+ .iter(&Interner)
+ .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
+ .collect();
+ Ty::apply(TypeCtor::Array, Substs(tys))
+}
+
+impl ToChalk for Substs {
+ type Chalk = chalk_ir::Substitution<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution<Interner> {
+ chalk_ir::Substitution::from_iter(&Interner, self.iter().map(|ty| ty.clone().to_chalk(db)))
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs {
+ let tys = parameters
+ .iter(&Interner)
+ .map(|p| match p.ty(&Interner) {
+ Some(ty) => from_chalk(db, ty.clone()),
+ None => unimplemented!(),
+ })
+ .collect();
+ Substs(tys)
+ }
+}
+
+impl ToChalk for TraitRef {
+ type Chalk = chalk_ir::TraitRef<Interner>;
+
+ fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef<Interner> {
+ let trait_id = self.trait_.to_chalk(db);
+ let substitution = self.substs.to_chalk(db);
+ chalk_ir::TraitRef { trait_id, substitution }
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self {
+ let trait_ = from_chalk(db, trait_ref.trait_id);
+ let substs = from_chalk(db, trait_ref.substitution);
+ TraitRef { trait_, substs }
+ }
+}
+
+impl ToChalk for hir_def::TraitId {
+ type Chalk = TraitId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId {
+ chalk_ir::TraitId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId {
+ InternKey::from_intern_id(trait_id.0)
+ }
+}
+
+impl ToChalk for OpaqueTyId {
+ type Chalk = chalk_ir::OpaqueTyId<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::OpaqueTyId<Interner> {
+ db.intern_impl_trait_id(self).into()
+ }
+
+ fn from_chalk(
+ db: &dyn HirDatabase,
+ opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
+ ) -> OpaqueTyId {
+ db.lookup_intern_impl_trait_id(opaque_ty_id.into())
+ }
+}
+
+impl ToChalk for TypeCtor {
+ type Chalk = TypeName<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> TypeName<Interner> {
+ match self {
+ TypeCtor::AssociatedType(type_alias) => {
+ let type_id = type_alias.to_chalk(db);
+ TypeName::AssociatedType(type_id)
+ }
+
+ TypeCtor::OpaqueType(impl_trait_id) => {
+ let id = impl_trait_id.to_chalk(db);
+ TypeName::OpaqueType(id)
+ }
+
+ TypeCtor::Bool => TypeName::Scalar(Scalar::Bool),
+ TypeCtor::Char => TypeName::Scalar(Scalar::Char),
+ TypeCtor::Int(int_ty) => TypeName::Scalar(int_ty_to_chalk(int_ty)),
+ TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) => {
+ TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32))
+ }
+ TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) => {
+ TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64))
+ }
+
+ TypeCtor::Tuple { cardinality } => TypeName::Tuple(cardinality.into()),
+ TypeCtor::RawPtr(mutability) => TypeName::Raw(mutability.to_chalk(db)),
+ TypeCtor::Slice => TypeName::Slice,
+ TypeCtor::Array => TypeName::Array,
+ TypeCtor::Ref(mutability) => TypeName::Ref(mutability.to_chalk(db)),
+ TypeCtor::Str => TypeName::Str,
+ TypeCtor::FnDef(callable_def) => {
+ let id = callable_def.to_chalk(db);
+ TypeName::FnDef(id)
+ }
+ TypeCtor::Never => TypeName::Never,
+
+ TypeCtor::Closure { def, expr } => {
+ let closure_id = db.intern_closure((def, expr));
+ TypeName::Closure(closure_id.into())
+ }
+
+ TypeCtor::Adt(adt_id) => TypeName::Adt(chalk_ir::AdtId(adt_id)),
+
+ TypeCtor::FnPtr { .. } => {
+ // This should not be reached, since Chalk doesn't represent
+ // function pointers with TypeName
+ unreachable!()
+ }
+ }
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, type_name: TypeName<Interner>) -> TypeCtor {
+ match type_name {
+ TypeName::Adt(struct_id) => TypeCtor::Adt(struct_id.0),
+ TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
+ TypeName::OpaqueType(opaque_type_id) => {
+ TypeCtor::OpaqueType(from_chalk(db, opaque_type_id))
+ }
+
+ TypeName::Scalar(Scalar::Bool) => TypeCtor::Bool,
+ TypeName::Scalar(Scalar::Char) => TypeCtor::Char,
+ TypeName::Scalar(Scalar::Int(int_ty)) => TypeCtor::Int(IntTy {
+ signedness: Signedness::Signed,
+ bitness: bitness_from_chalk_int(int_ty),
+ }),
+ TypeName::Scalar(Scalar::Uint(uint_ty)) => TypeCtor::Int(IntTy {
+ signedness: Signedness::Unsigned,
+ bitness: bitness_from_chalk_uint(uint_ty),
+ }),
+ TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) => {
+ TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })
+ }
+ TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) => {
+ TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })
+ }
+ TypeName::Tuple(cardinality) => TypeCtor::Tuple { cardinality: cardinality as u16 },
+ TypeName::Raw(mutability) => TypeCtor::RawPtr(from_chalk(db, mutability)),
+ TypeName::Slice => TypeCtor::Slice,
+ TypeName::Ref(mutability) => TypeCtor::Ref(from_chalk(db, mutability)),
+ TypeName::Str => TypeCtor::Str,
+ TypeName::Never => TypeCtor::Never,
+
+ TypeName::FnDef(fn_def_id) => {
+ let callable_def = from_chalk(db, fn_def_id);
+ TypeCtor::FnDef(callable_def)
+ }
+ TypeName::Array => TypeCtor::Array,
+
+ TypeName::Closure(id) => {
+ let id: crate::db::ClosureId = id.into();
+ let (def, expr) = db.lookup_intern_closure(id);
+ TypeCtor::Closure { def, expr }
+ }
+
+ TypeName::Error => {
+ // this should not be reached, since we don't represent TypeName::Error with TypeCtor
+ unreachable!()
+ }
+ }
+ }
+}
+
+fn bitness_from_chalk_uint(uint_ty: chalk_ir::UintTy) -> IntBitness {
+ use chalk_ir::UintTy;
+
+ match uint_ty {
+ UintTy::Usize => IntBitness::Xsize,
+ UintTy::U8 => IntBitness::X8,
+ UintTy::U16 => IntBitness::X16,
+ UintTy::U32 => IntBitness::X32,
+ UintTy::U64 => IntBitness::X64,
+ UintTy::U128 => IntBitness::X128,
+ }
+}
+
+fn bitness_from_chalk_int(int_ty: chalk_ir::IntTy) -> IntBitness {
+ use chalk_ir::IntTy;
+
+ match int_ty {
+ IntTy::Isize => IntBitness::Xsize,
+ IntTy::I8 => IntBitness::X8,
+ IntTy::I16 => IntBitness::X16,
+ IntTy::I32 => IntBitness::X32,
+ IntTy::I64 => IntBitness::X64,
+ IntTy::I128 => IntBitness::X128,
+ }
+}
+
+fn int_ty_to_chalk(int_ty: IntTy) -> Scalar {
+ use chalk_ir::{IntTy, UintTy};
+
+ match int_ty.signedness {
+ Signedness::Signed => Scalar::Int(match int_ty.bitness {
+ IntBitness::Xsize => IntTy::Isize,
+ IntBitness::X8 => IntTy::I8,
+ IntBitness::X16 => IntTy::I16,
+ IntBitness::X32 => IntTy::I32,
+ IntBitness::X64 => IntTy::I64,
+ IntBitness::X128 => IntTy::I128,
+ }),
+ Signedness::Unsigned => Scalar::Uint(match int_ty.bitness {
+ IntBitness::Xsize => UintTy::Usize,
+ IntBitness::X8 => UintTy::U8,
+ IntBitness::X16 => UintTy::U16,
+ IntBitness::X32 => UintTy::U32,
+ IntBitness::X64 => UintTy::U64,
+ IntBitness::X128 => UintTy::U128,
+ }),
+ }
+}
+
+impl ToChalk for Mutability {
+ type Chalk = chalk_ir::Mutability;
+ fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk {
+ match self {
+ Mutability::Shared => chalk_ir::Mutability::Not,
+ Mutability::Mut => chalk_ir::Mutability::Mut,
+ }
+ }
+ fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self {
+ match chalk {
+ chalk_ir::Mutability::Mut => Mutability::Mut,
+ chalk_ir::Mutability::Not => Mutability::Shared,
+ }
+ }
+}
+
+impl ToChalk for hir_def::ImplId {
+ type Chalk = ImplId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> ImplId {
+ chalk_ir::ImplId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, impl_id: ImplId) -> hir_def::ImplId {
+ InternKey::from_intern_id(impl_id.0)
+ }
+}
+
+impl ToChalk for CallableDefId {
+ type Chalk = FnDefId;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
+ db.intern_callable_def(self).into()
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
+ db.lookup_intern_callable_def(fn_def_id.into())
+ }
+}
+
+impl ToChalk for TypeAliasId {
+ type Chalk = AssocTypeId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId {
+ chalk_ir::AssocTypeId(self.as_intern_id())
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId {
+ InternKey::from_intern_id(type_alias_id.0)
+ }
+}
+
+pub struct TypeAliasAsValue(pub TypeAliasId);
+
+impl ToChalk for TypeAliasAsValue {
+ type Chalk = AssociatedTyValueId;
+
+ fn to_chalk(self, _db: &dyn HirDatabase) -> AssociatedTyValueId {
+ rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+ }
+
+ fn from_chalk(
+ _db: &dyn HirDatabase,
+ assoc_ty_value_id: AssociatedTyValueId,
+ ) -> TypeAliasAsValue {
+ TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+ }
+}
+
+impl ToChalk for GenericPredicate {
+ type Chalk = chalk_ir::QuantifiedWhereClause<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> {
+ match self {
+ GenericPredicate::Implemented(trait_ref) => {
+ let chalk_trait_ref = trait_ref.to_chalk(db);
+ let chalk_trait_ref = chalk_trait_ref.shifted_in(&Interner);
+ make_binders(chalk_ir::WhereClause::Implemented(chalk_trait_ref), 0)
+ }
+ GenericPredicate::Projection(projection_pred) => {
+ let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner);
+ let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner);
+ let alias = chalk_ir::AliasTy::Projection(projection);
+ make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0)
+ }
+ GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"),
+ }
+ }
+
+ fn from_chalk(
+ db: &dyn HirDatabase,
+ where_clause: chalk_ir::QuantifiedWhereClause<Interner>,
+ ) -> GenericPredicate {
+ // we don't produce any where clauses with binders and can't currently deal with them
+ match where_clause
+ .skip_binders()
+ .shifted_out(&Interner)
+ .expect("unexpected bound vars in where clause")
+ {
+ chalk_ir::WhereClause::Implemented(tr) => {
+ GenericPredicate::Implemented(from_chalk(db, tr))
+ }
+ chalk_ir::WhereClause::AliasEq(projection_eq) => {
+ let projection_ty = from_chalk(
+ db,
+ match projection_eq.alias {
+ chalk_ir::AliasTy::Projection(p) => p,
+ _ => unimplemented!(),
+ },
+ );
+ let ty = from_chalk(db, projection_eq.ty);
+ GenericPredicate::Projection(ProjectionPredicate { projection_ty, ty })
+ }
+
+ chalk_ir::WhereClause::LifetimeOutlives(_) => {
+ // we shouldn't get these from Chalk
+ panic!("encountered LifetimeOutlives from Chalk")
+ }
+
+ chalk_ir::WhereClause::TypeOutlives(_) => {
+ // we shouldn't get these from Chalk
+ panic!("encountered TypeOutlives from Chalk")
+ }
+ }
+ }
+}
+
+impl ToChalk for ProjectionTy {
+ type Chalk = chalk_ir::ProjectionTy<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> {
+ chalk_ir::ProjectionTy {
+ associated_ty_id: self.associated_ty.to_chalk(db),
+ substitution: self.parameters.to_chalk(db),
+ }
+ }
+
+ fn from_chalk(
+ db: &dyn HirDatabase,
+ projection_ty: chalk_ir::ProjectionTy<Interner>,
+ ) -> ProjectionTy {
+ ProjectionTy {
+ associated_ty: from_chalk(db, projection_ty.associated_ty_id),
+ parameters: from_chalk(db, projection_ty.substitution),
+ }
+ }
+}
+
+impl ToChalk for ProjectionPredicate {
+ type Chalk = chalk_ir::AliasEq<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
+ chalk_ir::AliasEq {
+ alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)),
+ ty: self.ty.to_chalk(db),
+ }
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
+ unimplemented!()
+ }
+}
+
+impl ToChalk for Obligation {
+ type Chalk = chalk_ir::DomainGoal<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal<Interner> {
+ match self {
+ Obligation::Trait(tr) => tr.to_chalk(db).cast(&Interner),
+ Obligation::Projection(pr) => pr.to_chalk(db).cast(&Interner),
+ }
+ }
+
+ fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self {
+ unimplemented!()
+ }
+}
+
+impl<T> ToChalk for Canonical<T>
+where
+ T: ToChalk,
+ T::Chalk: HasInterner<Interner = Interner>,
+{
+ type Chalk = chalk_ir::Canonical<T::Chalk>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
+ let kinds = self
+ .kinds
+ .iter()
+ .map(|k| match k {
+ TyKind::General => chalk_ir::TyKind::General,
+ TyKind::Integer => chalk_ir::TyKind::Integer,
+ TyKind::Float => chalk_ir::TyKind::Float,
+ })
+ .map(|tk| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(tk),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ let value = self.value.to_chalk(db);
+ chalk_ir::Canonical {
+ value,
+ binders: chalk_ir::CanonicalVarKinds::from_iter(&Interner, kinds),
+ }
+ }
+
+ fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
+ let kinds = canonical
+ .binders
+ .iter(&Interner)
+ .map(|k| match k.kind {
+ chalk_ir::VariableKind::Ty(tk) => match tk {
+ chalk_ir::TyKind::General => TyKind::General,
+ chalk_ir::TyKind::Integer => TyKind::Integer,
+ chalk_ir::TyKind::Float => TyKind::Float,
+ },
+ chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"),
+ chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"),
+ })
+ .collect();
+ Canonical { kinds, value: from_chalk(db, canonical.value) }
+ }
+}
+
+impl ToChalk for Arc<TraitEnvironment> {
+ type Chalk = chalk_ir::Environment<Interner>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment<Interner> {
+ let mut clauses = Vec::new();
+ for pred in &self.predicates {
+ if pred.is_error() {
+ // for env, we just ignore errors
+ continue;
+ }
+ let program_clause: chalk_ir::ProgramClause<Interner> =
+ pred.clone().to_chalk(db).cast(&Interner);
+ clauses.push(program_clause.into_from_env_clause(&Interner));
+ }
+ chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses)
+ }
+
+ fn from_chalk(
+ _db: &dyn HirDatabase,
+ _env: chalk_ir::Environment<Interner>,
+ ) -> Arc<TraitEnvironment> {
+ unimplemented!()
+ }
+}
+
+impl<T: ToChalk> ToChalk for InEnvironment<T>
+where
+ T::Chalk: chalk_ir::interner::HasInterner<Interner = Interner>,
+{
+ type Chalk = chalk_ir::InEnvironment<T::Chalk>;
+
+ fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> {
+ chalk_ir::InEnvironment {
+ environment: self.environment.to_chalk(db),
+ goal: self.value.to_chalk(db),
+ }
+ }
+
+ fn from_chalk(
+ db: &dyn HirDatabase,
+ in_env: chalk_ir::InEnvironment<T::Chalk>,
+ ) -> InEnvironment<T> {
+ InEnvironment {
+ environment: from_chalk(db, in_env.environment),
+ value: from_chalk(db, in_env.goal),
+ }
+ }
+}
+
+pub(super) fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T>
+where
+ T: HasInterner<Interner = Interner>,
+{
+ chalk_ir::Binders::new(
+ chalk_ir::VariableKinds::from_iter(
+ &Interner,
+ std::iter::repeat(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)).take(num_vars),
+ ),
+ value,
+ )
+}
+
+pub(super) fn convert_where_clauses(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ substs: &Substs,
+) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
+ let generic_predicates = db.generic_predicates(def);
+ let mut result = Vec::with_capacity(generic_predicates.len());
+ for pred in generic_predicates.iter() {
+ if pred.value.is_error() {
+ // skip errored predicates completely
+ continue;
+ }
+ result.push(pred.clone().subst(substs).to_chalk(db));
+ }
+ result
+}
+
+pub(super) fn generic_predicate_to_inline_bound(
+ db: &dyn HirDatabase,
+ pred: &GenericPredicate,
+ self_ty: &Ty,
+) -> Option<rust_ir::InlineBound<Interner>> {
+ // An InlineBound is like a GenericPredicate, except the self type is left out.
+ // We don't have a special type for this, but Chalk does.
+ match pred {
+ GenericPredicate::Implemented(trait_ref) => {
+ if &trait_ref.substs[0] != self_ty {
+ // we can only convert predicates back to type bounds if they
+ // have the expected self type
+ return None;
+ }
+ let args_no_self = trait_ref.substs[1..]
+ .iter()
+ .map(|ty| ty.clone().to_chalk(db).cast(&Interner))
+ .collect();
+ let trait_bound =
+ rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self };
+ Some(rust_ir::InlineBound::TraitBound(trait_bound))
+ }
+ GenericPredicate::Projection(proj) => {
+ if &proj.projection_ty.parameters[0] != self_ty {
+ return None;
+ }
+ let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container {
+ AssocContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let args_no_self = proj.projection_ty.parameters[1..]
+ .iter()
+ .map(|ty| ty.clone().to_chalk(db).cast(&Interner))
+ .collect();
+ let alias_eq_bound = rust_ir::AliasEqBound {
+ value: proj.ty.clone().to_chalk(db),
+ trait_bound: rust_ir::TraitBound { trait_id: trait_.to_chalk(db), args_no_self },
+ associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db),
+ parameters: Vec::new(), // FIXME we don't support generic associated types yet
+ };
+ Some(rust_ir::InlineBound::AliasEqBound(alias_eq_bound))
+ }
+ GenericPredicate::Error => None,
+ }
+}
--- /dev/null
+//! Implementation of Chalk debug helper functions using TLS.
+use std::fmt;
+
+use chalk_ir::{AliasTy, GenericArg, Goal, Goals, Lifetime, ProgramClauseImplication, TypeName};
+use itertools::Itertools;
+
+use super::{from_chalk, Interner};
+use crate::{db::HirDatabase, CallableDefId, TypeCtor};
+use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId};
+
+pub use unsafe_tls::{set_current_program, with_current_program};
+
+pub struct DebugContext<'a>(&'a dyn HirDatabase);
+
+impl DebugContext<'_> {
+ pub fn debug_struct_id(
+ &self,
+ id: super::AdtId,
+ f: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_ctor: TypeCtor = from_chalk(self.0, TypeName::Adt(id));
+ match type_ctor {
+ TypeCtor::Bool => write!(f, "bool")?,
+ TypeCtor::Char => write!(f, "char")?,
+ TypeCtor::Int(t) => write!(f, "{}", t)?,
+ TypeCtor::Float(t) => write!(f, "{}", t)?,
+ TypeCtor::Str => write!(f, "str")?,
+ TypeCtor::Slice => write!(f, "slice")?,
+ TypeCtor::Array => write!(f, "array")?,
+ TypeCtor::RawPtr(m) => write!(f, "*{}", m.as_keyword_for_ptr())?,
+ TypeCtor::Ref(m) => write!(f, "&{}", m.as_keyword_for_ref())?,
+ TypeCtor::Never => write!(f, "!")?,
+ TypeCtor::Tuple { .. } => {
+ write!(f, "()")?;
+ }
+ TypeCtor::FnPtr { .. } => {
+ write!(f, "fn")?;
+ }
+ TypeCtor::FnDef(def) => {
+ let name = match def {
+ CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
+ CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
+ CallableDefId::EnumVariantId(e) => {
+ let enum_data = self.0.enum_data(e.parent);
+ enum_data.variants[e.local_id].name.clone()
+ }
+ };
+ match def {
+ CallableDefId::FunctionId(_) => write!(f, "{{fn {}}}", name)?,
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
+ write!(f, "{{ctor {}}}", name)?
+ }
+ }
+ }
+ TypeCtor::Adt(def_id) => {
+ let name = match def_id {
+ AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
+ AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
+ AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
+ };
+ write!(f, "{}", name)?;
+ }
+ TypeCtor::AssociatedType(type_alias) => {
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ AssocContainerId::TraitId(it) => it,
+ _ => panic!("not an associated type"),
+ };
+ let trait_name = self.0.trait_data(trait_).name.clone();
+ let name = self.0.type_alias_data(type_alias).name.clone();
+ write!(f, "{}::{}", trait_name, name)?;
+ }
+ TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
+ crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
+ write!(f, "{{impl trait {} of {:?}}}", idx, func)?;
+ }
+ },
+ TypeCtor::Closure { def, expr } => {
+ write!(f, "{{closure {:?} in ", expr.into_raw())?;
+ match def {
+ DefWithBodyId::FunctionId(func) => {
+ write!(f, "fn {}", self.0.function_data(func).name)?
+ }
+ DefWithBodyId::StaticId(s) => {
+ if let Some(name) = self.0.static_data(s).name.as_ref() {
+ write!(f, "body of static {}", name)?;
+ } else {
+ write!(f, "body of unnamed static {:?}", s)?;
+ }
+ }
+ DefWithBodyId::ConstId(c) => {
+ if let Some(name) = self.0.const_data(c).name.as_ref() {
+ write!(f, "body of const {}", name)?;
+ } else {
+ write!(f, "body of unnamed const {:?}", c)?;
+ }
+ }
+ };
+ write!(f, "}}")?;
+ }
+ }
+ Ok(())
+ }
+
+ pub fn debug_trait_id(
+ &self,
+ id: super::TraitId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let trait_: hir_def::TraitId = from_chalk(self.0, id);
+ let trait_data = self.0.trait_data(trait_);
+ write!(fmt, "{}", trait_data.name)
+ }
+
+ pub fn debug_assoc_type_id(
+ &self,
+ id: super::AssocTypeId,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias: TypeAliasId = from_chalk(self.0, id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ AssocContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
+ }
+
+ pub fn debug_opaque_ty_id(
+ &self,
+ opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
+ }
+
+ pub fn debug_alias(
+ &self,
+ alias_ty: &AliasTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ match alias_ty {
+ AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
+ AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
+ }
+ }
+
+ pub fn debug_projection_ty(
+ &self,
+ projection_ty: &chalk_ir::ProjectionTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
+ let type_alias_data = self.0.type_alias_data(type_alias);
+ let trait_ = match type_alias.lookup(self.0.upcast()).container {
+ AssocContainerId::TraitId(t) => t,
+ _ => panic!("associated type not in trait"),
+ };
+ let trait_data = self.0.trait_data(trait_);
+ let params = projection_ty.substitution.as_slice(&Interner);
+ write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?;
+ if params.len() > 1 {
+ write!(
+ fmt,
+ "<{}>",
+ ¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ )?;
+ }
+ write!(fmt, ">::{}", type_alias_data.name)
+ }
+
+ pub fn debug_opaque_ty(
+ &self,
+ opaque_ty: &chalk_ir::OpaqueTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
+ }
+
+ pub fn debug_ty(
+ &self,
+ ty: &chalk_ir::Ty<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", ty.data(&Interner))
+ }
+
+ pub fn debug_lifetime(
+ &self,
+ lifetime: &Lifetime<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", lifetime.data(&Interner))
+ }
+
+ pub fn debug_generic_arg(
+ &self,
+ parameter: &GenericArg<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
+ }
+
+ pub fn debug_goal(
+ &self,
+ goal: &Goal<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let goal_data = goal.data(&Interner);
+ write!(fmt, "{:?}", goal_data)
+ }
+
+ pub fn debug_goals(
+ &self,
+ goals: &Goals<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", goals.debug(&Interner))
+ }
+
+ pub fn debug_program_clause_implication(
+ &self,
+ pci: &ProgramClauseImplication<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", pci.debug(&Interner))
+ }
+
+ pub fn debug_application_ty(
+ &self,
+ application_ty: &chalk_ir::ApplicationTy<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", application_ty.debug(&Interner))
+ }
+
+ pub fn debug_substitution(
+ &self,
+ substitution: &chalk_ir::Substitution<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", substitution.debug(&Interner))
+ }
+
+ pub fn debug_separator_trait_ref(
+ &self,
+ separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
+ }
+
+ pub fn debug_fn_def_id(
+ &self,
+ fn_def_id: chalk_ir::FnDefId<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> Result<(), fmt::Error> {
+ let def: CallableDefId = from_chalk(self.0, fn_def_id);
+ let name = match def {
+ CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
+ CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
+ CallableDefId::EnumVariantId(e) => {
+ let enum_data = self.0.enum_data(e.parent);
+ enum_data.variants[e.local_id].name.clone()
+ }
+ };
+ match def {
+ CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
+ write!(fmt, "{{ctor {}}}", name)
+ }
+ }
+ }
+
+ pub fn debug_const(
+ &self,
+ _constant: &chalk_ir::Const<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "const")
+ }
+
+ pub fn debug_variable_kinds(
+ &self,
+ variable_kinds: &chalk_ir::VariableKinds<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
+ }
+ pub fn debug_variable_kinds_with_angles(
+ &self,
+ variable_kinds: &chalk_ir::VariableKinds<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
+ }
+ pub fn debug_canonical_var_kinds(
+ &self,
+ canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
+ }
+ pub fn debug_program_clause(
+ &self,
+ clause: &chalk_ir::ProgramClause<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", clause.data(&Interner))
+ }
+ pub fn debug_program_clauses(
+ &self,
+ clauses: &chalk_ir::ProgramClauses<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", clauses.as_slice(&Interner))
+ }
+ pub fn debug_quantified_where_clauses(
+ &self,
+ clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
+ fmt: &mut fmt::Formatter<'_>,
+ ) -> fmt::Result {
+ write!(fmt, "{:?}", clauses.as_slice(&Interner))
+ }
+}
+
+mod unsafe_tls {
+ use super::DebugContext;
+ use crate::db::HirDatabase;
+ use scoped_tls::scoped_thread_local;
+
+ scoped_thread_local!(static PROGRAM: DebugContext);
+
+ pub fn with_current_program<R>(
+ op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
+ ) -> R {
+ if PROGRAM.is_set() {
+ PROGRAM.with(|prog| op(Some(prog)))
+ } else {
+ op(None)
+ }
+ }
+
+ pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
+ where
+ OP: FnOnce() -> R,
+ {
+ let ctx = DebugContext(p);
+ // we're transmuting the lifetime in the DebugContext to static. This is
+ // fine because we only keep the reference for the lifetime of this
+ // function, *and* the only way to access the context is through
+ // `with_current_program`, which hides the lifetime through the `for`
+ // type.
+ let static_p: &DebugContext<'static> =
+ unsafe { std::mem::transmute::<&DebugContext, &DebugContext<'static>>(&ctx) };
+ PROGRAM.set(static_p, || op())
+ }
+}
--- /dev/null
+//! Helper functions for working with def, which don't need to be a separate
+//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
+use std::sync::Arc;
+
+use hir_def::generics::WherePredicateTarget;
+use hir_def::{
+ adt::VariantData,
+ db::DefDatabase,
+ generics::{GenericParams, TypeParamData, TypeParamProvenance},
+ path::Path,
+ resolver::{HasResolver, TypeNs},
+ type_ref::TypeRef,
+ AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, VariantId,
+};
+use hir_expand::name::{name, Name};
+
+use crate::{db::HirDatabase, GenericPredicate, TraitRef};
+
+fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
+ let resolver = trait_.resolver(db);
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_.into());
+ let trait_self = generic_params.find_trait_self_param();
+ generic_params
+ .where_predicates
+ .iter()
+ .filter_map(|pred| match &pred.target {
+ WherePredicateTarget::TypeRef(TypeRef::Path(p)) if p == &Path::from(name![Self]) => {
+ pred.bound.as_path()
+ }
+ WherePredicateTarget::TypeParam(local_id) if Some(*local_id) == trait_self => {
+ pred.bound.as_path()
+ }
+ _ => None,
+ })
+ .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
+ Some(TypeNs::TraitId(t)) => Some(t),
+ _ => None,
+ })
+ .collect()
+}
+
+fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
+ // returning the iterator directly doesn't easily work because of
+ // lifetime problems, but since there usually shouldn't be more than a
+ // few direct traits this should be fine (we could even use some kind of
+ // SmallVec if performance is a concern)
+ let generic_params = db.generic_params(trait_ref.trait_.into());
+ let trait_self = match generic_params.find_trait_self_param() {
+ Some(p) => TypeParamId { parent: trait_ref.trait_.into(), local_id: p },
+ None => return Vec::new(),
+ };
+ db.generic_predicates_for_param(trait_self)
+ .iter()
+ .filter_map(|pred| {
+ pred.as_ref().filter_map(|pred| match pred {
+ GenericPredicate::Implemented(tr) => Some(tr.clone()),
+ _ => None,
+ })
+ })
+ .map(|pred| pred.subst(&trait_ref.substs))
+ .collect()
+}
+
+/// Returns an iterator over the whole super trait hierarchy (including the
+/// trait itself).
+pub(super) fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
+ // we need to take care a bit here to avoid infinite loops in case of cycles
+ // (i.e. if we have `trait A: B; trait B: A;`)
+ let mut result = vec![trait_];
+ let mut i = 0;
+ while i < result.len() {
+ let t = result[i];
+ // yeah this is quadratic, but trait hierarchies should be flat
+ // enough that this doesn't matter
+ for tt in direct_super_traits(db, t) {
+ if !result.contains(&tt) {
+ result.push(tt);
+ }
+ }
+ i += 1;
+ }
+ result
+}
+
+/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
+/// super traits. The original trait ref will be included. So the difference to
+/// `all_super_traits` is that we keep track of type parameters; for example if
+/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
+/// `Self: OtherTrait<i32>`.
+pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> Vec<TraitRef> {
+ // we need to take care a bit here to avoid infinite loops in case of cycles
+ // (i.e. if we have `trait A: B; trait B: A;`)
+ let mut result = vec![trait_ref];
+ let mut i = 0;
+ while i < result.len() {
+ let t = &result[i];
+ // yeah this is quadratic, but trait hierarchies should be flat
+ // enough that this doesn't matter
+ for tt in direct_super_trait_refs(db, t) {
+ if !result.iter().any(|tr| tr.trait_ == tt.trait_) {
+ result.push(tt);
+ }
+ }
+ i += 1;
+ }
+ result
+}
+
+pub(super) fn associated_type_by_name_including_super_traits(
+ db: &dyn HirDatabase,
+ trait_ref: TraitRef,
+ name: &Name,
+) -> Option<(TraitRef, TypeAliasId)> {
+ all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| {
+ let assoc_type = db.trait_data(t.trait_).associated_type_by_name(name)?;
+ Some((t, assoc_type))
+ })
+}
+
+pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> {
+ match var {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ }
+}
+
+/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
+/// The underlying values are cloned if there are other strong references.
+pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
+ if Arc::get_mut(a).is_none() {
+ *a = a.iter().cloned().collect();
+ }
+ Arc::get_mut(a).unwrap()
+}
+
+pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
+ let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
+ Generics { def, params: db.generic_params(def), parent_generics }
+}
+
+#[derive(Debug)]
+pub(crate) struct Generics {
+ def: GenericDefId,
+ pub(crate) params: Arc<GenericParams>,
+ parent_generics: Option<Box<Generics>>,
+}
+
+impl Generics {
+ pub(crate) fn iter<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
+ self.parent_generics
+ .as_ref()
+ .into_iter()
+ .flat_map(|it| {
+ it.params
+ .types
+ .iter()
+ .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
+ })
+ .chain(
+ self.params
+ .types
+ .iter()
+ .map(move |(local_id, p)| (TypeParamId { parent: self.def, local_id }, p)),
+ )
+ }
+
+ pub(crate) fn iter_parent<'a>(
+ &'a self,
+ ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
+ self.parent_generics.as_ref().into_iter().flat_map(|it| {
+ it.params
+ .types
+ .iter()
+ .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
+ })
+ }
+
+ pub(crate) fn len(&self) -> usize {
+ self.len_split().0
+ }
+
+ /// (total, parents, child)
+ pub(crate) fn len_split(&self) -> (usize, usize, usize) {
+ let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
+ let child = self.params.types.len();
+ (parent + child, parent, child)
+ }
+
+ /// (parent total, self param, type param list, impl trait)
+ pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize) {
+ let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
+ let self_params = self
+ .params
+ .types
+ .iter()
+ .filter(|(_, p)| p.provenance == TypeParamProvenance::TraitSelf)
+ .count();
+ let list_params = self
+ .params
+ .types
+ .iter()
+ .filter(|(_, p)| p.provenance == TypeParamProvenance::TypeParamList)
+ .count();
+ let impl_trait_params = self
+ .params
+ .types
+ .iter()
+ .filter(|(_, p)| p.provenance == TypeParamProvenance::ArgumentImplTrait)
+ .count();
+ (parent, self_params, list_params, impl_trait_params)
+ }
+
+ pub(crate) fn param_idx(&self, param: TypeParamId) -> Option<usize> {
+ Some(self.find_param(param)?.0)
+ }
+
+ fn find_param(&self, param: TypeParamId) -> Option<(usize, &TypeParamData)> {
+ if param.parent == self.def {
+ let (idx, (_local_id, data)) = self
+ .params
+ .types
+ .iter()
+ .enumerate()
+ .find(|(_, (idx, _))| *idx == param.local_id)
+ .unwrap();
+ let (_total, parent_len, _child) = self.len_split();
+ Some((parent_len + idx, data))
+ } else {
+ self.parent_generics.as_ref().and_then(|g| g.find_param(param))
+ }
+ }
+}
+
+fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
+ let container = match def {
+ GenericDefId::FunctionId(it) => it.lookup(db).container,
+ GenericDefId::TypeAliasId(it) => it.lookup(db).container,
+ GenericDefId::ConstId(it) => it.lookup(db).container,
+ GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
+ GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ };
+
+ match container {
+ AssocContainerId::ImplId(it) => Some(it.into()),
+ AssocContainerId::TraitId(it) => Some(it.into()),
+ AssocContainerId::ContainerId(_) => None,
+ }
+}
profile = { path = "../profile" }
hir_expand = { path = "../hir_expand" }
hir_def = { path = "../hir_def" }
-hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
+hir_ty = { path = "../hir_ty" }
+++ /dev/null
-[package]
-edition = "2018"
-name = "ra_hir_ty"
-version = "0.1.0"
-authors = ["rust-analyzer developers"]
-license = "MIT OR Apache-2.0"
-
-[lib]
-doctest = false
-
-[dependencies]
-itertools = "0.9.0"
-arrayvec = "0.5.1"
-smallvec = "1.2.0"
-ena = "0.14.0"
-log = "0.4.8"
-rustc-hash = "1.1.0"
-
-stdx = { path = "../stdx" }
-
-hir_def = { path = "../hir_def" }
-hir_expand = { path = "../hir_expand" }
-arena = { path = "../arena" }
-base_db = { path = "../base_db" }
-profile = { path = "../profile" }
-syntax = { path = "../syntax" }
-test_utils = { path = "../test_utils" }
-
-scoped-tls = "1"
-
-chalk-solve = { version = "0.21.0" }
-chalk-ir = { version = "0.21.0" }
-chalk-recursive = { version = "0.21.0" }
-
-[dev-dependencies]
-expect = { path = "../expect" }
-
-tracing = "0.1"
-tracing-subscriber = { version = "0.2", default-features = false, features = ["env-filter", "registry"] }
-tracing-tree = { version = "0.1.4" }
+++ /dev/null
-//! In certain situations, rust automatically inserts derefs as necessary: for
-//! example, field accesses `foo.bar` still work when `foo` is actually a
-//! reference to a type with the field `bar`. This is an approximation of the
-//! logic in rustc (which lives in librustc_typeck/check/autoderef.rs).
-
-use std::iter::successors;
-
-use base_db::CrateId;
-use hir_def::lang_item::LangItemTarget;
-use hir_expand::name::name;
-use log::{info, warn};
-
-use crate::{
- db::HirDatabase,
- traits::{InEnvironment, Solution},
- utils::generics,
- BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
-};
-
-const AUTODEREF_RECURSION_LIMIT: usize = 10;
-
-pub fn autoderef<'a>(
- db: &'a dyn HirDatabase,
- krate: Option<CrateId>,
- ty: InEnvironment<Canonical<Ty>>,
-) -> impl Iterator<Item = Canonical<Ty>> + 'a {
- let InEnvironment { value: ty, environment } = ty;
- successors(Some(ty), move |ty| {
- deref(db, krate?, InEnvironment { value: ty, environment: environment.clone() })
- })
- .take(AUTODEREF_RECURSION_LIMIT)
-}
-
-pub(crate) fn deref(
- db: &dyn HirDatabase,
- krate: CrateId,
- ty: InEnvironment<&Canonical<Ty>>,
-) -> Option<Canonical<Ty>> {
- if let Some(derefed) = ty.value.value.builtin_deref() {
- Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() })
- } else {
- deref_by_trait(db, krate, ty)
- }
-}
-
-fn deref_by_trait(
- db: &dyn HirDatabase,
- krate: CrateId,
- ty: InEnvironment<&Canonical<Ty>>,
-) -> Option<Canonical<Ty>> {
- let deref_trait = match db.lang_item(krate, "deref".into())? {
- LangItemTarget::TraitId(it) => it,
- _ => return None,
- };
- let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
-
- let generic_params = generics(db.upcast(), target.into());
- if generic_params.len() != 1 {
- // the Target type + Deref trait should only have one generic parameter,
- // namely Deref's Self type
- return None;
- }
-
- // FIXME make the Canonical / bound var handling nicer
-
- let parameters =
- Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
-
- // Check that the type implements Deref at all
- let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
- let implements_goal = Canonical {
- kinds: ty.value.kinds.clone(),
- value: InEnvironment {
- value: Obligation::Trait(trait_ref),
- environment: ty.environment.clone(),
- },
- };
- if db.trait_solve(krate, implements_goal).is_none() {
- return None;
- }
-
- // Now do the assoc type projection
- let projection = super::traits::ProjectionPredicate {
- ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())),
- projection_ty: super::ProjectionTy { associated_ty: target, parameters },
- };
-
- let obligation = super::Obligation::Projection(projection);
-
- let in_env = InEnvironment { value: obligation, environment: ty.environment };
-
- let canonical =
- Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General)));
-
- let solution = db.trait_solve(krate, canonical)?;
-
- match &solution {
- Solution::Unique(vars) => {
- // FIXME: vars may contain solutions for any inference variables
- // that happened to be inside ty. To correctly handle these, we
- // would have to pass the solution up to the inference context, but
- // that requires a larger refactoring (especially if the deref
- // happens during method resolution). So for the moment, we just
- // check that we're not in the situation we're we would actually
- // need to handle the values of the additional variables, i.e.
- // they're just being 'passed through'. In the 'standard' case where
- // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
- // the case.
-
- // FIXME: if the trait solver decides to truncate the type, these
- // assumptions will be broken. We would need to properly introduce
- // new variables in that case
-
- for i in 1..vars.0.kinds.len() {
- if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
- {
- warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution);
- return None;
- }
- }
- Some(Canonical {
- value: vars.0.value[vars.0.value.len() - 1].clone(),
- kinds: vars.0.kinds.clone(),
- })
- }
- Solution::Ambig(_) => {
- info!("Ambiguous solution for derefing {:?}: {:?}", ty.value, solution);
- None
- }
- }
-}
+++ /dev/null
-//! FIXME: write short doc here
-
-use std::sync::Arc;
-
-use arena::map::ArenaMap;
-use base_db::{impl_intern_key, salsa, CrateId, Upcast};
-use hir_def::{
- db::DefDatabase, expr::ExprId, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId,
- TypeParamId, VariantId,
-};
-
-use crate::{
- method_resolution::{InherentImpls, TraitImpls},
- traits::chalk,
- Binders, CallableDefId, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig,
- ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId,
-};
-use hir_expand::name::Name;
-
-#[salsa::query_group(HirDatabaseStorage)]
-pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
- #[salsa::invoke(infer_wait)]
- #[salsa::transparent]
- fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
-
- #[salsa::invoke(crate::infer::infer_query)]
- fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
-
- #[salsa::invoke(crate::lower::ty_query)]
- #[salsa::cycle(crate::lower::ty_recover)]
- fn ty(&self, def: TyDefId) -> Binders<Ty>;
-
- #[salsa::invoke(crate::lower::value_ty_query)]
- fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
-
- #[salsa::invoke(crate::lower::impl_self_ty_query)]
- #[salsa::cycle(crate::lower::impl_self_ty_recover)]
- fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
-
- #[salsa::invoke(crate::lower::impl_trait_query)]
- fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
-
- #[salsa::invoke(crate::lower::field_types_query)]
- fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
-
- #[salsa::invoke(crate::callable_item_sig)]
- fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
-
- #[salsa::invoke(crate::lower::return_type_impl_traits)]
- fn return_type_impl_traits(
- &self,
- def: FunctionId,
- ) -> Option<Arc<Binders<ReturnTypeImplTraits>>>;
-
- #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
- #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
- fn generic_predicates_for_param(
- &self,
- param_id: TypeParamId,
- ) -> Arc<[Binders<GenericPredicate>]>;
-
- #[salsa::invoke(crate::lower::generic_predicates_query)]
- fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>;
-
- #[salsa::invoke(crate::lower::generic_defaults_query)]
- fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>;
-
- #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
- fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
-
- #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
- fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
-
- #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
- fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>;
-
- // Interned IDs for Chalk integration
- #[salsa::interned]
- fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
- #[salsa::interned]
- fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId;
- #[salsa::interned]
- fn intern_impl_trait_id(&self, id: OpaqueTyId) -> InternedOpaqueTyId;
- #[salsa::interned]
- fn intern_closure(&self, id: (DefWithBodyId, ExprId)) -> ClosureId;
-
- #[salsa::invoke(chalk::associated_ty_data_query)]
- fn associated_ty_data(&self, id: chalk::AssocTypeId) -> Arc<chalk::AssociatedTyDatum>;
-
- #[salsa::invoke(chalk::trait_datum_query)]
- fn trait_datum(&self, krate: CrateId, trait_id: chalk::TraitId) -> Arc<chalk::TraitDatum>;
-
- #[salsa::invoke(chalk::struct_datum_query)]
- fn struct_datum(&self, krate: CrateId, struct_id: chalk::AdtId) -> Arc<chalk::StructDatum>;
-
- #[salsa::invoke(crate::traits::chalk::impl_datum_query)]
- fn impl_datum(&self, krate: CrateId, impl_id: chalk::ImplId) -> Arc<chalk::ImplDatum>;
-
- #[salsa::invoke(crate::traits::chalk::fn_def_datum_query)]
- fn fn_def_datum(&self, krate: CrateId, fn_def_id: chalk::FnDefId) -> Arc<chalk::FnDefDatum>;
-
- #[salsa::invoke(crate::traits::chalk::associated_ty_value_query)]
- fn associated_ty_value(
- &self,
- krate: CrateId,
- id: chalk::AssociatedTyValueId,
- ) -> Arc<chalk::AssociatedTyValue>;
-
- #[salsa::invoke(crate::traits::trait_solve_query)]
- fn trait_solve(
- &self,
- krate: CrateId,
- goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
- ) -> Option<crate::traits::Solution>;
-
- #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
- fn program_clauses_for_chalk_env(
- &self,
- krate: CrateId,
- env: chalk_ir::Environment<chalk::Interner>,
- ) -> chalk_ir::ProgramClauses<chalk::Interner>;
-}
-
-fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
- let _p = profile::span("infer:wait").detail(|| match def {
- DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
- DefWithBodyId::StaticId(it) => {
- db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
- }
- DefWithBodyId::ConstId(it) => {
- db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
- }
- });
- db.infer_query(def)
-}
-
-#[test]
-fn hir_database_is_object_safe() {
- fn _assert_object_safe(_: &dyn HirDatabase) {}
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct GlobalTypeParamId(salsa::InternId);
-impl_intern_key!(GlobalTypeParamId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedOpaqueTyId(salsa::InternId);
-impl_intern_key!(InternedOpaqueTyId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct ClosureId(salsa::InternId);
-impl_intern_key!(ClosureId);
-
-/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
-/// we have different IDs for struct and enum variant constructors.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct InternedCallableDefId(salsa::InternId);
-impl_intern_key!(InternedCallableDefId);
+++ /dev/null
-//! FIXME: write short doc here
-mod expr;
-mod match_check;
-mod unsafe_check;
-
-use std::any::Any;
-
-use hir_def::DefWithBodyId;
-use hir_expand::diagnostics::{Diagnostic, DiagnosticSink};
-use hir_expand::{name::Name, HirFileId, InFile};
-use stdx::format_to;
-use syntax::{ast, AstPtr, SyntaxNodePtr};
-
-use crate::db::HirDatabase;
-
-pub use crate::diagnostics::expr::{record_literal_missing_fields, record_pattern_missing_fields};
-
-pub fn validate_body(db: &dyn HirDatabase, owner: DefWithBodyId, sink: &mut DiagnosticSink<'_>) {
- let _p = profile::span("validate_body");
- let infer = db.infer(owner);
- infer.add_diagnostics(db, owner, sink);
- let mut validator = expr::ExprValidator::new(owner, infer.clone(), sink);
- validator.validate_body(db);
- let mut validator = unsafe_check::UnsafeValidator::new(owner, infer, sink);
- validator.validate_body(db);
-}
-
-#[derive(Debug)]
-pub struct NoSuchField {
- pub file: HirFileId,
- pub field: AstPtr<ast::RecordExprField>,
-}
-
-impl Diagnostic for NoSuchField {
- fn message(&self) -> String {
- "no such field".to_string()
- }
-
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile::new(self.file, self.field.clone().into())
- }
-
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MissingFields {
- pub file: HirFileId,
- pub field_list_parent: AstPtr<ast::RecordExpr>,
- pub field_list_parent_path: Option<AstPtr<ast::Path>>,
- pub missed_fields: Vec<Name>,
-}
-
-impl Diagnostic for MissingFields {
- fn message(&self) -> String {
- let mut buf = String::from("Missing structure fields:\n");
- for field in &self.missed_fields {
- format_to!(buf, "- {}\n", field);
- }
- buf
- }
-
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile {
- file_id: self.file,
- value: self
- .field_list_parent_path
- .clone()
- .map(SyntaxNodePtr::from)
- .unwrap_or_else(|| self.field_list_parent.clone().into()),
- }
- }
-
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MissingPatFields {
- pub file: HirFileId,
- pub field_list_parent: AstPtr<ast::RecordPat>,
- pub field_list_parent_path: Option<AstPtr<ast::Path>>,
- pub missed_fields: Vec<Name>,
-}
-
-impl Diagnostic for MissingPatFields {
- fn message(&self) -> String {
- let mut buf = String::from("Missing structure fields:\n");
- for field in &self.missed_fields {
- format_to!(buf, "- {}\n", field);
- }
- buf
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile {
- file_id: self.file,
- value: self
- .field_list_parent_path
- .clone()
- .map(SyntaxNodePtr::from)
- .unwrap_or_else(|| self.field_list_parent.clone().into()),
- }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MissingMatchArms {
- pub file: HirFileId,
- pub match_expr: AstPtr<ast::Expr>,
- pub arms: AstPtr<ast::MatchArmList>,
-}
-
-impl Diagnostic for MissingMatchArms {
- fn message(&self) -> String {
- String::from("Missing match arm")
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile { file_id: self.file, value: self.match_expr.clone().into() }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MissingOkInTailExpr {
- pub file: HirFileId,
- pub expr: AstPtr<ast::Expr>,
-}
-
-impl Diagnostic for MissingOkInTailExpr {
- fn message(&self) -> String {
- "wrap return expression in Ok".to_string()
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile { file_id: self.file, value: self.expr.clone().into() }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct BreakOutsideOfLoop {
- pub file: HirFileId,
- pub expr: AstPtr<ast::Expr>,
-}
-
-impl Diagnostic for BreakOutsideOfLoop {
- fn message(&self) -> String {
- "break outside of loop".to_string()
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile { file_id: self.file, value: self.expr.clone().into() }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MissingUnsafe {
- pub file: HirFileId,
- pub expr: AstPtr<ast::Expr>,
-}
-
-impl Diagnostic for MissingUnsafe {
- fn message(&self) -> String {
- format!("This operation is unsafe and requires an unsafe function or block")
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile { file_id: self.file, value: self.expr.clone().into() }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
-}
-
-#[derive(Debug)]
-pub struct MismatchedArgCount {
- pub file: HirFileId,
- pub call_expr: AstPtr<ast::Expr>,
- pub expected: usize,
- pub found: usize,
-}
-
-impl Diagnostic for MismatchedArgCount {
- fn message(&self) -> String {
- let s = if self.expected == 1 { "" } else { "s" };
- format!("Expected {} argument{}, found {}", self.expected, s, self.found)
- }
- fn display_source(&self) -> InFile<SyntaxNodePtr> {
- InFile { file_id: self.file, value: self.call_expr.clone().into() }
- }
- fn as_any(&self) -> &(dyn Any + Send + 'static) {
- self
- }
- fn is_experimental(&self) -> bool {
- true
- }
-}
-
-#[cfg(test)]
-mod tests {
- use base_db::{fixture::WithFixture, FileId, SourceDatabase, SourceDatabaseExt};
- use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId};
- use hir_expand::{
- db::AstDatabase,
- diagnostics::{Diagnostic, DiagnosticSinkBuilder},
- };
- use rustc_hash::FxHashMap;
- use syntax::{TextRange, TextSize};
-
- use crate::{diagnostics::validate_body, test_db::TestDB};
-
- impl TestDB {
- fn diagnostics<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
- let crate_graph = self.crate_graph();
- for krate in crate_graph.iter() {
- let crate_def_map = self.crate_def_map(krate);
-
- let mut fns = Vec::new();
- for (module_id, _) in crate_def_map.modules.iter() {
- for decl in crate_def_map[module_id].scope.declarations() {
- if let ModuleDefId::FunctionId(f) = decl {
- fns.push(f)
- }
- }
-
- for impl_id in crate_def_map[module_id].scope.impls() {
- let impl_data = self.impl_data(impl_id);
- for item in impl_data.items.iter() {
- if let AssocItemId::FunctionId(f) = item {
- fns.push(*f)
- }
- }
- }
- }
-
- for f in fns {
- let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
- validate_body(self, f.into(), &mut sink);
- }
- }
- }
- }
-
- pub(crate) fn check_diagnostics(ra_fixture: &str) {
- let db = TestDB::with_files(ra_fixture);
- let annotations = db.extract_annotations();
-
- let mut actual: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
- db.diagnostics(|d| {
- let src = d.display_source();
- let root = db.parse_or_expand(src.file_id).unwrap();
- // FIXME: macros...
- let file_id = src.file_id.original_file(&db);
- let range = src.value.to_node(&root).text_range();
- let message = d.message().to_owned();
- actual.entry(file_id).or_default().push((range, message));
- });
-
- for (file_id, diags) in actual.iter_mut() {
- diags.sort_by_key(|it| it.0.start());
- let text = db.file_text(*file_id);
- // For multiline spans, place them on line start
- for (range, content) in diags {
- if text[*range].contains('\n') {
- *range = TextRange::new(range.start(), range.start() + TextSize::from(1));
- *content = format!("... {}", content);
- }
- }
- }
-
- assert_eq!(annotations, actual);
- }
-
- #[test]
- fn no_such_field_diagnostics() {
- check_diagnostics(
- r#"
-struct S { foo: i32, bar: () }
-impl S {
- fn new() -> S {
- S {
- //^ Missing structure fields:
- //| - bar
- foo: 92,
- baz: 62,
- //^^^^^^^ no such field
- }
- }
-}
-"#,
- );
- }
- #[test]
- fn no_such_field_with_feature_flag_diagnostics() {
- check_diagnostics(
- r#"
-//- /lib.rs crate:foo cfg:feature=foo
-struct MyStruct {
- my_val: usize,
- #[cfg(feature = "foo")]
- bar: bool,
-}
-
-impl MyStruct {
- #[cfg(feature = "foo")]
- pub(crate) fn new(my_val: usize, bar: bool) -> Self {
- Self { my_val, bar }
- }
- #[cfg(not(feature = "foo"))]
- pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
- Self { my_val }
- }
-}
-"#,
- );
- }
-
- #[test]
- fn no_such_field_enum_with_feature_flag_diagnostics() {
- check_diagnostics(
- r#"
-//- /lib.rs crate:foo cfg:feature=foo
-enum Foo {
- #[cfg(not(feature = "foo"))]
- Buz,
- #[cfg(feature = "foo")]
- Bar,
- Baz
-}
-
-fn test_fn(f: Foo) {
- match f {
- Foo::Bar => {},
- Foo::Baz => {},
- }
-}
-"#,
- );
- }
-
- #[test]
- fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
- check_diagnostics(
- r#"
-//- /lib.rs crate:foo cfg:feature=foo
-struct S {
- #[cfg(feature = "foo")]
- foo: u32,
- #[cfg(not(feature = "foo"))]
- bar: u32,
-}
-
-impl S {
- #[cfg(feature = "foo")]
- fn new(foo: u32) -> Self {
- Self { foo }
- }
- #[cfg(not(feature = "foo"))]
- fn new(bar: u32) -> Self {
- Self { bar }
- }
- fn new2(bar: u32) -> Self {
- #[cfg(feature = "foo")]
- { Self { foo: bar } }
- #[cfg(not(feature = "foo"))]
- { Self { bar } }
- }
- fn new2(val: u32) -> Self {
- Self {
- #[cfg(feature = "foo")]
- foo: val,
- #[cfg(not(feature = "foo"))]
- bar: val,
- }
- }
-}
-"#,
- );
- }
-
- #[test]
- fn no_such_field_with_type_macro() {
- check_diagnostics(
- r#"
-macro_rules! Type { () => { u32 }; }
-struct Foo { bar: Type![] }
-
-impl Foo {
- fn new() -> Self {
- Foo { bar: 0 }
- }
-}
-"#,
- );
- }
-
- #[test]
- fn missing_record_pat_field_diagnostic() {
- check_diagnostics(
- r#"
-struct S { foo: i32, bar: () }
-fn baz(s: S) {
- let S { foo: _ } = s;
- //^ Missing structure fields:
- //| - bar
-}
-"#,
- );
- }
-
- #[test]
- fn missing_record_pat_field_no_diagnostic_if_not_exhaustive() {
- check_diagnostics(
- r"
-struct S { foo: i32, bar: () }
-fn baz(s: S) -> i32 {
- match s {
- S { foo, .. } => foo,
- }
-}
-",
- )
- }
-
- #[test]
- fn break_outside_of_loop() {
- check_diagnostics(
- r#"
-fn foo() { break; }
- //^^^^^ break outside of loop
-"#,
- );
- }
-}
+++ /dev/null
-//! FIXME: write short doc here
-
-use std::sync::Arc;
-
-use hir_def::{path::path, resolver::HasResolver, AdtId, DefWithBodyId};
-use hir_expand::diagnostics::DiagnosticSink;
-use rustc_hash::FxHashSet;
-use syntax::{ast, AstPtr};
-
-use crate::{
- db::HirDatabase,
- diagnostics::{
- match_check::{is_useful, MatchCheckCtx, Matrix, PatStack, Usefulness},
- MismatchedArgCount, MissingFields, MissingMatchArms, MissingOkInTailExpr, MissingPatFields,
- },
- utils::variant_data,
- ApplicationTy, InferenceResult, Ty, TypeCtor,
-};
-
-pub use hir_def::{
- body::{
- scope::{ExprScopes, ScopeEntry, ScopeId},
- Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource,
- },
- expr::{
- ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp,
- MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp,
- },
- src::HasSource,
- LocalFieldId, Lookup, VariantId,
-};
-
-pub(super) struct ExprValidator<'a, 'b: 'a> {
- owner: DefWithBodyId,
- infer: Arc<InferenceResult>,
- sink: &'a mut DiagnosticSink<'b>,
-}
-
-impl<'a, 'b> ExprValidator<'a, 'b> {
- pub(super) fn new(
- owner: DefWithBodyId,
- infer: Arc<InferenceResult>,
- sink: &'a mut DiagnosticSink<'b>,
- ) -> ExprValidator<'a, 'b> {
- ExprValidator { owner, infer, sink }
- }
-
- pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
- let body = db.body(self.owner.into());
-
- for (id, expr) in body.exprs.iter() {
- if let Some((variant_def, missed_fields, true)) =
- record_literal_missing_fields(db, &self.infer, id, expr)
- {
- self.create_record_literal_missing_fields_diagnostic(
- id,
- db,
- variant_def,
- missed_fields,
- );
- }
-
- match expr {
- Expr::Match { expr, arms } => {
- self.validate_match(id, *expr, arms, db, self.infer.clone());
- }
- Expr::Call { .. } | Expr::MethodCall { .. } => {
- self.validate_call(db, id, expr);
- }
- _ => {}
- }
- }
- for (id, pat) in body.pats.iter() {
- if let Some((variant_def, missed_fields, true)) =
- record_pattern_missing_fields(db, &self.infer, id, pat)
- {
- self.create_record_pattern_missing_fields_diagnostic(
- id,
- db,
- variant_def,
- missed_fields,
- );
- }
- }
- let body_expr = &body[body.body_expr];
- if let Expr::Block { tail: Some(t), .. } = body_expr {
- self.validate_results_in_tail_expr(body.body_expr, *t, db);
- }
- }
-
- fn create_record_literal_missing_fields_diagnostic(
- &mut self,
- id: ExprId,
- db: &dyn HirDatabase,
- variant_def: VariantId,
- missed_fields: Vec<LocalFieldId>,
- ) {
- // XXX: only look at source_map if we do have missing fields
- let (_, source_map) = db.body_with_source_map(self.owner.into());
-
- if let Ok(source_ptr) = source_map.expr_syntax(id) {
- let root = source_ptr.file_syntax(db.upcast());
- if let ast::Expr::RecordExpr(record_expr) = &source_ptr.value.to_node(&root) {
- if let Some(_) = record_expr.record_expr_field_list() {
- let variant_data = variant_data(db.upcast(), variant_def);
- let missed_fields = missed_fields
- .into_iter()
- .map(|idx| variant_data.fields()[idx].name.clone())
- .collect();
- self.sink.push(MissingFields {
- file: source_ptr.file_id,
- field_list_parent: AstPtr::new(&record_expr),
- field_list_parent_path: record_expr.path().map(|path| AstPtr::new(&path)),
- missed_fields,
- })
- }
- }
- }
- }
-
- fn create_record_pattern_missing_fields_diagnostic(
- &mut self,
- id: PatId,
- db: &dyn HirDatabase,
- variant_def: VariantId,
- missed_fields: Vec<LocalFieldId>,
- ) {
- // XXX: only look at source_map if we do have missing fields
- let (_, source_map) = db.body_with_source_map(self.owner.into());
-
- if let Ok(source_ptr) = source_map.pat_syntax(id) {
- if let Some(expr) = source_ptr.value.as_ref().left() {
- let root = source_ptr.file_syntax(db.upcast());
- if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
- if let Some(_) = record_pat.record_pat_field_list() {
- let variant_data = variant_data(db.upcast(), variant_def);
- let missed_fields = missed_fields
- .into_iter()
- .map(|idx| variant_data.fields()[idx].name.clone())
- .collect();
- self.sink.push(MissingPatFields {
- file: source_ptr.file_id,
- field_list_parent: AstPtr::new(&record_pat),
- field_list_parent_path: record_pat
- .path()
- .map(|path| AstPtr::new(&path)),
- missed_fields,
- })
- }
- }
- }
- }
- }
-
- fn validate_call(&mut self, db: &dyn HirDatabase, call_id: ExprId, expr: &Expr) -> Option<()> {
- // Check that the number of arguments matches the number of parameters.
-
- // FIXME: Due to shortcomings in the current type system implementation, only emit this
- // diagnostic if there are no type mismatches in the containing function.
- if self.infer.type_mismatches.iter().next().is_some() {
- return Some(());
- }
-
- let is_method_call = matches!(expr, Expr::MethodCall { .. });
- let (sig, args) = match expr {
- Expr::Call { callee, args } => {
- let callee = &self.infer.type_of_expr[*callee];
- let sig = callee.callable_sig(db)?;
- (sig, args.clone())
- }
- Expr::MethodCall { receiver, args, .. } => {
- let mut args = args.clone();
- args.insert(0, *receiver);
-
- // FIXME: note that we erase information about substs here. This
- // is not right, but, luckily, doesn't matter as we care only
- // about the number of params
- let callee = self.infer.method_resolution(call_id)?;
- let sig = db.callable_item_signature(callee.into()).value;
-
- (sig, args)
- }
- _ => return None,
- };
-
- if sig.is_varargs {
- return None;
- }
-
- let params = sig.params();
-
- let mut param_count = params.len();
- let mut arg_count = args.len();
-
- if arg_count != param_count {
- let (_, source_map) = db.body_with_source_map(self.owner.into());
- if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
- if is_method_call {
- param_count -= 1;
- arg_count -= 1;
- }
- self.sink.push(MismatchedArgCount {
- file: source_ptr.file_id,
- call_expr: source_ptr.value,
- expected: param_count,
- found: arg_count,
- });
- }
- }
-
- None
- }
-
- fn validate_match(
- &mut self,
- id: ExprId,
- match_expr: ExprId,
- arms: &[MatchArm],
- db: &dyn HirDatabase,
- infer: Arc<InferenceResult>,
- ) {
- let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
- db.body_with_source_map(self.owner.into());
-
- let match_expr_ty = match infer.type_of_expr.get(match_expr) {
- Some(ty) => ty,
- // If we can't resolve the type of the match expression
- // we cannot perform exhaustiveness checks.
- None => return,
- };
-
- let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db };
- let pats = arms.iter().map(|arm| arm.pat);
-
- let mut seen = Matrix::empty();
- for pat in pats {
- if let Some(pat_ty) = infer.type_of_pat.get(pat) {
- // We only include patterns whose type matches the type
- // of the match expression. If we had a InvalidMatchArmPattern
- // diagnostic or similar we could raise that in an else
- // block here.
- //
- // When comparing the types, we also have to consider that rustc
- // will automatically de-reference the match expression type if
- // necessary.
- //
- // FIXME we should use the type checker for this.
- if pat_ty == match_expr_ty
- || match_expr_ty
- .as_reference()
- .map(|(match_expr_ty, _)| match_expr_ty == pat_ty)
- .unwrap_or(false)
- {
- // If we had a NotUsefulMatchArm diagnostic, we could
- // check the usefulness of each pattern as we added it
- // to the matrix here.
- let v = PatStack::from_pattern(pat);
- seen.push(&cx, v);
- continue;
- }
- }
-
- // If we can't resolve the type of a pattern, or the pattern type doesn't
- // fit the match expression, we skip this diagnostic. Skipping the entire
- // diagnostic rather than just not including this match arm is preferred
- // to avoid the chance of false positives.
- return;
- }
-
- match is_useful(&cx, &seen, &PatStack::from_wild()) {
- Ok(Usefulness::Useful) => (),
- // if a wildcard pattern is not useful, then all patterns are covered
- Ok(Usefulness::NotUseful) => return,
- // this path is for unimplemented checks, so we err on the side of not
- // reporting any errors
- _ => return,
- }
-
- if let Ok(source_ptr) = source_map.expr_syntax(id) {
- let root = source_ptr.file_syntax(db.upcast());
- if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
- if let (Some(match_expr), Some(arms)) =
- (match_expr.expr(), match_expr.match_arm_list())
- {
- self.sink.push(MissingMatchArms {
- file: source_ptr.file_id,
- match_expr: AstPtr::new(&match_expr),
- arms: AstPtr::new(&arms),
- })
- }
- }
- }
- }
-
- fn validate_results_in_tail_expr(&mut self, body_id: ExprId, id: ExprId, db: &dyn HirDatabase) {
- // the mismatch will be on the whole block currently
- let mismatch = match self.infer.type_mismatch_for_expr(body_id) {
- Some(m) => m,
- None => return,
- };
-
- let core_result_path = path![core::result::Result];
-
- let resolver = self.owner.resolver(db.upcast());
- let core_result_enum = match resolver.resolve_known_enum(db.upcast(), &core_result_path) {
- Some(it) => it,
- _ => return,
- };
-
- let core_result_ctor = TypeCtor::Adt(AdtId::EnumId(core_result_enum));
- let params = match &mismatch.expected {
- Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &core_result_ctor => {
- parameters
- }
- _ => return,
- };
-
- if params.len() == 2 && params[0] == mismatch.actual {
- let (_, source_map) = db.body_with_source_map(self.owner.into());
-
- if let Ok(source_ptr) = source_map.expr_syntax(id) {
- self.sink
- .push(MissingOkInTailExpr { file: source_ptr.file_id, expr: source_ptr.value });
- }
- }
- }
-}
-
-pub fn record_literal_missing_fields(
- db: &dyn HirDatabase,
- infer: &InferenceResult,
- id: ExprId,
- expr: &Expr,
-) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
- let (fields, exhausitve) = match expr {
- Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
- _ => return None,
- };
-
- let variant_def = infer.variant_resolution_for_expr(id)?;
- if let VariantId::UnionId(_) = variant_def {
- return None;
- }
-
- let variant_data = variant_data(db.upcast(), variant_def);
-
- let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
- let missed_fields: Vec<LocalFieldId> = variant_data
- .fields()
- .iter()
- .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
- .collect();
- if missed_fields.is_empty() {
- return None;
- }
- Some((variant_def, missed_fields, exhausitve))
-}
-
-pub fn record_pattern_missing_fields(
- db: &dyn HirDatabase,
- infer: &InferenceResult,
- id: PatId,
- pat: &Pat,
-) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
- let (fields, exhaustive) = match pat {
- Pat::Record { path: _, args, ellipsis } => (args, !ellipsis),
- _ => return None,
- };
-
- let variant_def = infer.variant_resolution_for_pat(id)?;
- if let VariantId::UnionId(_) = variant_def {
- return None;
- }
-
- let variant_data = variant_data(db.upcast(), variant_def);
-
- let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
- let missed_fields: Vec<LocalFieldId> = variant_data
- .fields()
- .iter()
- .filter_map(|(f, d)| if specified_fields.contains(&d.name) { None } else { Some(f) })
- .collect();
- if missed_fields.is_empty() {
- return None;
- }
- Some((variant_def, missed_fields, exhaustive))
-}
-
-#[cfg(test)]
-mod tests {
- use crate::diagnostics::tests::check_diagnostics;
-
- #[test]
- fn simple_free_fn_zero() {
- check_diagnostics(
- r#"
-fn zero() {}
-fn f() { zero(1); }
- //^^^^^^^ Expected 0 arguments, found 1
-"#,
- );
-
- check_diagnostics(
- r#"
-fn zero() {}
-fn f() { zero(); }
-"#,
- );
- }
-
- #[test]
- fn simple_free_fn_one() {
- check_diagnostics(
- r#"
-fn one(arg: u8) {}
-fn f() { one(); }
- //^^^^^ Expected 1 argument, found 0
-"#,
- );
-
- check_diagnostics(
- r#"
-fn one(arg: u8) {}
-fn f() { one(1); }
-"#,
- );
- }
-
- #[test]
- fn method_as_fn() {
- check_diagnostics(
- r#"
-struct S;
-impl S { fn method(&self) {} }
-
-fn f() {
- S::method();
-} //^^^^^^^^^^^ Expected 1 argument, found 0
-"#,
- );
-
- check_diagnostics(
- r#"
-struct S;
-impl S { fn method(&self) {} }
-
-fn f() {
- S::method(&S);
- S.method();
-}
-"#,
- );
- }
-
- #[test]
- fn method_with_arg() {
- check_diagnostics(
- r#"
-struct S;
-impl S { fn method(&self, arg: u8) {} }
-
- fn f() {
- S.method();
- } //^^^^^^^^^^ Expected 1 argument, found 0
- "#,
- );
-
- check_diagnostics(
- r#"
-struct S;
-impl S { fn method(&self, arg: u8) {} }
-
-fn f() {
- S::method(&S, 0);
- S.method(1);
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_struct() {
- check_diagnostics(
- r#"
-struct Tup(u8, u16);
-fn f() {
- Tup(0);
-} //^^^^^^ Expected 2 arguments, found 1
-"#,
- )
- }
-
- #[test]
- fn enum_variant() {
- check_diagnostics(
- r#"
-enum En { Variant(u8, u16), }
-fn f() {
- En::Variant(0);
-} //^^^^^^^^^^^^^^ Expected 2 arguments, found 1
-"#,
- )
- }
-
- #[test]
- fn enum_variant_type_macro() {
- check_diagnostics(
- r#"
-macro_rules! Type {
- () => { u32 };
-}
-enum Foo {
- Bar(Type![])
-}
-impl Foo {
- fn new() {
- Foo::Bar(0);
- Foo::Bar(0, 1);
- //^^^^^^^^^^^^^^ Expected 1 argument, found 2
- Foo::Bar();
- //^^^^^^^^^^ Expected 1 argument, found 0
- }
-}
- "#,
- );
- }
-
- #[test]
- fn varargs() {
- check_diagnostics(
- r#"
-extern "C" {
- fn fixed(fixed: u8);
- fn varargs(fixed: u8, ...);
- fn varargs2(...);
-}
-
-fn f() {
- unsafe {
- fixed(0);
- fixed(0, 1);
- //^^^^^^^^^^^ Expected 1 argument, found 2
- varargs(0);
- varargs(0, 1);
- varargs2();
- varargs2(0);
- varargs2(0, 1);
- }
-}
- "#,
- )
- }
-
- #[test]
- fn arg_count_lambda() {
- check_diagnostics(
- r#"
-fn main() {
- let f = |()| ();
- f();
- //^^^ Expected 1 argument, found 0
- f(());
- f((), ());
- //^^^^^^^^^ Expected 1 argument, found 2
-}
-"#,
- )
- }
-}
+++ /dev/null
-//! This module implements match statement exhaustiveness checking and usefulness checking
-//! for match arms.
-//!
-//! It is modeled on the rustc module `librustc_mir_build::hair::pattern::_match`, which
-//! contains very detailed documentation about the algorithms used here. I've duplicated
-//! most of that documentation below.
-//!
-//! This file includes the logic for exhaustiveness and usefulness checking for
-//! pattern-matching. Specifically, given a list of patterns for a type, we can
-//! tell whether:
-//! - (a) the patterns cover every possible constructor for the type (exhaustiveness).
-//! - (b) each pattern is necessary (usefulness).
-//!
-//! The algorithm implemented here is a modified version of the one described in
-//! <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
-//! However, to save future implementors from reading the original paper, we
-//! summarise the algorithm here to hopefully save time and be a little clearer
-//! (without being so rigorous).
-//!
-//! The core of the algorithm revolves about a "usefulness" check. In particular, we
-//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as
-//! a matrix). `U(P, p)` represents whether, given an existing list of patterns
-//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously-
-//! uncovered values of the type).
-//!
-//! If we have this predicate, then we can easily compute both exhaustiveness of an
-//! entire set of patterns and the individual usefulness of each one.
-//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard
-//! match doesn't increase the number of values we're matching)
-//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a
-//! pattern to those that have come before it doesn't increase the number of values
-//! we're matching).
-//!
-//! During the course of the algorithm, the rows of the matrix won't just be individual patterns,
-//! but rather partially-deconstructed patterns in the form of a list of patterns. The paper
-//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the
-//! new pattern `p`.
-//!
-//! For example, say we have the following:
-//!
-//! ```ignore
-//! // x: (Option<bool>, Result<()>)
-//! match x {
-//! (Some(true), _) => (),
-//! (None, Err(())) => (),
-//! (None, Err(_)) => (),
-//! }
-//! ```
-//!
-//! Here, the matrix `P` starts as:
-//!
-//! ```text
-//! [
-//! [(Some(true), _)],
-//! [(None, Err(()))],
-//! [(None, Err(_))],
-//! ]
-//! ```
-//!
-//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering
-//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because
-//! all the values it covers are already covered by row 2.
-//!
-//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of
-//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks.
-//! To match the paper, the top of the stack is at the beginning / on the left.
-//!
-//! There are two important operations on pattern-stacks necessary to understand the algorithm:
-//!
-//! 1. We can pop a given constructor off the top of a stack. This operation is called
-//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or
-//! `None`) and `p` a pattern-stack.
-//! If the pattern on top of the stack can cover `c`, this removes the constructor and
-//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns.
-//! Otherwise the pattern-stack is discarded.
-//! This essentially filters those pattern-stacks whose top covers the constructor `c` and
-//! discards the others.
-//!
-//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we
-//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the
-//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get
-//! nothing back.
-//!
-//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1`
-//! on top of the stack, and we have four cases:
-//!
-//! * 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We push onto
-//! the stack the arguments of this constructor, and return the result:
-//!
-//! r_1, .., r_a, p_2, .., p_n
-//!
-//! * 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠c'`. We discard the current stack and return
-//! nothing.
-//! * 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has
-//! arguments (its arity), and return the resulting stack:
-//!
-//! _, .., _, p_2, .., p_n
-//!
-//! * 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
-//!
-//! S(c, (r_1, p_2, .., p_n))
-//! S(c, (r_2, p_2, .., p_n))
-//!
-//! 2. We can pop a wildcard off the top of the stack. This is called `D(p)`, where `p` is
-//! a pattern-stack.
-//! This is used when we know there are missing constructor cases, but there might be
-//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check
-//! all its *other* components.
-//!
-//! It is computed as follows. We look at the pattern `p_1` on top of the stack,
-//! and we have three cases:
-//! * 1.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing.
-//! * 1.2. `p_1 = _`. We return the rest of the stack:
-//!
-//! p_2, .., p_n
-//!
-//! * 1.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting stack:
-//!
-//! D((r_1, p_2, .., p_n))
-//! D((r_2, p_2, .., p_n))
-//!
-//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the
-//! exhaustive integer matching rules, so they're written here for posterity.
-//!
-//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by
-//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with
-//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard.
-//!
-//!
-//! The algorithm for computing `U`
-//! -------------------------------
-//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns).
-//! That means we're going to check the components from left-to-right, so the algorithm
-//! operates principally on the first component of the matrix and new pattern-stack `p`.
-//! This algorithm is realised in the `is_useful` function.
-//!
-//! Base case (`n = 0`, i.e., an empty tuple pattern):
-//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then
-//! `U(P, p)` is false.
-//! - Otherwise, `P` must be empty, so `U(P, p)` is true.
-//!
-//! Inductive step (`n > 0`, i.e., whether there's at least one column [which may then be expanded
-//! into further columns later]). We're going to match on the top of the new pattern-stack, `p_1`:
-//!
-//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern.
-//! Then, the usefulness of `p_1` can be reduced to whether it is useful when
-//! we ignore all the patterns in the first column of `P` that involve other constructors.
-//! This is where `S(c, P)` comes in:
-//!
-//! ```text
-//! U(P, p) := U(S(c, P), S(c, p))
-//! ```
-//!
-//! This special case is handled in `is_useful_specialized`.
-//!
-//! For example, if `P` is:
-//!
-//! ```text
-//! [
-//! [Some(true), _],
-//! [None, 0],
-//! ]
-//! ```
-//!
-//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only
-//! matches values that row 2 doesn't. For row 1 however, we need to dig into the
-//! arguments of `Some` to know whether some new value is covered. So we compute
-//! `U([[true, _]], [false, 0])`.
-//!
-//! - If `p_1 == _`, then we look at the list of constructors that appear in the first component of
-//! the rows of `P`:
-//! - If there are some constructors that aren't present, then we might think that the
-//! wildcard `_` is useful, since it covers those constructors that weren't covered
-//! before.
-//! That's almost correct, but only works if there were no wildcards in those first
-//! components. So we need to check that `p` is useful with respect to the rows that
-//! start with a wildcard, if there are any. This is where `D` comes in:
-//! `U(P, p) := U(D(P), D(p))`
-//!
-//! For example, if `P` is:
-//! ```text
-//! [
-//! [_, true, _],
-//! [None, false, 1],
-//! ]
-//! ```
-//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we
-//! only had row 2, we'd know that `p` is useful. However row 1 starts with a
-//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`.
-//!
-//! - Otherwise, all possible constructors (for the relevant type) are present. In this
-//! case we must check whether the wildcard pattern covers any unmatched value. For
-//! that, we can think of the `_` pattern as a big OR-pattern that covers all
-//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for
-//! example. The wildcard pattern is useful in this case if it is useful when
-//! specialized to one of the possible constructors. So we compute:
-//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))`
-//!
-//! For example, if `P` is:
-//! ```text
-//! [
-//! [Some(true), _],
-//! [None, false],
-//! ]
-//! ```
-//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first
-//! components of `P`. We will therefore try popping both constructors in turn: we
-//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]],
-//! [false])` for the `None` constructor. The first case returns true, so we know that
-//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched
-//! before.
-//!
-//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately:
-//!
-//! ```text
-//! U(P, p) := U(P, (r_1, p_2, .., p_n))
-//! || U(P, (r_2, p_2, .., p_n))
-//! ```
-use std::sync::Arc;
-
-use arena::Idx;
-use hir_def::{
- adt::VariantData,
- body::Body,
- expr::{Expr, Literal, Pat, PatId},
- AdtId, EnumVariantId, VariantId,
-};
-use smallvec::{smallvec, SmallVec};
-
-use crate::{db::HirDatabase, ApplicationTy, InferenceResult, Ty, TypeCtor};
-
-#[derive(Debug, Clone, Copy)]
-/// Either a pattern from the source code being analyzed, represented as
-/// as `PatId`, or a `Wild` pattern which is created as an intermediate
-/// step in the match checking algorithm and thus is not backed by a
-/// real `PatId`.
-///
-/// Note that it is totally valid for the `PatId` variant to contain
-/// a `PatId` which resolves to a `Wild` pattern, if that wild pattern
-/// exists in the source code being analyzed.
-enum PatIdOrWild {
- PatId(PatId),
- Wild,
-}
-
-impl PatIdOrWild {
- fn as_pat(self, cx: &MatchCheckCtx) -> Pat {
- match self {
- PatIdOrWild::PatId(id) => cx.body.pats[id].clone(),
- PatIdOrWild::Wild => Pat::Wild,
- }
- }
-
- fn as_id(self) -> Option<PatId> {
- match self {
- PatIdOrWild::PatId(id) => Some(id),
- PatIdOrWild::Wild => None,
- }
- }
-}
-
-impl From<PatId> for PatIdOrWild {
- fn from(pat_id: PatId) -> Self {
- Self::PatId(pat_id)
- }
-}
-
-impl From<&PatId> for PatIdOrWild {
- fn from(pat_id: &PatId) -> Self {
- Self::PatId(*pat_id)
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq)]
-pub(super) enum MatchCheckErr {
- NotImplemented,
- MalformedMatchArm,
- /// Used when type inference cannot resolve the type of
- /// a pattern or expression.
- Unknown,
-}
-
-/// The return type of `is_useful` is either an indication of usefulness
-/// of the match arm, or an error in the case the match statement
-/// is made up of types for which exhaustiveness checking is currently
-/// not completely implemented.
-///
-/// The `std::result::Result` type is used here rather than a custom enum
-/// to allow the use of `?`.
-pub(super) type MatchCheckResult<T> = Result<T, MatchCheckErr>;
-
-#[derive(Debug)]
-/// A row in a Matrix.
-///
-/// This type is modeled from the struct of the same name in `rustc`.
-pub(super) struct PatStack(PatStackInner);
-type PatStackInner = SmallVec<[PatIdOrWild; 2]>;
-
-impl PatStack {
- pub(super) fn from_pattern(pat_id: PatId) -> PatStack {
- Self(smallvec!(pat_id.into()))
- }
-
- pub(super) fn from_wild() -> PatStack {
- Self(smallvec!(PatIdOrWild::Wild))
- }
-
- fn from_slice(slice: &[PatIdOrWild]) -> PatStack {
- Self(SmallVec::from_slice(slice))
- }
-
- fn from_vec(v: PatStackInner) -> PatStack {
- Self(v)
- }
-
- fn get_head(&self) -> Option<PatIdOrWild> {
- self.0.first().copied()
- }
-
- fn tail(&self) -> &[PatIdOrWild] {
- self.0.get(1..).unwrap_or(&[])
- }
-
- fn to_tail(&self) -> PatStack {
- Self::from_slice(self.tail())
- }
-
- fn replace_head_with<I, T>(&self, pats: I) -> PatStack
- where
- I: Iterator<Item = T>,
- T: Into<PatIdOrWild>,
- {
- let mut patterns: PatStackInner = smallvec![];
- for pat in pats {
- patterns.push(pat.into());
- }
- for pat in &self.0[1..] {
- patterns.push(*pat);
- }
- PatStack::from_vec(patterns)
- }
-
- /// Computes `D(self)`.
- ///
- /// See the module docs and the associated documentation in rustc for details.
- fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> {
- if matches!(self.get_head()?.as_pat(cx), Pat::Wild) {
- Some(self.to_tail())
- } else {
- None
- }
- }
-
- /// Computes `S(constructor, self)`.
- ///
- /// See the module docs and the associated documentation in rustc for details.
- fn specialize_constructor(
- &self,
- cx: &MatchCheckCtx,
- constructor: &Constructor,
- ) -> MatchCheckResult<Option<PatStack>> {
- let head = match self.get_head() {
- Some(head) => head,
- None => return Ok(None),
- };
-
- let head_pat = head.as_pat(cx);
- let result = match (head_pat, constructor) {
- (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => {
- if ellipsis.is_some() {
- // If there are ellipsis here, we should add the correct number of
- // Pat::Wild patterns to `pat_ids`. We should be able to use the
- // constructors arity for this, but at the time of writing we aren't
- // correctly calculating this arity when ellipsis are present.
- return Err(MatchCheckErr::NotImplemented);
- }
-
- Some(self.replace_head_with(pat_ids.iter()))
- }
- (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => {
- match cx.body.exprs[lit_expr] {
- Expr::Literal(Literal::Bool(pat_val)) if *constructor_val == pat_val => {
- Some(self.to_tail())
- }
- // it was a bool but the value doesn't match
- Expr::Literal(Literal::Bool(_)) => None,
- // perhaps this is actually unreachable given we have
- // already checked that these match arms have the appropriate type?
- _ => return Err(MatchCheckErr::NotImplemented),
- }
- }
- (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?),
- (Pat::Path(_), Constructor::Enum(constructor)) => {
- // unit enum variants become `Pat::Path`
- let pat_id = head.as_id().expect("we know this isn't a wild");
- if !enum_variant_matches(cx, pat_id, *constructor) {
- None
- } else {
- Some(self.to_tail())
- }
- }
- (
- Pat::TupleStruct { args: ref pat_ids, ellipsis, .. },
- Constructor::Enum(enum_constructor),
- ) => {
- let pat_id = head.as_id().expect("we know this isn't a wild");
- if !enum_variant_matches(cx, pat_id, *enum_constructor) {
- None
- } else {
- let constructor_arity = constructor.arity(cx)?;
- if let Some(ellipsis_position) = ellipsis {
- // If there are ellipsis in the pattern, the ellipsis must take the place
- // of at least one sub-pattern, so `pat_ids` should be smaller than the
- // constructor arity.
- if pat_ids.len() < constructor_arity {
- let mut new_patterns: Vec<PatIdOrWild> = vec![];
-
- for pat_id in &pat_ids[0..ellipsis_position] {
- new_patterns.push((*pat_id).into());
- }
-
- for _ in 0..(constructor_arity - pat_ids.len()) {
- new_patterns.push(PatIdOrWild::Wild);
- }
-
- for pat_id in &pat_ids[ellipsis_position..pat_ids.len()] {
- new_patterns.push((*pat_id).into());
- }
-
- Some(self.replace_head_with(new_patterns.into_iter()))
- } else {
- return Err(MatchCheckErr::MalformedMatchArm);
- }
- } else {
- // If there is no ellipsis in the tuple pattern, the number
- // of patterns must equal the constructor arity.
- if pat_ids.len() == constructor_arity {
- Some(self.replace_head_with(pat_ids.into_iter()))
- } else {
- return Err(MatchCheckErr::MalformedMatchArm);
- }
- }
- }
- }
- (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => {
- let pat_id = head.as_id().expect("we know this isn't a wild");
- if !enum_variant_matches(cx, pat_id, *e) {
- None
- } else {
- match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
- VariantData::Record(struct_field_arena) => {
- // Here we treat any missing fields in the record as the wild pattern, as
- // if the record has ellipsis. We want to do this here even if the
- // record does not contain ellipsis, because it allows us to continue
- // enforcing exhaustiveness for the rest of the match statement.
- //
- // Creating the diagnostic for the missing field in the pattern
- // should be done in a different diagnostic.
- let patterns = struct_field_arena.iter().map(|(_, struct_field)| {
- arg_patterns
- .iter()
- .find(|pat| pat.name == struct_field.name)
- .map(|pat| PatIdOrWild::from(pat.pat))
- .unwrap_or(PatIdOrWild::Wild)
- });
-
- Some(self.replace_head_with(patterns))
- }
- _ => return Err(MatchCheckErr::Unknown),
- }
- }
- }
- (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
- (_, _) => return Err(MatchCheckErr::NotImplemented),
- };
-
- Ok(result)
- }
-
- /// A special case of `specialize_constructor` where the head of the pattern stack
- /// is a Wild pattern.
- ///
- /// Replaces the Wild pattern at the head of the pattern stack with N Wild patterns
- /// (N >= 0), where N is the arity of the given constructor.
- fn expand_wildcard(
- &self,
- cx: &MatchCheckCtx,
- constructor: &Constructor,
- ) -> MatchCheckResult<PatStack> {
- assert_eq!(
- Pat::Wild,
- self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx),
- "expand_wildcard must only be called on PatStack with wild at head",
- );
-
- let mut patterns: PatStackInner = smallvec![];
-
- for _ in 0..constructor.arity(cx)? {
- patterns.push(PatIdOrWild::Wild);
- }
-
- for pat in &self.0[1..] {
- patterns.push(*pat);
- }
-
- Ok(PatStack::from_vec(patterns))
- }
-}
-
-/// A collection of PatStack.
-///
-/// This type is modeled from the struct of the same name in `rustc`.
-pub(super) struct Matrix(Vec<PatStack>);
-
-impl Matrix {
- pub(super) fn empty() -> Self {
- Self(vec![])
- }
-
- pub(super) fn push(&mut self, cx: &MatchCheckCtx, row: PatStack) {
- if let Some(Pat::Or(pat_ids)) = row.get_head().map(|pat_id| pat_id.as_pat(cx)) {
- // Or patterns are expanded here
- for pat_id in pat_ids {
- self.0.push(PatStack::from_pattern(pat_id));
- }
- } else {
- self.0.push(row);
- }
- }
-
- fn is_empty(&self) -> bool {
- self.0.is_empty()
- }
-
- fn heads(&self) -> Vec<PatIdOrWild> {
- self.0.iter().flat_map(|p| p.get_head()).collect()
- }
-
- /// Computes `D(self)` for each contained PatStack.
- ///
- /// See the module docs and the associated documentation in rustc for details.
- fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Self {
- Self::collect(cx, self.0.iter().filter_map(|r| r.specialize_wildcard(cx)))
- }
-
- /// Computes `S(constructor, self)` for each contained PatStack.
- ///
- /// See the module docs and the associated documentation in rustc for details.
- fn specialize_constructor(
- &self,
- cx: &MatchCheckCtx,
- constructor: &Constructor,
- ) -> MatchCheckResult<Self> {
- let mut new_matrix = Matrix::empty();
- for pat in &self.0 {
- if let Some(pat) = pat.specialize_constructor(cx, constructor)? {
- new_matrix.push(cx, pat);
- }
- }
-
- Ok(new_matrix)
- }
-
- fn collect<T: IntoIterator<Item = PatStack>>(cx: &MatchCheckCtx, iter: T) -> Self {
- let mut matrix = Matrix::empty();
-
- for pat in iter {
- // using push ensures we expand or-patterns
- matrix.push(cx, pat);
- }
-
- matrix
- }
-}
-
-#[derive(Clone, Debug, PartialEq)]
-/// An indication of the usefulness of a given match arm, where
-/// usefulness is defined as matching some patterns which were
-/// not matched by an prior match arms.
-///
-/// We may eventually need an `Unknown` variant here.
-pub(super) enum Usefulness {
- Useful,
- NotUseful,
-}
-
-pub(super) struct MatchCheckCtx<'a> {
- pub(super) match_expr: Idx<Expr>,
- pub(super) body: Arc<Body>,
- pub(super) infer: Arc<InferenceResult>,
- pub(super) db: &'a dyn HirDatabase,
-}
-
-/// Given a set of patterns `matrix`, and pattern to consider `v`, determines
-/// whether `v` is useful. A pattern is useful if it covers cases which were
-/// not previously covered.
-///
-/// When calling this function externally (that is, not the recursive calls) it
-/// expected that you have already type checked the match arms. All patterns in
-/// matrix should be the same type as v, as well as they should all be the same
-/// type as the match expression.
-pub(super) fn is_useful(
- cx: &MatchCheckCtx,
- matrix: &Matrix,
- v: &PatStack,
-) -> MatchCheckResult<Usefulness> {
- // Handle two special cases:
- // - enum with no variants
- // - `!` type
- // In those cases, no match arm is useful.
- match cx.infer[cx.match_expr].strip_references() {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) => {
- if cx.db.enum_data(*enum_id).variants.is_empty() {
- return Ok(Usefulness::NotUseful);
- }
- }
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }) => {
- return Ok(Usefulness::NotUseful);
- }
- _ => (),
- }
-
- let head = match v.get_head() {
- Some(head) => head,
- None => {
- let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful };
-
- return Ok(result);
- }
- };
-
- if let Pat::Or(pat_ids) = head.as_pat(cx) {
- let mut found_unimplemented = false;
- let any_useful = pat_ids.iter().any(|&pat_id| {
- let v = PatStack::from_pattern(pat_id);
-
- match is_useful(cx, matrix, &v) {
- Ok(Usefulness::Useful) => true,
- Ok(Usefulness::NotUseful) => false,
- _ => {
- found_unimplemented = true;
- false
- }
- }
- });
-
- return if any_useful {
- Ok(Usefulness::Useful)
- } else if found_unimplemented {
- Err(MatchCheckErr::NotImplemented)
- } else {
- Ok(Usefulness::NotUseful)
- };
- }
-
- if let Some(constructor) = pat_constructor(cx, head)? {
- let matrix = matrix.specialize_constructor(&cx, &constructor)?;
- let v = v
- .specialize_constructor(&cx, &constructor)?
- .expect("we know this can't fail because we get the constructor from `v.head()` above");
-
- is_useful(&cx, &matrix, &v)
- } else {
- // expanding wildcard
- let mut used_constructors: Vec<Constructor> = vec![];
- for pat in matrix.heads() {
- if let Some(constructor) = pat_constructor(cx, pat)? {
- used_constructors.push(constructor);
- }
- }
-
- // We assume here that the first constructor is the "correct" type. Since we
- // only care about the "type" of the constructor (i.e. if it is a bool we
- // don't care about the value), this assumption should be valid as long as
- // the match statement is well formed. We currently uphold this invariant by
- // filtering match arms before calling `is_useful`, only passing in match arms
- // whose type matches the type of the match expression.
- match &used_constructors.first() {
- Some(constructor) if all_constructors_covered(&cx, constructor, &used_constructors) => {
- // If all constructors are covered, then we need to consider whether
- // any values are covered by this wildcard.
- //
- // For example, with matrix '[[Some(true)], [None]]', all
- // constructors are covered (`Some`/`None`), so we need
- // to perform specialization to see that our wildcard will cover
- // the `Some(false)` case.
- //
- // Here we create a constructor for each variant and then check
- // usefulness after specializing for that constructor.
- let mut found_unimplemented = false;
- for constructor in constructor.all_constructors(cx) {
- let matrix = matrix.specialize_constructor(&cx, &constructor)?;
- let v = v.expand_wildcard(&cx, &constructor)?;
-
- match is_useful(&cx, &matrix, &v) {
- Ok(Usefulness::Useful) => return Ok(Usefulness::Useful),
- Ok(Usefulness::NotUseful) => continue,
- _ => found_unimplemented = true,
- };
- }
-
- if found_unimplemented {
- Err(MatchCheckErr::NotImplemented)
- } else {
- Ok(Usefulness::NotUseful)
- }
- }
- _ => {
- // Either not all constructors are covered, or the only other arms
- // are wildcards. Either way, this pattern is useful if it is useful
- // when compared to those arms with wildcards.
- let matrix = matrix.specialize_wildcard(&cx);
- let v = v.to_tail();
-
- is_useful(&cx, &matrix, &v)
- }
- }
- }
-}
-
-#[derive(Debug, Clone, Copy)]
-/// Similar to TypeCtor, but includes additional information about the specific
-/// value being instantiated. For example, TypeCtor::Bool doesn't contain the
-/// boolean value.
-enum Constructor {
- Bool(bool),
- Tuple { arity: usize },
- Enum(EnumVariantId),
-}
-
-impl Constructor {
- fn arity(&self, cx: &MatchCheckCtx) -> MatchCheckResult<usize> {
- let arity = match self {
- Constructor::Bool(_) => 0,
- Constructor::Tuple { arity } => *arity,
- Constructor::Enum(e) => {
- match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
- VariantData::Tuple(struct_field_data) => struct_field_data.len(),
- VariantData::Record(struct_field_data) => struct_field_data.len(),
- VariantData::Unit => 0,
- }
- }
- };
-
- Ok(arity)
- }
-
- fn all_constructors(&self, cx: &MatchCheckCtx) -> Vec<Constructor> {
- match self {
- Constructor::Bool(_) => vec![Constructor::Bool(true), Constructor::Bool(false)],
- Constructor::Tuple { .. } => vec![*self],
- Constructor::Enum(e) => cx
- .db
- .enum_data(e.parent)
- .variants
- .iter()
- .map(|(local_id, _)| {
- Constructor::Enum(EnumVariantId { parent: e.parent, local_id })
- })
- .collect(),
- }
- }
-}
-
-/// Returns the constructor for the given pattern. Should only return None
-/// in the case of a Wild pattern.
-fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Option<Constructor>> {
- let res = match pat.as_pat(cx) {
- Pat::Wild => None,
- // FIXME somehow create the Tuple constructor with the proper arity. If there are
- // ellipsis, the arity is not equal to the number of patterns.
- Pat::Tuple { args: pats, ellipsis } if ellipsis.is_none() => {
- Some(Constructor::Tuple { arity: pats.len() })
- }
- Pat::Lit(lit_expr) => match cx.body.exprs[lit_expr] {
- Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
- _ => return Err(MatchCheckErr::NotImplemented),
- },
- Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => {
- let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
- let variant_id =
- cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?;
- match variant_id {
- VariantId::EnumVariantId(enum_variant_id) => {
- Some(Constructor::Enum(enum_variant_id))
- }
- _ => return Err(MatchCheckErr::NotImplemented),
- }
- }
- _ => return Err(MatchCheckErr::NotImplemented),
- };
-
- Ok(res)
-}
-
-fn all_constructors_covered(
- cx: &MatchCheckCtx,
- constructor: &Constructor,
- used_constructors: &[Constructor],
-) -> bool {
- match constructor {
- Constructor::Tuple { arity } => {
- used_constructors.iter().any(|constructor| match constructor {
- Constructor::Tuple { arity: used_arity } => arity == used_arity,
- _ => false,
- })
- }
- Constructor::Bool(_) => {
- if used_constructors.is_empty() {
- return false;
- }
-
- let covers_true =
- used_constructors.iter().any(|c| matches!(c, Constructor::Bool(true)));
- let covers_false =
- used_constructors.iter().any(|c| matches!(c, Constructor::Bool(false)));
-
- covers_true && covers_false
- }
- Constructor::Enum(e) => cx.db.enum_data(e.parent).variants.iter().all(|(id, _)| {
- for constructor in used_constructors {
- if let Constructor::Enum(e) = constructor {
- if id == e.local_id {
- return true;
- }
- }
- }
-
- false
- }),
- }
-}
-
-fn enum_variant_matches(cx: &MatchCheckCtx, pat_id: PatId, enum_variant_id: EnumVariantId) -> bool {
- Some(enum_variant_id.into()) == cx.infer.variant_resolution_for_pat(pat_id)
-}
-
-#[cfg(test)]
-mod tests {
- use crate::diagnostics::tests::check_diagnostics;
-
- #[test]
- fn empty_tuple() {
- check_diagnostics(
- r#"
-fn main() {
- match () { }
- //^^ Missing match arm
- match (()) { }
- //^^^^ Missing match arm
-
- match () { _ => (), }
- match () { () => (), }
- match (()) { (()) => (), }
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_of_two_empty_tuple() {
- check_diagnostics(
- r#"
-fn main() {
- match ((), ()) { }
- //^^^^^^^^ Missing match arm
-
- match ((), ()) { ((), ()) => (), }
-}
-"#,
- );
- }
-
- #[test]
- fn boolean() {
- check_diagnostics(
- r#"
-fn test_main() {
- match false { }
- //^^^^^ Missing match arm
- match false { true => (), }
- //^^^^^ Missing match arm
- match (false, true) {}
- //^^^^^^^^^^^^^ Missing match arm
- match (false, true) { (true, true) => (), }
- //^^^^^^^^^^^^^ Missing match arm
- match (false, true) {
- //^^^^^^^^^^^^^ Missing match arm
- (false, true) => (),
- (false, false) => (),
- (true, false) => (),
- }
- match (false, true) { (true, _x) => (), }
- //^^^^^^^^^^^^^ Missing match arm
-
- match false { true => (), false => (), }
- match (false, true) {
- (false, _) => (),
- (true, false) => (),
- (_, true) => (),
- }
- match (false, true) {
- (true, true) => (),
- (true, false) => (),
- (false, true) => (),
- (false, false) => (),
- }
- match (false, true) {
- (true, _x) => (),
- (false, true) => (),
- (false, false) => (),
- }
- match (false, true, false) {
- (false, ..) => (),
- (true, ..) => (),
- }
- match (false, true, false) {
- (.., false) => (),
- (.., true) => (),
- }
- match (false, true, false) { (..) => (), }
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_of_tuple_and_bools() {
- check_diagnostics(
- r#"
-fn main() {
- match (false, ((), false)) {}
- //^^^^^^^^^^^^^^^^^^^^ Missing match arm
- match (false, ((), false)) { (true, ((), true)) => (), }
- //^^^^^^^^^^^^^^^^^^^^ Missing match arm
- match (false, ((), false)) { (true, _) => (), }
- //^^^^^^^^^^^^^^^^^^^^ Missing match arm
-
- match (false, ((), false)) {
- (true, ((), true)) => (),
- (true, ((), false)) => (),
- (false, ((), true)) => (),
- (false, ((), false)) => (),
- }
- match (false, ((), false)) {
- (true, ((), true)) => (),
- (true, ((), false)) => (),
- (false, _) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enums() {
- check_diagnostics(
- r#"
-enum Either { A, B, }
-
-fn main() {
- match Either::A { }
- //^^^^^^^^^ Missing match arm
- match Either::B { Either::A => (), }
- //^^^^^^^^^ Missing match arm
-
- match &Either::B {
- //^^^^^^^^^^ Missing match arm
- Either::A => (),
- }
-
- match Either::B {
- Either::A => (), Either::B => (),
- }
- match &Either::B {
- Either::A => (), Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enum_containing_bool() {
- check_diagnostics(
- r#"
-enum Either { A(bool), B }
-
-fn main() {
- match Either::B { }
- //^^^^^^^^^ Missing match arm
- match Either::B {
- //^^^^^^^^^ Missing match arm
- Either::A(true) => (), Either::B => ()
- }
-
- match Either::B {
- Either::A(true) => (),
- Either::A(false) => (),
- Either::B => (),
- }
- match Either::B {
- Either::B => (),
- _ => (),
- }
- match Either::B {
- Either::A(_) => (),
- Either::B => (),
- }
-
-}
- "#,
- );
- }
-
- #[test]
- fn enum_different_sizes() {
- check_diagnostics(
- r#"
-enum Either { A(bool), B(bool, bool) }
-
-fn main() {
- match Either::A(false) {
- //^^^^^^^^^^^^^^^^ Missing match arm
- Either::A(_) => (),
- Either::B(false, _) => (),
- }
-
- match Either::A(false) {
- Either::A(_) => (),
- Either::B(true, _) => (),
- Either::B(false, _) => (),
- }
- match Either::A(false) {
- Either::A(true) | Either::A(false) => (),
- Either::B(true, _) => (),
- Either::B(false, _) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_of_enum_no_diagnostic() {
- check_diagnostics(
- r#"
-enum Either { A(bool), B(bool, bool) }
-enum Either2 { C, D }
-
-fn main() {
- match (Either::A(false), Either2::C) {
- (Either::A(true), _) | (Either::A(false), _) => (),
- (Either::B(true, _), Either2::C) => (),
- (Either::B(false, _), Either2::C) => (),
- (Either::B(_, _), Either2::D) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn mismatched_types() {
- // Match statements with arms that don't match the
- // expression pattern do not fire this diagnostic.
- check_diagnostics(
- r#"
-enum Either { A, B }
-enum Either2 { C, D }
-
-fn main() {
- match Either::A {
- Either2::C => (),
- Either2::D => (),
- }
- match (true, false) {
- (true, false, true) => (),
- (true) => (),
- }
- match (0) { () => () }
- match Unresolved::Bar { Unresolved::Baz => () }
-}
- "#,
- );
- }
-
- #[test]
- fn malformed_match_arm_tuple_enum_missing_pattern() {
- // We are testing to be sure we don't panic here when the match
- // arm `Either::B` is missing its pattern.
- check_diagnostics(
- r#"
-enum Either { A, B(u32) }
-
-fn main() {
- match Either::A {
- Either::A => (),
- Either::B() => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn expr_diverges() {
- check_diagnostics(
- r#"
-enum Either { A, B }
-
-fn main() {
- match loop {} {
- Either::A => (),
- Either::B => (),
- }
- match loop {} {
- Either::A => (),
- }
- match loop { break Foo::A } {
- //^^^^^^^^^^^^^^^^^^^^^ Missing match arm
- Either::A => (),
- }
- match loop { break Foo::A } {
- Either::A => (),
- Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn expr_partially_diverges() {
- check_diagnostics(
- r#"
-enum Either<T> { A(T), B }
-
-fn foo() -> Either<!> { Either::B }
-fn main() -> u32 {
- match foo() {
- Either::A(val) => val,
- Either::B => 0,
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enum_record() {
- check_diagnostics(
- r#"
-enum Either { A { foo: bool }, B }
-
-fn main() {
- let a = Either::A { foo: true };
- match a { }
- //^ Missing match arm
- match a { Either::A { foo: true } => () }
- //^ Missing match arm
- match a {
- Either::A { } => (),
- //^^^^^^^^^ Missing structure fields:
- // | - foo
- Either::B => (),
- }
- match a {
- //^ Missing match arm
- Either::A { } => (),
- } //^^^^^^^^^ Missing structure fields:
- // | - foo
-
- match a {
- Either::A { foo: true } => (),
- Either::A { foo: false } => (),
- Either::B => (),
- }
- match a {
- Either::A { foo: _ } => (),
- Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enum_record_fields_out_of_order() {
- check_diagnostics(
- r#"
-enum Either {
- A { foo: bool, bar: () },
- B,
-}
-
-fn main() {
- let a = Either::A { foo: true, bar: () };
- match a {
- //^ Missing match arm
- Either::A { bar: (), foo: false } => (),
- Either::A { foo: true, bar: () } => (),
- }
-
- match a {
- Either::A { bar: (), foo: false } => (),
- Either::A { foo: true, bar: () } => (),
- Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enum_record_ellipsis() {
- check_diagnostics(
- r#"
-enum Either {
- A { foo: bool, bar: bool },
- B,
-}
-
-fn main() {
- let a = Either::B;
- match a {
- //^ Missing match arm
- Either::A { foo: true, .. } => (),
- Either::B => (),
- }
- match a {
- //^ Missing match arm
- Either::A { .. } => (),
- }
-
- match a {
- Either::A { foo: true, .. } => (),
- Either::A { foo: false, .. } => (),
- Either::B => (),
- }
-
- match a {
- Either::A { .. } => (),
- Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn enum_tuple_partial_ellipsis() {
- check_diagnostics(
- r#"
-enum Either {
- A(bool, bool, bool, bool),
- B,
-}
-
-fn main() {
- match Either::B {
- //^^^^^^^^^ Missing match arm
- Either::A(true, .., true) => (),
- Either::A(true, .., false) => (),
- Either::A(false, .., false) => (),
- Either::B => (),
- }
- match Either::B {
- //^^^^^^^^^ Missing match arm
- Either::A(true, .., true) => (),
- Either::A(true, .., false) => (),
- Either::A(.., true) => (),
- Either::B => (),
- }
-
- match Either::B {
- Either::A(true, .., true) => (),
- Either::A(true, .., false) => (),
- Either::A(false, .., true) => (),
- Either::A(false, .., false) => (),
- Either::B => (),
- }
- match Either::B {
- Either::A(true, .., true) => (),
- Either::A(true, .., false) => (),
- Either::A(.., true) => (),
- Either::A(.., false) => (),
- Either::B => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn never() {
- check_diagnostics(
- r#"
-enum Never {}
-
-fn enum_(never: Never) {
- match never {}
-}
-fn enum_ref(never: &Never) {
- match never {}
-}
-fn bang(never: !) {
- match never {}
-}
-"#,
- );
- }
-
- #[test]
- fn or_pattern_panic() {
- check_diagnostics(
- r#"
-pub enum Category { Infinity, Zero }
-
-fn panic(a: Category, b: Category) {
- match (a, b) {
- (Category::Zero | Category::Infinity, _) => (),
- (_, Category::Zero | Category::Infinity) => (),
- }
-
- // FIXME: This is a false positive, but the code used to cause a panic in the match checker,
- // so this acts as a regression test for that.
- match (a, b) {
- //^^^^^^ Missing match arm
- (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => (),
- (Category::Infinity | Category::Zero, _) => (),
- }
-}
-"#,
- );
- }
-
- mod false_negatives {
- //! The implementation of match checking here is a work in progress. As we roll this out, we
- //! prefer false negatives to false positives (ideally there would be no false positives). This
- //! test module should document known false negatives. Eventually we will have a complete
- //! implementation of match checking and this module will be empty.
- //!
- //! The reasons for documenting known false negatives:
- //!
- //! 1. It acts as a backlog of work that can be done to improve the behavior of the system.
- //! 2. It ensures the code doesn't panic when handling these cases.
- use super::*;
-
- #[test]
- fn integers() {
- // We don't currently check integer exhaustiveness.
- check_diagnostics(
- r#"
-fn main() {
- match 5 {
- 10 => (),
- 11..20 => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn internal_or() {
- // We do not currently handle patterns with internal `or`s.
- check_diagnostics(
- r#"
-fn main() {
- enum Either { A(bool), B }
- match Either::B {
- Either::A(true | false) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_of_bools_with_ellipsis_at_end_missing_arm() {
- // We don't currently handle tuple patterns with ellipsis.
- check_diagnostics(
- r#"
-fn main() {
- match (false, true, false) {
- (false, ..) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() {
- // We don't currently handle tuple patterns with ellipsis.
- check_diagnostics(
- r#"
-fn main() {
- match (false, true, false) {
- (.., false) => (),
- }
-}
-"#,
- );
- }
-
- #[test]
- fn struct_missing_arm() {
- // We don't currently handle structs.
- check_diagnostics(
- r#"
-struct Foo { a: bool }
-fn main(f: Foo) {
- match f { Foo { a: true } => () }
-}
-"#,
- );
- }
- }
-}
+++ /dev/null
-//! Provides validations for unsafe code. Currently checks if unsafe functions are missing
-//! unsafe blocks.
-
-use std::sync::Arc;
-
-use hir_def::{
- body::Body,
- expr::{Expr, ExprId, UnaryOp},
- resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
- DefWithBodyId,
-};
-use hir_expand::diagnostics::DiagnosticSink;
-
-use crate::{
- db::HirDatabase, diagnostics::MissingUnsafe, lower::CallableDefId, ApplicationTy,
- InferenceResult, Ty, TypeCtor,
-};
-
-pub(super) struct UnsafeValidator<'a, 'b: 'a> {
- owner: DefWithBodyId,
- infer: Arc<InferenceResult>,
- sink: &'a mut DiagnosticSink<'b>,
-}
-
-impl<'a, 'b> UnsafeValidator<'a, 'b> {
- pub(super) fn new(
- owner: DefWithBodyId,
- infer: Arc<InferenceResult>,
- sink: &'a mut DiagnosticSink<'b>,
- ) -> UnsafeValidator<'a, 'b> {
- UnsafeValidator { owner, infer, sink }
- }
-
- pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
- let def = self.owner.into();
- let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
- let is_unsafe = match self.owner {
- DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe,
- DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) => false,
- };
- if is_unsafe
- || unsafe_expressions
- .iter()
- .filter(|unsafe_expr| !unsafe_expr.inside_unsafe_block)
- .count()
- == 0
- {
- return;
- }
-
- let (_, body_source) = db.body_with_source_map(def);
- for unsafe_expr in unsafe_expressions {
- if !unsafe_expr.inside_unsafe_block {
- if let Ok(in_file) = body_source.as_ref().expr_syntax(unsafe_expr.expr) {
- self.sink.push(MissingUnsafe { file: in_file.file_id, expr: in_file.value })
- }
- }
- }
- }
-}
-
-pub struct UnsafeExpr {
- pub expr: ExprId,
- pub inside_unsafe_block: bool,
-}
-
-pub fn unsafe_expressions(
- db: &dyn HirDatabase,
- infer: &InferenceResult,
- def: DefWithBodyId,
-) -> Vec<UnsafeExpr> {
- let mut unsafe_exprs = vec![];
- let body = db.body(def);
- walk_unsafe(&mut unsafe_exprs, db, infer, def, &body, body.body_expr, false);
-
- unsafe_exprs
-}
-
-fn walk_unsafe(
- unsafe_exprs: &mut Vec<UnsafeExpr>,
- db: &dyn HirDatabase,
- infer: &InferenceResult,
- def: DefWithBodyId,
- body: &Body,
- current: ExprId,
- inside_unsafe_block: bool,
-) {
- let expr = &body.exprs[current];
- match expr {
- Expr::Call { callee, .. } => {
- let ty = &infer[*callee];
- if let &Ty::Apply(ApplicationTy {
- ctor: TypeCtor::FnDef(CallableDefId::FunctionId(func)),
- ..
- }) = ty
- {
- if db.function_data(func).is_unsafe {
- unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
- }
- }
- }
- Expr::Path(path) => {
- let resolver = resolver_for_expr(db.upcast(), def, current);
- let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path());
- if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
- if db.static_data(id).mutable {
- unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
- }
- }
- }
- Expr::MethodCall { .. } => {
- if infer
- .method_resolution(current)
- .map(|func| db.function_data(func).is_unsafe)
- .unwrap_or(false)
- {
- unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
- }
- }
- Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- if let Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(..), .. }) = &infer[*expr] {
- unsafe_exprs.push(UnsafeExpr { expr: current, inside_unsafe_block });
- }
- }
- Expr::Unsafe { body: child } => {
- return walk_unsafe(unsafe_exprs, db, infer, def, body, *child, true);
- }
- _ => {}
- }
-
- expr.walk_child_exprs(|child| {
- walk_unsafe(unsafe_exprs, db, infer, def, body, child, inside_unsafe_block);
- });
-}
-
-#[cfg(test)]
-mod tests {
- use crate::diagnostics::tests::check_diagnostics;
-
- #[test]
- fn missing_unsafe_diagnostic_with_raw_ptr() {
- check_diagnostics(
- r#"
-fn main() {
- let x = &5 as *const usize;
- unsafe { let y = *x; }
- let z = *x;
-} //^^ This operation is unsafe and requires an unsafe function or block
-"#,
- )
- }
-
- #[test]
- fn missing_unsafe_diagnostic_with_unsafe_call() {
- check_diagnostics(
- r#"
-struct HasUnsafe;
-
-impl HasUnsafe {
- unsafe fn unsafe_fn(&self) {
- let x = &5 as *const usize;
- let y = *x;
- }
-}
-
-unsafe fn unsafe_fn() {
- let x = &5 as *const usize;
- let y = *x;
-}
-
-fn main() {
- unsafe_fn();
- //^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
- HasUnsafe.unsafe_fn();
- //^^^^^^^^^^^^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
- unsafe {
- unsafe_fn();
- HasUnsafe.unsafe_fn();
- }
-}
-"#,
- );
- }
-
- #[test]
- fn missing_unsafe_diagnostic_with_static_mut() {
- check_diagnostics(
- r#"
-struct Ty {
- a: u8,
-}
-
-static mut static_mut: Ty = Ty { a: 0 };
-
-fn main() {
- let x = static_mut.a;
- //^^^^^^^^^^ This operation is unsafe and requires an unsafe function or block
- unsafe {
- let x = static_mut.a;
- }
-}
-"#,
- );
- }
-}
+++ /dev/null
-//! FIXME: write short doc here
-
-use std::fmt;
-
-use crate::{
- db::HirDatabase, utils::generics, ApplicationTy, CallableDefId, FnSig, GenericPredicate,
- Obligation, OpaqueTyId, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
-};
-use hir_def::{
- find_path, generics::TypeParamProvenance, item_scope::ItemInNs, AdtId, AssocContainerId,
- Lookup, ModuleId,
-};
-use hir_expand::name::Name;
-
-pub struct HirFormatter<'a> {
- pub db: &'a dyn HirDatabase,
- fmt: &'a mut dyn fmt::Write,
- buf: String,
- curr_size: usize,
- pub(crate) max_size: Option<usize>,
- omit_verbose_types: bool,
- display_target: DisplayTarget,
-}
-
-pub trait HirDisplay {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError>;
-
- /// Returns a `Display`able type that is human-readable.
- /// Use this for showing types to the user (e.g. diagnostics)
- fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
- where
- Self: Sized,
- {
- HirDisplayWrapper {
- db,
- t: self,
- max_size: None,
- omit_verbose_types: false,
- display_target: DisplayTarget::Diagnostics,
- }
- }
-
- /// Returns a `Display`able type that is human-readable and tries to be succinct.
- /// Use this for showing types to the user where space is constrained (e.g. doc popups)
- fn display_truncated<'a>(
- &'a self,
- db: &'a dyn HirDatabase,
- max_size: Option<usize>,
- ) -> HirDisplayWrapper<'a, Self>
- where
- Self: Sized,
- {
- HirDisplayWrapper {
- db,
- t: self,
- max_size,
- omit_verbose_types: true,
- display_target: DisplayTarget::Diagnostics,
- }
- }
-
- /// Returns a String representation of `self` that can be inserted into the given module.
- /// Use this when generating code (e.g. assists)
- fn display_source_code<'a>(
- &'a self,
- db: &'a dyn HirDatabase,
- module_id: ModuleId,
- ) -> Result<String, DisplaySourceCodeError> {
- let mut result = String::new();
- match self.hir_fmt(&mut HirFormatter {
- db,
- fmt: &mut result,
- buf: String::with_capacity(20),
- curr_size: 0,
- max_size: None,
- omit_verbose_types: false,
- display_target: DisplayTarget::SourceCode { module_id },
- }) {
- Ok(()) => {}
- Err(HirDisplayError::FmtError) => panic!("Writing to String can't fail!"),
- Err(HirDisplayError::DisplaySourceCodeError(e)) => return Err(e),
- };
- Ok(result)
- }
-}
-
-impl<'a> HirFormatter<'a> {
- pub fn write_joined<T: HirDisplay>(
- &mut self,
- iter: impl IntoIterator<Item = T>,
- sep: &str,
- ) -> Result<(), HirDisplayError> {
- let mut first = true;
- for e in iter {
- if !first {
- write!(self, "{}", sep)?;
- }
- first = false;
- e.hir_fmt(self)?;
- }
- Ok(())
- }
-
- /// This allows using the `write!` macro directly with a `HirFormatter`.
- pub fn write_fmt(&mut self, args: fmt::Arguments) -> Result<(), HirDisplayError> {
- // We write to a buffer first to track output size
- self.buf.clear();
- fmt::write(&mut self.buf, args)?;
- self.curr_size += self.buf.len();
-
- // Then we write to the internal formatter from the buffer
- self.fmt.write_str(&self.buf).map_err(HirDisplayError::from)
- }
-
- pub fn should_truncate(&self) -> bool {
- if let Some(max_size) = self.max_size {
- self.curr_size >= max_size
- } else {
- false
- }
- }
-
- pub fn omit_verbose_types(&self) -> bool {
- self.omit_verbose_types
- }
-}
-
-#[derive(Clone, Copy)]
-enum DisplayTarget {
- /// Display types for inlays, doc popups, autocompletion, etc...
- /// Showing `{unknown}` or not qualifying paths is fine here.
- /// There's no reason for this to fail.
- Diagnostics,
- /// Display types for inserting them in source files.
- /// The generated code should compile, so paths need to be qualified.
- SourceCode { module_id: ModuleId },
-}
-
-impl DisplayTarget {
- fn is_source_code(&self) -> bool {
- matches!(self, Self::SourceCode {..})
- }
-}
-
-#[derive(Debug)]
-pub enum DisplaySourceCodeError {
- PathNotFound,
-}
-
-pub enum HirDisplayError {
- /// Errors that can occur when generating source code
- DisplaySourceCodeError(DisplaySourceCodeError),
- /// `FmtError` is required to be compatible with std::fmt::Display
- FmtError,
-}
-impl From<fmt::Error> for HirDisplayError {
- fn from(_: fmt::Error) -> Self {
- Self::FmtError
- }
-}
-
-pub struct HirDisplayWrapper<'a, T> {
- db: &'a dyn HirDatabase,
- t: &'a T,
- max_size: Option<usize>,
- omit_verbose_types: bool,
- display_target: DisplayTarget,
-}
-
-impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T>
-where
- T: HirDisplay,
-{
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self.t.hir_fmt(&mut HirFormatter {
- db: self.db,
- fmt: f,
- buf: String::with_capacity(20),
- curr_size: 0,
- max_size: self.max_size,
- omit_verbose_types: self.omit_verbose_types,
- display_target: self.display_target,
- }) {
- Ok(()) => Ok(()),
- Err(HirDisplayError::FmtError) => Err(fmt::Error),
- Err(HirDisplayError::DisplaySourceCodeError(_)) => {
- // This should never happen
- panic!("HirDisplay failed when calling Display::fmt!")
- }
- }
- }
-}
-
-const TYPE_HINT_TRUNCATION: &str = "…";
-
-impl HirDisplay for &Ty {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- HirDisplay::hir_fmt(*self, f)
- }
-}
-
-impl HirDisplay for ApplicationTy {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
- }
-
- match self.ctor {
- TypeCtor::Bool => write!(f, "bool")?,
- TypeCtor::Char => write!(f, "char")?,
- TypeCtor::Int(t) => write!(f, "{}", t)?,
- TypeCtor::Float(t) => write!(f, "{}", t)?,
- TypeCtor::Str => write!(f, "str")?,
- TypeCtor::Slice => {
- let t = self.parameters.as_single();
- write!(f, "[{}]", t.display(f.db))?;
- }
- TypeCtor::Array => {
- let t = self.parameters.as_single();
- write!(f, "[{}; _]", t.display(f.db))?;
- }
- TypeCtor::RawPtr(m) => {
- let t = self.parameters.as_single();
- write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
- }
- TypeCtor::Ref(m) => {
- let t = self.parameters.as_single();
- let ty_display = if f.omit_verbose_types() {
- t.display_truncated(f.db, f.max_size)
- } else {
- t.display(f.db)
- };
- write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
- }
- TypeCtor::Never => write!(f, "!")?,
- TypeCtor::Tuple { .. } => {
- let ts = &self.parameters;
- if ts.len() == 1 {
- write!(f, "({},)", ts[0].display(f.db))?;
- } else {
- write!(f, "(")?;
- f.write_joined(&*ts.0, ", ")?;
- write!(f, ")")?;
- }
- }
- TypeCtor::FnPtr { is_varargs, .. } => {
- let sig = FnSig::from_fn_ptr_substs(&self.parameters, is_varargs);
- write!(f, "fn(")?;
- f.write_joined(sig.params(), ", ")?;
- if is_varargs {
- if sig.params().is_empty() {
- write!(f, "...")?;
- } else {
- write!(f, ", ...")?;
- }
- }
- write!(f, ")")?;
- let ret = sig.ret();
- if *ret != Ty::unit() {
- let ret_display = if f.omit_verbose_types() {
- ret.display_truncated(f.db, f.max_size)
- } else {
- ret.display(f.db)
- };
- write!(f, " -> {}", ret_display)?;
- }
- }
- TypeCtor::FnDef(def) => {
- let sig = f.db.callable_item_signature(def).subst(&self.parameters);
- match def {
- CallableDefId::FunctionId(ff) => {
- write!(f, "fn {}", f.db.function_data(ff).name)?
- }
- CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
- CallableDefId::EnumVariantId(e) => {
- write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
- }
- };
- if self.parameters.len() > 0 {
- let generics = generics(f.db.upcast(), def.into());
- let (parent_params, self_param, type_params, _impl_trait_params) =
- generics.provenance_split();
- let total_len = parent_params + self_param + type_params;
- // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
- if total_len > 0 {
- write!(f, "<")?;
- f.write_joined(&self.parameters.0[..total_len], ", ")?;
- write!(f, ">")?;
- }
- }
- write!(f, "(")?;
- f.write_joined(sig.params(), ", ")?;
- write!(f, ")")?;
- let ret = sig.ret();
- if *ret != Ty::unit() {
- let ret_display = if f.omit_verbose_types() {
- ret.display_truncated(f.db, f.max_size)
- } else {
- ret.display(f.db)
- };
- write!(f, " -> {}", ret_display)?;
- }
- }
- TypeCtor::Adt(def_id) => {
- match f.display_target {
- DisplayTarget::Diagnostics => {
- let name = match def_id {
- AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
- AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
- AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
- };
- write!(f, "{}", name)?;
- }
- DisplayTarget::SourceCode { module_id } => {
- if let Some(path) = find_path::find_path(
- f.db.upcast(),
- ItemInNs::Types(def_id.into()),
- module_id,
- ) {
- write!(f, "{}", path)?;
- } else {
- return Err(HirDisplayError::DisplaySourceCodeError(
- DisplaySourceCodeError::PathNotFound,
- ));
- }
- }
- }
-
- if self.parameters.len() > 0 {
- let parameters_to_write =
- if f.display_target.is_source_code() || f.omit_verbose_types() {
- match self
- .ctor
- .as_generic_def()
- .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
- .filter(|defaults| !defaults.is_empty())
- {
- None => self.parameters.0.as_ref(),
- Some(default_parameters) => {
- let mut default_from = 0;
- for (i, parameter) in self.parameters.iter().enumerate() {
- match (parameter, default_parameters.get(i)) {
- (&Ty::Unknown, _) | (_, None) => {
- default_from = i + 1;
- }
- (_, Some(default_parameter)) => {
- let actual_default = default_parameter
- .clone()
- .subst(&self.parameters.prefix(i));
- if parameter != &actual_default {
- default_from = i + 1;
- }
- }
- }
- }
- &self.parameters.0[0..default_from]
- }
- }
- } else {
- self.parameters.0.as_ref()
- };
- if !parameters_to_write.is_empty() {
- write!(f, "<")?;
- f.write_joined(parameters_to_write, ", ")?;
- write!(f, ">")?;
- }
- }
- }
- TypeCtor::AssociatedType(type_alias) => {
- let trait_ = match type_alias.lookup(f.db.upcast()).container {
- AssocContainerId::TraitId(it) => it,
- _ => panic!("not an associated type"),
- };
- let trait_ = f.db.trait_data(trait_);
- let type_alias = f.db.type_alias_data(type_alias);
- write!(f, "{}::{}", trait_.name, type_alias.name)?;
- if self.parameters.len() > 0 {
- write!(f, "<")?;
- f.write_joined(&*self.parameters.0, ", ")?;
- write!(f, ">")?;
- }
- }
- TypeCtor::OpaqueType(opaque_ty_id) => {
- let bounds = match opaque_ty_id {
- OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
- let datas =
- f.db.return_type_impl_traits(func).expect("impl trait id without data");
- let data = (*datas)
- .as_ref()
- .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
- data.subst(&self.parameters)
- }
- };
- write!(f, "impl ")?;
- write_bounds_like_dyn_trait(&bounds.value, f)?;
- // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
- }
- TypeCtor::Closure { .. } => {
- let sig = self.parameters[0].callable_sig(f.db);
- if let Some(sig) = sig {
- if sig.params().is_empty() {
- write!(f, "||")?;
- } else if f.omit_verbose_types() {
- write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
- } else {
- write!(f, "|")?;
- f.write_joined(sig.params(), ", ")?;
- write!(f, "|")?;
- };
-
- let ret_display = if f.omit_verbose_types() {
- sig.ret().display_truncated(f.db, f.max_size)
- } else {
- sig.ret().display(f.db)
- };
- write!(f, " -> {}", ret_display)?;
- } else {
- write!(f, "{{closure}}")?;
- }
- }
- }
- Ok(())
- }
-}
-
-impl HirDisplay for ProjectionTy {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
- }
-
- let trait_ = f.db.trait_data(self.trait_(f.db));
- write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_.name)?;
- if self.parameters.len() > 1 {
- write!(f, "<")?;
- f.write_joined(&self.parameters[1..], ", ")?;
- write!(f, ">")?;
- }
- write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
- Ok(())
- }
-}
-
-impl HirDisplay for Ty {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
- }
-
- match self {
- Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
- Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
- Ty::Placeholder(id) => {
- let generics = generics(f.db.upcast(), id.parent);
- let param_data = &generics.params.types[id.local_id];
- match param_data.provenance {
- TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
- write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
- }
- TypeParamProvenance::ArgumentImplTrait => {
- write!(f, "impl ")?;
- let bounds = f.db.generic_predicates_for_param(*id);
- let substs = Substs::type_params_for_generics(&generics);
- write_bounds_like_dyn_trait(
- &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
- f,
- )?;
- }
- }
- }
- Ty::Bound(idx) => write!(f, "?{}.{}", idx.debruijn.depth(), idx.index)?,
- Ty::Dyn(predicates) => {
- write!(f, "dyn ")?;
- write_bounds_like_dyn_trait(predicates, f)?;
- }
- Ty::Opaque(opaque_ty) => {
- let bounds = match opaque_ty.opaque_ty_id {
- OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
- let datas =
- f.db.return_type_impl_traits(func).expect("impl trait id without data");
- let data = (*datas)
- .as_ref()
- .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
- data.subst(&opaque_ty.parameters)
- }
- };
- write!(f, "impl ")?;
- write_bounds_like_dyn_trait(&bounds.value, f)?;
- }
- Ty::Unknown => write!(f, "{{unknown}}")?,
- Ty::Infer(..) => write!(f, "_")?,
- }
- Ok(())
- }
-}
-
-fn write_bounds_like_dyn_trait(
- predicates: &[GenericPredicate],
- f: &mut HirFormatter,
-) -> Result<(), HirDisplayError> {
- // Note: This code is written to produce nice results (i.e.
- // corresponding to surface Rust) for types that can occur in
- // actual Rust. It will have weird results if the predicates
- // aren't as expected (i.e. self types = $0, projection
- // predicates for a certain trait come after the Implemented
- // predicate for that trait).
- let mut first = true;
- let mut angle_open = false;
- for p in predicates.iter() {
- match p {
- GenericPredicate::Implemented(trait_ref) => {
- if angle_open {
- write!(f, ">")?;
- }
- if !first {
- write!(f, " + ")?;
- }
- // We assume that the self type is $0 (i.e. the
- // existential) here, which is the only thing that's
- // possible in actual Rust, and hence don't print it
- write!(f, "{}", f.db.trait_data(trait_ref.trait_).name)?;
- if trait_ref.substs.len() > 1 {
- write!(f, "<")?;
- f.write_joined(&trait_ref.substs[1..], ", ")?;
- // there might be assoc type bindings, so we leave the angle brackets open
- angle_open = true;
- }
- }
- GenericPredicate::Projection(projection_pred) => {
- // in types in actual Rust, these will always come
- // after the corresponding Implemented predicate
- if angle_open {
- write!(f, ", ")?;
- } else {
- write!(f, "<")?;
- angle_open = true;
- }
- let type_alias = f.db.type_alias_data(projection_pred.projection_ty.associated_ty);
- write!(f, "{} = ", type_alias.name)?;
- projection_pred.ty.hir_fmt(f)?;
- }
- GenericPredicate::Error => {
- if angle_open {
- // impl Trait<X, {error}>
- write!(f, ", ")?;
- } else if !first {
- // impl Trait + {error}
- write!(f, " + ")?;
- }
- p.hir_fmt(f)?;
- }
- }
- first = false;
- }
- if angle_open {
- write!(f, ">")?;
- }
- Ok(())
-}
-
-impl TraitRef {
- fn hir_fmt_ext(&self, f: &mut HirFormatter, use_as: bool) -> Result<(), HirDisplayError> {
- if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
- }
-
- self.substs[0].hir_fmt(f)?;
- if use_as {
- write!(f, " as ")?;
- } else {
- write!(f, ": ")?;
- }
- write!(f, "{}", f.db.trait_data(self.trait_).name)?;
- if self.substs.len() > 1 {
- write!(f, "<")?;
- f.write_joined(&self.substs[1..], ", ")?;
- write!(f, ">")?;
- }
- Ok(())
- }
-}
-
-impl HirDisplay for TraitRef {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- self.hir_fmt_ext(f, false)
- }
-}
-
-impl HirDisplay for &GenericPredicate {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- HirDisplay::hir_fmt(*self, f)
- }
-}
-
-impl HirDisplay for GenericPredicate {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
- }
-
- match self {
- GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
- GenericPredicate::Projection(projection_pred) => {
- write!(f, "<")?;
- projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
- write!(
- f,
- ">::{} = {}",
- f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
- projection_pred.ty.display(f.db)
- )?;
- }
- GenericPredicate::Error => write!(f, "{{error}}")?,
- }
- Ok(())
- }
-}
-
-impl HirDisplay for Obligation {
- fn hir_fmt(&self, f: &mut HirFormatter) -> Result<(), HirDisplayError> {
- Ok(match self {
- Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db))?,
- Obligation::Projection(proj) => write!(
- f,
- "Normalize({} => {})",
- proj.projection_ty.display(f.db),
- proj.ty.display(f.db)
- )?,
- })
- }
-}
+++ /dev/null
-//! Type inference, i.e. the process of walking through the code and determining
-//! the type of each expression and pattern.
-//!
-//! For type inference, compare the implementations in rustc (the various
-//! check_* methods in librustc_typeck/check/mod.rs are a good entry point) and
-//! IntelliJ-Rust (org.rust.lang.core.types.infer). Our entry point for
-//! inference here is the `infer` function, which infers the types of all
-//! expressions in a given function.
-//!
-//! During inference, types (i.e. the `Ty` struct) can contain type 'variables'
-//! which represent currently unknown types; as we walk through the expressions,
-//! we might determine that certain variables need to be equal to each other, or
-//! to certain types. To record this, we use the union-find implementation from
-//! the `ena` crate, which is extracted from rustc.
-
-use std::borrow::Cow;
-use std::mem;
-use std::ops::Index;
-use std::sync::Arc;
-
-use arena::map::ArenaMap;
-use hir_def::{
- body::Body,
- data::{ConstData, FunctionData, StaticData},
- expr::{BindingAnnotation, ExprId, PatId},
- lang_item::LangItemTarget,
- path::{path, Path},
- resolver::{HasResolver, Resolver, TypeNs},
- type_ref::{Mutability, TypeRef},
- AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, Lookup, TraitId,
- TypeAliasId, VariantId,
-};
-use hir_expand::{diagnostics::DiagnosticSink, name::name};
-use rustc_hash::FxHashMap;
-use stdx::impl_from;
-use syntax::SmolStr;
-
-use super::{
- primitive::{FloatTy, IntTy},
- traits::{Guidance, Obligation, ProjectionPredicate, Solution},
- InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
-};
-use crate::{
- db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode,
-};
-
-pub(crate) use unify::unify;
-
-macro_rules! ty_app {
- ($ctor:pat, $param:pat) => {
- crate::Ty::Apply(crate::ApplicationTy { ctor: $ctor, parameters: $param })
- };
- ($ctor:pat) => {
- ty_app!($ctor, _)
- };
-}
-
-mod unify;
-mod path;
-mod expr;
-mod pat;
-mod coerce;
-
-/// The entry point of type inference.
-pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
- let _p = profile::span("infer_query");
- let resolver = def.resolver(db.upcast());
- let mut ctx = InferenceContext::new(db, def, resolver);
-
- match def {
- DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
- DefWithBodyId::FunctionId(f) => ctx.collect_fn(&db.function_data(f)),
- DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
- }
-
- ctx.infer_body();
-
- Arc::new(ctx.resolve_all())
-}
-
-#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
-enum ExprOrPatId {
- ExprId(ExprId),
- PatId(PatId),
-}
-impl_from!(ExprId, PatId for ExprOrPatId);
-
-/// Binding modes inferred for patterns.
-/// https://doc.rust-lang.org/reference/patterns.html#binding-modes
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-enum BindingMode {
- Move,
- Ref(Mutability),
-}
-
-impl BindingMode {
- pub fn convert(annotation: BindingAnnotation) -> BindingMode {
- match annotation {
- BindingAnnotation::Unannotated | BindingAnnotation::Mutable => BindingMode::Move,
- BindingAnnotation::Ref => BindingMode::Ref(Mutability::Shared),
- BindingAnnotation::RefMut => BindingMode::Ref(Mutability::Mut),
- }
- }
-}
-
-impl Default for BindingMode {
- fn default() -> Self {
- BindingMode::Move
- }
-}
-
-/// A mismatch between an expected and an inferred type.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TypeMismatch {
- pub expected: Ty,
- pub actual: Ty,
-}
-
-/// The result of type inference: A mapping from expressions and patterns to types.
-#[derive(Clone, PartialEq, Eq, Debug, Default)]
-pub struct InferenceResult {
- /// For each method call expr, records the function it resolves to.
- method_resolutions: FxHashMap<ExprId, FunctionId>,
- /// For each field access expr, records the field it resolves to.
- field_resolutions: FxHashMap<ExprId, FieldId>,
- /// For each field in record literal, records the field it resolves to.
- record_field_resolutions: FxHashMap<ExprId, FieldId>,
- record_field_pat_resolutions: FxHashMap<PatId, FieldId>,
- /// For each struct literal, records the variant it resolves to.
- variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
- /// For each associated item record what it resolves to
- assoc_resolutions: FxHashMap<ExprOrPatId, AssocItemId>,
- diagnostics: Vec<InferenceDiagnostic>,
- pub type_of_expr: ArenaMap<ExprId, Ty>,
- pub type_of_pat: ArenaMap<PatId, Ty>,
- pub(super) type_mismatches: ArenaMap<ExprId, TypeMismatch>,
-}
-
-impl InferenceResult {
- pub fn method_resolution(&self, expr: ExprId) -> Option<FunctionId> {
- self.method_resolutions.get(&expr).copied()
- }
- pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> {
- self.field_resolutions.get(&expr).copied()
- }
- pub fn record_field_resolution(&self, expr: ExprId) -> Option<FieldId> {
- self.record_field_resolutions.get(&expr).copied()
- }
- pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<FieldId> {
- self.record_field_pat_resolutions.get(&pat).copied()
- }
- pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
- self.variant_resolutions.get(&id.into()).copied()
- }
- pub fn variant_resolution_for_pat(&self, id: PatId) -> Option<VariantId> {
- self.variant_resolutions.get(&id.into()).copied()
- }
- pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<AssocItemId> {
- self.assoc_resolutions.get(&id.into()).copied()
- }
- pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<AssocItemId> {
- self.assoc_resolutions.get(&id.into()).copied()
- }
- pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> {
- self.type_mismatches.get(expr)
- }
- pub fn add_diagnostics(
- &self,
- db: &dyn HirDatabase,
- owner: DefWithBodyId,
- sink: &mut DiagnosticSink,
- ) {
- self.diagnostics.iter().for_each(|it| it.add_to(db, owner, sink))
- }
-}
-
-impl Index<ExprId> for InferenceResult {
- type Output = Ty;
-
- fn index(&self, expr: ExprId) -> &Ty {
- self.type_of_expr.get(expr).unwrap_or(&Ty::Unknown)
- }
-}
-
-impl Index<PatId> for InferenceResult {
- type Output = Ty;
-
- fn index(&self, pat: PatId) -> &Ty {
- self.type_of_pat.get(pat).unwrap_or(&Ty::Unknown)
- }
-}
-
-/// The inference context contains all information needed during type inference.
-#[derive(Clone, Debug)]
-struct InferenceContext<'a> {
- db: &'a dyn HirDatabase,
- owner: DefWithBodyId,
- body: Arc<Body>,
- resolver: Resolver,
- table: unify::InferenceTable,
- trait_env: Arc<TraitEnvironment>,
- obligations: Vec<Obligation>,
- result: InferenceResult,
- /// The return type of the function being inferred, or the closure if we're
- /// currently within one.
- ///
- /// We might consider using a nested inference context for checking
- /// closures, but currently this is the only field that will change there,
- /// so it doesn't make sense.
- return_ty: Ty,
- diverges: Diverges,
- breakables: Vec<BreakableContext>,
-}
-
-#[derive(Clone, Debug)]
-struct BreakableContext {
- pub may_break: bool,
- pub break_ty: Ty,
- pub label: Option<name::Name>,
-}
-
-fn find_breakable<'c>(
- ctxs: &'c mut [BreakableContext],
- label: Option<&name::Name>,
-) -> Option<&'c mut BreakableContext> {
- match label {
- Some(_) => ctxs.iter_mut().rev().find(|ctx| ctx.label.as_ref() == label),
- None => ctxs.last_mut(),
- }
-}
-
-impl<'a> InferenceContext<'a> {
- fn new(db: &'a dyn HirDatabase, owner: DefWithBodyId, resolver: Resolver) -> Self {
- InferenceContext {
- result: InferenceResult::default(),
- table: unify::InferenceTable::new(),
- obligations: Vec::default(),
- return_ty: Ty::Unknown, // set in collect_fn_signature
- trait_env: TraitEnvironment::lower(db, &resolver),
- db,
- owner,
- body: db.body(owner),
- resolver,
- diverges: Diverges::Maybe,
- breakables: Vec::new(),
- }
- }
-
- fn resolve_all(mut self) -> InferenceResult {
- // FIXME resolve obligations as well (use Guidance if necessary)
- let mut result = std::mem::take(&mut self.result);
- for ty in result.type_of_expr.values_mut() {
- let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
- *ty = resolved;
- }
- for ty in result.type_of_pat.values_mut() {
- let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown));
- *ty = resolved;
- }
- result
- }
-
- fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
- self.result.type_of_expr.insert(expr, ty);
- }
-
- fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId) {
- self.result.method_resolutions.insert(expr, func);
- }
-
- fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) {
- self.result.field_resolutions.insert(expr, field);
- }
-
- fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) {
- self.result.variant_resolutions.insert(id, variant);
- }
-
- fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) {
- self.result.assoc_resolutions.insert(id, item);
- }
-
- fn write_pat_ty(&mut self, pat: PatId, ty: Ty) {
- self.result.type_of_pat.insert(pat, ty);
- }
-
- fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
- self.result.diagnostics.push(diagnostic);
- }
-
- fn make_ty_with_mode(
- &mut self,
- type_ref: &TypeRef,
- impl_trait_mode: ImplTraitLoweringMode,
- ) -> Ty {
- // FIXME use right resolver for block
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
- .with_impl_trait_mode(impl_trait_mode);
- let ty = Ty::from_hir(&ctx, type_ref);
- let ty = self.insert_type_vars(ty);
- self.normalize_associated_types_in(ty)
- }
-
- fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
- self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
- }
-
- /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
- fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty {
- match ty {
- Ty::Unknown => self.table.new_type_var(),
- _ => ty,
- }
- }
-
- fn insert_type_vars(&mut self, ty: Ty) -> Ty {
- ty.fold(&mut |ty| self.insert_type_vars_shallow(ty))
- }
-
- fn resolve_obligations_as_possible(&mut self) {
- let obligations = mem::replace(&mut self.obligations, Vec::new());
- for obligation in obligations {
- let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone());
- let canonicalized = self.canonicalizer().canonicalize_obligation(in_env);
- let solution =
- self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone());
-
- match solution {
- Some(Solution::Unique(substs)) => {
- canonicalized.apply_solution(self, substs.0);
- }
- Some(Solution::Ambig(Guidance::Definite(substs))) => {
- canonicalized.apply_solution(self, substs.0);
- self.obligations.push(obligation);
- }
- Some(_) => {
- // FIXME use this when trying to resolve everything at the end
- self.obligations.push(obligation);
- }
- None => {
- // FIXME obligation cannot be fulfilled => diagnostic
- }
- };
- }
- }
-
- fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
- self.table.unify(ty1, ty2)
- }
-
- /// Resolves the type as far as currently possible, replacing type variables
- /// by their known types. All types returned by the infer_* functions should
- /// be resolved as far as possible, i.e. contain no type variables with
- /// known type.
- fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
- self.resolve_obligations_as_possible();
-
- self.table.resolve_ty_as_possible(ty)
- }
-
- fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
- self.table.resolve_ty_shallow(ty)
- }
-
- fn resolve_associated_type(&mut self, inner_ty: Ty, assoc_ty: Option<TypeAliasId>) -> Ty {
- self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
- }
-
- fn resolve_associated_type_with_params(
- &mut self,
- inner_ty: Ty,
- assoc_ty: Option<TypeAliasId>,
- params: &[Ty],
- ) -> Ty {
- match assoc_ty {
- Some(res_assoc_ty) => {
- let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
- hir_def::AssocContainerId::TraitId(trait_) => trait_,
- _ => panic!("resolve_associated_type called with non-associated type"),
- };
- let ty = self.table.new_type_var();
- let substs = Substs::build_for_def(self.db, res_assoc_ty)
- .push(inner_ty)
- .fill(params.iter().cloned())
- .build();
- let trait_ref = TraitRef { trait_, substs: substs.clone() };
- let projection = ProjectionPredicate {
- ty: ty.clone(),
- projection_ty: ProjectionTy { associated_ty: res_assoc_ty, parameters: substs },
- };
- self.obligations.push(Obligation::Trait(trait_ref));
- self.obligations.push(Obligation::Projection(projection));
- self.resolve_ty_as_possible(ty)
- }
- None => Ty::Unknown,
- }
- }
-
- /// Recurses through the given type, normalizing associated types mentioned
- /// in it by replacing them by type variables and registering obligations to
- /// resolve later. This should be done once for every type we get from some
- /// type annotation (e.g. from a let type annotation, field type or function
- /// call). `make_ty` handles this already, but e.g. for field types we need
- /// to do it as well.
- fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty {
- let ty = self.resolve_ty_as_possible(ty);
- ty.fold(&mut |ty| match ty {
- Ty::Projection(proj_ty) => self.normalize_projection_ty(proj_ty),
- _ => ty,
- })
- }
-
- fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty {
- let var = self.table.new_type_var();
- let predicate = ProjectionPredicate { projection_ty: proj_ty, ty: var.clone() };
- let obligation = Obligation::Projection(predicate);
- self.obligations.push(obligation);
- var
- }
-
- fn resolve_variant(&mut self, path: Option<&Path>) -> (Ty, Option<VariantId>) {
- let path = match path {
- Some(path) => path,
- None => return (Ty::Unknown, None),
- };
- let resolver = &self.resolver;
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
- // FIXME: this should resolve assoc items as well, see this example:
- // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
- let (resolution, unresolved) =
- match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
- Some(it) => it,
- None => return (Ty::Unknown, None),
- };
- return match resolution {
- TypeNs::AdtId(AdtId::StructId(strukt)) => {
- let substs = Ty::substs_from_path(&ctx, path, strukt.into(), true);
- let ty = self.db.ty(strukt.into());
- let ty = self.insert_type_vars(ty.subst(&substs));
- forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
- }
- TypeNs::AdtId(AdtId::UnionId(u)) => {
- let substs = Ty::substs_from_path(&ctx, path, u.into(), true);
- let ty = self.db.ty(u.into());
- let ty = self.insert_type_vars(ty.subst(&substs));
- forbid_unresolved_segments((ty, Some(u.into())), unresolved)
- }
- TypeNs::EnumVariantId(var) => {
- let substs = Ty::substs_from_path(&ctx, path, var.into(), true);
- let ty = self.db.ty(var.parent.into());
- let ty = self.insert_type_vars(ty.subst(&substs));
- forbid_unresolved_segments((ty, Some(var.into())), unresolved)
- }
- TypeNs::SelfType(impl_id) => {
- let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
- let substs = Substs::type_params_for_generics(&generics);
- let ty = self.db.impl_self_ty(impl_id).subst(&substs);
- match unresolved {
- None => {
- let variant = ty_variant(&ty);
- (ty, variant)
- }
- Some(1) => {
- let segment = path.mod_path().segments.last().unwrap();
- // this could be an enum variant or associated type
- if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
- let enum_data = self.db.enum_data(enum_id);
- if let Some(local_id) = enum_data.variant(segment) {
- let variant = EnumVariantId { parent: enum_id, local_id };
- return (ty, Some(variant.into()));
- }
- }
- // FIXME potentially resolve assoc type
- (Ty::Unknown, None)
- }
- Some(_) => {
- // FIXME diagnostic
- (Ty::Unknown, None)
- }
- }
- }
- TypeNs::TypeAliasId(it) => {
- let substs = Substs::build_for_def(self.db, it)
- .fill(std::iter::repeat_with(|| self.table.new_type_var()))
- .build();
- let ty = self.db.ty(it.into()).subst(&substs);
- let variant = ty_variant(&ty);
- forbid_unresolved_segments((ty, variant), unresolved)
- }
- TypeNs::AdtSelfType(_) => {
- // FIXME this could happen in array size expressions, once we're checking them
- (Ty::Unknown, None)
- }
- TypeNs::GenericParam(_) => {
- // FIXME potentially resolve assoc type
- (Ty::Unknown, None)
- }
- TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
- // FIXME diagnostic
- (Ty::Unknown, None)
- }
- };
-
- fn forbid_unresolved_segments(
- result: (Ty, Option<VariantId>),
- unresolved: Option<usize>,
- ) -> (Ty, Option<VariantId>) {
- if unresolved.is_none() {
- result
- } else {
- // FIXME diagnostic
- (Ty::Unknown, None)
- }
- }
-
- fn ty_variant(ty: &Ty) -> Option<VariantId> {
- ty.as_adt().and_then(|(adt_id, _)| match adt_id {
- AdtId::StructId(s) => Some(VariantId::StructId(s)),
- AdtId::UnionId(u) => Some(VariantId::UnionId(u)),
- AdtId::EnumId(_) => {
- // FIXME Error E0071, expected struct, variant or union type, found enum `Foo`
- None
- }
- })
- }
- }
-
- fn collect_const(&mut self, data: &ConstData) {
- self.return_ty = self.make_ty(&data.type_ref);
- }
-
- fn collect_static(&mut self, data: &StaticData) {
- self.return_ty = self.make_ty(&data.type_ref);
- }
-
- fn collect_fn(&mut self, data: &FunctionData) {
- let body = Arc::clone(&self.body); // avoid borrow checker problem
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
- .with_impl_trait_mode(ImplTraitLoweringMode::Param);
- let param_tys =
- data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>();
- for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) {
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
-
- self.infer_pat(*pat, &ty, BindingMode::default());
- }
- let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT
- self.return_ty = return_ty;
- }
-
- fn infer_body(&mut self) {
- self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
- }
-
- fn resolve_lang_item(&self, name: &str) -> Option<LangItemTarget> {
- let krate = self.resolver.krate()?;
- let name = SmolStr::new_inline_from_ascii(name.len(), name.as_bytes());
- self.db.lang_item(krate, name)
- }
-
- fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
- let path = path![core::iter::IntoIterator];
- let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Item])
- }
-
- fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
- let path = path![core::ops::Try];
- let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Ok])
- }
-
- fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
- let trait_ = self.resolve_lang_item("neg")?.as_trait()?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Output])
- }
-
- fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
- let trait_ = self.resolve_lang_item("not")?.as_trait()?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Output])
- }
-
- fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
- let trait_ = self.resolve_lang_item("future_trait")?.as_trait()?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Output])
- }
-
- fn resolve_boxed_box(&self) -> Option<AdtId> {
- let struct_ = self.resolve_lang_item("owned_box")?.as_struct()?;
- Some(struct_.into())
- }
-
- fn resolve_range_full(&self) -> Option<AdtId> {
- let path = path![core::ops::RangeFull];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_range(&self) -> Option<AdtId> {
- let path = path![core::ops::Range];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_range_inclusive(&self) -> Option<AdtId> {
- let path = path![core::ops::RangeInclusive];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_range_from(&self) -> Option<AdtId> {
- let path = path![core::ops::RangeFrom];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_range_to(&self) -> Option<AdtId> {
- let path = path![core::ops::RangeTo];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
- let path = path![core::ops::RangeToInclusive];
- let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
- Some(struct_.into())
- }
-
- fn resolve_ops_index(&self) -> Option<TraitId> {
- self.resolve_lang_item("index")?.as_trait()
- }
-
- fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
- let trait_ = self.resolve_ops_index()?;
- self.db.trait_data(trait_).associated_type_by_name(&name![Output])
- }
-}
-
-/// The kinds of placeholders we need during type inference. There's separate
-/// values for general types, and for integer and float variables. The latter
-/// two are used for inference of literal values (e.g. `100` could be one of
-/// several integer types).
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-pub enum InferTy {
- TypeVar(unify::TypeVarId),
- IntVar(unify::TypeVarId),
- FloatVar(unify::TypeVarId),
- MaybeNeverTypeVar(unify::TypeVarId),
-}
-
-impl InferTy {
- fn to_inner(self) -> unify::TypeVarId {
- match self {
- InferTy::TypeVar(ty)
- | InferTy::IntVar(ty)
- | InferTy::FloatVar(ty)
- | InferTy::MaybeNeverTypeVar(ty) => ty,
- }
- }
-
- fn fallback_value(self) -> Ty {
- match self {
- InferTy::TypeVar(..) => Ty::Unknown,
- InferTy::IntVar(..) => Ty::simple(TypeCtor::Int(IntTy::i32())),
- InferTy::FloatVar(..) => Ty::simple(TypeCtor::Float(FloatTy::f64())),
- InferTy::MaybeNeverTypeVar(..) => Ty::simple(TypeCtor::Never),
- }
- }
-}
-
-/// When inferring an expression, we propagate downward whatever type hint we
-/// are able in the form of an `Expectation`.
-#[derive(Clone, PartialEq, Eq, Debug)]
-struct Expectation {
- ty: Ty,
- /// See the `rvalue_hint` method.
- rvalue_hint: bool,
-}
-
-impl Expectation {
- /// The expectation that the type of the expression needs to equal the given
- /// type.
- fn has_type(ty: Ty) -> Self {
- Expectation { ty, rvalue_hint: false }
- }
-
- /// The following explanation is copied straight from rustc:
- /// Provides an expectation for an rvalue expression given an *optional*
- /// hint, which is not required for type safety (the resulting type might
- /// be checked higher up, as is the case with `&expr` and `box expr`), but
- /// is useful in determining the concrete type.
- ///
- /// The primary use case is where the expected type is a fat pointer,
- /// like `&[isize]`. For example, consider the following statement:
- ///
- /// let x: &[isize] = &[1, 2, 3];
- ///
- /// In this case, the expected type for the `&[1, 2, 3]` expression is
- /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
- /// expectation `ExpectHasType([isize])`, that would be too strong --
- /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
- /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
- /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
- /// which still is useful, because it informs integer literals and the like.
- /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
- /// for examples of where this comes up,.
- fn rvalue_hint(ty: Ty) -> Self {
- Expectation { ty, rvalue_hint: true }
- }
-
- /// This expresses no expectation on the type.
- fn none() -> Self {
- Expectation { ty: Ty::Unknown, rvalue_hint: false }
- }
-
- fn coercion_target(&self) -> &Ty {
- if self.rvalue_hint {
- &Ty::Unknown
- } else {
- &self.ty
- }
- }
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
-enum Diverges {
- Maybe,
- Always,
-}
-
-impl Diverges {
- fn is_always(self) -> bool {
- self == Diverges::Always
- }
-}
-
-impl std::ops::BitAnd for Diverges {
- type Output = Self;
- fn bitand(self, other: Self) -> Self {
- std::cmp::min(self, other)
- }
-}
-
-impl std::ops::BitOr for Diverges {
- type Output = Self;
- fn bitor(self, other: Self) -> Self {
- std::cmp::max(self, other)
- }
-}
-
-impl std::ops::BitAndAssign for Diverges {
- fn bitand_assign(&mut self, other: Self) {
- *self = *self & other;
- }
-}
-
-impl std::ops::BitOrAssign for Diverges {
- fn bitor_assign(&mut self, other: Self) {
- *self = *self | other;
- }
-}
-
-mod diagnostics {
- use hir_def::{expr::ExprId, DefWithBodyId};
- use hir_expand::diagnostics::DiagnosticSink;
-
- use crate::{
- db::HirDatabase,
- diagnostics::{BreakOutsideOfLoop, NoSuchField},
- };
-
- #[derive(Debug, PartialEq, Eq, Clone)]
- pub(super) enum InferenceDiagnostic {
- NoSuchField { expr: ExprId, field: usize },
- BreakOutsideOfLoop { expr: ExprId },
- }
-
- impl InferenceDiagnostic {
- pub(super) fn add_to(
- &self,
- db: &dyn HirDatabase,
- owner: DefWithBodyId,
- sink: &mut DiagnosticSink,
- ) {
- match self {
- InferenceDiagnostic::NoSuchField { expr, field } => {
- let (_, source_map) = db.body_with_source_map(owner);
- let field = source_map.field_syntax(*expr, *field);
- sink.push(NoSuchField { file: field.file_id, field: field.value })
- }
- InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
- let (_, source_map) = db.body_with_source_map(owner);
- let ptr = source_map
- .expr_syntax(*expr)
- .expect("break outside of loop in synthetic syntax");
- sink.push(BreakOutsideOfLoop { file: ptr.file_id, expr: ptr.value })
- }
- }
- }
- }
-}
+++ /dev/null
-//! Coercion logic. Coercions are certain type conversions that can implicitly
-//! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions
-//! like going from `&Vec<T>` to `&[T]`.
-//!
-//! See: https://doc.rust-lang.org/nomicon/coercions.html
-
-use hir_def::{lang_item::LangItemTarget, type_ref::Mutability};
-use test_utils::mark;
-
-use crate::{autoderef, traits::Solution, Obligation, Substs, TraitRef, Ty, TypeCtor};
-
-use super::{unify::TypeVarValue, InEnvironment, InferTy, InferenceContext};
-
-impl<'a> InferenceContext<'a> {
- /// Unify two types, but may coerce the first one to the second one
- /// using "implicit coercion rules" if needed.
- pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
- let from_ty = self.resolve_ty_shallow(from_ty).into_owned();
- let to_ty = self.resolve_ty_shallow(to_ty);
- self.coerce_inner(from_ty, &to_ty)
- }
-
- /// Merge two types from different branches, with possible coercion.
- ///
- /// Mostly this means trying to coerce one to the other, but
- /// - if we have two function types for different functions, we need to
- /// coerce both to function pointers;
- /// - if we were concerned with lifetime subtyping, we'd need to look for a
- /// least upper bound.
- pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty {
- if self.coerce(ty1, ty2) {
- ty2.clone()
- } else if self.coerce(ty2, ty1) {
- ty1.clone()
- } else {
- if let (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnDef(_))) = (ty1, ty2) {
- mark::hit!(coerce_fn_reification);
- // Special case: two function types. Try to coerce both to
- // pointers to have a chance at getting a match. See
- // https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
- let sig1 = ty1.callable_sig(self.db).expect("FnDef without callable sig");
- let sig2 = ty2.callable_sig(self.db).expect("FnDef without callable sig");
- let ptr_ty1 = Ty::fn_ptr(sig1);
- let ptr_ty2 = Ty::fn_ptr(sig2);
- self.coerce_merge_branch(&ptr_ty1, &ptr_ty2)
- } else {
- mark::hit!(coerce_merge_fail_fallback);
- ty1.clone()
- }
- }
- }
-
- fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool {
- match (&from_ty, to_ty) {
- // Never type will make type variable to fallback to Never Type instead of Unknown.
- (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => {
- let var = self.table.new_maybe_never_type_var();
- self.table.var_unification_table.union_value(*tv, TypeVarValue::Known(var));
- return true;
- }
- (ty_app!(TypeCtor::Never), _) => return true,
-
- // Trivial cases, this should go after `never` check to
- // avoid infer result type to be never
- _ => {
- if self.table.unify_inner_trivial(&from_ty, &to_ty, 0) {
- return true;
- }
- }
- }
-
- // Pointer weakening and function to pointer
- match (&mut from_ty, to_ty) {
- // `*mut T`, `&mut T, `&T`` -> `*const T`
- // `&mut T` -> `&T`
- // `&mut T` -> `*mut T`
- (ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
- | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared)))
- | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared)))
- | (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => {
- *c1 = *c2;
- }
-
- // Illegal mutablity conversion
- (
- ty_app!(TypeCtor::RawPtr(Mutability::Shared)),
- ty_app!(TypeCtor::RawPtr(Mutability::Mut)),
- )
- | (
- ty_app!(TypeCtor::Ref(Mutability::Shared)),
- ty_app!(TypeCtor::Ref(Mutability::Mut)),
- ) => return false,
-
- // `{function_type}` -> `fn()`
- (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => {
- match from_ty.callable_sig(self.db) {
- None => return false,
- Some(sig) => {
- from_ty = Ty::fn_ptr(sig);
- }
- }
- }
-
- (ty_app!(TypeCtor::Closure { .. }, params), ty_app!(TypeCtor::FnPtr { .. })) => {
- from_ty = params[0].clone();
- }
-
- _ => {}
- }
-
- if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
- return ret;
- }
-
- // Auto Deref if cannot coerce
- match (&from_ty, to_ty) {
- // FIXME: DerefMut
- (ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => {
- self.unify_autoderef_behind_ref(&st1[0], &st2[0])
- }
-
- // Otherwise, normal unify
- _ => self.unify(&from_ty, to_ty),
- }
- }
-
- /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>`
- ///
- /// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html
- fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> {
- let krate = self.resolver.krate().unwrap();
- let coerce_unsized_trait = match self.db.lang_item(krate, "coerce_unsized".into()) {
- Some(LangItemTarget::TraitId(trait_)) => trait_,
- _ => return None,
- };
-
- let generic_params = crate::utils::generics(self.db.upcast(), coerce_unsized_trait.into());
- if generic_params.len() != 2 {
- // The CoerceUnsized trait should have two generic params: Self and T.
- return None;
- }
-
- let substs = Substs::build_for_generics(&generic_params)
- .push(from_ty.clone())
- .push(to_ty.clone())
- .build();
- let trait_ref = TraitRef { trait_: coerce_unsized_trait, substs };
- let goal = InEnvironment::new(self.trait_env.clone(), Obligation::Trait(trait_ref));
-
- let canonicalizer = self.canonicalizer();
- let canonicalized = canonicalizer.canonicalize_obligation(goal);
-
- let solution = self.db.trait_solve(krate, canonicalized.value.clone())?;
-
- match solution {
- Solution::Unique(v) => {
- canonicalized.apply_solution(self, v.0);
- }
- _ => return None,
- };
-
- Some(true)
- }
-
- /// Unify `from_ty` to `to_ty` with optional auto Deref
- ///
- /// Note that the parameters are already stripped the outer reference.
- fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool {
- let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone());
- let to_ty = self.resolve_ty_shallow(&to_ty);
- // FIXME: Auto DerefMut
- for derefed_ty in autoderef::autoderef(
- self.db,
- self.resolver.krate(),
- InEnvironment {
- value: canonicalized.value.clone(),
- environment: self.trait_env.clone(),
- },
- ) {
- let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value);
- match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) {
- // Stop when constructor matches.
- (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => {
- // It will not recurse to `coerce`.
- return self.table.unify_substs(st1, st2, 0);
- }
- _ => {
- if self.table.unify_inner_trivial(&derefed_ty, &to_ty, 0) {
- return true;
- }
- }
- }
- }
-
- false
- }
-}
+++ /dev/null
-//! Type inference for expressions.
-
-use std::iter::{repeat, repeat_with};
-use std::{mem, sync::Arc};
-
-use hir_def::{
- builtin_type::Signedness,
- expr::{Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp},
- path::{GenericArg, GenericArgs},
- resolver::resolver_for_expr,
- AdtId, AssocContainerId, FieldId, Lookup,
-};
-use hir_expand::name::{name, Name};
-use syntax::ast::RangeOp;
-
-use crate::{
- autoderef, method_resolution, op,
- traits::{FnTrait, InEnvironment},
- utils::{generics, variant_data, Generics},
- ApplicationTy, Binders, CallableDefId, InferTy, IntTy, Mutability, Obligation, Rawness, Substs,
- TraitRef, Ty, TypeCtor,
-};
-
-use super::{
- find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext,
- InferenceDiagnostic, TypeMismatch,
-};
-
-impl<'a> InferenceContext<'a> {
- pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
- let ty = self.infer_expr_inner(tgt_expr, expected);
- if ty.is_never() {
- // Any expression that produces a value of type `!` must have diverged
- self.diverges = Diverges::Always;
- }
- let could_unify = self.unify(&ty, &expected.ty);
- if !could_unify {
- self.result.type_mismatches.insert(
- tgt_expr,
- TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() },
- );
- }
- self.resolve_ty_as_possible(ty)
- }
-
- /// Infer type of expression with possibly implicit coerce to the expected type.
- /// Return the type after possible coercion.
- pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
- let ty = self.infer_expr_inner(expr, &expected);
- let ty = if !self.coerce(&ty, &expected.coercion_target()) {
- self.result
- .type_mismatches
- .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() });
- // Return actual type when type mismatch.
- // This is needed for diagnostic when return type mismatch.
- ty
- } else if expected.coercion_target() == &Ty::Unknown {
- ty
- } else {
- expected.ty.clone()
- };
-
- self.resolve_ty_as_possible(ty)
- }
-
- fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
- let krate = self.resolver.krate()?;
- let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
- let output_assoc_type =
- self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
- let generic_params = generics(self.db.upcast(), fn_once_trait.into());
- if generic_params.len() != 2 {
- return None;
- }
-
- let mut param_builder = Substs::builder(num_args);
- let mut arg_tys = vec![];
- for _ in 0..num_args {
- let arg = self.table.new_type_var();
- param_builder = param_builder.push(arg.clone());
- arg_tys.push(arg);
- }
- let parameters = param_builder.build();
- let arg_ty = Ty::Apply(ApplicationTy {
- ctor: TypeCtor::Tuple { cardinality: num_args as u16 },
- parameters,
- });
- let substs =
- Substs::build_for_generics(&generic_params).push(ty.clone()).push(arg_ty).build();
-
- let trait_env = Arc::clone(&self.trait_env);
- let implements_fn_trait =
- Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() });
- let goal = self.canonicalizer().canonicalize_obligation(InEnvironment {
- value: implements_fn_trait.clone(),
- environment: trait_env,
- });
- if self.db.trait_solve(krate, goal.value).is_some() {
- self.obligations.push(implements_fn_trait);
- let output_proj_ty =
- crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs };
- let return_ty = self.normalize_projection_ty(output_proj_ty);
- Some((arg_tys, return_ty))
- } else {
- None
- }
- }
-
- pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
- match ty.callable_sig(self.db) {
- Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
- None => self.callable_sig_from_fn_trait(ty, num_args),
- }
- }
-
- fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
- let body = Arc::clone(&self.body); // avoid borrow checker problem
- let ty = match &body[tgt_expr] {
- Expr::Missing => Ty::Unknown,
- Expr::If { condition, then_branch, else_branch } => {
- // if let is desugared to match, so this is always simple if
- self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
-
- let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
- let mut both_arms_diverge = Diverges::Always;
-
- let then_ty = self.infer_expr_inner(*then_branch, &expected);
- both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
- let else_ty = match else_branch {
- Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
- None => Ty::unit(),
- };
- both_arms_diverge &= self.diverges;
-
- self.diverges = condition_diverges | both_arms_diverge;
-
- self.coerce_merge_branch(&then_ty, &else_ty)
- }
- Expr::Block { statements, tail, .. } => {
- // FIXME: Breakable block inference
- self.infer_block(statements, *tail, expected)
- }
- Expr::Unsafe { body } => self.infer_expr(*body, expected),
- Expr::TryBlock { body } => {
- let _inner = self.infer_expr(*body, expected);
- // FIXME should be std::result::Result<{inner}, _>
- Ty::Unknown
- }
- Expr::Loop { body, label } => {
- self.breakables.push(BreakableContext {
- may_break: false,
- break_ty: self.table.new_type_var(),
- label: label.clone(),
- });
- self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
-
- let ctxt = self.breakables.pop().expect("breakable stack broken");
- if ctxt.may_break {
- self.diverges = Diverges::Maybe;
- }
-
- if ctxt.may_break {
- ctxt.break_ty
- } else {
- Ty::simple(TypeCtor::Never)
- }
- }
- Expr::While { condition, body, label } => {
- self.breakables.push(BreakableContext {
- may_break: false,
- break_ty: Ty::Unknown,
- label: label.clone(),
- });
- // while let is desugared to a match loop, so this is always simple while
- self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool)));
- self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
- let _ctxt = self.breakables.pop().expect("breakable stack broken");
- // the body may not run, so it diverging doesn't mean we diverge
- self.diverges = Diverges::Maybe;
- Ty::unit()
- }
- Expr::For { iterable, body, pat, label } => {
- let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
-
- self.breakables.push(BreakableContext {
- may_break: false,
- break_ty: Ty::Unknown,
- label: label.clone(),
- });
- let pat_ty =
- self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
-
- self.infer_pat(*pat, &pat_ty, BindingMode::default());
-
- self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
- let _ctxt = self.breakables.pop().expect("breakable stack broken");
- // the body may not run, so it diverging doesn't mean we diverge
- self.diverges = Diverges::Maybe;
- Ty::unit()
- }
- Expr::Lambda { body, args, ret_type, arg_types } => {
- assert_eq!(args.len(), arg_types.len());
-
- let mut sig_tys = Vec::new();
-
- // collect explicitly written argument types
- for arg_type in arg_types.iter() {
- let arg_ty = if let Some(type_ref) = arg_type {
- self.make_ty(type_ref)
- } else {
- self.table.new_type_var()
- };
- sig_tys.push(arg_ty);
- }
-
- // add return type
- let ret_ty = match ret_type {
- Some(type_ref) => self.make_ty(type_ref),
- None => self.table.new_type_var(),
- };
- sig_tys.push(ret_ty.clone());
- let sig_ty = Ty::apply(
- TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1, is_varargs: false },
- Substs(sig_tys.clone().into()),
- );
- let closure_ty =
- Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty);
-
- // Eagerly try to relate the closure type with the expected
- // type, otherwise we often won't have enough information to
- // infer the body.
- self.coerce(&closure_ty, &expected.ty);
-
- // Now go through the argument patterns
- for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
- let resolved = self.resolve_ty_as_possible(arg_ty);
- self.infer_pat(*arg_pat, &resolved, BindingMode::default());
- }
-
- let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
- let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
-
- self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
-
- self.diverges = prev_diverges;
- self.return_ty = prev_ret_ty;
-
- closure_ty
- }
- Expr::Call { callee, args } => {
- let callee_ty = self.infer_expr(*callee, &Expectation::none());
- let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone());
- let mut derefs = autoderef(
- self.db,
- self.resolver.krate(),
- InEnvironment {
- value: canonicalized.value.clone(),
- environment: self.trait_env.clone(),
- },
- );
- let (param_tys, ret_ty): (Vec<Ty>, Ty) = derefs
- .find_map(|callee_deref_ty| {
- self.callable_sig(
- &canonicalized.decanonicalize_ty(callee_deref_ty.value),
- args.len(),
- )
- })
- .unwrap_or((Vec::new(), Ty::Unknown));
- self.register_obligations_for_call(&callee_ty);
- self.check_call_arguments(args, ¶m_tys);
- self.normalize_associated_types_in(ret_ty)
- }
- Expr::MethodCall { receiver, args, method_name, generic_args } => self
- .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()),
- Expr::Match { expr, arms } => {
- let input_ty = self.infer_expr(*expr, &Expectation::none());
-
- let mut result_ty = if arms.is_empty() {
- Ty::simple(TypeCtor::Never)
- } else {
- self.table.new_type_var()
- };
-
- let matchee_diverges = self.diverges;
- let mut all_arms_diverge = Diverges::Always;
-
- for arm in arms {
- self.diverges = Diverges::Maybe;
- let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
- if let Some(guard_expr) = arm.guard {
- self.infer_expr(
- guard_expr,
- &Expectation::has_type(Ty::simple(TypeCtor::Bool)),
- );
- }
-
- let arm_ty = self.infer_expr_inner(arm.expr, &expected);
- all_arms_diverge &= self.diverges;
- result_ty = self.coerce_merge_branch(&result_ty, &arm_ty);
- }
-
- self.diverges = matchee_diverges | all_arms_diverge;
-
- result_ty
- }
- Expr::Path(p) => {
- // FIXME this could be more efficient...
- let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
- self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown)
- }
- Expr::Continue { .. } => Ty::simple(TypeCtor::Never),
- Expr::Break { expr, label } => {
- let val_ty = if let Some(expr) = expr {
- self.infer_expr(*expr, &Expectation::none())
- } else {
- Ty::unit()
- };
-
- let last_ty =
- if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
- ctxt.break_ty.clone()
- } else {
- Ty::Unknown
- };
-
- let merged_type = self.coerce_merge_branch(&last_ty, &val_ty);
-
- if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
- ctxt.break_ty = merged_type;
- ctxt.may_break = true;
- } else {
- self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
- expr: tgt_expr,
- });
- }
-
- Ty::simple(TypeCtor::Never)
- }
- Expr::Return { expr } => {
- if let Some(expr) = expr {
- self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
- } else {
- let unit = Ty::unit();
- self.coerce(&unit, &self.return_ty.clone());
- }
- Ty::simple(TypeCtor::Never)
- }
- Expr::RecordLit { path, fields, spread } => {
- let (ty, def_id) = self.resolve_variant(path.as_ref());
- if let Some(variant) = def_id {
- self.write_variant_resolution(tgt_expr.into(), variant);
- }
-
- self.unify(&ty, &expected.ty);
-
- let substs = ty.substs().unwrap_or_else(Substs::empty);
- let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
- let variant_data = def_id.map(|it| variant_data(self.db.upcast(), it));
- for (field_idx, field) in fields.iter().enumerate() {
- let field_def =
- variant_data.as_ref().and_then(|it| match it.field(&field.name) {
- Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
- None => {
- self.push_diagnostic(InferenceDiagnostic::NoSuchField {
- expr: tgt_expr,
- field: field_idx,
- });
- None
- }
- });
- if let Some(field_def) = field_def {
- self.result.record_field_resolutions.insert(field.expr, field_def);
- }
- let field_ty = field_def
- .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs));
- self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
- }
- if let Some(expr) = spread {
- self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
- }
- ty
- }
- Expr::Field { expr, name } => {
- let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
- let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
- let ty = autoderef::autoderef(
- self.db,
- self.resolver.krate(),
- InEnvironment {
- value: canonicalized.value.clone(),
- environment: self.trait_env.clone(),
- },
- )
- .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
- Ty::Apply(a_ty) => match a_ty.ctor {
- TypeCtor::Tuple { .. } => name
- .as_tuple_index()
- .and_then(|idx| a_ty.parameters.0.get(idx).cloned()),
- TypeCtor::Adt(AdtId::StructId(s)) => {
- self.db.struct_data(s).variant_data.field(name).map(|local_id| {
- let field = FieldId { parent: s.into(), local_id };
- self.write_field_resolution(tgt_expr, field);
- self.db.field_types(s.into())[field.local_id]
- .clone()
- .subst(&a_ty.parameters)
- })
- }
- TypeCtor::Adt(AdtId::UnionId(u)) => {
- self.db.union_data(u).variant_data.field(name).map(|local_id| {
- let field = FieldId { parent: u.into(), local_id };
- self.write_field_resolution(tgt_expr, field);
- self.db.field_types(u.into())[field.local_id]
- .clone()
- .subst(&a_ty.parameters)
- })
- }
- _ => None,
- },
- _ => None,
- })
- .unwrap_or(Ty::Unknown);
- let ty = self.insert_type_vars(ty);
- self.normalize_associated_types_in(ty)
- }
- Expr::Await { expr } => {
- let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
- self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
- }
- Expr::Try { expr } => {
- let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
- self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
- }
- Expr::Cast { expr, type_ref } => {
- let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
- let cast_ty = self.make_ty(type_ref);
- // FIXME check the cast...
- cast_ty
- }
- Expr::Ref { expr, rawness, mutability } => {
- let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) =
- &expected.ty.as_reference_or_ptr()
- {
- if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared {
- // FIXME: throw type error - expected mut reference but found shared ref,
- // which cannot be coerced
- }
- if *exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
- // FIXME: throw type error - expected reference but found ptr,
- // which cannot be coerced
- }
- Expectation::rvalue_hint(Ty::clone(exp_inner))
- } else {
- Expectation::none()
- };
- let inner_ty = self.infer_expr_inner(*expr, &expectation);
- let ty = match rawness {
- Rawness::RawPtr => TypeCtor::RawPtr(*mutability),
- Rawness::Ref => TypeCtor::Ref(*mutability),
- };
- Ty::apply_one(ty, inner_ty)
- }
- Expr::Box { expr } => {
- let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
- if let Some(box_) = self.resolve_boxed_box() {
- Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
- } else {
- Ty::Unknown
- }
- }
- Expr::UnaryOp { expr, op } => {
- let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
- match op {
- UnaryOp::Deref => match self.resolver.krate() {
- Some(krate) => {
- let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
- match autoderef::deref(
- self.db,
- krate,
- InEnvironment {
- value: &canonicalized.value,
- environment: self.trait_env.clone(),
- },
- ) {
- Some(derefed_ty) => {
- canonicalized.decanonicalize_ty(derefed_ty.value)
- }
- None => Ty::Unknown,
- }
- }
- None => Ty::Unknown,
- },
- UnaryOp::Neg => {
- match &inner_ty {
- // Fast path for builtins
- Ty::Apply(ApplicationTy {
- ctor: TypeCtor::Int(IntTy { signedness: Signedness::Signed, .. }),
- ..
- })
- | Ty::Apply(ApplicationTy { ctor: TypeCtor::Float(_), .. })
- | Ty::Infer(InferTy::IntVar(..))
- | Ty::Infer(InferTy::FloatVar(..)) => inner_ty,
- // Otherwise we resolve via the std::ops::Neg trait
- _ => self
- .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
- }
- }
- UnaryOp::Not => {
- match &inner_ty {
- // Fast path for builtins
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Bool, .. })
- | Ty::Apply(ApplicationTy { ctor: TypeCtor::Int(_), .. })
- | Ty::Infer(InferTy::IntVar(..)) => inner_ty,
- // Otherwise we resolve via the std::ops::Not trait
- _ => self
- .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
- }
- }
- }
- }
- Expr::BinaryOp { lhs, rhs, op } => match op {
- Some(op) => {
- let lhs_expectation = match op {
- BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)),
- _ => Expectation::none(),
- };
- let lhs_ty = self.infer_expr(*lhs, &lhs_expectation);
- // FIXME: find implementation of trait corresponding to operation
- // symbol and resolve associated `Output` type
- let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty.clone());
- let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation));
-
- // FIXME: similar as above, return ty is often associated trait type
- op::binary_op_return_ty(*op, lhs_ty, rhs_ty)
- }
- _ => Ty::Unknown,
- },
- Expr::Range { lhs, rhs, range_type } => {
- let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
- let rhs_expect = lhs_ty
- .as_ref()
- .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
- let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
- match (range_type, lhs_ty, rhs_ty) {
- (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
- Some(adt) => Ty::simple(TypeCtor::Adt(adt)),
- None => Ty::Unknown,
- },
- (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
- Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
- None => Ty::Unknown,
- },
- (RangeOp::Inclusive, None, Some(ty)) => {
- match self.resolve_range_to_inclusive() {
- Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
- None => Ty::Unknown,
- }
- }
- (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
- Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
- None => Ty::Unknown,
- },
- (RangeOp::Inclusive, Some(_), Some(ty)) => {
- match self.resolve_range_inclusive() {
- Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
- None => Ty::Unknown,
- }
- }
- (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
- Some(adt) => Ty::apply_one(TypeCtor::Adt(adt), ty),
- None => Ty::Unknown,
- },
- (RangeOp::Inclusive, _, None) => Ty::Unknown,
- }
- }
- Expr::Index { base, index } => {
- let base_ty = self.infer_expr_inner(*base, &Expectation::none());
- let index_ty = self.infer_expr(*index, &Expectation::none());
-
- if let (Some(index_trait), Some(krate)) =
- (self.resolve_ops_index(), self.resolver.krate())
- {
- let canonicalized = self.canonicalizer().canonicalize_ty(base_ty);
- let self_ty = method_resolution::resolve_indexing_op(
- self.db,
- &canonicalized.value,
- self.trait_env.clone(),
- krate,
- index_trait,
- );
- let self_ty =
- self_ty.map_or(Ty::Unknown, |t| canonicalized.decanonicalize_ty(t.value));
- self.resolve_associated_type_with_params(
- self_ty,
- self.resolve_ops_index_output(),
- &[index_ty],
- )
- } else {
- Ty::Unknown
- }
- }
- Expr::Tuple { exprs } => {
- let mut tys = match &expected.ty {
- ty_app!(TypeCtor::Tuple { .. }, st) => st
- .iter()
- .cloned()
- .chain(repeat_with(|| self.table.new_type_var()))
- .take(exprs.len())
- .collect::<Vec<_>>(),
- _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
- };
-
- for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
- self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
- }
-
- Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into()))
- }
- Expr::Array(array) => {
- let elem_ty = match &expected.ty {
- ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
- st.as_single().clone()
- }
- _ => self.table.new_type_var(),
- };
-
- match array {
- Array::ElementList(items) => {
- for expr in items.iter() {
- self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone()));
- }
- }
- Array::Repeat { initializer, repeat } => {
- self.infer_expr_coerce(
- *initializer,
- &Expectation::has_type(elem_ty.clone()),
- );
- self.infer_expr(
- *repeat,
- &Expectation::has_type(Ty::simple(TypeCtor::Int(IntTy::usize()))),
- );
- }
- }
-
- Ty::apply_one(TypeCtor::Array, elem_ty)
- }
- Expr::Literal(lit) => match lit {
- Literal::Bool(..) => Ty::simple(TypeCtor::Bool),
- Literal::String(..) => {
- Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str))
- }
- Literal::ByteString(..) => {
- let byte_type = Ty::simple(TypeCtor::Int(IntTy::u8()));
- let array_type = Ty::apply_one(TypeCtor::Array, byte_type);
- Ty::apply_one(TypeCtor::Ref(Mutability::Shared), array_type)
- }
- Literal::Char(..) => Ty::simple(TypeCtor::Char),
- Literal::Int(_v, ty) => match ty {
- Some(int_ty) => Ty::simple(TypeCtor::Int((*int_ty).into())),
- None => self.table.new_integer_var(),
- },
- Literal::Float(_v, ty) => match ty {
- Some(float_ty) => Ty::simple(TypeCtor::Float((*float_ty).into())),
- None => self.table.new_float_var(),
- },
- },
- };
- // use a new type variable if we got Ty::Unknown here
- let ty = self.insert_type_vars_shallow(ty);
- let ty = self.resolve_ty_as_possible(ty);
- self.write_expr_ty(tgt_expr, ty.clone());
- ty
- }
-
- fn infer_block(
- &mut self,
- statements: &[Statement],
- tail: Option<ExprId>,
- expected: &Expectation,
- ) -> Ty {
- for stmt in statements {
- match stmt {
- Statement::Let { pat, type_ref, initializer } => {
- let decl_ty =
- type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown);
-
- // Always use the declared type when specified
- let mut ty = decl_ty.clone();
-
- if let Some(expr) = initializer {
- let actual_ty =
- self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
- if decl_ty == Ty::Unknown {
- ty = actual_ty;
- }
- }
-
- let ty = self.resolve_ty_as_possible(ty);
- self.infer_pat(*pat, &ty, BindingMode::default());
- }
- Statement::Expr(expr) => {
- self.infer_expr(*expr, &Expectation::none());
- }
- }
- }
-
- let ty = if let Some(expr) = tail {
- self.infer_expr_coerce(expr, expected)
- } else {
- // Citing rustc: if there is no explicit tail expression,
- // that is typically equivalent to a tail expression
- // of `()` -- except if the block diverges. In that
- // case, there is no value supplied from the tail
- // expression (assuming there are no other breaks,
- // this implies that the type of the block will be
- // `!`).
- if self.diverges.is_always() {
- // we don't even make an attempt at coercion
- self.table.new_maybe_never_type_var()
- } else {
- self.coerce(&Ty::unit(), expected.coercion_target());
- Ty::unit()
- }
- };
- ty
- }
-
- fn infer_method_call(
- &mut self,
- tgt_expr: ExprId,
- receiver: ExprId,
- args: &[ExprId],
- method_name: &Name,
- generic_args: Option<&GenericArgs>,
- ) -> Ty {
- let receiver_ty = self.infer_expr(receiver, &Expectation::none());
- let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone());
-
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
-
- let resolved = self.resolver.krate().and_then(|krate| {
- method_resolution::lookup_method(
- &canonicalized_receiver.value,
- self.db,
- self.trait_env.clone(),
- krate,
- &traits_in_scope,
- method_name,
- )
- });
- let (derefed_receiver_ty, method_ty, def_generics) = match resolved {
- Some((ty, func)) => {
- let ty = canonicalized_receiver.decanonicalize_ty(ty);
- self.write_method_resolution(tgt_expr, func);
- (ty, self.db.value_ty(func.into()), Some(generics(self.db.upcast(), func.into())))
- }
- None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
- };
- let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty);
- let method_ty = method_ty.subst(&substs);
- let method_ty = self.insert_type_vars(method_ty);
- self.register_obligations_for_call(&method_ty);
- let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) {
- Some(sig) => {
- if !sig.params().is_empty() {
- (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone())
- } else {
- (Ty::Unknown, Vec::new(), sig.ret().clone())
- }
- }
- None => (Ty::Unknown, Vec::new(), Ty::Unknown),
- };
- // Apply autoref so the below unification works correctly
- // FIXME: return correct autorefs from lookup_method
- let actual_receiver_ty = match expected_receiver_ty.as_reference() {
- Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty),
- _ => derefed_receiver_ty,
- };
- self.unify(&expected_receiver_ty, &actual_receiver_ty);
-
- self.check_call_arguments(args, ¶m_tys);
- self.normalize_associated_types_in(ret_ty)
- }
-
- fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) {
- // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
- // We do this in a pretty awful way: first we type-check any arguments
- // that are not closures, then we type-check the closures. This is so
- // that we have more information about the types of arguments when we
- // type-check the functions. This isn't really the right way to do this.
- for &check_closures in &[false, true] {
- let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown));
- for (&arg, param_ty) in args.iter().zip(param_iter) {
- let is_closure = matches!(&self.body[arg], Expr::Lambda { .. });
- if is_closure != check_closures {
- continue;
- }
-
- let param_ty = self.normalize_associated_types_in(param_ty);
- self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
- }
- }
- }
-
- fn substs_for_method_call(
- &mut self,
- def_generics: Option<Generics>,
- generic_args: Option<&GenericArgs>,
- receiver_ty: &Ty,
- ) -> Substs {
- let (parent_params, self_params, type_params, impl_trait_params) =
- def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split());
- assert_eq!(self_params, 0); // method shouldn't have another Self param
- let total_len = parent_params + type_params + impl_trait_params;
- let mut substs = Vec::with_capacity(total_len);
- // Parent arguments are unknown, except for the receiver type
- if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
- for (_id, param) in parent_generics {
- if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf {
- substs.push(receiver_ty.clone());
- } else {
- substs.push(Ty::Unknown);
- }
- }
- }
- // handle provided type arguments
- if let Some(generic_args) = generic_args {
- // if args are provided, it should be all of them, but we can't rely on that
- for arg in generic_args.args.iter().take(type_params) {
- match arg {
- GenericArg::Type(type_ref) => {
- let ty = self.make_ty(type_ref);
- substs.push(ty);
- }
- }
- }
- };
- let supplied_params = substs.len();
- for _ in supplied_params..total_len {
- substs.push(Ty::Unknown);
- }
- assert_eq!(substs.len(), total_len);
- Substs(substs.into())
- }
-
- fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
- if let Ty::Apply(a_ty) = callable_ty {
- if let TypeCtor::FnDef(def) = a_ty.ctor {
- let generic_predicates = self.db.generic_predicates(def.into());
- for predicate in generic_predicates.iter() {
- let predicate = predicate.clone().subst(&a_ty.parameters);
- if let Some(obligation) = Obligation::from_predicate(predicate) {
- self.obligations.push(obligation);
- }
- }
- // add obligation for trait implementation, if this is a trait method
- match def {
- CallableDefId::FunctionId(f) => {
- if let AssocContainerId::TraitId(trait_) =
- f.lookup(self.db.upcast()).container
- {
- // construct a TraitDef
- let substs = a_ty
- .parameters
- .prefix(generics(self.db.upcast(), trait_.into()).len());
- self.obligations.push(Obligation::Trait(TraitRef { trait_, substs }));
- }
- }
- CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
- }
- }
- }
- }
-}
+++ /dev/null
-//! Type inference for patterns.
-
-use std::iter::repeat;
-use std::sync::Arc;
-
-use hir_def::{
- expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
- path::Path,
- type_ref::Mutability,
- FieldId,
-};
-use hir_expand::name::Name;
-use test_utils::mark;
-
-use super::{BindingMode, Expectation, InferenceContext};
-use crate::{utils::variant_data, Substs, Ty, TypeCtor};
-
-impl<'a> InferenceContext<'a> {
- fn infer_tuple_struct_pat(
- &mut self,
- path: Option<&Path>,
- subpats: &[PatId],
- expected: &Ty,
- default_bm: BindingMode,
- id: PatId,
- ) -> Ty {
- let (ty, def) = self.resolve_variant(path);
- let var_data = def.map(|it| variant_data(self.db.upcast(), it));
- if let Some(variant) = def {
- self.write_variant_resolution(id.into(), variant);
- }
- self.unify(&ty, expected);
-
- let substs = ty.substs().unwrap_or_else(Substs::empty);
-
- let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
-
- for (i, &subpat) in subpats.iter().enumerate() {
- let expected_ty = var_data
- .as_ref()
- .and_then(|d| d.field(&Name::new_tuple_field(i)))
- .map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
- let expected_ty = self.normalize_associated_types_in(expected_ty);
- self.infer_pat(subpat, &expected_ty, default_bm);
- }
-
- ty
- }
-
- fn infer_record_pat(
- &mut self,
- path: Option<&Path>,
- subpats: &[RecordFieldPat],
- expected: &Ty,
- default_bm: BindingMode,
- id: PatId,
- ) -> Ty {
- let (ty, def) = self.resolve_variant(path);
- let var_data = def.map(|it| variant_data(self.db.upcast(), it));
- if let Some(variant) = def {
- self.write_variant_resolution(id.into(), variant);
- }
-
- self.unify(&ty, expected);
-
- let substs = ty.substs().unwrap_or_else(Substs::empty);
-
- let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
- for subpat in subpats {
- let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
- if let Some(local_id) = matching_field {
- let field_def = FieldId { parent: def.unwrap(), local_id };
- self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
- }
-
- let expected_ty =
- matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
- let expected_ty = self.normalize_associated_types_in(expected_ty);
- self.infer_pat(subpat.pat, &expected_ty, default_bm);
- }
-
- ty
- }
-
- pub(super) fn infer_pat(
- &mut self,
- pat: PatId,
- mut expected: &Ty,
- mut default_bm: BindingMode,
- ) -> Ty {
- let body = Arc::clone(&self.body); // avoid borrow checker problem
-
- if is_non_ref_pat(&body, pat) {
- while let Some((inner, mutability)) = expected.as_reference() {
- expected = inner;
- default_bm = match default_bm {
- BindingMode::Move => BindingMode::Ref(mutability),
- BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared),
- BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability),
- }
- }
- } else if let Pat::Ref { .. } = &body[pat] {
- mark::hit!(match_ergonomics_ref);
- // When you encounter a `&pat` pattern, reset to Move.
- // This is so that `w` is by value: `let (_, &w) = &(1, &2);`
- default_bm = BindingMode::Move;
- }
-
- // Lose mutability.
- let default_bm = default_bm;
- let expected = expected;
-
- let ty = match &body[pat] {
- Pat::Tuple { ref args, .. } => {
- let expectations = match expected.as_tuple() {
- Some(parameters) => &*parameters.0,
- _ => &[],
- };
- let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown));
-
- let inner_tys = args
- .iter()
- .zip(expectations_iter)
- .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm))
- .collect();
-
- Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys))
- }
- Pat::Or(ref pats) => {
- if let Some((first_pat, rest)) = pats.split_first() {
- let ty = self.infer_pat(*first_pat, expected, default_bm);
- for pat in rest {
- self.infer_pat(*pat, expected, default_bm);
- }
- ty
- } else {
- Ty::Unknown
- }
- }
- Pat::Ref { pat, mutability } => {
- let expectation = match expected.as_reference() {
- Some((inner_ty, exp_mut)) => {
- if *mutability != exp_mut {
- // FIXME: emit type error?
- }
- inner_ty
- }
- _ => &Ty::Unknown,
- };
- let subty = self.infer_pat(*pat, expectation, default_bm);
- Ty::apply_one(TypeCtor::Ref(*mutability), subty)
- }
- Pat::TupleStruct { path: p, args: subpats, .. } => {
- self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm, pat)
- }
- Pat::Record { path: p, args: fields, ellipsis: _ } => {
- self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat)
- }
- Pat::Path(path) => {
- // FIXME use correct resolver for the surrounding expression
- let resolver = self.resolver.clone();
- self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown)
- }
- Pat::Bind { mode, name: _, subpat } => {
- let mode = if mode == &BindingAnnotation::Unannotated {
- default_bm
- } else {
- BindingMode::convert(*mode)
- };
- let inner_ty = if let Some(subpat) = subpat {
- self.infer_pat(*subpat, expected, default_bm)
- } else {
- expected.clone()
- };
- let inner_ty = self.insert_type_vars_shallow(inner_ty);
-
- let bound_ty = match mode {
- BindingMode::Ref(mutability) => {
- Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone())
- }
- BindingMode::Move => inner_ty.clone(),
- };
- let bound_ty = self.resolve_ty_as_possible(bound_ty);
- self.write_pat_ty(pat, bound_ty);
- return inner_ty;
- }
- Pat::Slice { prefix, slice, suffix } => {
- let (container_ty, elem_ty) = match &expected {
- ty_app!(TypeCtor::Array, st) => (TypeCtor::Array, st.as_single().clone()),
- ty_app!(TypeCtor::Slice, st) => (TypeCtor::Slice, st.as_single().clone()),
- _ => (TypeCtor::Slice, Ty::Unknown),
- };
-
- for pat_id in prefix.iter().chain(suffix) {
- self.infer_pat(*pat_id, &elem_ty, default_bm);
- }
-
- let pat_ty = Ty::apply_one(container_ty, elem_ty);
- if let Some(slice_pat_id) = slice {
- self.infer_pat(*slice_pat_id, &pat_ty, default_bm);
- }
-
- pat_ty
- }
- Pat::Wild => expected.clone(),
- Pat::Range { start, end } => {
- let start_ty = self.infer_expr(*start, &Expectation::has_type(expected.clone()));
- let end_ty = self.infer_expr(*end, &Expectation::has_type(start_ty));
- end_ty
- }
- Pat::Lit(expr) => self.infer_expr(*expr, &Expectation::has_type(expected.clone())),
- Pat::Missing => Ty::Unknown,
- };
- // use a new type variable if we got Ty::Unknown here
- let ty = self.insert_type_vars_shallow(ty);
- if !self.unify(&ty, expected) {
- // FIXME record mismatch, we need to change the type of self.type_mismatches for that
- }
- let ty = self.resolve_ty_as_possible(ty);
- self.write_pat_ty(pat, ty.clone());
- ty
- }
-}
-
-fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
- match &body[pat] {
- Pat::Tuple { .. }
- | Pat::TupleStruct { .. }
- | Pat::Record { .. }
- | Pat::Range { .. }
- | Pat::Slice { .. } => true,
- Pat::Or(pats) => pats.iter().all(|p| is_non_ref_pat(body, *p)),
- // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented.
- Pat::Path(..) => true,
- Pat::Lit(expr) => match body[*expr] {
- Expr::Literal(Literal::String(..)) => false,
- _ => true,
- },
- Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false,
- }
-}
+++ /dev/null
-//! Path expression resolution.
-
-use std::iter;
-
-use hir_def::{
- path::{Path, PathSegment},
- resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
- AdtId, AssocContainerId, AssocItemId, EnumVariantId, Lookup,
-};
-use hir_expand::name::Name;
-
-use crate::{method_resolution, Substs, Ty, ValueTyDefId};
-
-use super::{ExprOrPatId, InferenceContext, TraitRef};
-
-impl<'a> InferenceContext<'a> {
- pub(super) fn infer_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
- let ty = self.resolve_value_path(resolver, path, id)?;
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
- Some(ty)
- }
-
- fn resolve_value_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
- let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- if path.segments().is_empty() {
- // This can't actually happen syntax-wise
- return None;
- }
- let ty = self.make_ty(type_ref);
- let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
- let (ty, _) = Ty::from_type_relative_path(&ctx, ty, None, remaining_segments_for_ty);
- self.resolve_ty_assoc_item(
- ty,
- &path.segments().last().expect("path had at least one segment").name,
- id,
- )?
- } else {
- let value_or_partial =
- resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
-
- match value_or_partial {
- ResolveValueResult::ValueNs(it) => (it, None),
- ResolveValueResult::Partial(def, remaining_index) => {
- self.resolve_assoc_item(def, path, remaining_index, id)?
- }
- }
- };
-
- let typable: ValueTyDefId = match value {
- ValueNs::LocalBinding(pat) => {
- let ty = self.result.type_of_pat.get(pat)?.clone();
- let ty = self.resolve_ty_as_possible(ty);
- return Some(ty);
- }
- ValueNs::FunctionId(it) => it.into(),
- ValueNs::ConstId(it) => it.into(),
- ValueNs::StaticId(it) => it.into(),
- ValueNs::StructId(it) => {
- self.write_variant_resolution(id, it.into());
-
- it.into()
- }
- ValueNs::EnumVariantId(it) => {
- self.write_variant_resolution(id, it.into());
-
- it.into()
- }
- ValueNs::ImplSelf(impl_id) => {
- let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
- let substs = Substs::type_params_for_generics(&generics);
- let ty = self.db.impl_self_ty(impl_id).subst(&substs);
- if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
- let ty = self.db.value_ty(struct_id.into()).subst(&substs);
- return Some(ty);
- } else {
- // FIXME: diagnostic, invalid Self reference
- return None;
- }
- }
- };
-
- let ty = self.db.value_ty(typable);
- // self_subst is just for the parent
- let parent_substs = self_subst.unwrap_or_else(Substs::empty);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
- let substs = Ty::substs_from_path(&ctx, path, typable, true);
- let full_substs = Substs::builder(substs.len())
- .use_parent_substs(&parent_substs)
- .fill(substs.0[parent_substs.len()..].iter().cloned())
- .build();
- let ty = ty.subst(&full_substs);
- Some(ty)
- }
-
- fn resolve_assoc_item(
- &mut self,
- def: TypeNs,
- path: &Path,
- remaining_index: usize,
- id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substs>)> {
- assert!(remaining_index < path.segments().len());
- // there may be more intermediate segments between the resolved one and
- // the end. Only the last segment needs to be resolved to a value; from
- // the segments before that, we need to get either a type or a trait ref.
-
- let resolved_segment = path.segments().get(remaining_index - 1).unwrap();
- let remaining_segments = path.segments().skip(remaining_index);
- let is_before_last = remaining_segments.len() == 1;
-
- match (def, is_before_last) {
- (TypeNs::TraitId(trait_), true) => {
- let segment =
- remaining_segments.last().expect("there should be at least one segment here");
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
- let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None);
- self.resolve_trait_assoc_item(trait_ref, segment, id)
- }
- (def, _) => {
- // Either we already have a type (e.g. `Vec::new`), or we have a
- // trait but it's not the last segment, so the next segment
- // should resolve to an associated type of that trait (e.g. `<T
- // as Iterator>::Item::default`)
- let remaining_segments_for_ty =
- remaining_segments.take(remaining_segments.len() - 1);
- let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
- let (ty, _) = Ty::from_partly_resolved_hir_path(
- &ctx,
- def,
- resolved_segment,
- remaining_segments_for_ty,
- true,
- );
- if let Ty::Unknown = ty {
- return None;
- }
-
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
-
- let segment =
- remaining_segments.last().expect("there should be at least one segment here");
-
- self.resolve_ty_assoc_item(ty, &segment.name, id)
- }
- }
- }
-
- fn resolve_trait_assoc_item(
- &mut self,
- trait_ref: TraitRef,
- segment: PathSegment<'_>,
- id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substs>)> {
- let trait_ = trait_ref.trait_;
- let item =
- self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
- match item {
- AssocItemId::FunctionId(func) => {
- if segment.name == &self.db.function_data(func).name {
- Some(AssocItemId::FunctionId(func))
- } else {
- None
- }
- }
-
- AssocItemId::ConstId(konst) => {
- if self
- .db
- .const_data(konst)
- .name
- .as_ref()
- .map_or(false, |n| n == segment.name)
- {
- Some(AssocItemId::ConstId(konst))
- } else {
- None
- }
- }
- AssocItemId::TypeAliasId(_) => None,
- }
- })?;
- let def = match item {
- AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
- AssocItemId::ConstId(c) => ValueNs::ConstId(c),
- AssocItemId::TypeAliasId(_) => unreachable!(),
- };
-
- self.write_assoc_resolution(id, item);
- Some((def, Some(trait_ref.substs)))
- }
-
- fn resolve_ty_assoc_item(
- &mut self,
- ty: Ty,
- name: &Name,
- id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substs>)> {
- if let Ty::Unknown = ty {
- return None;
- }
-
- if let Some(result) = self.resolve_enum_variant_on_ty(&ty, name, id) {
- return Some(result);
- }
-
- let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
- let krate = self.resolver.krate()?;
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
-
- method_resolution::iterate_method_candidates(
- &canonical_ty.value,
- self.db,
- self.trait_env.clone(),
- krate,
- &traits_in_scope,
- Some(name),
- method_resolution::LookupMode::Path,
- move |_ty, item| {
- let (def, container) = match item {
- AssocItemId::FunctionId(f) => {
- (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
- }
- AssocItemId::ConstId(c) => {
- (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
- }
- AssocItemId::TypeAliasId(_) => unreachable!(),
- };
- let substs = match container {
- AssocContainerId::ImplId(impl_id) => {
- let impl_substs = Substs::build_for_def(self.db, impl_id)
- .fill(iter::repeat_with(|| self.table.new_type_var()))
- .build();
- let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
- self.unify(&impl_self_ty, &ty);
- Some(impl_substs)
- }
- AssocContainerId::TraitId(trait_) => {
- // we're picking this method
- let trait_substs = Substs::build_for_def(self.db, trait_)
- .push(ty.clone())
- .fill(std::iter::repeat_with(|| self.table.new_type_var()))
- .build();
- self.obligations.push(super::Obligation::Trait(TraitRef {
- trait_,
- substs: trait_substs.clone(),
- }));
- Some(trait_substs)
- }
- AssocContainerId::ContainerId(_) => None,
- };
-
- self.write_assoc_resolution(id, item);
- Some((def, substs))
- },
- )
- }
-
- fn resolve_enum_variant_on_ty(
- &mut self,
- ty: &Ty,
- name: &Name,
- id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substs>)> {
- let (enum_id, subst) = match ty.as_adt() {
- Some((AdtId::EnumId(e), subst)) => (e, subst),
- _ => return None,
- };
- let enum_data = self.db.enum_data(enum_id);
- let local_id = enum_data.variant(name)?;
- let variant = EnumVariantId { parent: enum_id, local_id };
- self.write_variant_resolution(id, variant.into());
- Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
- }
-}
+++ /dev/null
-//! Unification and canonicalization logic.
-
-use std::borrow::Cow;
-
-use ena::unify::{InPlaceUnificationTable, NoError, UnifyKey, UnifyValue};
-
-use test_utils::mark;
-
-use super::{InferenceContext, Obligation};
-use crate::{
- BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
- TyKind, TypeCtor, TypeWalk,
-};
-
-impl<'a> InferenceContext<'a> {
- pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b>
- where
- 'a: 'b,
- {
- Canonicalizer { ctx: self, free_vars: Vec::new(), var_stack: Vec::new() }
- }
-}
-
-pub(super) struct Canonicalizer<'a, 'b>
-where
- 'a: 'b,
-{
- ctx: &'b mut InferenceContext<'a>,
- free_vars: Vec<InferTy>,
- /// A stack of type variables that is used to detect recursive types (which
- /// are an error, but we need to protect against them to avoid stack
- /// overflows).
- var_stack: Vec<TypeVarId>,
-}
-
-#[derive(Debug)]
-pub(super) struct Canonicalized<T> {
- pub value: Canonical<T>,
- free_vars: Vec<InferTy>,
-}
-
-impl<'a, 'b> Canonicalizer<'a, 'b>
-where
- 'a: 'b,
-{
- fn add(&mut self, free_var: InferTy) -> usize {
- self.free_vars.iter().position(|&v| v == free_var).unwrap_or_else(|| {
- let next_index = self.free_vars.len();
- self.free_vars.push(free_var);
- next_index
- })
- }
-
- fn do_canonicalize<T: TypeWalk>(&mut self, t: T, binders: DebruijnIndex) -> T {
- t.fold_binders(
- &mut |ty, binders| match ty {
- Ty::Infer(tv) => {
- let inner = tv.to_inner();
- if self.var_stack.contains(&inner) {
- // recursive type
- return tv.fallback_value();
- }
- if let Some(known_ty) =
- self.ctx.table.var_unification_table.inlined_probe_value(inner).known()
- {
- self.var_stack.push(inner);
- let result = self.do_canonicalize(known_ty.clone(), binders);
- self.var_stack.pop();
- result
- } else {
- let root = self.ctx.table.var_unification_table.find(inner);
- let free_var = match tv {
- InferTy::TypeVar(_) => InferTy::TypeVar(root),
- InferTy::IntVar(_) => InferTy::IntVar(root),
- InferTy::FloatVar(_) => InferTy::FloatVar(root),
- InferTy::MaybeNeverTypeVar(_) => InferTy::MaybeNeverTypeVar(root),
- };
- let position = self.add(free_var);
- Ty::Bound(BoundVar::new(binders, position))
- }
- }
- _ => ty,
- },
- binders,
- )
- }
-
- fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> {
- let kinds = self
- .free_vars
- .iter()
- .map(|v| match v {
- // mapping MaybeNeverTypeVar to the same kind as general ones
- // should be fine, because as opposed to int or float type vars,
- // they don't restrict what kind of type can go into them, they
- // just affect fallback.
- InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General,
- InferTy::IntVar(_) => TyKind::Integer,
- InferTy::FloatVar(_) => TyKind::Float,
- })
- .collect();
- Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars }
- }
-
- pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> {
- let result = self.do_canonicalize(ty, DebruijnIndex::INNERMOST);
- self.into_canonicalized(result)
- }
-
- pub(crate) fn canonicalize_obligation(
- mut self,
- obligation: InEnvironment<Obligation>,
- ) -> Canonicalized<InEnvironment<Obligation>> {
- let result = match obligation.value {
- Obligation::Trait(tr) => {
- Obligation::Trait(self.do_canonicalize(tr, DebruijnIndex::INNERMOST))
- }
- Obligation::Projection(pr) => {
- Obligation::Projection(self.do_canonicalize(pr, DebruijnIndex::INNERMOST))
- }
- };
- self.into_canonicalized(InEnvironment {
- value: result,
- environment: obligation.environment,
- })
- }
-}
-
-impl<T> Canonicalized<T> {
- pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
- ty.walk_mut_binders(
- &mut |ty, binders| {
- if let &mut Ty::Bound(bound) = ty {
- if bound.debruijn >= binders {
- *ty = Ty::Infer(self.free_vars[bound.index]);
- }
- }
- },
- DebruijnIndex::INNERMOST,
- );
- ty
- }
-
- pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) {
- // the solution may contain new variables, which we need to convert to new inference vars
- let new_vars = Substs(
- solution
- .kinds
- .iter()
- .map(|k| match k {
- TyKind::General => ctx.table.new_type_var(),
- TyKind::Integer => ctx.table.new_integer_var(),
- TyKind::Float => ctx.table.new_float_var(),
- })
- .collect(),
- );
- for (i, ty) in solution.value.into_iter().enumerate() {
- let var = self.free_vars[i];
- // eagerly replace projections in the type; we may be getting types
- // e.g. from where clauses where this hasn't happened yet
- let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars));
- ctx.table.unify(&Ty::Infer(var), &ty);
- }
- }
-}
-
-pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> {
- let mut table = InferenceTable::new();
- let vars = Substs(
- tys.kinds
- .iter()
- // we always use type vars here because we want everything to
- // fallback to Unknown in the end (kind of hacky, as below)
- .map(|_| table.new_type_var())
- .collect(),
- );
- let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars);
- let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars);
- if !table.unify(&ty1_with_vars, &ty2_with_vars) {
- return None;
- }
- // default any type vars that weren't unified back to their original bound vars
- // (kind of hacky)
- for (i, var) in vars.iter().enumerate() {
- if &*table.resolve_ty_shallow(var) == var {
- table.unify(var, &Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i)));
- }
- }
- Some(
- Substs::builder(tys.kinds.len())
- .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone())))
- .build(),
- )
-}
-
-#[derive(Clone, Debug)]
-pub(crate) struct InferenceTable {
- pub(super) var_unification_table: InPlaceUnificationTable<TypeVarId>,
-}
-
-impl InferenceTable {
- pub fn new() -> Self {
- InferenceTable { var_unification_table: InPlaceUnificationTable::new() }
- }
-
- pub fn new_type_var(&mut self) -> Ty {
- Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
- }
-
- pub fn new_integer_var(&mut self) -> Ty {
- Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
- }
-
- pub fn new_float_var(&mut self) -> Ty {
- Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
- }
-
- pub fn new_maybe_never_type_var(&mut self) -> Ty {
- Ty::Infer(InferTy::MaybeNeverTypeVar(
- self.var_unification_table.new_key(TypeVarValue::Unknown),
- ))
- }
-
- pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty {
- self.resolve_ty_completely_inner(&mut Vec::new(), ty)
- }
-
- pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty {
- self.resolve_ty_as_possible_inner(&mut Vec::new(), ty)
- }
-
- pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
- self.unify_inner(ty1, ty2, 0)
- }
-
- pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
- substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
- }
-
- fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
- if depth > 1000 {
- // prevent stackoverflows
- panic!("infinite recursion in unification");
- }
- if ty1 == ty2 {
- return true;
- }
- // try to resolve type vars first
- let ty1 = self.resolve_ty_shallow(ty1);
- let ty2 = self.resolve_ty_shallow(ty2);
- match (&*ty1, &*ty2) {
- (Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
- self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
- }
-
- _ => self.unify_inner_trivial(&ty1, &ty2, depth),
- }
- }
-
- pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
- match (ty1, ty2) {
- (Ty::Unknown, _) | (_, Ty::Unknown) => true,
-
- (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
-
- (Ty::Dyn(dyn1), Ty::Dyn(dyn2)) if dyn1.len() == dyn2.len() => {
- for (pred1, pred2) in dyn1.iter().zip(dyn2.iter()) {
- if !self.unify_preds(pred1, pred2, depth + 1) {
- return false;
- }
- }
- true
- }
-
- (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
- | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
- | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
- | (
- Ty::Infer(InferTy::MaybeNeverTypeVar(tv1)),
- Ty::Infer(InferTy::MaybeNeverTypeVar(tv2)),
- ) => {
- // both type vars are unknown since we tried to resolve them
- self.var_unification_table.union(*tv1, *tv2);
- true
- }
-
- // The order of MaybeNeverTypeVar matters here.
- // Unifying MaybeNeverTypeVar and TypeVar will let the latter become MaybeNeverTypeVar.
- // Unifying MaybeNeverTypeVar and other concrete type will let the former become it.
- (Ty::Infer(InferTy::TypeVar(tv)), other)
- | (other, Ty::Infer(InferTy::TypeVar(tv)))
- | (Ty::Infer(InferTy::MaybeNeverTypeVar(tv)), other)
- | (other, Ty::Infer(InferTy::MaybeNeverTypeVar(tv)))
- | (Ty::Infer(InferTy::IntVar(tv)), other @ ty_app!(TypeCtor::Int(_)))
- | (other @ ty_app!(TypeCtor::Int(_)), Ty::Infer(InferTy::IntVar(tv)))
- | (Ty::Infer(InferTy::FloatVar(tv)), other @ ty_app!(TypeCtor::Float(_)))
- | (other @ ty_app!(TypeCtor::Float(_)), Ty::Infer(InferTy::FloatVar(tv))) => {
- // the type var is unknown since we tried to resolve it
- self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
- true
- }
-
- _ => false,
- }
- }
-
- fn unify_preds(
- &mut self,
- pred1: &GenericPredicate,
- pred2: &GenericPredicate,
- depth: usize,
- ) -> bool {
- match (pred1, pred2) {
- (GenericPredicate::Implemented(tr1), GenericPredicate::Implemented(tr2))
- if tr1.trait_ == tr2.trait_ =>
- {
- self.unify_substs(&tr1.substs, &tr2.substs, depth + 1)
- }
- (GenericPredicate::Projection(proj1), GenericPredicate::Projection(proj2))
- if proj1.projection_ty.associated_ty == proj2.projection_ty.associated_ty =>
- {
- self.unify_substs(
- &proj1.projection_ty.parameters,
- &proj2.projection_ty.parameters,
- depth + 1,
- ) && self.unify_inner(&proj1.ty, &proj2.ty, depth + 1)
- }
- _ => false,
- }
- }
-
- /// If `ty` is a type variable with known type, returns that type;
- /// otherwise, return ty.
- pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
- let mut ty = Cow::Borrowed(ty);
- // The type variable could resolve to a int/float variable. Hence try
- // resolving up to three times; each type of variable shouldn't occur
- // more than once
- for i in 0..3 {
- if i > 0 {
- mark::hit!(type_var_resolves_to_int_var);
- }
- match &*ty {
- Ty::Infer(tv) => {
- let inner = tv.to_inner();
- match self.var_unification_table.inlined_probe_value(inner).known() {
- Some(known_ty) => {
- // The known_ty can't be a type var itself
- ty = Cow::Owned(known_ty.clone());
- }
- _ => return ty,
- }
- }
- _ => return ty,
- }
- }
- log::error!("Inference variable still not resolved: {:?}", ty);
- ty
- }
-
- /// Resolves the type as far as currently possible, replacing type variables
- /// by their known types. All types returned by the infer_* functions should
- /// be resolved as far as possible, i.e. contain no type variables with
- /// known type.
- fn resolve_ty_as_possible_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
- ty.fold(&mut |ty| match ty {
- Ty::Infer(tv) => {
- let inner = tv.to_inner();
- if tv_stack.contains(&inner) {
- mark::hit!(type_var_cycles_resolve_as_possible);
- // recursive type
- return tv.fallback_value();
- }
- if let Some(known_ty) =
- self.var_unification_table.inlined_probe_value(inner).known()
- {
- // known_ty may contain other variables that are known by now
- tv_stack.push(inner);
- let result = self.resolve_ty_as_possible_inner(tv_stack, known_ty.clone());
- tv_stack.pop();
- result
- } else {
- ty
- }
- }
- _ => ty,
- })
- }
-
- /// Resolves the type completely; type variables without known type are
- /// replaced by Ty::Unknown.
- fn resolve_ty_completely_inner(&mut self, tv_stack: &mut Vec<TypeVarId>, ty: Ty) -> Ty {
- ty.fold(&mut |ty| match ty {
- Ty::Infer(tv) => {
- let inner = tv.to_inner();
- if tv_stack.contains(&inner) {
- mark::hit!(type_var_cycles_resolve_completely);
- // recursive type
- return tv.fallback_value();
- }
- if let Some(known_ty) =
- self.var_unification_table.inlined_probe_value(inner).known()
- {
- // known_ty may contain other variables that are known by now
- tv_stack.push(inner);
- let result = self.resolve_ty_completely_inner(tv_stack, known_ty.clone());
- tv_stack.pop();
- result
- } else {
- tv.fallback_value()
- }
- }
- _ => ty,
- })
- }
-}
-
-/// The ID of a type variable.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
-pub struct TypeVarId(pub(super) u32);
-
-impl UnifyKey for TypeVarId {
- type Value = TypeVarValue;
-
- fn index(&self) -> u32 {
- self.0
- }
-
- fn from_index(i: u32) -> Self {
- TypeVarId(i)
- }
-
- fn tag() -> &'static str {
- "TypeVarId"
- }
-}
-
-/// The value of a type variable: either we already know the type, or we don't
-/// know it yet.
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub enum TypeVarValue {
- Known(Ty),
- Unknown,
-}
-
-impl TypeVarValue {
- fn known(&self) -> Option<&Ty> {
- match self {
- TypeVarValue::Known(ty) => Some(ty),
- TypeVarValue::Unknown => None,
- }
- }
-}
-
-impl UnifyValue for TypeVarValue {
- type Error = NoError;
-
- fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
- match (value1, value2) {
- // We should never equate two type variables, both of which have
- // known types. Instead, we recursively equate those types.
- (TypeVarValue::Known(t1), TypeVarValue::Known(t2)) => panic!(
- "equating two type variables, both of which have known types: {:?} and {:?}",
- t1, t2
- ),
-
- // If one side is known, prefer that one.
- (TypeVarValue::Known(..), TypeVarValue::Unknown) => Ok(value1.clone()),
- (TypeVarValue::Unknown, TypeVarValue::Known(..)) => Ok(value2.clone()),
-
- (TypeVarValue::Unknown, TypeVarValue::Unknown) => Ok(TypeVarValue::Unknown),
- }
- }
-}
+++ /dev/null
-//! The type system. We currently use this to infer types for completion, hover
-//! information and various assists.
-
-#[allow(unused)]
-macro_rules! eprintln {
- ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
-}
-
-mod autoderef;
-pub mod primitive;
-pub mod traits;
-pub mod method_resolution;
-mod op;
-mod lower;
-pub(crate) mod infer;
-pub(crate) mod utils;
-
-pub mod display;
-pub mod db;
-pub mod diagnostics;
-
-#[cfg(test)]
-mod tests;
-#[cfg(test)]
-mod test_db;
-
-use std::{iter, mem, ops::Deref, sync::Arc};
-
-use base_db::{salsa, CrateId};
-use hir_def::{
- expr::ExprId,
- type_ref::{Mutability, Rawness},
- AdtId, AssocContainerId, DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId,
- TypeParamId,
-};
-use itertools::Itertools;
-
-use crate::{
- db::HirDatabase,
- display::HirDisplay,
- primitive::{FloatTy, IntTy},
- utils::{generics, make_mut_slice, Generics},
-};
-
-pub use autoderef::autoderef;
-pub use infer::{InferTy, InferenceResult};
-pub use lower::CallableDefId;
-pub use lower::{
- associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId,
- TyLoweringContext, ValueTyDefId,
-};
-pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
-
-pub use chalk_ir::{BoundVar, DebruijnIndex};
-
-/// A type constructor or type name: this might be something like the primitive
-/// type `bool`, a struct like `Vec`, or things like function pointers or
-/// tuples.
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub enum TypeCtor {
- /// The primitive boolean type. Written as `bool`.
- Bool,
-
- /// The primitive character type; holds a Unicode scalar value
- /// (a non-surrogate code point). Written as `char`.
- Char,
-
- /// A primitive integer type. For example, `i32`.
- Int(IntTy),
-
- /// A primitive floating-point type. For example, `f64`.
- Float(FloatTy),
-
- /// Structures, enumerations and unions.
- Adt(AdtId),
-
- /// The pointee of a string slice. Written as `str`.
- Str,
-
- /// The pointee of an array slice. Written as `[T]`.
- Slice,
-
- /// An array with the given length. Written as `[T; n]`.
- Array,
-
- /// A raw pointer. Written as `*mut T` or `*const T`
- RawPtr(Mutability),
-
- /// A reference; a pointer with an associated lifetime. Written as
- /// `&'a mut T` or `&'a T`.
- Ref(Mutability),
-
- /// The anonymous type of a function declaration/definition. Each
- /// function has a unique type, which is output (for a function
- /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`.
- ///
- /// This includes tuple struct / enum variant constructors as well.
- ///
- /// For example the type of `bar` here:
- ///
- /// ```
- /// fn foo() -> i32 { 1 }
- /// let bar = foo; // bar: fn() -> i32 {foo}
- /// ```
- FnDef(CallableDefId),
-
- /// A pointer to a function. Written as `fn() -> i32`.
- ///
- /// For example the type of `bar` here:
- ///
- /// ```
- /// fn foo() -> i32 { 1 }
- /// let bar: fn() -> i32 = foo;
- /// ```
- // FIXME make this a Ty variant like in Chalk
- FnPtr { num_args: u16, is_varargs: bool },
-
- /// The never type `!`.
- Never,
-
- /// A tuple type. For example, `(i32, bool)`.
- Tuple { cardinality: u16 },
-
- /// Represents an associated item like `Iterator::Item`. This is used
- /// when we have tried to normalize a projection like `T::Item` but
- /// couldn't find a better representation. In that case, we generate
- /// an **application type** like `(Iterator::Item)<T>`.
- AssociatedType(TypeAliasId),
-
- /// This represents a placeholder for an opaque type in situations where we
- /// don't know the hidden type (i.e. currently almost always). This is
- /// analogous to the `AssociatedType` type constructor. As with that one,
- /// these are only produced by Chalk.
- OpaqueType(OpaqueTyId),
-
- /// The type of a specific closure.
- ///
- /// The closure signature is stored in a `FnPtr` type in the first type
- /// parameter.
- Closure { def: DefWithBodyId, expr: ExprId },
-}
-
-impl TypeCtor {
- pub fn num_ty_params(self, db: &dyn HirDatabase) -> usize {
- match self {
- TypeCtor::Bool
- | TypeCtor::Char
- | TypeCtor::Int(_)
- | TypeCtor::Float(_)
- | TypeCtor::Str
- | TypeCtor::Never => 0,
- TypeCtor::Slice
- | TypeCtor::Array
- | TypeCtor::RawPtr(_)
- | TypeCtor::Ref(_)
- | TypeCtor::Closure { .. } // 1 param representing the signature of the closure
- => 1,
- TypeCtor::Adt(adt) => {
- let generic_params = generics(db.upcast(), adt.into());
- generic_params.len()
- }
- TypeCtor::FnDef(callable) => {
- let generic_params = generics(db.upcast(), callable.into());
- generic_params.len()
- }
- TypeCtor::AssociatedType(type_alias) => {
- let generic_params = generics(db.upcast(), type_alias.into());
- generic_params.len()
- }
- TypeCtor::OpaqueType(opaque_ty_id) => {
- match opaque_ty_id {
- OpaqueTyId::ReturnTypeImplTrait(func, _) => {
- let generic_params = generics(db.upcast(), func.into());
- generic_params.len()
- }
- }
- }
- TypeCtor::FnPtr { num_args, is_varargs: _ } => num_args as usize + 1,
- TypeCtor::Tuple { cardinality } => cardinality as usize,
- }
- }
-
- pub fn krate(self, db: &dyn HirDatabase) -> Option<CrateId> {
- match self {
- TypeCtor::Bool
- | TypeCtor::Char
- | TypeCtor::Int(_)
- | TypeCtor::Float(_)
- | TypeCtor::Str
- | TypeCtor::Never
- | TypeCtor::Slice
- | TypeCtor::Array
- | TypeCtor::RawPtr(_)
- | TypeCtor::Ref(_)
- | TypeCtor::FnPtr { .. }
- | TypeCtor::Tuple { .. } => None,
- // Closure's krate is irrelevant for coherence I would think?
- TypeCtor::Closure { .. } => None,
- TypeCtor::Adt(adt) => Some(adt.module(db.upcast()).krate),
- TypeCtor::FnDef(callable) => Some(callable.krate(db)),
- TypeCtor::AssociatedType(type_alias) => {
- Some(type_alias.lookup(db.upcast()).module(db.upcast()).krate)
- }
- TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
- OpaqueTyId::ReturnTypeImplTrait(func, _) => {
- Some(func.lookup(db.upcast()).module(db.upcast()).krate)
- }
- },
- }
- }
-
- pub fn as_generic_def(self) -> Option<GenericDefId> {
- match self {
- TypeCtor::Bool
- | TypeCtor::Char
- | TypeCtor::Int(_)
- | TypeCtor::Float(_)
- | TypeCtor::Str
- | TypeCtor::Never
- | TypeCtor::Slice
- | TypeCtor::Array
- | TypeCtor::RawPtr(_)
- | TypeCtor::Ref(_)
- | TypeCtor::FnPtr { .. }
- | TypeCtor::Tuple { .. }
- | TypeCtor::Closure { .. } => None,
- TypeCtor::Adt(adt) => Some(adt.into()),
- TypeCtor::FnDef(callable) => Some(callable.into()),
- TypeCtor::AssociatedType(type_alias) => Some(type_alias.into()),
- TypeCtor::OpaqueType(_impl_trait_id) => None,
- }
- }
-}
-
-/// A nominal type with (maybe 0) type parameters. This might be a primitive
-/// type like `bool`, a struct, tuple, function pointer, reference or
-/// several other things.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct ApplicationTy {
- pub ctor: TypeCtor,
- pub parameters: Substs,
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct OpaqueTy {
- pub opaque_ty_id: OpaqueTyId,
- pub parameters: Substs,
-}
-
-/// A "projection" type corresponds to an (unnormalized)
-/// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the
-/// trait and all its parameters are fully known.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct ProjectionTy {
- pub associated_ty: TypeAliasId,
- pub parameters: Substs,
-}
-
-impl ProjectionTy {
- pub fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
- TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() }
- }
-
- fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
- match self.associated_ty.lookup(db.upcast()).container {
- AssocContainerId::TraitId(it) => it,
- _ => panic!("projection ty without parent trait"),
- }
- }
-}
-
-impl TypeWalk for ProjectionTy {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- self.parameters.walk(f);
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- self.parameters.walk_mut_binders(f, binders);
- }
-}
-
-/// A type.
-///
-/// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents
-/// the same thing (but in a different way).
-///
-/// This should be cheap to clone.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub enum Ty {
- /// A nominal type with (maybe 0) type parameters. This might be a primitive
- /// type like `bool`, a struct, tuple, function pointer, reference or
- /// several other things.
- Apply(ApplicationTy),
-
- /// A "projection" type corresponds to an (unnormalized)
- /// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the
- /// trait and all its parameters are fully known.
- Projection(ProjectionTy),
-
- /// An opaque type (`impl Trait`).
- ///
- /// This is currently only used for return type impl trait; each instance of
- /// `impl Trait` in a return type gets its own ID.
- Opaque(OpaqueTy),
-
- /// A placeholder for a type parameter; for example, `T` in `fn f<T>(x: T)
- /// {}` when we're type-checking the body of that function. In this
- /// situation, we know this stands for *some* type, but don't know the exact
- /// type.
- Placeholder(TypeParamId),
-
- /// A bound type variable. This is used in various places: when representing
- /// some polymorphic type like the type of function `fn f<T>`, the type
- /// parameters get turned into variables; during trait resolution, inference
- /// variables get turned into bound variables and back; and in `Dyn` the
- /// `Self` type is represented with a bound variable as well.
- Bound(BoundVar),
-
- /// A type variable used during type checking.
- Infer(InferTy),
-
- /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust).
- ///
- /// The predicates are quantified over the `Self` type, i.e. `Ty::Bound(0)`
- /// represents the `Self` type inside the bounds. This is currently
- /// implicit; Chalk has the `Binders` struct to make it explicit, but it
- /// didn't seem worth the overhead yet.
- Dyn(Arc<[GenericPredicate]>),
-
- /// A placeholder for a type which could not be computed; this is propagated
- /// to avoid useless error messages. Doubles as a placeholder where type
- /// variables are inserted before type checking, since we want to try to
- /// infer a better type here anyway -- for the IDE use case, we want to try
- /// to infer as much as possible even in the presence of type errors.
- Unknown,
-}
-
-/// A list of substitutions for generic parameters.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct Substs(Arc<[Ty]>);
-
-impl TypeWalk for Substs {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- for t in self.0.iter() {
- t.walk(f);
- }
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- for t in make_mut_slice(&mut self.0) {
- t.walk_mut_binders(f, binders);
- }
- }
-}
-
-impl Substs {
- pub fn empty() -> Substs {
- Substs(Arc::new([]))
- }
-
- pub fn single(ty: Ty) -> Substs {
- Substs(Arc::new([ty]))
- }
-
- pub fn prefix(&self, n: usize) -> Substs {
- Substs(self.0[..std::cmp::min(self.0.len(), n)].into())
- }
-
- pub fn suffix(&self, n: usize) -> Substs {
- Substs(self.0[self.0.len() - std::cmp::min(self.0.len(), n)..].into())
- }
-
- pub fn as_single(&self) -> &Ty {
- if self.0.len() != 1 {
- panic!("expected substs of len 1, got {:?}", self);
- }
- &self.0[0]
- }
-
- /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
- pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
- Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect())
- }
-
- /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
- pub fn type_params(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substs {
- let params = generics(db.upcast(), def.into());
- Substs::type_params_for_generics(¶ms)
- }
-
- /// Return Substs that replace each parameter by a bound variable.
- pub(crate) fn bound_vars(generic_params: &Generics, debruijn: DebruijnIndex) -> Substs {
- Substs(
- generic_params
- .iter()
- .enumerate()
- .map(|(idx, _)| Ty::Bound(BoundVar::new(debruijn, idx)))
- .collect(),
- )
- }
-
- pub fn build_for_def(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder {
- let def = def.into();
- let params = generics(db.upcast(), def);
- let param_count = params.len();
- Substs::builder(param_count)
- }
-
- pub(crate) fn build_for_generics(generic_params: &Generics) -> SubstsBuilder {
- Substs::builder(generic_params.len())
- }
-
- pub fn build_for_type_ctor(db: &dyn HirDatabase, type_ctor: TypeCtor) -> SubstsBuilder {
- Substs::builder(type_ctor.num_ty_params(db))
- }
-
- fn builder(param_count: usize) -> SubstsBuilder {
- SubstsBuilder { vec: Vec::with_capacity(param_count), param_count }
- }
-}
-
-/// Return an index of a parameter in the generic type parameter list by it's id.
-pub fn param_idx(db: &dyn HirDatabase, id: TypeParamId) -> Option<usize> {
- generics(db.upcast(), id.parent).param_idx(id)
-}
-
-#[derive(Debug, Clone)]
-pub struct SubstsBuilder {
- vec: Vec<Ty>,
- param_count: usize,
-}
-
-impl SubstsBuilder {
- pub fn build(self) -> Substs {
- assert_eq!(self.vec.len(), self.param_count);
- Substs(self.vec.into())
- }
-
- pub fn push(mut self, ty: Ty) -> Self {
- self.vec.push(ty);
- self
- }
-
- fn remaining(&self) -> usize {
- self.param_count - self.vec.len()
- }
-
- pub fn fill_with_bound_vars(self, debruijn: DebruijnIndex, starting_from: usize) -> Self {
- self.fill((starting_from..).map(|idx| Ty::Bound(BoundVar::new(debruijn, idx))))
- }
-
- pub fn fill_with_unknown(self) -> Self {
- self.fill(iter::repeat(Ty::Unknown))
- }
-
- pub fn fill(mut self, filler: impl Iterator<Item = Ty>) -> Self {
- self.vec.extend(filler.take(self.remaining()));
- assert_eq!(self.remaining(), 0);
- self
- }
-
- pub fn use_parent_substs(mut self, parent_substs: &Substs) -> Self {
- assert!(self.vec.is_empty());
- assert!(parent_substs.len() <= self.param_count);
- self.vec.extend(parent_substs.iter().cloned());
- self
- }
-}
-
-impl Deref for Substs {
- type Target = [Ty];
-
- fn deref(&self) -> &[Ty] {
- &self.0
- }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub struct Binders<T> {
- pub num_binders: usize,
- pub value: T,
-}
-
-impl<T> Binders<T> {
- pub fn new(num_binders: usize, value: T) -> Self {
- Self { num_binders, value }
- }
-
- pub fn as_ref(&self) -> Binders<&T> {
- Binders { num_binders: self.num_binders, value: &self.value }
- }
-
- pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Binders<U> {
- Binders { num_binders: self.num_binders, value: f(self.value) }
- }
-
- pub fn filter_map<U>(self, f: impl FnOnce(T) -> Option<U>) -> Option<Binders<U>> {
- Some(Binders { num_binders: self.num_binders, value: f(self.value)? })
- }
-}
-
-impl<T: Clone> Binders<&T> {
- pub fn cloned(&self) -> Binders<T> {
- Binders { num_binders: self.num_binders, value: self.value.clone() }
- }
-}
-
-impl<T: TypeWalk> Binders<T> {
- /// Substitutes all variables.
- pub fn subst(self, subst: &Substs) -> T {
- assert_eq!(subst.len(), self.num_binders);
- self.value.subst_bound_vars(subst)
- }
-
- /// Substitutes just a prefix of the variables (shifting the rest).
- pub fn subst_prefix(self, subst: &Substs) -> Binders<T> {
- assert!(subst.len() < self.num_binders);
- Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst))
- }
-}
-
-impl<T: TypeWalk> TypeWalk for Binders<T> {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- self.value.walk(f);
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- self.value.walk_mut_binders(f, binders.shifted_in())
- }
-}
-
-/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait.
-/// Name to be bikeshedded: TraitBound? TraitImplements?
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct TraitRef {
- /// FIXME name?
- pub trait_: TraitId,
- pub substs: Substs,
-}
-
-impl TraitRef {
- pub fn self_ty(&self) -> &Ty {
- &self.substs[0]
- }
-}
-
-impl TypeWalk for TraitRef {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- self.substs.walk(f);
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- self.substs.walk_mut_binders(f, binders);
- }
-}
-
-/// Like `generics::WherePredicate`, but with resolved types: A condition on the
-/// parameters of a generic item.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum GenericPredicate {
- /// The given trait needs to be implemented for its type parameters.
- Implemented(TraitRef),
- /// An associated type bindings like in `Iterator<Item = T>`.
- Projection(ProjectionPredicate),
- /// We couldn't resolve the trait reference. (If some type parameters can't
- /// be resolved, they will just be Unknown).
- Error,
-}
-
-impl GenericPredicate {
- pub fn is_error(&self) -> bool {
- matches!(self, GenericPredicate::Error)
- }
-
- pub fn is_implemented(&self) -> bool {
- matches!(self, GenericPredicate::Implemented(_))
- }
-
- pub fn trait_ref(&self, db: &dyn HirDatabase) -> Option<TraitRef> {
- match self {
- GenericPredicate::Implemented(tr) => Some(tr.clone()),
- GenericPredicate::Projection(proj) => Some(proj.projection_ty.trait_ref(db)),
- GenericPredicate::Error => None,
- }
- }
-}
-
-impl TypeWalk for GenericPredicate {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- match self {
- GenericPredicate::Implemented(trait_ref) => trait_ref.walk(f),
- GenericPredicate::Projection(projection_pred) => projection_pred.walk(f),
- GenericPredicate::Error => {}
- }
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- match self {
- GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut_binders(f, binders),
- GenericPredicate::Projection(projection_pred) => {
- projection_pred.walk_mut_binders(f, binders)
- }
- GenericPredicate::Error => {}
- }
- }
-}
-
-/// Basically a claim (currently not validated / checked) that the contained
-/// type / trait ref contains no inference variables; any inference variables it
-/// contained have been replaced by bound variables, and `kinds` tells us how
-/// many there are and whether they were normal or float/int variables. This is
-/// used to erase irrelevant differences between types before using them in
-/// queries.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Canonical<T> {
- pub value: T,
- pub kinds: Arc<[TyKind]>,
-}
-
-impl<T> Canonical<T> {
- pub fn new(value: T, kinds: impl IntoIterator<Item = TyKind>) -> Self {
- Self { value, kinds: kinds.into_iter().collect() }
- }
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
-pub enum TyKind {
- General,
- Integer,
- Float,
-}
-
-/// A function signature as seen by type inference: Several parameter types and
-/// one return type.
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct FnSig {
- params_and_return: Arc<[Ty]>,
- is_varargs: bool,
-}
-
-/// A polymorphic function signature.
-pub type PolyFnSig = Binders<FnSig>;
-
-impl FnSig {
- pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty, is_varargs: bool) -> FnSig {
- params.push(ret);
- FnSig { params_and_return: params.into(), is_varargs }
- }
-
- pub fn from_fn_ptr_substs(substs: &Substs, is_varargs: bool) -> FnSig {
- FnSig { params_and_return: Arc::clone(&substs.0), is_varargs }
- }
-
- pub fn params(&self) -> &[Ty] {
- &self.params_and_return[0..self.params_and_return.len() - 1]
- }
-
- pub fn ret(&self) -> &Ty {
- &self.params_and_return[self.params_and_return.len() - 1]
- }
-}
-
-impl TypeWalk for FnSig {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- for t in self.params_and_return.iter() {
- t.walk(f);
- }
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- for t in make_mut_slice(&mut self.params_and_return) {
- t.walk_mut_binders(f, binders);
- }
- }
-}
-
-impl Ty {
- pub fn simple(ctor: TypeCtor) -> Ty {
- Ty::Apply(ApplicationTy { ctor, parameters: Substs::empty() })
- }
- pub fn apply_one(ctor: TypeCtor, param: Ty) -> Ty {
- Ty::Apply(ApplicationTy { ctor, parameters: Substs::single(param) })
- }
- pub fn apply(ctor: TypeCtor, parameters: Substs) -> Ty {
- Ty::Apply(ApplicationTy { ctor, parameters })
- }
- pub fn unit() -> Self {
- Ty::apply(TypeCtor::Tuple { cardinality: 0 }, Substs::empty())
- }
- pub fn fn_ptr(sig: FnSig) -> Self {
- Ty::apply(
- TypeCtor::FnPtr { num_args: sig.params().len() as u16, is_varargs: sig.is_varargs },
- Substs(sig.params_and_return),
- )
- }
-
- pub fn as_reference(&self) -> Option<(&Ty, Mutability)> {
- match self {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => {
- Some((parameters.as_single(), *mutability))
- }
- _ => None,
- }
- }
-
- pub fn as_reference_or_ptr(&self) -> Option<(&Ty, Rawness, Mutability)> {
- match self {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(mutability), parameters }) => {
- Some((parameters.as_single(), Rawness::Ref, *mutability))
- }
- Ty::Apply(ApplicationTy { ctor: TypeCtor::RawPtr(mutability), parameters }) => {
- Some((parameters.as_single(), Rawness::RawPtr, *mutability))
- }
- _ => None,
- }
- }
-
- pub fn strip_references(&self) -> &Ty {
- let mut t: &Ty = self;
-
- while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t {
- t = parameters.as_single();
- }
-
- t
- }
-
- pub fn as_adt(&self) -> Option<(AdtId, &Substs)> {
- match self {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => {
- Some((*adt_def, parameters))
- }
- _ => None,
- }
- }
-
- pub fn as_tuple(&self) -> Option<&Substs> {
- match self {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::Tuple { .. }, parameters }) => {
- Some(parameters)
- }
- _ => None,
- }
- }
-
- pub fn is_never(&self) -> bool {
- matches!(self, Ty::Apply(ApplicationTy { ctor: TypeCtor::Never, .. }))
- }
-
- /// If this is a `dyn Trait` type, this returns the `Trait` part.
- pub fn dyn_trait_ref(&self) -> Option<&TraitRef> {
- match self {
- Ty::Dyn(bounds) => bounds.get(0).and_then(|b| match b {
- GenericPredicate::Implemented(trait_ref) => Some(trait_ref),
- _ => None,
- }),
- _ => None,
- }
- }
-
- /// If this is a `dyn Trait`, returns that trait.
- pub fn dyn_trait(&self) -> Option<TraitId> {
- self.dyn_trait_ref().map(|it| it.trait_)
- }
-
- fn builtin_deref(&self) -> Option<Ty> {
- match self {
- Ty::Apply(a_ty) => match a_ty.ctor {
- TypeCtor::Ref(..) => Some(Ty::clone(a_ty.parameters.as_single())),
- TypeCtor::RawPtr(..) => Some(Ty::clone(a_ty.parameters.as_single())),
- _ => None,
- },
- _ => None,
- }
- }
-
- pub fn callable_sig(&self, db: &dyn HirDatabase) -> Option<FnSig> {
- match self {
- Ty::Apply(a_ty) => match a_ty.ctor {
- TypeCtor::FnPtr { is_varargs, .. } => {
- Some(FnSig::from_fn_ptr_substs(&a_ty.parameters, is_varargs))
- }
- TypeCtor::FnDef(def) => {
- let sig = db.callable_item_signature(def);
- Some(sig.subst(&a_ty.parameters))
- }
- TypeCtor::Closure { .. } => {
- let sig_param = &a_ty.parameters[0];
- sig_param.callable_sig(db)
- }
- _ => None,
- },
- _ => None,
- }
- }
-
- /// If this is a type with type parameters (an ADT or function), replaces
- /// the `Substs` for these type parameters with the given ones. (So e.g. if
- /// `self` is `Option<_>` and the substs contain `u32`, we'll have
- /// `Option<u32>` afterwards.)
- pub fn apply_substs(self, substs: Substs) -> Ty {
- match self {
- Ty::Apply(ApplicationTy { ctor, parameters: previous_substs }) => {
- assert_eq!(previous_substs.len(), substs.len());
- Ty::Apply(ApplicationTy { ctor, parameters: substs })
- }
- _ => self,
- }
- }
-
- /// Returns the type parameters of this type if it has some (i.e. is an ADT
- /// or function); so if `self` is `Option<u32>`, this returns the `u32`.
- pub fn substs(&self) -> Option<Substs> {
- match self {
- Ty::Apply(ApplicationTy { parameters, .. }) => Some(parameters.clone()),
- _ => None,
- }
- }
-
- pub fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<GenericPredicate>> {
- match self {
- Ty::Opaque(opaque_ty) => {
- let predicates = match opaque_ty.opaque_ty_id {
- OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
- db.return_type_impl_traits(func).map(|it| {
- let data = (*it)
- .as_ref()
- .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
- data.subst(&opaque_ty.parameters)
- })
- }
- };
-
- predicates.map(|it| it.value)
- }
- Ty::Placeholder(id) => {
- let generic_params = db.generic_params(id.parent);
- let param_data = &generic_params.types[id.local_id];
- match param_data.provenance {
- hir_def::generics::TypeParamProvenance::ArgumentImplTrait => {
- let predicates = db
- .generic_predicates_for_param(*id)
- .into_iter()
- .map(|pred| pred.value.clone())
- .collect_vec();
-
- Some(predicates)
- }
- _ => None,
- }
- }
- _ => None,
- }
- }
-
- pub fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
- match self {
- Ty::Apply(ApplicationTy { ctor: TypeCtor::AssociatedType(type_alias_id), .. }) => {
- match type_alias_id.lookup(db.upcast()).container {
- AssocContainerId::TraitId(trait_id) => Some(trait_id),
- _ => None,
- }
- }
- Ty::Projection(projection_ty) => {
- match projection_ty.associated_ty.lookup(db.upcast()).container {
- AssocContainerId::TraitId(trait_id) => Some(trait_id),
- _ => None,
- }
- }
- _ => None,
- }
- }
-}
-
-/// This allows walking structures that contain types to do something with those
-/// types, similar to Chalk's `Fold` trait.
-pub trait TypeWalk {
- fn walk(&self, f: &mut impl FnMut(&Ty));
- fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
- self.walk_mut_binders(&mut |ty, _binders| f(ty), DebruijnIndex::INNERMOST);
- }
- /// Walk the type, counting entered binders.
- ///
- /// `Ty::Bound` variables use DeBruijn indexing, which means that 0 refers
- /// to the innermost binder, 1 to the next, etc.. So when we want to
- /// substitute a certain bound variable, we can't just walk the whole type
- /// and blindly replace each instance of a certain index; when we 'enter'
- /// things that introduce new bound variables, we have to keep track of
- /// that. Currently, the only thing that introduces bound variables on our
- /// side are `Ty::Dyn` and `Ty::Opaque`, which each introduce a bound
- /// variable for the self type.
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- );
-
- fn fold_binders(
- mut self,
- f: &mut impl FnMut(Ty, DebruijnIndex) -> Ty,
- binders: DebruijnIndex,
- ) -> Self
- where
- Self: Sized,
- {
- self.walk_mut_binders(
- &mut |ty_mut, binders| {
- let ty = mem::replace(ty_mut, Ty::Unknown);
- *ty_mut = f(ty, binders);
- },
- binders,
- );
- self
- }
-
- fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self
- where
- Self: Sized,
- {
- self.walk_mut(&mut |ty_mut| {
- let ty = mem::replace(ty_mut, Ty::Unknown);
- *ty_mut = f(ty);
- });
- self
- }
-
- /// Substitutes `Ty::Bound` vars with the given substitution.
- fn subst_bound_vars(self, substs: &Substs) -> Self
- where
- Self: Sized,
- {
- self.subst_bound_vars_at_depth(substs, DebruijnIndex::INNERMOST)
- }
-
- /// Substitutes `Ty::Bound` vars with the given substitution.
- fn subst_bound_vars_at_depth(mut self, substs: &Substs, depth: DebruijnIndex) -> Self
- where
- Self: Sized,
- {
- self.walk_mut_binders(
- &mut |ty, binders| {
- if let &mut Ty::Bound(bound) = ty {
- if bound.debruijn >= binders {
- *ty = substs.0[bound.index].clone().shift_bound_vars(binders);
- }
- }
- },
- depth,
- );
- self
- }
-
- /// Shifts up debruijn indices of `Ty::Bound` vars by `n`.
- fn shift_bound_vars(self, n: DebruijnIndex) -> Self
- where
- Self: Sized,
- {
- self.fold_binders(
- &mut |ty, binders| match ty {
- Ty::Bound(bound) if bound.debruijn >= binders => {
- Ty::Bound(bound.shifted_in_from(n))
- }
- ty => ty,
- },
- DebruijnIndex::INNERMOST,
- )
- }
-}
-
-impl TypeWalk for Ty {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- match self {
- Ty::Apply(a_ty) => {
- for t in a_ty.parameters.iter() {
- t.walk(f);
- }
- }
- Ty::Projection(p_ty) => {
- for t in p_ty.parameters.iter() {
- t.walk(f);
- }
- }
- Ty::Dyn(predicates) => {
- for p in predicates.iter() {
- p.walk(f);
- }
- }
- Ty::Opaque(o_ty) => {
- for t in o_ty.parameters.iter() {
- t.walk(f);
- }
- }
- Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
- }
- f(self);
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- match self {
- Ty::Apply(a_ty) => {
- a_ty.parameters.walk_mut_binders(f, binders);
- }
- Ty::Projection(p_ty) => {
- p_ty.parameters.walk_mut_binders(f, binders);
- }
- Ty::Dyn(predicates) => {
- for p in make_mut_slice(predicates) {
- p.walk_mut_binders(f, binders.shifted_in());
- }
- }
- Ty::Opaque(o_ty) => {
- o_ty.parameters.walk_mut_binders(f, binders);
- }
- Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
- }
- f(self, binders);
- }
-}
-
-impl<T: TypeWalk> TypeWalk for Vec<T> {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- for t in self {
- t.walk(f);
- }
- }
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- for t in self {
- t.walk_mut_binders(f, binders);
- }
- }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
-pub enum OpaqueTyId {
- ReturnTypeImplTrait(hir_def::FunctionId, u16),
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub struct ReturnTypeImplTraits {
- pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
-}
-
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub(crate) struct ReturnTypeImplTrait {
- pub bounds: Binders<Vec<GenericPredicate>>,
-}
+++ /dev/null
-//! Methods for lowering the HIR to types. There are two main cases here:
-//!
-//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
-//! type: The entry point for this is `Ty::from_hir`.
-//! - Building the type for an item: This happens through the `type_for_def` query.
-//!
-//! This usually involves resolving names, collecting generic arguments etc.
-use std::{iter, sync::Arc};
-
-use arena::map::ArenaMap;
-use base_db::CrateId;
-use hir_def::{
- adt::StructKind,
- builtin_type::BuiltinType,
- generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget},
- path::{GenericArg, Path, PathSegment, PathSegments},
- resolver::{HasResolver, Resolver, TypeNs},
- type_ref::{TypeBound, TypeRef},
- AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId,
- HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
- UnionId, VariantId,
-};
-use hir_expand::name::Name;
-use smallvec::SmallVec;
-use stdx::impl_from;
-use test_utils::mark;
-
-use crate::{
- db::HirDatabase,
- primitive::{FloatTy, IntTy},
- utils::{
- all_super_trait_refs, associated_type_by_name_including_super_traits, generics,
- make_mut_slice, variant_data,
- },
- Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, OpaqueTy, OpaqueTyId, PolyFnSig,
- ProjectionPredicate, ProjectionTy, ReturnTypeImplTrait, ReturnTypeImplTraits, Substs,
- TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk,
-};
-
-#[derive(Debug)]
-pub struct TyLoweringContext<'a> {
- pub db: &'a dyn HirDatabase,
- pub resolver: &'a Resolver,
- in_binders: DebruijnIndex,
- /// Note: Conceptually, it's thinkable that we could be in a location where
- /// some type params should be represented as placeholders, and others
- /// should be converted to variables. I think in practice, this isn't
- /// possible currently, so this should be fine for now.
- pub type_param_mode: TypeParamLoweringMode,
- pub impl_trait_mode: ImplTraitLoweringMode,
- impl_trait_counter: std::cell::Cell<u16>,
- /// When turning `impl Trait` into opaque types, we have to collect the
- /// bounds at the same time to get the IDs correct (without becoming too
- /// complicated). I don't like using interior mutability (as for the
- /// counter), but I've tried and failed to make the lifetimes work for
- /// passing around a `&mut TyLoweringContext`. The core problem is that
- /// we're grouping the mutable data (the counter and this field) together
- /// with the immutable context (the references to the DB and resolver).
- /// Splitting this up would be a possible fix.
- opaque_type_data: std::cell::RefCell<Vec<ReturnTypeImplTrait>>,
-}
-
-impl<'a> TyLoweringContext<'a> {
- pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
- let impl_trait_counter = std::cell::Cell::new(0);
- let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
- let type_param_mode = TypeParamLoweringMode::Placeholder;
- let in_binders = DebruijnIndex::INNERMOST;
- let opaque_type_data = std::cell::RefCell::new(Vec::new());
- Self {
- db,
- resolver,
- in_binders,
- impl_trait_mode,
- impl_trait_counter,
- type_param_mode,
- opaque_type_data,
- }
- }
-
- pub fn with_debruijn<T>(
- &self,
- debruijn: DebruijnIndex,
- f: impl FnOnce(&TyLoweringContext) -> T,
- ) -> T {
- let opaque_ty_data_vec = self.opaque_type_data.replace(Vec::new());
- let new_ctx = Self {
- in_binders: debruijn,
- impl_trait_counter: std::cell::Cell::new(self.impl_trait_counter.get()),
- opaque_type_data: std::cell::RefCell::new(opaque_ty_data_vec),
- ..*self
- };
- let result = f(&new_ctx);
- self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
- self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
- result
- }
-
- pub fn with_shifted_in<T>(
- &self,
- debruijn: DebruijnIndex,
- f: impl FnOnce(&TyLoweringContext) -> T,
- ) -> T {
- self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
- }
-
- pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
- Self { impl_trait_mode, ..self }
- }
-
- pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self {
- Self { type_param_mode, ..self }
- }
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum ImplTraitLoweringMode {
- /// `impl Trait` gets lowered into an opaque type that doesn't unify with
- /// anything except itself. This is used in places where values flow 'out',
- /// i.e. for arguments of the function we're currently checking, and return
- /// types of functions we're calling.
- Opaque,
- /// `impl Trait` gets lowered into a type variable. Used for argument
- /// position impl Trait when inside the respective function, since it allows
- /// us to support that without Chalk.
- Param,
- /// `impl Trait` gets lowered into a variable that can unify with some
- /// type. This is used in places where values flow 'in', i.e. for arguments
- /// of functions we're calling, and the return type of the function we're
- /// currently checking.
- Variable,
- /// `impl Trait` is disallowed and will be an error.
- Disallowed,
-}
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum TypeParamLoweringMode {
- Placeholder,
- Variable,
-}
-
-impl Ty {
- pub fn from_hir(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Self {
- Ty::from_hir_ext(ctx, type_ref).0
- }
- pub fn from_hir_ext(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> (Self, Option<TypeNs>) {
- let mut res = None;
- let ty = match type_ref {
- TypeRef::Never => Ty::simple(TypeCtor::Never),
- TypeRef::Tuple(inner) => {
- let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect();
- Ty::apply(
- TypeCtor::Tuple { cardinality: inner_tys.len() as u16 },
- Substs(inner_tys),
- )
- }
- TypeRef::Path(path) => {
- let (ty, res_) = Ty::from_hir_path(ctx, path);
- res = res_;
- ty
- }
- TypeRef::RawPtr(inner, mutability) => {
- let inner_ty = Ty::from_hir(ctx, inner);
- Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty)
- }
- TypeRef::Array(inner) => {
- let inner_ty = Ty::from_hir(ctx, inner);
- Ty::apply_one(TypeCtor::Array, inner_ty)
- }
- TypeRef::Slice(inner) => {
- let inner_ty = Ty::from_hir(ctx, inner);
- Ty::apply_one(TypeCtor::Slice, inner_ty)
- }
- TypeRef::Reference(inner, mutability) => {
- let inner_ty = Ty::from_hir(ctx, inner);
- Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty)
- }
- TypeRef::Placeholder => Ty::Unknown,
- TypeRef::Fn(params, is_varargs) => {
- let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect());
- Ty::apply(
- TypeCtor::FnPtr { num_args: sig.len() as u16 - 1, is_varargs: *is_varargs },
- sig,
- )
- }
- TypeRef::DynTrait(bounds) => {
- let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
- let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
- bounds
- .iter()
- .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
- .collect()
- });
- Ty::Dyn(predicates)
- }
- TypeRef::ImplTrait(bounds) => {
- match ctx.impl_trait_mode {
- ImplTraitLoweringMode::Opaque => {
- let idx = ctx.impl_trait_counter.get();
- ctx.impl_trait_counter.set(idx + 1);
-
- assert!(idx as usize == ctx.opaque_type_data.borrow().len());
- // this dance is to make sure the data is in the right
- // place even if we encounter more opaque types while
- // lowering the bounds
- ctx.opaque_type_data
- .borrow_mut()
- .push(ReturnTypeImplTrait { bounds: Binders::new(1, Vec::new()) });
- // We don't want to lower the bounds inside the binders
- // we're currently in, because they don't end up inside
- // those binders. E.g. when we have `impl Trait<impl
- // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
- // to the self parameter from `impl Trait`, and the
- // bounds aren't actually stored nested within each
- // other, but separately. So if the `T` refers to a type
- // parameter of the outer function, it's just one binder
- // away instead of two.
- let actual_opaque_type_data = ctx
- .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
- ReturnTypeImplTrait::from_hir(ctx, &bounds)
- });
- ctx.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
-
- let func = match ctx.resolver.generic_def() {
- Some(GenericDefId::FunctionId(f)) => f,
- _ => panic!("opaque impl trait lowering in non-function"),
- };
- let impl_trait_id = OpaqueTyId::ReturnTypeImplTrait(func, idx);
- let generics = generics(ctx.db.upcast(), func.into());
- let parameters = Substs::bound_vars(&generics, ctx.in_binders);
- Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters })
- }
- ImplTraitLoweringMode::Param => {
- let idx = ctx.impl_trait_counter.get();
- // FIXME we're probably doing something wrong here
- ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
- if let Some(def) = ctx.resolver.generic_def() {
- let generics = generics(ctx.db.upcast(), def);
- let param = generics
- .iter()
- .filter(|(_, data)| {
- data.provenance == TypeParamProvenance::ArgumentImplTrait
- })
- .nth(idx as usize)
- .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id));
- param
- } else {
- Ty::Unknown
- }
- }
- ImplTraitLoweringMode::Variable => {
- let idx = ctx.impl_trait_counter.get();
- // FIXME we're probably doing something wrong here
- ctx.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
- let (parent_params, self_params, list_params, _impl_trait_params) =
- if let Some(def) = ctx.resolver.generic_def() {
- let generics = generics(ctx.db.upcast(), def);
- generics.provenance_split()
- } else {
- (0, 0, 0, 0)
- };
- Ty::Bound(BoundVar::new(
- ctx.in_binders,
- idx as usize + parent_params + self_params + list_params,
- ))
- }
- ImplTraitLoweringMode::Disallowed => {
- // FIXME: report error
- Ty::Unknown
- }
- }
- }
- TypeRef::Error => Ty::Unknown,
- };
- (ty, res)
- }
-
- /// This is only for `generic_predicates_for_param`, where we can't just
- /// lower the self types of the predicates since that could lead to cycles.
- /// So we just check here if the `type_ref` resolves to a generic param, and which.
- fn from_hir_only_param(ctx: &TyLoweringContext<'_>, type_ref: &TypeRef) -> Option<TypeParamId> {
- let path = match type_ref {
- TypeRef::Path(path) => path,
- _ => return None,
- };
- if path.type_anchor().is_some() {
- return None;
- }
- if path.segments().len() > 1 {
- return None;
- }
- let resolution =
- match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
- Some((it, None)) => it,
- _ => return None,
- };
- if let TypeNs::GenericParam(param_id) = resolution {
- Some(param_id)
- } else {
- None
- }
- }
-
- pub(crate) fn from_type_relative_path(
- ctx: &TyLoweringContext<'_>,
- ty: Ty,
- // We need the original resolution to lower `Self::AssocTy` correctly
- res: Option<TypeNs>,
- remaining_segments: PathSegments<'_>,
- ) -> (Ty, Option<TypeNs>) {
- if remaining_segments.len() == 1 {
- // resolve unselected assoc types
- let segment = remaining_segments.first().unwrap();
- (Ty::select_associated_type(ctx, res, segment), None)
- } else if remaining_segments.len() > 1 {
- // FIXME report error (ambiguous associated type)
- (Ty::Unknown, None)
- } else {
- (ty, res)
- }
- }
-
- pub(crate) fn from_partly_resolved_hir_path(
- ctx: &TyLoweringContext<'_>,
- resolution: TypeNs,
- resolved_segment: PathSegment<'_>,
- remaining_segments: PathSegments<'_>,
- infer_args: bool,
- ) -> (Ty, Option<TypeNs>) {
- let ty = match resolution {
- TypeNs::TraitId(trait_) => {
- // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there
- let self_ty = if remaining_segments.len() == 0 {
- Some(Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0)))
- } else {
- None
- };
- let trait_ref =
- TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty);
- let ty = if remaining_segments.len() == 1 {
- let segment = remaining_segments.first().unwrap();
- let found = associated_type_by_name_including_super_traits(
- ctx.db,
- trait_ref,
- &segment.name,
- );
- match found {
- Some((super_trait_ref, associated_ty)) => {
- // FIXME handle type parameters on the segment
- Ty::Projection(ProjectionTy {
- associated_ty,
- parameters: super_trait_ref.substs,
- })
- }
- None => {
- // FIXME: report error (associated type not found)
- Ty::Unknown
- }
- }
- } else if remaining_segments.len() > 1 {
- // FIXME report error (ambiguous associated type)
- Ty::Unknown
- } else {
- Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)]))
- };
- return (ty, None);
- }
- TypeNs::GenericParam(param_id) => {
- let generics = generics(
- ctx.db.upcast(),
- ctx.resolver.generic_def().expect("generics in scope"),
- );
- match ctx.type_param_mode {
- TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
- TypeParamLoweringMode::Variable => {
- let idx = generics.param_idx(param_id).expect("matching generics");
- Ty::Bound(BoundVar::new(ctx.in_binders, idx))
- }
- }
- }
- TypeNs::SelfType(impl_id) => {
- let generics = generics(ctx.db.upcast(), impl_id.into());
- let substs = match ctx.type_param_mode {
- TypeParamLoweringMode::Placeholder => {
- Substs::type_params_for_generics(&generics)
- }
- TypeParamLoweringMode::Variable => {
- Substs::bound_vars(&generics, ctx.in_binders)
- }
- };
- ctx.db.impl_self_ty(impl_id).subst(&substs)
- }
- TypeNs::AdtSelfType(adt) => {
- let generics = generics(ctx.db.upcast(), adt.into());
- let substs = match ctx.type_param_mode {
- TypeParamLoweringMode::Placeholder => {
- Substs::type_params_for_generics(&generics)
- }
- TypeParamLoweringMode::Variable => {
- Substs::bound_vars(&generics, ctx.in_binders)
- }
- };
- ctx.db.ty(adt.into()).subst(&substs)
- }
-
- TypeNs::AdtId(it) => {
- Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
- }
- TypeNs::BuiltinType(it) => {
- Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
- }
- TypeNs::TypeAliasId(it) => {
- Ty::from_hir_path_inner(ctx, resolved_segment, it.into(), infer_args)
- }
- // FIXME: report error
- TypeNs::EnumVariantId(_) => return (Ty::Unknown, None),
- };
-
- Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments)
- }
-
- pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_>, path: &Path) -> (Ty, Option<TypeNs>) {
- // Resolve the path (in type namespace)
- if let Some(type_ref) = path.type_anchor() {
- let (ty, res) = Ty::from_hir_ext(ctx, &type_ref);
- return Ty::from_type_relative_path(ctx, ty, res, path.segments());
- }
- let (resolution, remaining_index) =
- match ctx.resolver.resolve_path_in_type_ns(ctx.db.upcast(), path.mod_path()) {
- Some(it) => it,
- None => return (Ty::Unknown, None),
- };
- let (resolved_segment, remaining_segments) = match remaining_index {
- None => (
- path.segments().last().expect("resolved path has at least one element"),
- PathSegments::EMPTY,
- ),
- Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
- };
- Ty::from_partly_resolved_hir_path(
- ctx,
- resolution,
- resolved_segment,
- remaining_segments,
- false,
- )
- }
-
- fn select_associated_type(
- ctx: &TyLoweringContext<'_>,
- res: Option<TypeNs>,
- segment: PathSegment<'_>,
- ) -> Ty {
- if let Some(res) = res {
- let ty =
- associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| {
- if name == segment.name {
- let substs = match ctx.type_param_mode {
- TypeParamLoweringMode::Placeholder => {
- // if we're lowering to placeholders, we have to put
- // them in now
- let s = Substs::type_params(
- ctx.db,
- ctx.resolver.generic_def().expect(
- "there should be generics if there's a generic param",
- ),
- );
- t.substs.clone().subst_bound_vars(&s)
- }
- TypeParamLoweringMode::Variable => t.substs.clone(),
- };
- // We need to shift in the bound vars, since
- // associated_type_shorthand_candidates does not do that
- let substs = substs.shift_bound_vars(ctx.in_binders);
- // FIXME handle type parameters on the segment
- return Some(Ty::Projection(ProjectionTy {
- associated_ty,
- parameters: substs,
- }));
- }
-
- None
- });
-
- ty.unwrap_or(Ty::Unknown)
- } else {
- Ty::Unknown
- }
- }
-
- fn from_hir_path_inner(
- ctx: &TyLoweringContext<'_>,
- segment: PathSegment<'_>,
- typable: TyDefId,
- infer_args: bool,
- ) -> Ty {
- let generic_def = match typable {
- TyDefId::BuiltinType(_) => None,
- TyDefId::AdtId(it) => Some(it.into()),
- TyDefId::TypeAliasId(it) => Some(it.into()),
- };
- let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args);
- ctx.db.ty(typable).subst(&substs)
- }
-
- /// Collect generic arguments from a path into a `Substs`. See also
- /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
- pub(super) fn substs_from_path(
- ctx: &TyLoweringContext<'_>,
- path: &Path,
- // Note that we don't call `db.value_type(resolved)` here,
- // `ValueTyDefId` is just a convenient way to pass generics and
- // special-case enum variants
- resolved: ValueTyDefId,
- infer_args: bool,
- ) -> Substs {
- let last = path.segments().last().expect("path should have at least one segment");
- let (segment, generic_def) = match resolved {
- ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
- ValueTyDefId::StructId(it) => (last, Some(it.into())),
- ValueTyDefId::UnionId(it) => (last, Some(it.into())),
- ValueTyDefId::ConstId(it) => (last, Some(it.into())),
- ValueTyDefId::StaticId(_) => (last, None),
- ValueTyDefId::EnumVariantId(var) => {
- // the generic args for an enum variant may be either specified
- // on the segment referring to the enum, or on the segment
- // referring to the variant. So `Option::<T>::None` and
- // `Option::None::<T>` are both allowed (though the former is
- // preferred). See also `def_ids_for_path_segments` in rustc.
- let len = path.segments().len();
- let penultimate = if len >= 2 { path.segments().get(len - 2) } else { None };
- let segment = match penultimate {
- Some(segment) if segment.args_and_bindings.is_some() => segment,
- _ => last,
- };
- (segment, Some(var.parent.into()))
- }
- };
- substs_from_path_segment(ctx, segment, generic_def, infer_args)
- }
-}
-
-fn substs_from_path_segment(
- ctx: &TyLoweringContext<'_>,
- segment: PathSegment<'_>,
- def_generic: Option<GenericDefId>,
- infer_args: bool,
-) -> Substs {
- let mut substs = Vec::new();
- let def_generics = def_generic.map(|def| generics(ctx.db.upcast(), def));
-
- let (parent_params, self_params, type_params, impl_trait_params) =
- def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
- let total_len = parent_params + self_params + type_params + impl_trait_params;
-
- substs.extend(iter::repeat(Ty::Unknown).take(parent_params));
-
- let mut had_explicit_args = false;
-
- if let Some(generic_args) = &segment.args_and_bindings {
- if !generic_args.has_self_type {
- substs.extend(iter::repeat(Ty::Unknown).take(self_params));
- }
- let expected_num =
- if generic_args.has_self_type { self_params + type_params } else { type_params };
- let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
- // if args are provided, it should be all of them, but we can't rely on that
- for arg in generic_args.args.iter().skip(skip).take(expected_num) {
- match arg {
- GenericArg::Type(type_ref) => {
- had_explicit_args = true;
- let ty = Ty::from_hir(ctx, type_ref);
- substs.push(ty);
- }
- }
- }
- }
-
- // handle defaults. In expression or pattern path segments without
- // explicitly specified type arguments, missing type arguments are inferred
- // (i.e. defaults aren't used).
- if !infer_args || had_explicit_args {
- if let Some(def_generic) = def_generic {
- let defaults = ctx.db.generic_defaults(def_generic);
- assert_eq!(total_len, defaults.len());
-
- for default_ty in defaults.iter().skip(substs.len()) {
- // each default can depend on the previous parameters
- let substs_so_far = Substs(substs.clone().into());
- substs.push(default_ty.clone().subst(&substs_so_far));
- }
- }
- }
-
- // add placeholders for args that were not provided
- // FIXME: emit diagnostics in contexts where this is not allowed
- for _ in substs.len()..total_len {
- substs.push(Ty::Unknown);
- }
- assert_eq!(substs.len(), total_len);
-
- Substs(substs.into())
-}
-
-impl TraitRef {
- fn from_path(
- ctx: &TyLoweringContext<'_>,
- path: &Path,
- explicit_self_ty: Option<Ty>,
- ) -> Option<Self> {
- let resolved =
- match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db.upcast(), path.mod_path())? {
- TypeNs::TraitId(tr) => tr,
- _ => return None,
- };
- let segment = path.segments().last().expect("path should have at least one segment");
- Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty))
- }
-
- pub(crate) fn from_resolved_path(
- ctx: &TyLoweringContext<'_>,
- resolved: TraitId,
- segment: PathSegment<'_>,
- explicit_self_ty: Option<Ty>,
- ) -> Self {
- let mut substs = TraitRef::substs_from_path(ctx, segment, resolved);
- if let Some(self_ty) = explicit_self_ty {
- make_mut_slice(&mut substs.0)[0] = self_ty;
- }
- TraitRef { trait_: resolved, substs }
- }
-
- fn from_hir(
- ctx: &TyLoweringContext<'_>,
- type_ref: &TypeRef,
- explicit_self_ty: Option<Ty>,
- ) -> Option<Self> {
- let path = match type_ref {
- TypeRef::Path(path) => path,
- _ => return None,
- };
- TraitRef::from_path(ctx, path, explicit_self_ty)
- }
-
- fn substs_from_path(
- ctx: &TyLoweringContext<'_>,
- segment: PathSegment<'_>,
- resolved: TraitId,
- ) -> Substs {
- substs_from_path_segment(ctx, segment, Some(resolved.into()), false)
- }
-
- pub(crate) fn from_type_bound(
- ctx: &TyLoweringContext<'_>,
- bound: &TypeBound,
- self_ty: Ty,
- ) -> Option<TraitRef> {
- match bound {
- TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)),
- TypeBound::Error => None,
- }
- }
-}
-
-impl GenericPredicate {
- pub(crate) fn from_where_predicate<'a>(
- ctx: &'a TyLoweringContext<'a>,
- where_predicate: &'a WherePredicate,
- ) -> impl Iterator<Item = GenericPredicate> + 'a {
- let self_ty = match &where_predicate.target {
- WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref),
- WherePredicateTarget::TypeParam(param_id) => {
- let generic_def = ctx.resolver.generic_def().expect("generics in scope");
- let generics = generics(ctx.db.upcast(), generic_def);
- let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
- match ctx.type_param_mode {
- TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
- TypeParamLoweringMode::Variable => {
- let idx = generics.param_idx(param_id).expect("matching generics");
- Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx))
- }
- }
- }
- };
- GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty)
- }
-
- pub(crate) fn from_type_bound<'a>(
- ctx: &'a TyLoweringContext<'a>,
- bound: &'a TypeBound,
- self_ty: Ty,
- ) -> impl Iterator<Item = GenericPredicate> + 'a {
- let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty);
- iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented))
- .chain(
- trait_ref
- .into_iter()
- .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)),
- )
- }
-}
-
-fn assoc_type_bindings_from_type_bound<'a>(
- ctx: &'a TyLoweringContext<'a>,
- bound: &'a TypeBound,
- trait_ref: TraitRef,
-) -> impl Iterator<Item = GenericPredicate> + 'a {
- let last_segment = match bound {
- TypeBound::Path(path) => path.segments().last(),
- TypeBound::Error => None,
- };
- last_segment
- .into_iter()
- .flat_map(|segment| segment.args_and_bindings.into_iter())
- .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
- .flat_map(move |binding| {
- let found = associated_type_by_name_including_super_traits(
- ctx.db,
- trait_ref.clone(),
- &binding.name,
- );
- let (super_trait_ref, associated_ty) = match found {
- None => return SmallVec::<[GenericPredicate; 1]>::new(),
- Some(t) => t,
- };
- let projection_ty = ProjectionTy { associated_ty, parameters: super_trait_ref.substs };
- let mut preds = SmallVec::with_capacity(
- binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
- );
- if let Some(type_ref) = &binding.type_ref {
- let ty = Ty::from_hir(ctx, type_ref);
- let projection_predicate =
- ProjectionPredicate { projection_ty: projection_ty.clone(), ty };
- preds.push(GenericPredicate::Projection(projection_predicate));
- }
- for bound in &binding.bounds {
- preds.extend(GenericPredicate::from_type_bound(
- ctx,
- bound,
- Ty::Projection(projection_ty.clone()),
- ));
- }
- preds
- })
-}
-
-impl ReturnTypeImplTrait {
- fn from_hir(ctx: &TyLoweringContext, bounds: &[TypeBound]) -> Self {
- mark::hit!(lower_rpit);
- let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
- let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
- bounds
- .iter()
- .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
- .collect()
- });
- ReturnTypeImplTrait { bounds: Binders::new(1, predicates) }
- }
-}
-
-fn count_impl_traits(type_ref: &TypeRef) -> usize {
- let mut count = 0;
- type_ref.walk(&mut |type_ref| {
- if matches!(type_ref, TypeRef::ImplTrait(_)) {
- count += 1;
- }
- });
- count
-}
-
-/// Build the signature of a callable item (function, struct or enum variant).
-pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
- match def {
- CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
- CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
- CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
- }
-}
-
-pub fn associated_type_shorthand_candidates<R>(
- db: &dyn HirDatabase,
- res: TypeNs,
- mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
-) -> Option<R> {
- let traits_from_env: Vec<_> = match res {
- TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) {
- None => vec![],
- Some(trait_ref) => vec![trait_ref.value],
- },
- TypeNs::GenericParam(param_id) => {
- let predicates = db.generic_predicates_for_param(param_id);
- let mut traits_: Vec<_> = predicates
- .iter()
- .filter_map(|pred| match &pred.value {
- GenericPredicate::Implemented(tr) => Some(tr.clone()),
- _ => None,
- })
- .collect();
- // Handle `Self::Type` referring to own associated type in trait definitions
- if let GenericDefId::TraitId(trait_id) = param_id.parent {
- let generics = generics(db.upcast(), trait_id.into());
- if generics.params.types[param_id.local_id].provenance
- == TypeParamProvenance::TraitSelf
- {
- let trait_ref = TraitRef {
- trait_: trait_id,
- substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST),
- };
- traits_.push(trait_ref);
- }
- }
- traits_
- }
- _ => vec![],
- };
-
- for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) {
- let data = db.trait_data(t.trait_);
-
- for (name, assoc_id) in &data.items {
- match assoc_id {
- AssocItemId::TypeAliasId(alias) => {
- if let Some(result) = cb(name, &t, *alias) {
- return Some(result);
- }
- }
- AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {}
- }
- }
- }
-
- None
-}
-
-/// Build the type of all specific fields of a struct or enum variant.
-pub(crate) fn field_types_query(
- db: &dyn HirDatabase,
- variant_id: VariantId,
-) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
- let var_data = variant_data(db.upcast(), variant_id);
- let (resolver, def): (_, GenericDefId) = match variant_id {
- VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
- VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
- VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
- };
- let generics = generics(db.upcast(), def);
- let mut res = ArenaMap::default();
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- for (field_id, field_data) in var_data.fields().iter() {
- res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref)))
- }
- Arc::new(res)
-}
-
-/// This query exists only to be used when resolving short-hand associated types
-/// like `T::Item`.
-///
-/// See the analogous query in rustc and its comment:
-/// https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46
-/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
-/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
-/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
-pub(crate) fn generic_predicates_for_param_query(
- db: &dyn HirDatabase,
- param_id: TypeParamId,
-) -> Arc<[Binders<GenericPredicate>]> {
- let resolver = param_id.parent.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let generics = generics(db.upcast(), param_id.parent);
- resolver
- .where_predicates_in_scope()
- // we have to filter out all other predicates *first*, before attempting to lower them
- .filter(|pred| match &pred.target {
- WherePredicateTarget::TypeRef(type_ref) => {
- Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id)
- }
- WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id,
- })
- .flat_map(|pred| {
- GenericPredicate::from_where_predicate(&ctx, pred)
- .map(|p| Binders::new(generics.len(), p))
- })
- .collect()
-}
-
-pub(crate) fn generic_predicates_for_param_recover(
- _db: &dyn HirDatabase,
- _cycle: &[String],
- _param_id: &TypeParamId,
-) -> Arc<[Binders<GenericPredicate>]> {
- Arc::new([])
-}
-
-impl TraitEnvironment {
- pub fn lower(db: &dyn HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
- let ctx = TyLoweringContext::new(db, &resolver)
- .with_type_param_mode(TypeParamLoweringMode::Placeholder);
- let mut predicates = resolver
- .where_predicates_in_scope()
- .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
- .collect::<Vec<_>>();
-
- if let Some(def) = resolver.generic_def() {
- let container: Option<AssocContainerId> = match def {
- // FIXME: is there a function for this?
- GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
- GenericDefId::AdtId(_) => None,
- GenericDefId::TraitId(_) => None,
- GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
- GenericDefId::ImplId(_) => None,
- GenericDefId::EnumVariantId(_) => None,
- GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
- };
- if let Some(AssocContainerId::TraitId(trait_id)) = container {
- // add `Self: Trait<T1, T2, ...>` to the environment in trait
- // function default implementations (and hypothetical code
- // inside consts or type aliases)
- test_utils::mark::hit!(trait_self_implements_self);
- let substs = Substs::type_params(db, trait_id);
- let trait_ref = TraitRef { trait_: trait_id, substs };
- let pred = GenericPredicate::Implemented(trait_ref);
-
- predicates.push(pred);
- }
- }
-
- Arc::new(TraitEnvironment { predicates })
- }
-}
-
-/// Resolve the where clause(s) of an item with generics.
-pub(crate) fn generic_predicates_query(
- db: &dyn HirDatabase,
- def: GenericDefId,
-) -> Arc<[Binders<GenericPredicate>]> {
- let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let generics = generics(db.upcast(), def);
- resolver
- .where_predicates_in_scope()
- .flat_map(|pred| {
- GenericPredicate::from_where_predicate(&ctx, pred)
- .map(|p| Binders::new(generics.len(), p))
- })
- .collect()
-}
-
-/// Resolve the default type params from generics
-pub(crate) fn generic_defaults_query(
- db: &dyn HirDatabase,
- def: GenericDefId,
-) -> Arc<[Binders<Ty>]> {
- let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let generic_params = generics(db.upcast(), def);
-
- let defaults = generic_params
- .iter()
- .enumerate()
- .map(|(idx, (_, p))| {
- let mut ty = p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t));
-
- // Each default can only refer to previous parameters.
- ty.walk_mut_binders(
- &mut |ty, binders| match ty {
- Ty::Bound(BoundVar { debruijn, index }) if *debruijn == binders => {
- if *index >= idx {
- // type variable default referring to parameter coming
- // after it. This is forbidden (FIXME: report
- // diagnostic)
- *ty = Ty::Unknown;
- }
- }
- _ => {}
- },
- DebruijnIndex::INNERMOST,
- );
-
- Binders::new(idx, ty)
- })
- .collect();
-
- defaults
-}
-
-fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
- let data = db.function_data(def);
- let resolver = def.resolver(db.upcast());
- let ctx_params = TyLoweringContext::new(db, &resolver)
- .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
- .with_type_param_mode(TypeParamLoweringMode::Variable);
- let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>();
- let ctx_ret = TyLoweringContext::new(db, &resolver)
- .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
- .with_type_param_mode(TypeParamLoweringMode::Variable);
- let ret = Ty::from_hir(&ctx_ret, &data.ret_type);
- let generics = generics(db.upcast(), def.into());
- let num_binders = generics.len();
- Binders::new(num_binders, FnSig::from_params_and_return(params, ret, data.is_varargs))
-}
-
-/// Build the declared type of a function. This should not need to look at the
-/// function body.
-fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
- let generics = generics(db.upcast(), def.into());
- let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
- Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
-}
-
-/// Build the declared type of a const.
-fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
- let data = db.const_data(def);
- let generics = generics(db.upcast(), def.into());
- let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
-
- Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref))
-}
-
-/// Build the declared type of a static.
-fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
- let data = db.static_data(def);
- let resolver = def.resolver(db.upcast());
- let ctx = TyLoweringContext::new(db, &resolver);
-
- Binders::new(0, Ty::from_hir(&ctx, &data.type_ref))
-}
-
-/// Build the declared type of a static.
-fn type_for_builtin(def: BuiltinType) -> Ty {
- Ty::simple(match def {
- BuiltinType::Char => TypeCtor::Char,
- BuiltinType::Bool => TypeCtor::Bool,
- BuiltinType::Str => TypeCtor::Str,
- BuiltinType::Int(t) => TypeCtor::Int(IntTy::from(t).into()),
- BuiltinType::Float(t) => TypeCtor::Float(FloatTy::from(t).into()),
- })
-}
-
-fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
- let struct_data = db.struct_data(def);
- let fields = struct_data.variant_data.fields();
- let resolver = def.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let params =
- fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
- let ret = type_for_adt(db, def.into());
- Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false))
-}
-
-/// Build the type of a tuple struct constructor.
-fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
- let struct_data = db.struct_data(def);
- if let StructKind::Unit = struct_data.variant_data.kind() {
- return type_for_adt(db, def.into());
- }
- let generics = generics(db.upcast(), def.into());
- let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
- Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
-}
-
-fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
- let enum_data = db.enum_data(def.parent);
- let var_data = &enum_data.variants[def.local_id];
- let fields = var_data.variant_data.fields();
- let resolver = def.parent.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let params =
- fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
- let ret = type_for_adt(db, def.parent.into());
- Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value, false))
-}
-
-/// Build the type of a tuple enum variant constructor.
-fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
- let enum_data = db.enum_data(def.parent);
- let var_data = &enum_data.variants[def.local_id].variant_data;
- if let StructKind::Unit = var_data.kind() {
- return type_for_adt(db, def.parent.into());
- }
- let generics = generics(db.upcast(), def.parent.into());
- let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
- Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
-}
-
-fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
- let generics = generics(db.upcast(), adt.into());
- let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
- Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs))
-}
-
-fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
- let generics = generics(db.upcast(), t.into());
- let resolver = t.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let type_ref = &db.type_alias_data(t).type_ref;
- let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
- let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error));
- Binders::new(substs.len(), inner)
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub enum CallableDefId {
- FunctionId(FunctionId),
- StructId(StructId),
- EnumVariantId(EnumVariantId),
-}
-impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
-
-impl CallableDefId {
- pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
- let db = db.upcast();
- match self {
- CallableDefId::FunctionId(f) => f.lookup(db).module(db),
- CallableDefId::StructId(s) => s.lookup(db).container.module(db),
- CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container.module(db),
- }
- .krate
- }
-}
-
-impl From<CallableDefId> for GenericDefId {
- fn from(def: CallableDefId) -> GenericDefId {
- match def {
- CallableDefId::FunctionId(f) => f.into(),
- CallableDefId::StructId(s) => s.into(),
- CallableDefId::EnumVariantId(e) => e.into(),
- }
- }
-}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum TyDefId {
- BuiltinType(BuiltinType),
- AdtId(AdtId),
- TypeAliasId(TypeAliasId),
-}
-impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum ValueTyDefId {
- FunctionId(FunctionId),
- StructId(StructId),
- UnionId(UnionId),
- EnumVariantId(EnumVariantId),
- ConstId(ConstId),
- StaticId(StaticId),
-}
-impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
-
-/// Build the declared type of an item. This depends on the namespace; e.g. for
-/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
-/// the constructor function `(usize) -> Foo` which lives in the values
-/// namespace.
-pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
- match def {
- TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)),
- TyDefId::AdtId(it) => type_for_adt(db, it),
- TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
- }
-}
-
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
- let num_binders = match *def {
- TyDefId::BuiltinType(_) => 0,
- TyDefId::AdtId(it) => generics(db.upcast(), it.into()).len(),
- TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()).len(),
- };
- Binders::new(num_binders, Ty::Unknown)
-}
-
-pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
- match def {
- ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
- ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
- ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
- ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
- ValueTyDefId::ConstId(it) => type_for_const(db, it),
- ValueTyDefId::StaticId(it) => type_for_static(db, it),
- }
-}
-
-pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
- let impl_data = db.impl_data(impl_id);
- let resolver = impl_id.resolver(db.upcast());
- let generics = generics(db.upcast(), impl_id.into());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type))
-}
-
-pub(crate) fn impl_self_ty_recover(
- db: &dyn HirDatabase,
- _cycle: &[String],
- impl_id: &ImplId,
-) -> Binders<Ty> {
- let generics = generics(db.upcast(), (*impl_id).into());
- Binders::new(generics.len(), Ty::Unknown)
-}
-
-pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
- let impl_data = db.impl_data(impl_id);
- let resolver = impl_id.resolver(db.upcast());
- let ctx =
- TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
- let self_ty = db.impl_self_ty(impl_id);
- let target_trait = impl_data.target_trait.as_ref()?;
- Some(Binders::new(
- self_ty.num_binders,
- TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?,
- ))
-}
-
-pub(crate) fn return_type_impl_traits(
- db: &dyn HirDatabase,
- def: hir_def::FunctionId,
-) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
- // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
- let data = db.function_data(def);
- let resolver = def.resolver(db.upcast());
- let ctx_ret = TyLoweringContext::new(db, &resolver)
- .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
- .with_type_param_mode(TypeParamLoweringMode::Variable);
- let _ret = Ty::from_hir(&ctx_ret, &data.ret_type);
- let generics = generics(db.upcast(), def.into());
- let num_binders = generics.len();
- let return_type_impl_traits =
- ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
- if return_type_impl_traits.impl_traits.is_empty() {
- None
- } else {
- Some(Arc::new(Binders::new(num_binders, return_type_impl_traits)))
- }
-}
+++ /dev/null
-//! This module is concerned with finding methods that a given type provides.
-//! For details about how this works in rustc, see the method lookup page in the
-//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
-//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
-use std::{iter, sync::Arc};
-
-use arrayvec::ArrayVec;
-use base_db::CrateId;
-use hir_def::{
- builtin_type::{IntBitness, Signedness},
- lang_item::LangItemTarget,
- type_ref::Mutability,
- AssocContainerId, AssocItemId, FunctionId, HasModule, ImplId, Lookup, TraitId,
-};
-use hir_expand::name::Name;
-use rustc_hash::{FxHashMap, FxHashSet};
-
-use super::Substs;
-use crate::{
- autoderef,
- db::HirDatabase,
- primitive::{FloatBitness, FloatTy, IntTy},
- utils::all_super_traits,
- ApplicationTy, Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind,
- TypeCtor, TypeWalk,
-};
-
-/// This is used as a key for indexing impls.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub enum TyFingerprint {
- Apply(TypeCtor),
-}
-
-impl TyFingerprint {
- /// Creates a TyFingerprint for looking up an impl. Only certain types can
- /// have impls: if we have some `struct S`, we can have an `impl S`, but not
- /// `impl &S`. Hence, this will return `None` for reference types and such.
- pub(crate) fn for_impl(ty: &Ty) -> Option<TyFingerprint> {
- match ty {
- Ty::Apply(a_ty) => Some(TyFingerprint::Apply(a_ty.ctor)),
- _ => None,
- }
- }
-}
-
-pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::X8,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::X16,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::X32,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::X64,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::X128,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: IntBitness::Xsize,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::X8,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::X16,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::X32,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::X64,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::X128,
- })),
- TyFingerprint::Apply(TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: IntBitness::Xsize,
- })),
-];
-
-pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
- TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })),
- TyFingerprint::Apply(TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })),
-];
-
-/// Trait impls defined or available in some crate.
-#[derive(Debug, Eq, PartialEq)]
-pub struct TraitImpls {
- // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
- map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
-}
-
-impl TraitImpls {
- pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p = profile::span("trait_impls_in_crate_query");
- let mut impls = Self { map: FxHashMap::default() };
-
- let crate_def_map = db.crate_def_map(krate);
- for (_module_id, module_data) in crate_def_map.modules.iter() {
- for impl_id in module_data.scope.impls() {
- let target_trait = match db.impl_trait(impl_id) {
- Some(tr) => tr.value.trait_,
- None => continue,
- };
- let self_ty = db.impl_self_ty(impl_id);
- let self_ty_fp = TyFingerprint::for_impl(&self_ty.value);
- impls
- .map
- .entry(target_trait)
- .or_default()
- .entry(self_ty_fp)
- .or_default()
- .push(impl_id);
- }
- }
-
- Arc::new(impls)
- }
-
- pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let _p = profile::span("trait_impls_in_deps_query");
- let crate_graph = db.crate_graph();
- let mut res = Self { map: FxHashMap::default() };
-
- for krate in crate_graph.transitive_deps(krate) {
- res.merge(&db.trait_impls_in_crate(krate));
- }
-
- Arc::new(res)
- }
-
- fn merge(&mut self, other: &Self) {
- for (trait_, other_map) in &other.map {
- let map = self.map.entry(*trait_).or_default();
- for (fp, impls) in other_map {
- let vec = map.entry(*fp).or_default();
- vec.extend(impls);
- }
- }
- }
-
- /// Queries all impls of the given trait.
- pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
- self.map
- .get(&trait_)
- .into_iter()
- .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
- }
-
- /// Queries all impls of `trait_` that may apply to `self_ty`.
- pub fn for_trait_and_self_ty(
- &self,
- trait_: TraitId,
- self_ty: TyFingerprint,
- ) -> impl Iterator<Item = ImplId> + '_ {
- self.map
- .get(&trait_)
- .into_iter()
- .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty))))
- .flat_map(|v| v.iter().copied())
- }
-
- pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
- self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
- }
-}
-
-/// Inherent impls defined in some crate.
-///
-/// Inherent impls can only be defined in the crate that also defines the self type of the impl
-/// (note that some primitives are considered to be defined by both libcore and liballoc).
-///
-/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
-/// single crate.
-#[derive(Debug, Eq, PartialEq)]
-pub struct InherentImpls {
- map: FxHashMap<TyFingerprint, Vec<ImplId>>,
-}
-
-impl InherentImpls {
- pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
-
- let crate_def_map = db.crate_def_map(krate);
- for (_module_id, module_data) in crate_def_map.modules.iter() {
- for impl_id in module_data.scope.impls() {
- let data = db.impl_data(impl_id);
- if data.target_trait.is_some() {
- continue;
- }
-
- let self_ty = db.impl_self_ty(impl_id);
- if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) {
- map.entry(fp).or_default().push(impl_id);
- }
- }
- }
-
- Arc::new(Self { map })
- }
-
- pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
- match TyFingerprint::for_impl(self_ty) {
- Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
- None => &[],
- }
- }
-
- pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
- self.map.values().flat_map(|v| v.iter().copied())
- }
-}
-
-impl Ty {
- pub fn def_crates(
- &self,
- db: &dyn HirDatabase,
- cur_crate: CrateId,
- ) -> Option<ArrayVec<[CrateId; 2]>> {
- // Types like slice can have inherent impls in several crates, (core and alloc).
- // The corresponding impls are marked with lang items, so we can use them to find the required crates.
- macro_rules! lang_item_crate {
- ($($name:expr),+ $(,)?) => {{
- let mut v = ArrayVec::<[LangItemTarget; 2]>::new();
- $(
- v.extend(db.lang_item(cur_crate, $name.into()));
- )+
- v
- }};
- }
-
- let lang_item_targets = match self {
- Ty::Apply(a_ty) => match a_ty.ctor {
- TypeCtor::Adt(def_id) => {
- return Some(std::iter::once(def_id.module(db.upcast()).krate).collect())
- }
- TypeCtor::Bool => lang_item_crate!("bool"),
- TypeCtor::Char => lang_item_crate!("char"),
- TypeCtor::Float(f) => match f.bitness {
- // There are two lang items: one in libcore (fXX) and one in libstd (fXX_runtime)
- FloatBitness::X32 => lang_item_crate!("f32", "f32_runtime"),
- FloatBitness::X64 => lang_item_crate!("f64", "f64_runtime"),
- },
- TypeCtor::Int(i) => lang_item_crate!(i.ty_to_string()),
- TypeCtor::Str => lang_item_crate!("str_alloc", "str"),
- TypeCtor::Slice => lang_item_crate!("slice_alloc", "slice"),
- TypeCtor::RawPtr(Mutability::Shared) => lang_item_crate!("const_ptr"),
- TypeCtor::RawPtr(Mutability::Mut) => lang_item_crate!("mut_ptr"),
- _ => return None,
- },
- _ => return None,
- };
- let res = lang_item_targets
- .into_iter()
- .filter_map(|it| match it {
- LangItemTarget::ImplDefId(it) => Some(it),
- _ => None,
- })
- .map(|it| it.lookup(db.upcast()).container.module(db.upcast()).krate)
- .collect();
- Some(res)
- }
-}
-/// Look up the method with the given name, returning the actual autoderefed
-/// receiver type (but without autoref applied yet).
-pub(crate) fn lookup_method(
- ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: &Name,
-) -> Option<(Ty, FunctionId)> {
- iterate_method_candidates(
- ty,
- db,
- env,
- krate,
- &traits_in_scope,
- Some(name),
- LookupMode::MethodCall,
- |ty, f| match f {
- AssocItemId::FunctionId(f) => Some((ty.clone(), f)),
- _ => None,
- },
- )
-}
-
-/// Whether we're looking up a dotted method call (like `v.len()`) or a path
-/// (like `Vec::new`).
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum LookupMode {
- /// Looking up a method call like `v.len()`: We only consider candidates
- /// that have a `self` parameter, and do autoderef.
- MethodCall,
- /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
- /// candidates including associated constants, but don't do autoderef.
- Path,
-}
-
-// This would be nicer if it just returned an iterator, but that runs into
-// lifetime problems, because we need to borrow temp `CrateImplDefs`.
-// FIXME add a context type here?
-pub fn iterate_method_candidates<T>(
- ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- mode: LookupMode,
- mut callback: impl FnMut(&Ty, AssocItemId) -> Option<T>,
-) -> Option<T> {
- let mut slot = None;
- iterate_method_candidates_impl(
- ty,
- db,
- env,
- krate,
- traits_in_scope,
- name,
- mode,
- &mut |ty, item| {
- assert!(slot.is_none());
- slot = callback(ty, item);
- slot.is_some()
- },
- );
- slot
-}
-
-fn iterate_method_candidates_impl(
- ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- mode: LookupMode,
- callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- match mode {
- LookupMode::MethodCall => {
- // For method calls, rust first does any number of autoderef, and then one
- // autoref (i.e. when the method takes &self or &mut self). We just ignore
- // the autoref currently -- when we find a method matching the given name,
- // we assume it fits.
-
- // Also note that when we've got a receiver like &S, even if the method we
- // find in the end takes &self, we still do the autoderef step (just as
- // rustc does an autoderef and then autoref again).
- let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
-
- // We have to be careful about the order we're looking at candidates
- // in here. Consider the case where we're resolving `x.clone()`
- // where `x: &Vec<_>`. This resolves to the clone method with self
- // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
- // the receiver type exactly matches before cases where we have to
- // do autoref. But in the autoderef steps, the `&_` self type comes
- // up *before* the `Vec<_>` self type.
- //
- // On the other hand, we don't want to just pick any by-value method
- // before any by-autoref method; it's just that we need to consider
- // the methods by autoderef order of *receiver types*, not *self
- // types*.
-
- let deref_chain = autoderef_method_receiver(db, krate, ty);
- for i in 0..deref_chain.len() {
- if iterate_method_candidates_with_autoref(
- &deref_chain[i..],
- db,
- env.clone(),
- krate,
- traits_in_scope,
- name,
- callback,
- ) {
- return true;
- }
- }
- false
- }
- LookupMode::Path => {
- // No autoderef for path lookups
- iterate_method_candidates_for_self_ty(
- &ty,
- db,
- env,
- krate,
- traits_in_scope,
- name,
- callback,
- )
- }
- }
-}
-
-fn iterate_method_candidates_with_autoref(
- deref_chain: &[Canonical<Ty>],
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- if iterate_method_candidates_by_receiver(
- &deref_chain[0],
- &deref_chain[1..],
- db,
- env.clone(),
- krate,
- &traits_in_scope,
- name,
- &mut callback,
- ) {
- return true;
- }
- let refed = Canonical {
- kinds: deref_chain[0].kinds.clone(),
- value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()),
- };
- if iterate_method_candidates_by_receiver(
- &refed,
- deref_chain,
- db,
- env.clone(),
- krate,
- &traits_in_scope,
- name,
- &mut callback,
- ) {
- return true;
- }
- let ref_muted = Canonical {
- kinds: deref_chain[0].kinds.clone(),
- value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()),
- };
- if iterate_method_candidates_by_receiver(
- &ref_muted,
- deref_chain,
- db,
- env,
- krate,
- &traits_in_scope,
- name,
- &mut callback,
- ) {
- return true;
- }
- false
-}
-
-fn iterate_method_candidates_by_receiver(
- receiver_ty: &Canonical<Ty>,
- rest_of_deref_chain: &[Canonical<Ty>],
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- // We're looking for methods with *receiver* type receiver_ty. These could
- // be found in any of the derefs of receiver_ty, so we have to go through
- // that.
- for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
- if iterate_inherent_methods(self_ty, db, name, Some(receiver_ty), krate, &mut callback) {
- return true;
- }
- }
- for self_ty in std::iter::once(receiver_ty).chain(rest_of_deref_chain) {
- if iterate_trait_method_candidates(
- self_ty,
- db,
- env.clone(),
- krate,
- &traits_in_scope,
- name,
- Some(receiver_ty),
- &mut callback,
- ) {
- return true;
- }
- }
- false
-}
-
-fn iterate_method_candidates_for_self_ty(
- self_ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- mut callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- if iterate_inherent_methods(self_ty, db, name, None, krate, &mut callback) {
- return true;
- }
- iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback)
-}
-
-fn iterate_trait_method_candidates(
- self_ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- receiver_ty: Option<&Canonical<Ty>>,
- callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- // if ty is `dyn Trait`, the trait doesn't need to be in scope
- let inherent_trait =
- self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
- let env_traits = if let Ty::Placeholder(_) = self_ty.value {
- // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
- env.trait_predicates_for_self_ty(&self_ty.value)
- .map(|tr| tr.trait_)
- .flat_map(|t| all_super_traits(db.upcast(), t))
- .collect()
- } else {
- Vec::new()
- };
- let traits =
- inherent_trait.chain(env_traits.into_iter()).chain(traits_in_scope.iter().copied());
- 'traits: for t in traits {
- let data = db.trait_data(t);
-
- // we'll be lazy about checking whether the type implements the
- // trait, but if we find out it doesn't, we'll skip the rest of the
- // iteration
- let mut known_implemented = false;
- for (_name, item) in data.items.iter() {
- if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) {
- continue;
- }
- if !known_implemented {
- let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone());
- if db.trait_solve(krate, goal).is_none() {
- continue 'traits;
- }
- }
- known_implemented = true;
- if callback(&self_ty.value, *item) {
- return true;
- }
- }
- }
- false
-}
-
-fn iterate_inherent_methods(
- self_ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- name: Option<&Name>,
- receiver_ty: Option<&Canonical<Ty>>,
- krate: CrateId,
- callback: &mut dyn FnMut(&Ty, AssocItemId) -> bool,
-) -> bool {
- let def_crates = match self_ty.value.def_crates(db, krate) {
- Some(k) => k,
- None => return false,
- };
- for krate in def_crates {
- let impls = db.inherent_impls_in_crate(krate);
-
- for &impl_def in impls.for_self_ty(&self_ty.value) {
- for &item in db.impl_data(impl_def).items.iter() {
- if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
- continue;
- }
- // we have to check whether the self type unifies with the type
- // that the impl is for. If we have a receiver type, this
- // already happens in `is_valid_candidate` above; if not, we
- // check it here
- if receiver_ty.is_none() && inherent_impl_substs(db, impl_def, self_ty).is_none() {
- test_utils::mark::hit!(impl_self_type_match_without_receiver);
- continue;
- }
- if callback(&self_ty.value, item) {
- return true;
- }
- }
- }
- }
- false
-}
-
-/// Returns the self type for the index trait call.
-pub fn resolve_indexing_op(
- db: &dyn HirDatabase,
- ty: &Canonical<Ty>,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- index_trait: TraitId,
-) -> Option<Canonical<Ty>> {
- let ty = InEnvironment { value: ty.clone(), environment: env.clone() };
- let deref_chain = autoderef_method_receiver(db, krate, ty);
- for ty in deref_chain {
- let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone());
- if db.trait_solve(krate, goal).is_some() {
- return Some(ty);
- }
- }
- None
-}
-
-fn is_valid_candidate(
- db: &dyn HirDatabase,
- name: Option<&Name>,
- receiver_ty: Option<&Canonical<Ty>>,
- item: AssocItemId,
- self_ty: &Canonical<Ty>,
-) -> bool {
- match item {
- AssocItemId::FunctionId(m) => {
- let data = db.function_data(m);
- if let Some(name) = name {
- if &data.name != name {
- return false;
- }
- }
- if let Some(receiver_ty) = receiver_ty {
- if !data.has_self_param {
- return false;
- }
- let transformed_receiver_ty = match transform_receiver_ty(db, m, self_ty) {
- Some(ty) => ty,
- None => return false,
- };
- if transformed_receiver_ty != receiver_ty.value {
- return false;
- }
- }
- true
- }
- AssocItemId::ConstId(c) => {
- let data = db.const_data(c);
- name.map_or(true, |name| data.name.as_ref() == Some(name)) && receiver_ty.is_none()
- }
- _ => false,
- }
-}
-
-pub(crate) fn inherent_impl_substs(
- db: &dyn HirDatabase,
- impl_id: ImplId,
- self_ty: &Canonical<Ty>,
-) -> Option<Substs> {
- // we create a var for each type parameter of the impl; we need to keep in
- // mind here that `self_ty` might have vars of its own
- let vars = Substs::build_for_def(db, impl_id)
- .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len())
- .build();
- let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars);
- let mut kinds = self_ty.kinds.to_vec();
- kinds.extend(iter::repeat(TyKind::General).take(vars.len()));
- let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) };
- let substs = super::infer::unify(&tys);
- // We only want the substs for the vars we added, not the ones from self_ty.
- // Also, if any of the vars we added are still in there, we replace them by
- // Unknown. I think this can only really happen if self_ty contained
- // Unknown, and in that case we want the result to contain Unknown in those
- // places again.
- substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len()))
-}
-
-/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
-/// num_vars_to_keep) by `Ty::Unknown`.
-fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs {
- s.fold_binders(
- &mut |ty, binders| {
- if let Ty::Bound(bound) = &ty {
- if bound.index >= num_vars_to_keep && bound.debruijn >= binders {
- Ty::Unknown
- } else {
- ty
- }
- } else {
- ty
- }
- },
- DebruijnIndex::INNERMOST,
- )
-}
-
-fn transform_receiver_ty(
- db: &dyn HirDatabase,
- function_id: FunctionId,
- self_ty: &Canonical<Ty>,
-) -> Option<Ty> {
- let substs = match function_id.lookup(db.upcast()).container {
- AssocContainerId::TraitId(_) => Substs::build_for_def(db, function_id)
- .push(self_ty.value.clone())
- .fill_with_unknown()
- .build(),
- AssocContainerId::ImplId(impl_id) => inherent_impl_substs(db, impl_id, &self_ty)?,
- AssocContainerId::ContainerId(_) => unreachable!(),
- };
- let sig = db.callable_item_signature(function_id.into());
- Some(sig.value.params()[0].clone().subst_bound_vars(&substs))
-}
-
-pub fn implements_trait(
- ty: &Canonical<Ty>,
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- krate: CrateId,
- trait_: TraitId,
-) -> bool {
- let goal = generic_implements_goal(db, env, trait_, ty.clone());
- let solution = db.trait_solve(krate, goal);
-
- solution.is_some()
-}
-
-/// This creates Substs for a trait with the given Self type and type variables
-/// for all other parameters, to query Chalk with it.
-fn generic_implements_goal(
- db: &dyn HirDatabase,
- env: Arc<TraitEnvironment>,
- trait_: TraitId,
- self_ty: Canonical<Ty>,
-) -> Canonical<InEnvironment<super::Obligation>> {
- let mut kinds = self_ty.kinds.to_vec();
- let substs = super::Substs::build_for_def(db, trait_)
- .push(self_ty.value)
- .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
- .build();
- kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1));
- let trait_ref = TraitRef { trait_, substs };
- let obligation = super::Obligation::Trait(trait_ref);
- Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) }
-}
-
-fn autoderef_method_receiver(
- db: &dyn HirDatabase,
- krate: CrateId,
- ty: InEnvironment<Canonical<Ty>>,
-) -> Vec<Canonical<Ty>> {
- let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect();
- // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
- if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) =
- deref_chain.last().map(|ty| &ty.value)
- {
- let kinds = deref_chain.last().unwrap().kinds.clone();
- let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone());
- deref_chain.push(Canonical { value: unsized_ty, kinds })
- }
- deref_chain
-}
+++ /dev/null
-//! Helper functions for binary operator type inference.
-use hir_def::expr::{ArithOp, BinaryOp, CmpOp};
-
-use super::{InferTy, Ty, TypeCtor};
-use crate::ApplicationTy;
-
-pub(super) fn binary_op_return_ty(op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Ty {
- match op {
- BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => Ty::simple(TypeCtor::Bool),
- BinaryOp::Assignment { .. } => Ty::unit(),
- BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => match lhs_ty {
- Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
- TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
- _ => Ty::Unknown,
- },
- Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
- _ => Ty::Unknown,
- },
- BinaryOp::ArithOp(_) => match rhs_ty {
- Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
- TypeCtor::Int(..) | TypeCtor::Float(..) => rhs_ty,
- _ => Ty::Unknown,
- },
- Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => rhs_ty,
- _ => Ty::Unknown,
- },
- }
-}
-
-pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty {
- match op {
- BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool),
- BinaryOp::Assignment { op: None } => lhs_ty,
- BinaryOp::CmpOp(CmpOp::Eq { .. }) => match lhs_ty {
- Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
- TypeCtor::Int(..)
- | TypeCtor::Float(..)
- | TypeCtor::Str
- | TypeCtor::Char
- | TypeCtor::Bool => lhs_ty,
- _ => Ty::Unknown,
- },
- Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
- _ => Ty::Unknown,
- },
- BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown,
- BinaryOp::CmpOp(CmpOp::Ord { .. })
- | BinaryOp::Assignment { op: Some(_) }
- | BinaryOp::ArithOp(_) => match lhs_ty {
- Ty::Apply(ApplicationTy { ctor, .. }) => match ctor {
- TypeCtor::Int(..) | TypeCtor::Float(..) => lhs_ty,
- _ => Ty::Unknown,
- },
- Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty,
- _ => Ty::Unknown,
- },
- }
-}
+++ /dev/null
-//! Defines primitive types, which have a couple of peculiarities:
-//!
-//! * during type inference, they can be uncertain (ie, `let x = 92;`)
-//! * they don't belong to any particular crate.
-
-use std::fmt;
-
-pub use hir_def::builtin_type::{BuiltinFloat, BuiltinInt, FloatBitness, IntBitness, Signedness};
-
-#[derive(Copy, Clone, Eq, PartialEq, Hash)]
-pub struct IntTy {
- pub signedness: Signedness,
- pub bitness: IntBitness,
-}
-
-impl fmt::Debug for IntTy {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(self, f)
- }
-}
-
-impl fmt::Display for IntTy {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{}", self.ty_to_string())
- }
-}
-
-impl IntTy {
- pub fn isize() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::Xsize }
- }
-
- pub fn i8() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::X8 }
- }
-
- pub fn i16() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::X16 }
- }
-
- pub fn i32() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::X32 }
- }
-
- pub fn i64() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::X64 }
- }
-
- pub fn i128() -> IntTy {
- IntTy { signedness: Signedness::Signed, bitness: IntBitness::X128 }
- }
-
- pub fn usize() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::Xsize }
- }
-
- pub fn u8() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X8 }
- }
-
- pub fn u16() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X16 }
- }
-
- pub fn u32() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X32 }
- }
-
- pub fn u64() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X64 }
- }
-
- pub fn u128() -> IntTy {
- IntTy { signedness: Signedness::Unsigned, bitness: IntBitness::X128 }
- }
-
- pub fn ty_to_string(self) -> &'static str {
- match (self.signedness, self.bitness) {
- (Signedness::Signed, IntBitness::Xsize) => "isize",
- (Signedness::Signed, IntBitness::X8) => "i8",
- (Signedness::Signed, IntBitness::X16) => "i16",
- (Signedness::Signed, IntBitness::X32) => "i32",
- (Signedness::Signed, IntBitness::X64) => "i64",
- (Signedness::Signed, IntBitness::X128) => "i128",
- (Signedness::Unsigned, IntBitness::Xsize) => "usize",
- (Signedness::Unsigned, IntBitness::X8) => "u8",
- (Signedness::Unsigned, IntBitness::X16) => "u16",
- (Signedness::Unsigned, IntBitness::X32) => "u32",
- (Signedness::Unsigned, IntBitness::X64) => "u64",
- (Signedness::Unsigned, IntBitness::X128) => "u128",
- }
- }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-pub struct FloatTy {
- pub bitness: FloatBitness,
-}
-
-impl fmt::Debug for FloatTy {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(self, f)
- }
-}
-
-impl fmt::Display for FloatTy {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{}", self.ty_to_string())
- }
-}
-
-impl FloatTy {
- pub fn f32() -> FloatTy {
- FloatTy { bitness: FloatBitness::X32 }
- }
-
- pub fn f64() -> FloatTy {
- FloatTy { bitness: FloatBitness::X64 }
- }
-
- pub fn ty_to_string(self) -> &'static str {
- match self.bitness {
- FloatBitness::X32 => "f32",
- FloatBitness::X64 => "f64",
- }
- }
-}
-
-impl From<BuiltinInt> for IntTy {
- fn from(t: BuiltinInt) -> Self {
- IntTy { signedness: t.signedness, bitness: t.bitness }
- }
-}
-
-impl From<BuiltinFloat> for FloatTy {
- fn from(t: BuiltinFloat) -> Self {
- FloatTy { bitness: t.bitness }
- }
-}
+++ /dev/null
-//! Database used for testing `hir`.
-
-use std::{
- fmt, panic,
- sync::{Arc, Mutex},
-};
-
-use base_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast};
-use hir_def::{db::DefDatabase, ModuleId};
-use hir_expand::db::AstDatabase;
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::TextRange;
-use test_utils::extract_annotations;
-
-#[salsa::database(
- base_db::SourceDatabaseExtStorage,
- base_db::SourceDatabaseStorage,
- hir_expand::db::AstDatabaseStorage,
- hir_def::db::InternDatabaseStorage,
- hir_def::db::DefDatabaseStorage,
- crate::db::HirDatabaseStorage
-)]
-#[derive(Default)]
-pub struct TestDB {
- storage: salsa::Storage<TestDB>,
- events: Mutex<Option<Vec<salsa::Event>>>,
-}
-impl fmt::Debug for TestDB {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("TestDB").finish()
- }
-}
-
-impl Upcast<dyn AstDatabase> for TestDB {
- fn upcast(&self) -> &(dyn AstDatabase + 'static) {
- &*self
- }
-}
-
-impl Upcast<dyn DefDatabase> for TestDB {
- fn upcast(&self) -> &(dyn DefDatabase + 'static) {
- &*self
- }
-}
-
-impl salsa::Database for TestDB {
- fn salsa_event(&self, event: salsa::Event) {
- let mut events = self.events.lock().unwrap();
- if let Some(events) = &mut *events {
- events.push(event);
- }
- }
-}
-
-impl salsa::ParallelDatabase for TestDB {
- fn snapshot(&self) -> salsa::Snapshot<TestDB> {
- salsa::Snapshot::new(TestDB {
- storage: self.storage.snapshot(),
- events: Default::default(),
- })
- }
-}
-
-impl panic::RefUnwindSafe for TestDB {}
-
-impl FileLoader for TestDB {
- fn file_text(&self, file_id: FileId) -> Arc<String> {
- FileLoaderDelegate(self).file_text(file_id)
- }
- fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> {
- FileLoaderDelegate(self).resolve_path(anchor, path)
- }
- fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
- FileLoaderDelegate(self).relevant_crates(file_id)
- }
-}
-
-impl TestDB {
- pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
- for &krate in self.relevant_crates(file_id).iter() {
- let crate_def_map = self.crate_def_map(krate);
- for (local_id, data) in crate_def_map.modules.iter() {
- if data.origin.file_id() == Some(file_id) {
- return ModuleId { krate, local_id };
- }
- }
- }
- panic!("Can't find module for file")
- }
-
- pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
- let mut files = Vec::new();
- let crate_graph = self.crate_graph();
- for krate in crate_graph.iter() {
- let crate_def_map = self.crate_def_map(krate);
- for (module_id, _) in crate_def_map.modules.iter() {
- let file_id = crate_def_map[module_id].origin.file_id();
- files.extend(file_id)
- }
- }
- files
- .into_iter()
- .filter_map(|file_id| {
- let text = self.file_text(file_id);
- let annotations = extract_annotations(&text);
- if annotations.is_empty() {
- return None;
- }
- Some((file_id, annotations))
- })
- .collect()
- }
-}
-
-impl TestDB {
- pub fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
- *self.events.lock().unwrap() = Some(Vec::new());
- f();
- self.events.lock().unwrap().take().unwrap()
- }
-
- pub fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
- let events = self.log(f);
- events
- .into_iter()
- .filter_map(|e| match e.kind {
- // This pretty horrible, but `Debug` is the only way to inspect
- // QueryDescriptor at the moment.
- salsa::EventKind::WillExecute { database_key } => {
- Some(format!("{:?}", database_key.debug(self)))
- }
- _ => None,
- })
- .collect()
- }
-}
+++ /dev/null
-mod never_type;
-mod coercion;
-mod regression;
-mod simple;
-mod patterns;
-mod traits;
-mod method_resolution;
-mod macros;
-mod display_source_code;
-
-use std::sync::Arc;
-
-use base_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt};
-use expect::Expect;
-use hir_def::{
- body::{BodySourceMap, SyntheticSyntax},
- child_by_source::ChildBySource,
- db::DefDatabase,
- item_scope::ItemScope,
- keys,
- nameres::CrateDefMap,
- AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId,
-};
-use hir_expand::{db::AstDatabase, InFile};
-use stdx::format_to;
-use syntax::{
- algo,
- ast::{self, AstNode},
- SyntaxNode,
-};
-
-use crate::{
- db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty,
-};
-
-// These tests compare the inference results for all expressions in a file
-// against snapshots of the expected results using expect. Use
-// `env UPDATE_EXPECT=1 cargo test -p ra_hir_ty` to update the snapshots.
-
-fn setup_tracing() -> tracing::subscriber::DefaultGuard {
- use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
- use tracing_tree::HierarchicalLayer;
- let filter = EnvFilter::from_env("CHALK_DEBUG");
- let layer = HierarchicalLayer::default()
- .with_indent_lines(true)
- .with_ansi(false)
- .with_indent_amount(2)
- .with_writer(std::io::stderr);
- let subscriber = Registry::default().with(filter).with(layer);
- tracing::subscriber::set_default(subscriber)
-}
-
-fn check_types(ra_fixture: &str) {
- check_types_impl(ra_fixture, false)
-}
-
-fn check_types_source_code(ra_fixture: &str) {
- check_types_impl(ra_fixture, true)
-}
-
-fn check_types_impl(ra_fixture: &str, display_source: bool) {
- let _tracing = setup_tracing();
- let db = TestDB::with_files(ra_fixture);
- let mut checked_one = false;
- for (file_id, annotations) in db.extract_annotations() {
- for (range, expected) in annotations {
- let ty = type_at_range(&db, FileRange { file_id, range });
- let actual = if display_source {
- let module = db.module_for_file(file_id);
- ty.display_source_code(&db, module).unwrap()
- } else {
- ty.display(&db).to_string()
- };
- assert_eq!(expected, actual);
- checked_one = true;
- }
- }
- assert!(checked_one, "no `//^` annotations found");
-}
-
-fn type_at_range(db: &TestDB, pos: FileRange) -> Ty {
- let file = db.parse(pos.file_id).ok().unwrap();
- let expr = algo::find_node_at_range::<ast::Expr>(file.syntax(), pos.range).unwrap();
- let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap();
- let module = db.module_for_file(pos.file_id);
- let func = *module.child_by_source(db)[keys::FUNCTION]
- .get(&InFile::new(pos.file_id.into(), fn_def))
- .unwrap();
-
- let (_body, source_map) = db.body_with_source_map(func.into());
- if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) {
- let infer = db.infer(func.into());
- return infer[expr_id].clone();
- }
- panic!("Can't find expression")
-}
-
-fn infer(ra_fixture: &str) -> String {
- infer_with_mismatches(ra_fixture, false)
-}
-
-fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
- let _tracing = setup_tracing();
- let (db, file_id) = TestDB::with_single_file(content);
-
- let mut buf = String::new();
-
- let mut infer_def = |inference_result: Arc<InferenceResult>,
- body_source_map: Arc<BodySourceMap>| {
- let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
- let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
-
- for (pat, ty) in inference_result.type_of_pat.iter() {
- let syntax_ptr = match body_source_map.pat_syntax(pat) {
- Ok(sp) => {
- let root = db.parse_or_expand(sp.file_id).unwrap();
- sp.map(|ptr| {
- ptr.either(
- |it| it.to_node(&root).syntax().clone(),
- |it| it.to_node(&root).syntax().clone(),
- )
- })
- }
- Err(SyntheticSyntax) => continue,
- };
- types.push((syntax_ptr, ty));
- }
-
- for (expr, ty) in inference_result.type_of_expr.iter() {
- let node = match body_source_map.expr_syntax(expr) {
- Ok(sp) => {
- let root = db.parse_or_expand(sp.file_id).unwrap();
- sp.map(|ptr| ptr.to_node(&root).syntax().clone())
- }
- Err(SyntheticSyntax) => continue,
- };
- types.push((node.clone(), ty));
- if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
- mismatches.push((node, mismatch));
- }
- }
-
- // sort ranges for consistency
- types.sort_by_key(|(node, _)| {
- let range = node.value.text_range();
- (range.start(), range.end())
- });
- for (node, ty) in &types {
- let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
- (self_param.self_token().unwrap().text_range(), "self".to_string())
- } else {
- (node.value.text_range(), node.value.text().to_string().replace("\n", " "))
- };
- let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
- format_to!(
- buf,
- "{}{:?} '{}': {}\n",
- macro_prefix,
- range,
- ellipsize(text, 15),
- ty.display(&db)
- );
- }
- if include_mismatches {
- mismatches.sort_by_key(|(node, _)| {
- let range = node.value.text_range();
- (range.start(), range.end())
- });
- for (src_ptr, mismatch) in &mismatches {
- let range = src_ptr.value.text_range();
- let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
- format_to!(
- buf,
- "{}{:?}: expected {}, got {}\n",
- macro_prefix,
- range,
- mismatch.expected.display(&db),
- mismatch.actual.display(&db),
- );
- }
- }
- };
-
- let module = db.module_for_file(file_id);
- let crate_def_map = db.crate_def_map(module.krate);
-
- let mut defs: Vec<DefWithBodyId> = Vec::new();
- visit_module(&db, &crate_def_map, module.local_id, &mut |it| defs.push(it));
- defs.sort_by_key(|def| match def {
- DefWithBodyId::FunctionId(it) => {
- let loc = it.lookup(&db);
- let tree = db.item_tree(loc.id.file_id);
- tree.source(&db, loc.id).syntax().text_range().start()
- }
- DefWithBodyId::ConstId(it) => {
- let loc = it.lookup(&db);
- let tree = db.item_tree(loc.id.file_id);
- tree.source(&db, loc.id).syntax().text_range().start()
- }
- DefWithBodyId::StaticId(it) => {
- let loc = it.lookup(&db);
- let tree = db.item_tree(loc.id.file_id);
- tree.source(&db, loc.id).syntax().text_range().start()
- }
- });
- for def in defs {
- let (_body, source_map) = db.body_with_source_map(def);
- let infer = db.infer(def);
- infer_def(infer, source_map);
- }
-
- buf.truncate(buf.trim_end().len());
- buf
-}
-
-fn visit_module(
- db: &TestDB,
- crate_def_map: &CrateDefMap,
- module_id: LocalModuleId,
- cb: &mut dyn FnMut(DefWithBodyId),
-) {
- visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
- for impl_id in crate_def_map[module_id].scope.impls() {
- let impl_data = db.impl_data(impl_id);
- for &item in impl_data.items.iter() {
- match item {
- AssocItemId::FunctionId(it) => {
- let def = it.into();
- cb(def);
- let body = db.body(def);
- visit_scope(db, crate_def_map, &body.item_scope, cb);
- }
- AssocItemId::ConstId(it) => {
- let def = it.into();
- cb(def);
- let body = db.body(def);
- visit_scope(db, crate_def_map, &body.item_scope, cb);
- }
- AssocItemId::TypeAliasId(_) => (),
- }
- }
- }
-
- fn visit_scope(
- db: &TestDB,
- crate_def_map: &CrateDefMap,
- scope: &ItemScope,
- cb: &mut dyn FnMut(DefWithBodyId),
- ) {
- for decl in scope.declarations() {
- match decl {
- ModuleDefId::FunctionId(it) => {
- let def = it.into();
- cb(def);
- let body = db.body(def);
- visit_scope(db, crate_def_map, &body.item_scope, cb);
- }
- ModuleDefId::ConstId(it) => {
- let def = it.into();
- cb(def);
- let body = db.body(def);
- visit_scope(db, crate_def_map, &body.item_scope, cb);
- }
- ModuleDefId::StaticId(it) => {
- let def = it.into();
- cb(def);
- let body = db.body(def);
- visit_scope(db, crate_def_map, &body.item_scope, cb);
- }
- ModuleDefId::TraitId(it) => {
- let trait_data = db.trait_data(it);
- for &(_, item) in trait_data.items.iter() {
- match item {
- AssocItemId::FunctionId(it) => cb(it.into()),
- AssocItemId::ConstId(it) => cb(it.into()),
- AssocItemId::TypeAliasId(_) => (),
- }
- }
- }
- ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
- _ => (),
- }
- }
- }
-}
-
-fn ellipsize(mut text: String, max_len: usize) -> String {
- if text.len() <= max_len {
- return text;
- }
- let ellipsis = "...";
- let e_len = ellipsis.len();
- let mut prefix_len = (max_len - e_len) / 2;
- while !text.is_char_boundary(prefix_len) {
- prefix_len += 1;
- }
- let mut suffix_len = max_len - e_len - prefix_len;
- while !text.is_char_boundary(text.len() - suffix_len) {
- suffix_len += 1;
- }
- text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
- text
-}
-
-#[test]
-fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
- let (mut db, pos) = TestDB::with_position(
- "
- //- /lib.rs
- fn foo() -> i32 {
- <|>1 + 1
- }
- ",
- );
- {
- let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id);
- let crate_def_map = db.crate_def_map(module.krate);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
- db.infer(def);
- });
- });
- assert!(format!("{:?}", events).contains("infer"))
- }
-
- let new_text = "
- fn foo() -> i32 {
- 1
- +
- 1
- }
- "
- .to_string();
-
- db.set_file_text(pos.file_id, Arc::new(new_text));
-
- {
- let events = db.log_executed(|| {
- let module = db.module_for_file(pos.file_id);
- let crate_def_map = db.crate_def_map(module.krate);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
- db.infer(def);
- });
- });
- assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
- }
-}
-
-fn check_infer(ra_fixture: &str, expect: Expect) {
- let mut actual = infer(ra_fixture);
- actual.push('\n');
- expect.assert_eq(&actual);
-}
-
-fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
- let mut actual = infer_with_mismatches(ra_fixture, true);
- actual.push('\n');
- expect.assert_eq(&actual);
-}
+++ /dev/null
-use expect::expect;
-use test_utils::mark;
-
-use super::{check_infer, check_infer_with_mismatches};
-
-#[test]
-fn infer_block_expr_type_mismatch() {
- check_infer(
- r"
- fn test() {
- let a: i32 = { 1i64 };
- }
- ",
- expect![[r"
- 10..40 '{ ...4 }; }': ()
- 20..21 'a': i32
- 29..37 '{ 1i64 }': i64
- 31..35 '1i64': i64
- "]],
- );
-}
-
-#[test]
-fn coerce_places() {
- check_infer(
- r#"
- struct S<T> { a: T }
-
- fn f<T>(_: &[T]) -> T { loop {} }
- fn g<T>(_: S<&[T]>) -> T { loop {} }
-
- fn gen<T>() -> *mut [T; 2] { loop {} }
- fn test1<U>() -> *mut [U] {
- gen()
- }
-
- fn test2() {
- let arr: &[u8; 1] = &[1];
-
- let a: &[_] = arr;
- let b = f(arr);
- let c: &[_] = { arr };
- let d = g(S { a: arr });
- let e: [&[_]; 1] = [arr];
- let f: [&[_]; 2] = [arr; 2];
- let g: (&[_], &[_]) = (arr, arr);
- }
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- "#,
- expect![[r"
- 30..31 '_': &[T]
- 44..55 '{ loop {} }': T
- 46..53 'loop {}': !
- 51..53 '{}': ()
- 64..65 '_': S<&[T]>
- 81..92 '{ loop {} }': T
- 83..90 'loop {}': !
- 88..90 '{}': ()
- 121..132 '{ loop {} }': *mut [T; _]
- 123..130 'loop {}': !
- 128..130 '{}': ()
- 159..172 '{ gen() }': *mut [U]
- 165..168 'gen': fn gen<U>() -> *mut [U; _]
- 165..170 'gen()': *mut [U; _]
- 185..419 '{ ...rr); }': ()
- 195..198 'arr': &[u8; _]
- 211..215 '&[1]': &[u8; _]
- 212..215 '[1]': [u8; _]
- 213..214 '1': u8
- 226..227 'a': &[u8]
- 236..239 'arr': &[u8; _]
- 249..250 'b': u8
- 253..254 'f': fn f<u8>(&[u8]) -> u8
- 253..259 'f(arr)': u8
- 255..258 'arr': &[u8; _]
- 269..270 'c': &[u8]
- 279..286 '{ arr }': &[u8]
- 281..284 'arr': &[u8; _]
- 296..297 'd': u8
- 300..301 'g': fn g<u8>(S<&[u8]>) -> u8
- 300..315 'g(S { a: arr })': u8
- 302..314 'S { a: arr }': S<&[u8]>
- 309..312 'arr': &[u8; _]
- 325..326 'e': [&[u8]; _]
- 340..345 '[arr]': [&[u8]; _]
- 341..344 'arr': &[u8; _]
- 355..356 'f': [&[u8]; _]
- 370..378 '[arr; 2]': [&[u8]; _]
- 371..374 'arr': &[u8; _]
- 376..377 '2': usize
- 388..389 'g': (&[u8], &[u8])
- 406..416 '(arr, arr)': (&[u8], &[u8])
- 407..410 'arr': &[u8; _]
- 412..415 'arr': &[u8; _]
- "]],
- );
-}
-
-#[test]
-fn infer_let_stmt_coerce() {
- check_infer(
- r"
- fn test() {
- let x: &[isize] = &[1];
- let x: *const [isize] = &[1];
- }
- ",
- expect![[r"
- 10..75 '{ ...[1]; }': ()
- 20..21 'x': &[isize]
- 34..38 '&[1]': &[isize; _]
- 35..38 '[1]': [isize; _]
- 36..37 '1': isize
- 48..49 'x': *const [isize]
- 68..72 '&[1]': &[isize; _]
- 69..72 '[1]': [isize; _]
- 70..71 '1': isize
- "]],
- );
-}
-
-#[test]
-fn infer_custom_coerce_unsized() {
- check_infer(
- r#"
- struct A<T: ?Sized>(*const T);
- struct B<T: ?Sized>(*const T);
- struct C<T: ?Sized> { inner: *const T }
-
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<B<U>> for B<T> {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<C<U>> for C<T> {}
-
- fn foo1<T>(x: A<[T]>) -> A<[T]> { x }
- fn foo2<T>(x: B<[T]>) -> B<[T]> { x }
- fn foo3<T>(x: C<[T]>) -> C<[T]> { x }
-
- fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
- let d = foo1(a);
- let e = foo2(b);
- let f = foo3(c);
- }
-
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- "#,
- expect![[r"
- 257..258 'x': A<[T]>
- 278..283 '{ x }': A<[T]>
- 280..281 'x': A<[T]>
- 295..296 'x': B<[T]>
- 316..321 '{ x }': B<[T]>
- 318..319 'x': B<[T]>
- 333..334 'x': C<[T]>
- 354..359 '{ x }': C<[T]>
- 356..357 'x': C<[T]>
- 369..370 'a': A<[u8; _]>
- 384..385 'b': B<[u8; _]>
- 399..400 'c': C<[u8; _]>
- 414..480 '{ ...(c); }': ()
- 424..425 'd': A<[{unknown}]>
- 428..432 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
- 428..435 'foo1(a)': A<[{unknown}]>
- 433..434 'a': A<[u8; _]>
- 445..446 'e': B<[u8]>
- 449..453 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
- 449..456 'foo2(b)': B<[u8]>
- 454..455 'b': B<[u8; _]>
- 466..467 'f': C<[u8]>
- 470..474 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
- 470..477 'foo3(c)': C<[u8]>
- 475..476 'c': C<[u8; _]>
- "]],
- );
-}
-
-#[test]
-fn infer_if_coerce() {
- check_infer(
- r#"
- fn foo<T>(x: &[T]) -> &[T] { loop {} }
- fn test() {
- let x = if true {
- foo(&[1])
- } else {
- &[1]
- };
- }
-
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- "#,
- expect![[r"
- 10..11 'x': &[T]
- 27..38 '{ loop {} }': &[T]
- 29..36 'loop {}': !
- 34..36 '{}': ()
- 49..125 '{ ... }; }': ()
- 59..60 'x': &[i32]
- 63..122 'if tru... }': &[i32]
- 66..70 'true': bool
- 71..96 '{ ... }': &[i32]
- 81..84 'foo': fn foo<i32>(&[i32]) -> &[i32]
- 81..90 'foo(&[1])': &[i32]
- 85..89 '&[1]': &[i32; _]
- 86..89 '[1]': [i32; _]
- 87..88 '1': i32
- 102..122 '{ ... }': &[i32; _]
- 112..116 '&[1]': &[i32; _]
- 113..116 '[1]': [i32; _]
- 114..115 '1': i32
- "]],
- );
-}
-
-#[test]
-fn infer_if_else_coerce() {
- check_infer(
- r#"
- fn foo<T>(x: &[T]) -> &[T] { loop {} }
- fn test() {
- let x = if true {
- &[1]
- } else {
- foo(&[1])
- };
- }
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- "#,
- expect![[r"
- 10..11 'x': &[T]
- 27..38 '{ loop {} }': &[T]
- 29..36 'loop {}': !
- 34..36 '{}': ()
- 49..125 '{ ... }; }': ()
- 59..60 'x': &[i32]
- 63..122 'if tru... }': &[i32]
- 66..70 'true': bool
- 71..91 '{ ... }': &[i32; _]
- 81..85 '&[1]': &[i32; _]
- 82..85 '[1]': [i32; _]
- 83..84 '1': i32
- 97..122 '{ ... }': &[i32]
- 107..110 'foo': fn foo<i32>(&[i32]) -> &[i32]
- 107..116 'foo(&[1])': &[i32]
- 111..115 '&[1]': &[i32; _]
- 112..115 '[1]': [i32; _]
- 113..114 '1': i32
- "]],
- )
-}
-
-#[test]
-fn infer_match_first_coerce() {
- check_infer(
- r#"
- fn foo<T>(x: &[T]) -> &[T] { loop {} }
- fn test(i: i32) {
- let x = match i {
- 2 => foo(&[2]),
- 1 => &[1],
- _ => &[3],
- };
- }
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- "#,
- expect![[r"
- 10..11 'x': &[T]
- 27..38 '{ loop {} }': &[T]
- 29..36 'loop {}': !
- 34..36 '{}': ()
- 47..48 'i': i32
- 55..149 '{ ... }; }': ()
- 65..66 'x': &[i32]
- 69..146 'match ... }': &[i32]
- 75..76 'i': i32
- 87..88 '2': i32
- 87..88 '2': i32
- 92..95 'foo': fn foo<i32>(&[i32]) -> &[i32]
- 92..101 'foo(&[2])': &[i32]
- 96..100 '&[2]': &[i32; _]
- 97..100 '[2]': [i32; _]
- 98..99 '2': i32
- 111..112 '1': i32
- 111..112 '1': i32
- 116..120 '&[1]': &[i32; _]
- 117..120 '[1]': [i32; _]
- 118..119 '1': i32
- 130..131 '_': i32
- 135..139 '&[3]': &[i32; _]
- 136..139 '[3]': [i32; _]
- 137..138 '3': i32
- "]],
- );
-}
-
-#[test]
-fn infer_match_second_coerce() {
- check_infer(
- r#"
- fn foo<T>(x: &[T]) -> &[T] { loop {} }
- fn test(i: i32) {
- let x = match i {
- 1 => &[1],
- 2 => foo(&[2]),
- _ => &[3],
- };
- }
-
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T: ?Sized> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}
- impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}
- "#,
- expect![[r"
- 10..11 'x': &[T]
- 27..38 '{ loop {} }': &[T]
- 29..36 'loop {}': !
- 34..36 '{}': ()
- 47..48 'i': i32
- 55..149 '{ ... }; }': ()
- 65..66 'x': &[i32]
- 69..146 'match ... }': &[i32]
- 75..76 'i': i32
- 87..88 '1': i32
- 87..88 '1': i32
- 92..96 '&[1]': &[i32; _]
- 93..96 '[1]': [i32; _]
- 94..95 '1': i32
- 106..107 '2': i32
- 106..107 '2': i32
- 111..114 'foo': fn foo<i32>(&[i32]) -> &[i32]
- 111..120 'foo(&[2])': &[i32]
- 115..119 '&[2]': &[i32; _]
- 116..119 '[2]': [i32; _]
- 117..118 '2': i32
- 130..131 '_': i32
- 135..139 '&[3]': &[i32; _]
- 136..139 '[3]': [i32; _]
- 137..138 '3': i32
- "]],
- );
-}
-
-#[test]
-fn coerce_merge_one_by_one1() {
- mark::check!(coerce_merge_fail_fallback);
-
- check_infer(
- r"
- fn test() {
- let t = &mut 1;
- let x = match 1 {
- 1 => t as *mut i32,
- 2 => t as &i32,
- _ => t as *const i32,
- };
- }
- ",
- expect![[r"
- 10..144 '{ ... }; }': ()
- 20..21 't': &mut i32
- 24..30 '&mut 1': &mut i32
- 29..30 '1': i32
- 40..41 'x': *const i32
- 44..141 'match ... }': *const i32
- 50..51 '1': i32
- 62..63 '1': i32
- 62..63 '1': i32
- 67..68 't': &mut i32
- 67..80 't as *mut i32': *mut i32
- 90..91 '2': i32
- 90..91 '2': i32
- 95..96 't': &mut i32
- 95..104 't as &i32': &i32
- 114..115 '_': i32
- 119..120 't': &mut i32
- 119..134 't as *const i32': *const i32
- "]],
- );
-}
-
-#[test]
-fn return_coerce_unknown() {
- check_infer_with_mismatches(
- r"
- fn foo() -> u32 {
- return unknown;
- }
- ",
- expect![[r"
- 16..39 '{ ...own; }': u32
- 22..36 'return unknown': !
- 29..36 'unknown': u32
- "]],
- );
-}
-
-#[test]
-fn coerce_autoderef() {
- check_infer_with_mismatches(
- r"
- struct Foo;
- fn takes_ref_foo(x: &Foo) {}
- fn test() {
- takes_ref_foo(&Foo);
- takes_ref_foo(&&Foo);
- takes_ref_foo(&&&Foo);
- }
- ",
- expect![[r"
- 29..30 'x': &Foo
- 38..40 '{}': ()
- 51..132 '{ ...oo); }': ()
- 57..70 'takes_ref_foo': fn takes_ref_foo(&Foo)
- 57..76 'takes_...(&Foo)': ()
- 71..75 '&Foo': &Foo
- 72..75 'Foo': Foo
- 82..95 'takes_ref_foo': fn takes_ref_foo(&Foo)
- 82..102 'takes_...&&Foo)': ()
- 96..101 '&&Foo': &&Foo
- 97..101 '&Foo': &Foo
- 98..101 'Foo': Foo
- 108..121 'takes_ref_foo': fn takes_ref_foo(&Foo)
- 108..129 'takes_...&&Foo)': ()
- 122..128 '&&&Foo': &&&Foo
- 123..128 '&&Foo': &&Foo
- 124..128 '&Foo': &Foo
- 125..128 'Foo': Foo
- "]],
- );
-}
-
-#[test]
-fn coerce_autoderef_generic() {
- check_infer_with_mismatches(
- r"
- struct Foo;
- fn takes_ref<T>(x: &T) -> T { *x }
- fn test() {
- takes_ref(&Foo);
- takes_ref(&&Foo);
- takes_ref(&&&Foo);
- }
- ",
- expect![[r"
- 28..29 'x': &T
- 40..46 '{ *x }': T
- 42..44 '*x': T
- 43..44 'x': &T
- 57..126 '{ ...oo); }': ()
- 63..72 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo
- 63..78 'takes_ref(&Foo)': Foo
- 73..77 '&Foo': &Foo
- 74..77 'Foo': Foo
- 84..93 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo
- 84..100 'takes_...&&Foo)': &Foo
- 94..99 '&&Foo': &&Foo
- 95..99 '&Foo': &Foo
- 96..99 'Foo': Foo
- 106..115 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo
- 106..123 'takes_...&&Foo)': &&Foo
- 116..122 '&&&Foo': &&&Foo
- 117..122 '&&Foo': &&Foo
- 118..122 '&Foo': &Foo
- 119..122 'Foo': Foo
- "]],
- );
-}
-
-#[test]
-fn coerce_autoderef_block() {
- check_infer_with_mismatches(
- r#"
- struct String {}
- #[lang = "deref"]
- trait Deref { type Target; }
- impl Deref for String { type Target = str; }
- fn takes_ref_str(x: &str) {}
- fn returns_string() -> String { loop {} }
- fn test() {
- takes_ref_str(&{ returns_string() });
- }
- "#,
- expect![[r"
- 126..127 'x': &str
- 135..137 '{}': ()
- 168..179 '{ loop {} }': String
- 170..177 'loop {}': !
- 175..177 '{}': ()
- 190..235 '{ ... }); }': ()
- 196..209 'takes_ref_str': fn takes_ref_str(&str)
- 196..232 'takes_...g() })': ()
- 210..231 '&{ ret...ng() }': &String
- 211..231 '{ retu...ng() }': String
- 213..227 'returns_string': fn returns_string() -> String
- 213..229 'return...ring()': String
- "]],
- );
-}
-
-#[test]
-fn closure_return_coerce() {
- check_infer_with_mismatches(
- r"
- fn foo() {
- let x = || {
- if true {
- return &1u32;
- }
- &&1u32
- };
- }
- ",
- expect![[r"
- 9..105 '{ ... }; }': ()
- 19..20 'x': || -> &u32
- 23..102 '|| { ... }': || -> &u32
- 26..102 '{ ... }': &u32
- 36..81 'if tru... }': ()
- 39..43 'true': bool
- 44..81 '{ ... }': ()
- 58..70 'return &1u32': !
- 65..70 '&1u32': &u32
- 66..70 '1u32': u32
- 90..96 '&&1u32': &&u32
- 91..96 '&1u32': &u32
- 92..96 '1u32': u32
- "]],
- );
-}
-
-#[test]
-fn coerce_fn_item_to_fn_ptr() {
- check_infer_with_mismatches(
- r"
- fn foo(x: u32) -> isize { 1 }
- fn test() {
- let f: fn(u32) -> isize = foo;
- }
- ",
- expect![[r"
- 7..8 'x': u32
- 24..29 '{ 1 }': isize
- 26..27 '1': isize
- 40..78 '{ ...foo; }': ()
- 50..51 'f': fn(u32) -> isize
- 72..75 'foo': fn foo(u32) -> isize
- "]],
- );
-}
-
-#[test]
-fn coerce_fn_items_in_match_arms() {
- mark::check!(coerce_fn_reification);
-
- check_infer_with_mismatches(
- r"
- fn foo1(x: u32) -> isize { 1 }
- fn foo2(x: u32) -> isize { 2 }
- fn foo3(x: u32) -> isize { 3 }
- fn test() {
- let x = match 1 {
- 1 => foo1,
- 2 => foo2,
- _ => foo3,
- };
- }
- ",
- expect![[r"
- 8..9 'x': u32
- 25..30 '{ 1 }': isize
- 27..28 '1': isize
- 39..40 'x': u32
- 56..61 '{ 2 }': isize
- 58..59 '2': isize
- 70..71 'x': u32
- 87..92 '{ 3 }': isize
- 89..90 '3': isize
- 103..192 '{ ... }; }': ()
- 113..114 'x': fn(u32) -> isize
- 117..189 'match ... }': fn(u32) -> isize
- 123..124 '1': i32
- 135..136 '1': i32
- 135..136 '1': i32
- 140..144 'foo1': fn foo1(u32) -> isize
- 154..155 '2': i32
- 154..155 '2': i32
- 159..163 'foo2': fn foo2(u32) -> isize
- 173..174 '_': i32
- 178..182 'foo3': fn foo3(u32) -> isize
- "]],
- );
-}
-
-#[test]
-fn coerce_closure_to_fn_ptr() {
- check_infer_with_mismatches(
- r"
- fn test() {
- let f: fn(u32) -> isize = |x| { 1 };
- }
- ",
- expect![[r"
- 10..54 '{ ...1 }; }': ()
- 20..21 'f': fn(u32) -> isize
- 42..51 '|x| { 1 }': |u32| -> isize
- 43..44 'x': u32
- 46..51 '{ 1 }': isize
- 48..49 '1': isize
- "]],
- );
-}
-
-#[test]
-fn coerce_placeholder_ref() {
- // placeholders should unify, even behind references
- check_infer_with_mismatches(
- r"
- struct S<T> { t: T }
- impl<TT> S<TT> {
- fn get(&self) -> &TT {
- &self.t
- }
- }
- ",
- expect![[r"
- 50..54 'self': &S<TT>
- 63..86 '{ ... }': &TT
- 73..80 '&self.t': &TT
- 74..78 'self': &S<TT>
- 74..80 'self.t': TT
- "]],
- );
-}
-
-#[test]
-fn coerce_unsize_array() {
- check_infer_with_mismatches(
- r#"
- #[lang = "unsize"]
- pub trait Unsize<T> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
-
- fn test() {
- let f: &[usize] = &[1, 2, 3];
- }
- "#,
- expect![[r"
- 161..198 '{ ... 3]; }': ()
- 171..172 'f': &[usize]
- 185..195 '&[1, 2, 3]': &[usize; _]
- 186..195 '[1, 2, 3]': [usize; _]
- 187..188 '1': usize
- 190..191 '2': usize
- 193..194 '3': usize
- "]],
- );
-}
-
-#[test]
-fn coerce_unsize_trait_object_simple() {
- check_infer_with_mismatches(
- r#"
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
-
- trait Foo<T, U> {}
- trait Bar<U, T, X>: Foo<T, U> {}
- trait Baz<T, X>: Bar<usize, T, X> {}
-
- struct S<T, X>;
- impl<T, X> Foo<T, usize> for S<T, X> {}
- impl<T, X> Bar<usize, T, X> for S<T, X> {}
- impl<T, X> Baz<T, X> for S<T, X> {}
-
- fn test() {
- let obj: &dyn Baz<i8, i16> = &S;
- let obj: &dyn Bar<_, i8, i16> = &S;
- let obj: &dyn Foo<i8, _> = &S;
- }
- "#,
- expect![[r"
- 424..539 '{ ... &S; }': ()
- 434..437 'obj': &dyn Baz<i8, i16>
- 459..461 '&S': &S<i8, i16>
- 460..461 'S': S<i8, i16>
- 471..474 'obj': &dyn Bar<usize, i8, i16>
- 499..501 '&S': &S<i8, i16>
- 500..501 'S': S<i8, i16>
- 511..514 'obj': &dyn Foo<i8, usize>
- 534..536 '&S': &S<i8, {unknown}>
- 535..536 'S': S<i8, {unknown}>
- "]],
- );
-}
-
-#[test]
-// The rust reference says this should be possible, but rustc doesn't implement
-// it. We used to support it, but Chalk doesn't.
-#[ignore]
-fn coerce_unsize_trait_object_to_trait_object() {
- check_infer_with_mismatches(
- r#"
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
-
- trait Foo<T, U> {}
- trait Bar<U, T, X>: Foo<T, U> {}
- trait Baz<T, X>: Bar<usize, T, X> {}
-
- struct S<T, X>;
- impl<T, X> Foo<T, usize> for S<T, X> {}
- impl<T, X> Bar<usize, T, X> for S<T, X> {}
- impl<T, X> Baz<T, X> for S<T, X> {}
-
- fn test() {
- let obj: &dyn Baz<i8, i16> = &S;
- let obj: &dyn Bar<_, _, _> = obj;
- let obj: &dyn Foo<_, _> = obj;
- let obj2: &dyn Baz<i8, i16> = &S;
- let _: &dyn Foo<_, _> = obj2;
- }
- "#,
- expect![[r"
- 424..609 '{ ...bj2; }': ()
- 434..437 'obj': &dyn Baz<i8, i16>
- 459..461 '&S': &S<i8, i16>
- 460..461 'S': S<i8, i16>
- 471..474 'obj': &dyn Bar<usize, i8, i16>
- 496..499 'obj': &dyn Baz<i8, i16>
- 509..512 'obj': &dyn Foo<i8, usize>
- 531..534 'obj': &dyn Bar<usize, i8, i16>
- 544..548 'obj2': &dyn Baz<i8, i16>
- 570..572 '&S': &S<i8, i16>
- 571..572 'S': S<i8, i16>
- 582..583 '_': &dyn Foo<i8, usize>
- 602..606 'obj2': &dyn Baz<i8, i16>
- "]],
- );
-}
-
-#[test]
-fn coerce_unsize_super_trait_cycle() {
- check_infer_with_mismatches(
- r#"
- #[lang = "sized"]
- pub trait Sized {}
- #[lang = "unsize"]
- pub trait Unsize<T> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
-
- trait A {}
- trait B: C + A {}
- trait C: B {}
- trait D: C
-
- struct S;
- impl A for S {}
- impl B for S {}
- impl C for S {}
- impl D for S {}
-
- fn test() {
- let obj: &dyn D = &S;
- let obj: &dyn A = &S;
- }
- "#,
- expect![[r"
- 328..383 '{ ... &S; }': ()
- 338..341 'obj': &dyn D
- 352..354 '&S': &S
- 353..354 'S': S
- 364..367 'obj': &dyn A
- 378..380 '&S': &S
- 379..380 'S': S
- "]],
- );
-}
-
-#[ignore]
-#[test]
-fn coerce_unsize_generic() {
- // FIXME: Implement this
- // https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
- check_infer_with_mismatches(
- r#"
- #[lang = "unsize"]
- pub trait Unsize<T> {}
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- impl<T: Unsize<U>, U> CoerceUnsized<&U> for &T {}
-
- struct Foo<T> { t: T };
- struct Bar<T>(Foo<T>);
-
- fn test() {
- let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
- let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
- }
- "#,
- expect![[r"
- "]],
- );
-}
+++ /dev/null
-use super::check_types_source_code;
-
-#[test]
-fn qualify_path_to_submodule() {
- check_types_source_code(
- r#"
-mod foo {
- pub struct Foo;
-}
-
-fn bar() {
- let foo: foo::Foo = foo::Foo;
- foo
-} //^ foo::Foo
-
-"#,
- );
-}
-
-#[test]
-fn omit_default_type_parameters() {
- check_types_source_code(
- r#"
-struct Foo<T = u8> { t: T }
-fn main() {
- let foo = Foo { t: 5u8 };
- foo;
-} //^ Foo
-"#,
- );
-
- check_types_source_code(
- r#"
-struct Foo<K, T = u8> { k: K, t: T }
-fn main() {
- let foo = Foo { k: 400, t: 5u8 };
- foo;
-} //^ Foo<i32>
-"#,
- );
-}
+++ /dev/null
-use std::fs;
-
-use expect::expect;
-use test_utils::project_dir;
-
-use super::{check_infer, check_types};
-
-#[test]
-fn cfg_impl_def() {
- check_types(
- r#"
-//- /main.rs crate:main deps:foo cfg:test
-use foo::S as T;
-struct S;
-
-#[cfg(test)]
-impl S {
- fn foo1(&self) -> i32 { 0 }
-}
-
-#[cfg(not(test))]
-impl S {
- fn foo2(&self) -> i32 { 0 }
-}
-
-fn test() {
- let t = (S.foo1(), S.foo2(), T.foo3(), T.foo4());
- t;
-} //^ (i32, {unknown}, i32, {unknown})
-
-//- /foo.rs crate:foo
-struct S;
-
-#[cfg(not(test))]
-impl S {
- fn foo3(&self) -> i32 { 0 }
-}
-
-#[cfg(test)]
-impl S {
- fn foo4(&self) -> i32 { 0 }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_macros_expanded() {
- check_infer(
- r#"
- struct Foo(Vec<i32>);
-
- macro_rules! foo {
- ($($item:expr),*) => {
- {
- Foo(vec![$($item,)*])
- }
- };
- }
-
- fn main() {
- let x = foo!(1,2);
- }
- "#,
- expect![[r#"
- !0..17 '{Foo(v...,2,])}': Foo
- !1..4 'Foo': Foo({unknown}) -> Foo
- !1..16 'Foo(vec![1,2,])': Foo
- !5..15 'vec![1,2,]': {unknown}
- 155..181 '{ ...,2); }': ()
- 165..166 'x': Foo
- "#]],
- );
-}
-
-#[test]
-fn infer_legacy_textual_scoped_macros_expanded() {
- check_infer(
- r#"
- struct Foo(Vec<i32>);
-
- #[macro_use]
- mod m {
- macro_rules! foo {
- ($($item:expr),*) => {
- {
- Foo(vec![$($item,)*])
- }
- };
- }
- }
-
- fn main() {
- let x = foo!(1,2);
- let y = crate::foo!(1,2);
- }
- "#,
- expect![[r#"
- !0..17 '{Foo(v...,2,])}': Foo
- !1..4 'Foo': Foo({unknown}) -> Foo
- !1..16 'Foo(vec![1,2,])': Foo
- !5..15 'vec![1,2,]': {unknown}
- 194..250 '{ ...,2); }': ()
- 204..205 'x': Foo
- 227..228 'y': {unknown}
- 231..247 'crate:...!(1,2)': {unknown}
- "#]],
- );
-}
-
-#[test]
-fn infer_path_qualified_macros_expanded() {
- check_infer(
- r#"
- #[macro_export]
- macro_rules! foo {
- () => { 42i32 }
- }
-
- mod m {
- pub use super::foo as bar;
- }
-
- fn main() {
- let x = crate::foo!();
- let y = m::bar!();
- }
- "#,
- expect![[r#"
- !0..5 '42i32': i32
- !0..5 '42i32': i32
- 110..163 '{ ...!(); }': ()
- 120..121 'x': i32
- 147..148 'y': i32
- "#]],
- );
-}
-
-#[test]
-fn expr_macro_expanded_in_various_places() {
- check_infer(
- r#"
- macro_rules! spam {
- () => (1isize);
- }
-
- fn spam() {
- spam!();
- (spam!());
- spam!().spam(spam!());
- for _ in spam!() {}
- || spam!();
- while spam!() {}
- break spam!();
- return spam!();
- match spam!() {
- _ if spam!() => spam!(),
- }
- spam!()(spam!());
- Spam { spam: spam!() };
- spam!()[spam!()];
- await spam!();
- spam!() as usize;
- &spam!();
- -spam!();
- spam!()..spam!();
- spam!() + spam!();
- }
- "#,
- expect![[r#"
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- !0..6 '1isize': isize
- 53..456 '{ ...!(); }': ()
- 87..108 'spam!(...am!())': {unknown}
- 114..133 'for _ ...!() {}': ()
- 118..119 '_': {unknown}
- 131..133 '{}': ()
- 138..148 '|| spam!()': || -> isize
- 154..170 'while ...!() {}': ()
- 168..170 '{}': ()
- 175..188 'break spam!()': !
- 194..208 'return spam!()': !
- 214..268 'match ... }': isize
- 238..239 '_': isize
- 273..289 'spam!(...am!())': {unknown}
- 295..317 'Spam {...m!() }': {unknown}
- 323..339 'spam!(...am!()]': {unknown}
- 364..380 'spam!(... usize': usize
- 386..394 '&spam!()': &isize
- 400..408 '-spam!()': isize
- 414..430 'spam!(...pam!()': {unknown}
- 436..453 'spam!(...pam!()': isize
- "#]],
- );
-}
-
-#[test]
-fn infer_type_value_macro_having_same_name() {
- check_infer(
- r#"
- #[macro_export]
- macro_rules! foo {
- () => {
- mod foo {
- pub use super::foo;
- }
- };
- ($x:tt) => {
- $x
- };
- }
-
- foo!();
-
- fn foo() {
- let foo = foo::foo!(42i32);
- }
- "#,
- expect![[r#"
- !0..5 '42i32': i32
- 170..205 '{ ...32); }': ()
- 180..183 'foo': i32
- "#]],
- );
-}
-
-#[test]
-fn processes_impls_generated_by_macros() {
- check_types(
- r#"
-macro_rules! m {
- ($ident:ident) => (impl Trait for $ident {})
-}
-trait Trait { fn foo(self) -> u128 {} }
-struct S;
-m!(S);
-fn test() { S.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn infer_assoc_items_generated_by_macros() {
- check_types(
- r#"
-macro_rules! m {
- () => (fn foo(&self) -> u128 {0})
-}
-struct S;
-impl S {
- m!();
-}
-
-fn test() { S.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn infer_assoc_items_generated_by_macros_chain() {
- check_types(
- r#"
-macro_rules! m_inner {
- () => {fn foo(&self) -> u128 {0}}
-}
-macro_rules! m {
- () => {m_inner!();}
-}
-
-struct S;
-impl S {
- m!();
-}
-
-fn test() { S.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn infer_macro_with_dollar_crate_is_correct_in_expr() {
- check_types(
- r#"
-//- /main.rs crate:main deps:foo
-fn test() {
- let x = (foo::foo!(1), foo::foo!(2));
- x;
-} //^ (i32, usize)
-
-//- /lib.rs crate:foo
-#[macro_export]
-macro_rules! foo {
- (1) => { $crate::bar!() };
- (2) => { 1 + $crate::baz() };
-}
-
-#[macro_export]
-macro_rules! bar {
- () => { 42 }
-}
-
-pub fn baz() -> usize { 31usize }
-"#,
- );
-}
-
-#[test]
-fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
- check_types(
- r#"
-//- /main.rs crate:main deps:foo
-use foo::Trait;
-
-fn test() {
- let msg = foo::Message(foo::MessageRef);
- let r = msg.deref();
- r;
- //^ &MessageRef
-}
-
-//- /lib.rs crate:foo
-pub struct MessageRef;
-pub struct Message(MessageRef);
-
-pub trait Trait {
- type Target;
- fn deref(&self) -> &Self::Target;
-}
-
-#[macro_export]
-macro_rules! expand {
- () => {
- impl Trait for Message {
- type Target = $crate::MessageRef;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
- }
- }
-}
-
-expand!();
-"#,
- );
-}
-
-#[test]
-fn infer_type_value_non_legacy_macro_use_as() {
- check_infer(
- r#"
- mod m {
- macro_rules! _foo {
- ($x:ident) => { type $x = u64; }
- }
- pub(crate) use _foo as foo;
- }
-
- m::foo!(foo);
- use foo as bar;
- fn f() -> bar { 0 }
- fn main() {
- let _a = f();
- }
- "#,
- expect![[r#"
- 158..163 '{ 0 }': u64
- 160..161 '0': u64
- 174..196 '{ ...f(); }': ()
- 184..186 '_a': u64
- 190..191 'f': fn f() -> u64
- 190..193 'f()': u64
- "#]],
- );
-}
-
-#[test]
-fn infer_local_macro() {
- check_infer(
- r#"
- fn main() {
- macro_rules! foo {
- () => { 1usize }
- }
- let _a = foo!();
- }
- "#,
- expect![[r#"
- !0..6 '1usize': usize
- 10..89 '{ ...!(); }': ()
- 16..65 'macro_... }': {unknown}
- 74..76 '_a': usize
- "#]],
- );
-}
-
-#[test]
-fn infer_local_inner_macros() {
- check_types(
- r#"
-//- /main.rs crate:main deps:foo
-fn test() {
- let x = foo::foo!(1);
- x;
-} //^ i32
-
-//- /lib.rs crate:foo
-#[macro_export(local_inner_macros)]
-macro_rules! foo {
- (1) => { bar!() };
-}
-
-#[macro_export]
-macro_rules! bar {
- () => { 42 }
-}
-
-"#,
- );
-}
-
-#[test]
-fn infer_builtin_macros_line() {
- check_infer(
- r#"
- #[rustc_builtin_macro]
- macro_rules! line {() => {}}
-
- fn main() {
- let x = line!();
- }
- "#,
- expect![[r#"
- !0..1 '0': i32
- 63..87 '{ ...!(); }': ()
- 73..74 'x': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_builtin_macros_file() {
- check_infer(
- r#"
- #[rustc_builtin_macro]
- macro_rules! file {() => {}}
-
- fn main() {
- let x = file!();
- }
- "#,
- expect![[r#"
- !0..2 '""': &str
- 63..87 '{ ...!(); }': ()
- 73..74 'x': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_builtin_macros_column() {
- check_infer(
- r#"
- #[rustc_builtin_macro]
- macro_rules! column {() => {}}
-
- fn main() {
- let x = column!();
- }
- "#,
- expect![[r#"
- !0..1 '0': i32
- 65..91 '{ ...!(); }': ()
- 75..76 'x': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_builtin_macros_concat() {
- check_infer(
- r#"
- #[rustc_builtin_macro]
- macro_rules! concat {() => {}}
-
- fn main() {
- let x = concat!("hello", concat!("world", "!"));
- }
- "#,
- expect![[r#"
- !0..13 '"helloworld!"': &str
- 65..121 '{ ...")); }': ()
- 75..76 'x': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_builtin_macros_include() {
- check_types(
- r#"
-//- /main.rs
-#[rustc_builtin_macro]
-macro_rules! include {() => {}}
-
-include!("foo.rs");
-
-fn main() {
- bar();
-} //^ u32
-
-//- /foo.rs
-fn bar() -> u32 {0}
-"#,
- );
-}
-
-#[test]
-#[ignore]
-fn include_accidentally_quadratic() {
- let file = project_dir().join("crates/syntax/test_data/accidentally_quadratic");
- let big_file = fs::read_to_string(file).unwrap();
- let big_file = vec![big_file; 10].join("\n");
-
- let fixture = r#"
-//- /main.rs
-#[rustc_builtin_macro]
-macro_rules! include {() => {}}
-
-include!("foo.rs");
-
-fn main() {
- RegisterBlock { };
- //^ RegisterBlock
-}
- "#;
- let fixture = format!("{}\n//- /foo.rs\n{}", fixture, big_file);
- check_types(&fixture);
-}
-
-#[test]
-fn infer_builtin_macros_include_concat() {
- check_types(
- r#"
-//- /main.rs
-#[rustc_builtin_macro]
-macro_rules! include {() => {}}
-
-#[rustc_builtin_macro]
-macro_rules! concat {() => {}}
-
-include!(concat!("f", "oo.rs"));
-
-fn main() {
- bar();
-} //^ u32
-
-//- /foo.rs
-fn bar() -> u32 {0}
-"#,
- );
-}
-
-#[test]
-fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
- check_types(
- r#"
-//- /main.rs
-#[rustc_builtin_macro]
-macro_rules! include {() => {}}
-
-#[rustc_builtin_macro]
-macro_rules! concat {() => {}}
-
-#[rustc_builtin_macro]
-macro_rules! env {() => {}}
-
-include!(concat!(env!("OUT_DIR"), "/foo.rs"));
-
-fn main() {
- bar();
-} //^ {unknown}
-
-//- /foo.rs
-fn bar() -> u32 {0}
-"#,
- );
-}
-
-#[test]
-fn infer_builtin_macros_include_itself_should_failed() {
- check_types(
- r#"
-#[rustc_builtin_macro]
-macro_rules! include {() => {}}
-
-include!("main.rs");
-
-fn main() {
- 0
-} //^ i32
-"#,
- );
-}
-
-#[test]
-fn infer_builtin_macros_concat_with_lazy() {
- check_infer(
- r#"
- macro_rules! hello {() => {"hello"}}
-
- #[rustc_builtin_macro]
- macro_rules! concat {() => {}}
-
- fn main() {
- let x = concat!(hello!(), concat!("world", "!"));
- }
- "#,
- expect![[r#"
- !0..13 '"helloworld!"': &str
- 103..160 '{ ...")); }': ()
- 113..114 'x': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_builtin_macros_env() {
- check_infer(
- r#"
- //- /main.rs env:foo=bar
- #[rustc_builtin_macro]
- macro_rules! env {() => {}}
-
- fn main() {
- let x = env!("foo");
- }
- "#,
- expect![[r#"
- !0..22 '"__RA_...TED__"': &str
- 62..90 '{ ...o"); }': ()
- 72..73 'x': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_derive_clone_simple() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-#[derive(Clone)]
-struct S;
-fn test() {
- S.clone();
-} //^ S
-
-//- /lib.rs crate:core
-#[prelude_import]
-use clone::*;
-mod clone {
- trait Clone {
- fn clone(&self) -> Self;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_derive_clone_in_core() {
- check_types(
- r#"
-//- /lib.rs crate:core
-#[prelude_import]
-use clone::*;
-mod clone {
- trait Clone {
- fn clone(&self) -> Self;
- }
-}
-#[derive(Clone)]
-pub struct S;
-
-//- /main.rs crate:main deps:core
-use core::S;
-fn test() {
- S.clone();
-} //^ S
-"#,
- );
-}
-
-#[test]
-fn infer_derive_clone_with_params() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-#[derive(Clone)]
-struct S;
-#[derive(Clone)]
-struct Wrapper<T>(T);
-struct NonClone;
-fn test() {
- (Wrapper(S).clone(), Wrapper(NonClone).clone());
- //^ (Wrapper<S>, {unknown})
-}
-
-//- /lib.rs crate:core
-#[prelude_import]
-use clone::*;
-mod clone {
- trait Clone {
- fn clone(&self) -> Self;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_custom_derive_simple() {
- // FIXME: this test current now do nothing
- check_types(
- r#"
-//- /main.rs crate:main
-use foo::Foo;
-
-#[derive(Foo)]
-struct S{}
-
-fn test() {
- S{};
-} //^ S
-"#,
- );
-}
-
-#[test]
-fn macro_in_arm() {
- check_infer(
- r#"
- macro_rules! unit {
- () => { () };
- }
-
- fn main() {
- let x = match () {
- unit!() => 92u32,
- };
- }
- "#,
- expect![[r#"
- 51..110 '{ ... }; }': ()
- 61..62 'x': u32
- 65..107 'match ... }': u32
- 71..73 '()': ()
- 84..91 'unit!()': ()
- 95..100 '92u32': u32
- "#]],
- );
-}
+++ /dev/null
-use expect::expect;
-
-use super::{check_infer, check_types};
-
-#[test]
-fn infer_slice_method() {
- check_infer(
- r#"
- #[lang = "slice"]
- impl<T> [T] {
- fn foo(&self) -> T {
- loop {}
- }
- }
-
- #[lang = "slice_alloc"]
- impl<T> [T] {}
-
- fn test(x: &[u8]) {
- <[_]>::foo(x);
- }
- "#,
- expect![[r#"
- 44..48 'self': &[T]
- 55..78 '{ ... }': T
- 65..72 'loop {}': !
- 70..72 '{}': ()
- 130..131 'x': &[u8]
- 140..162 '{ ...(x); }': ()
- 146..156 '<[_]>::foo': fn foo<u8>(&[u8]) -> u8
- 146..159 '<[_]>::foo(x)': u8
- 157..158 'x': &[u8]
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_struct() {
- check_infer(
- r#"
- struct A { x: u32 }
-
- impl A {
- fn new() -> A {
- A { x: 0 }
- }
- }
- fn test() {
- let a = A::new();
- a.x;
- }
- "#,
- expect![[r#"
- 48..74 '{ ... }': A
- 58..68 'A { x: 0 }': A
- 65..66 '0': u32
- 87..121 '{ ...a.x; }': ()
- 97..98 'a': A
- 101..107 'A::new': fn new() -> A
- 101..109 'A::new()': A
- 115..116 'a': A
- 115..118 'a.x': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_enum() {
- check_infer(
- r#"
- enum A { B, C }
-
- impl A {
- pub fn b() -> A {
- A::B
- }
- pub fn c() -> A {
- A::C
- }
- }
- fn test() {
- let a = A::b();
- a;
- let c = A::c();
- c;
- }
- "#,
- expect![[r#"
- 46..66 '{ ... }': A
- 56..60 'A::B': A
- 87..107 '{ ... }': A
- 97..101 'A::C': A
- 120..177 '{ ... c; }': ()
- 130..131 'a': A
- 134..138 'A::b': fn b() -> A
- 134..140 'A::b()': A
- 146..147 'a': A
- 157..158 'c': A
- 161..165 'A::c': fn c() -> A
- 161..167 'A::c()': A
- 173..174 'c': A
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_with_modules() {
- check_infer(
- r#"
- mod a {
- struct A;
- impl A { pub fn thing() -> A { A {} }}
- }
-
- mod b {
- struct B;
- impl B { pub fn thing() -> u32 { 99 }}
-
- mod c {
- struct C;
- impl C { pub fn thing() -> C { C {} }}
- }
- }
- use b::c;
-
- fn test() {
- let x = a::A::thing();
- let y = b::B::thing();
- let z = c::C::thing();
- }
- "#,
- expect![[r#"
- 55..63 '{ A {} }': A
- 57..61 'A {}': A
- 125..131 '{ 99 }': u32
- 127..129 '99': u32
- 201..209 '{ C {} }': C
- 203..207 'C {}': C
- 240..324 '{ ...g(); }': ()
- 250..251 'x': A
- 254..265 'a::A::thing': fn thing() -> A
- 254..267 'a::A::thing()': A
- 277..278 'y': u32
- 281..292 'b::B::thing': fn thing() -> u32
- 281..294 'b::B::thing()': u32
- 304..305 'z': C
- 308..319 'c::C::thing': fn thing() -> C
- 308..321 'c::C::thing()': C
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_generics() {
- check_infer(
- r#"
- struct Gen<T> {
- val: T
- }
-
- impl<T> Gen<T> {
- pub fn make(val: T) -> Gen<T> {
- Gen { val }
- }
- }
-
- fn test() {
- let a = Gen::make(0u32);
- }
- "#,
- expect![[r#"
- 63..66 'val': T
- 81..108 '{ ... }': Gen<T>
- 91..102 'Gen { val }': Gen<T>
- 97..100 'val': T
- 122..154 '{ ...32); }': ()
- 132..133 'a': Gen<u32>
- 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
- 136..151 'Gen::make(0u32)': Gen<u32>
- 146..150 '0u32': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_generics_without_args() {
- check_infer(
- r#"
- struct Gen<T> {
- val: T
- }
-
- impl<T> Gen<T> {
- pub fn make() -> Gen<T> {
- loop { }
- }
- }
-
- fn test() {
- let a = Gen::<u32>::make();
- }
- "#,
- expect![[r#"
- 75..99 '{ ... }': Gen<T>
- 85..93 'loop { }': !
- 90..93 '{ }': ()
- 113..148 '{ ...e(); }': ()
- 123..124 'a': Gen<u32>
- 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
- 127..145 'Gen::<...make()': Gen<u32>
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_method_generics_2_type_params_without_args() {
- check_infer(
- r#"
- struct Gen<T, U> {
- val: T,
- val2: U,
- }
-
- impl<T> Gen<u32, T> {
- pub fn make() -> Gen<u32,T> {
- loop { }
- }
- }
-
- fn test() {
- let a = Gen::<u32, u64>::make();
- }
- "#,
- expect![[r#"
- 101..125 '{ ... }': Gen<u32, T>
- 111..119 'loop { }': !
- 116..119 '{ }': ()
- 139..179 '{ ...e(); }': ()
- 149..150 'a': Gen<u32, u64>
- 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
- 153..176 'Gen::<...make()': Gen<u32, u64>
- "#]],
- );
-}
-
-#[test]
-fn cross_crate_associated_method_call() {
- check_types(
- r#"
-//- /main.rs crate:main deps:other_crate
-fn test() {
- let x = other_crate::foo::S::thing();
- x;
-} //^ i128
-
-//- /lib.rs crate:other_crate
-mod foo {
- struct S;
- impl S {
- fn thing() -> i128 {}
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_trait_method_simple() {
- // the trait implementation is intentionally incomplete -- it shouldn't matter
- check_infer(
- r#"
- trait Trait1 {
- fn method(&self) -> u32;
- }
- struct S1;
- impl Trait1 for S1 {}
- trait Trait2 {
- fn method(&self) -> i128;
- }
- struct S2;
- impl Trait2 for S2 {}
- fn test() {
- S1.method(); // -> u32
- S2.method(); // -> i128
- }
- "#,
- expect![[r#"
- 30..34 'self': &Self
- 109..113 'self': &Self
- 169..227 '{ ...i128 }': ()
- 175..177 'S1': S1
- 175..186 'S1.method()': u32
- 202..204 'S2': S2
- 202..213 'S2.method()': i128
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_method_scoped() {
- // the trait implementation is intentionally incomplete -- it shouldn't matter
- check_infer(
- r#"
- struct S;
- mod foo {
- pub trait Trait1 {
- fn method(&self) -> u32;
- }
- impl Trait1 for super::S {}
- }
- mod bar {
- pub trait Trait2 {
- fn method(&self) -> i128;
- }
- impl Trait2 for super::S {}
- }
-
- mod foo_test {
- use super::S;
- use super::foo::Trait1;
- fn test() {
- S.method(); // -> u32
- }
- }
-
- mod bar_test {
- use super::S;
- use super::bar::Trait2;
- fn test() {
- S.method(); // -> i128
- }
- }
- "#,
- expect![[r#"
- 62..66 'self': &Self
- 168..172 'self': &Self
- 299..336 '{ ... }': ()
- 309..310 'S': S
- 309..319 'S.method()': u32
- 415..453 '{ ... }': ()
- 425..426 'S': S
- 425..435 'S.method()': i128
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_method_generic_1() {
- // the trait implementation is intentionally incomplete -- it shouldn't matter
- check_infer(
- r#"
- trait Trait<T> {
- fn method(&self) -> T;
- }
- struct S;
- impl Trait<u32> for S {}
- fn test() {
- S.method();
- }
- "#,
- expect![[r#"
- 32..36 'self': &Self
- 91..110 '{ ...d(); }': ()
- 97..98 'S': S
- 97..107 'S.method()': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_method_generic_more_params() {
- // the trait implementation is intentionally incomplete -- it shouldn't matter
- check_infer(
- r#"
- trait Trait<T1, T2, T3> {
- fn method1(&self) -> (T1, T2, T3);
- fn method2(&self) -> (T3, T2, T1);
- }
- struct S1;
- impl Trait<u8, u16, u32> for S1 {}
- struct S2;
- impl<T> Trait<i8, i16, T> for S2 {}
- fn test() {
- S1.method1(); // u8, u16, u32
- S1.method2(); // u32, u16, u8
- S2.method1(); // i8, i16, {unknown}
- S2.method2(); // {unknown}, i16, i8
- }
- "#,
- expect![[r#"
- 42..46 'self': &Self
- 81..85 'self': &Self
- 209..360 '{ ..., i8 }': ()
- 215..217 'S1': S1
- 215..227 'S1.method1()': (u8, u16, u32)
- 249..251 'S1': S1
- 249..261 'S1.method2()': (u32, u16, u8)
- 283..285 'S2': S2
- 283..295 'S2.method1()': (i8, i16, {unknown})
- 323..325 'S2': S2
- 323..335 'S2.method2()': ({unknown}, i16, i8)
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_method_generic_2() {
- // the trait implementation is intentionally incomplete -- it shouldn't matter
- check_infer(
- r#"
- trait Trait<T> {
- fn method(&self) -> T;
- }
- struct S<T>(T);
- impl<U> Trait<U> for S<U> {}
- fn test() {
- S(1u32).method();
- }
- "#,
- expect![[r#"
- 32..36 'self': &Self
- 101..126 '{ ...d(); }': ()
- 107..108 'S': S<u32>(u32) -> S<u32>
- 107..114 'S(1u32)': S<u32>
- 107..123 'S(1u32...thod()': u32
- 109..113 '1u32': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method() {
- check_infer(
- r#"
- trait Default {
- fn default() -> Self;
- }
- struct S;
- impl Default for S {}
- fn test() {
- let s1: S = Default::default();
- let s2 = S::default();
- let s3 = <S as Default>::default();
- }
- "#,
- expect![[r#"
- 86..192 '{ ...t(); }': ()
- 96..98 's1': S
- 104..120 'Defaul...efault': fn default<S>() -> S
- 104..122 'Defaul...ault()': S
- 132..134 's2': S
- 137..147 'S::default': fn default<S>() -> S
- 137..149 'S::default()': S
- 159..161 's3': S
- 164..187 '<S as ...efault': fn default<S>() -> S
- 164..189 '<S as ...ault()': S
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method_generics_1() {
- check_infer(
- r#"
- trait Trait<T> {
- fn make() -> T;
- }
- struct S;
- impl Trait<u32> for S {}
- struct G<T>;
- impl<T> Trait<T> for G<T> {}
- fn test() {
- let a = S::make();
- let b = G::<u64>::make();
- let c: f64 = G::make();
- }
- "#,
- expect![[r#"
- 126..210 '{ ...e(); }': ()
- 136..137 'a': u32
- 140..147 'S::make': fn make<S, u32>() -> u32
- 140..149 'S::make()': u32
- 159..160 'b': u64
- 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
- 163..179 'G::<u6...make()': u64
- 189..190 'c': f64
- 198..205 'G::make': fn make<G<f64>, f64>() -> f64
- 198..207 'G::make()': f64
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method_generics_2() {
- check_infer(
- r#"
- trait Trait<T> {
- fn make<U>() -> (T, U);
- }
- struct S;
- impl Trait<u32> for S {}
- struct G<T>;
- impl<T> Trait<T> for G<T> {}
- fn test() {
- let a = S::make::<i64>();
- let b: (_, i64) = S::make();
- let c = G::<u32>::make::<i64>();
- let d: (u32, _) = G::make::<i64>();
- let e: (u32, i64) = G::make();
- }
- "#,
- expect![[r#"
- 134..312 '{ ...e(); }': ()
- 144..145 'a': (u32, i64)
- 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
- 148..164 'S::mak...i64>()': (u32, i64)
- 174..175 'b': (u32, i64)
- 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
- 188..197 'S::make()': (u32, i64)
- 207..208 'c': (u32, i64)
- 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
- 211..234 'G::<u3...i64>()': (u32, i64)
- 244..245 'd': (u32, i64)
- 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
- 258..274 'G::mak...i64>()': (u32, i64)
- 284..285 'e': (u32, i64)
- 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
- 300..309 'G::make()': (u32, i64)
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method_generics_3() {
- check_infer(
- r#"
- trait Trait<T> {
- fn make() -> (Self, T);
- }
- struct S<T>;
- impl Trait<i64> for S<i32> {}
- fn test() {
- let a = S::make();
- }
- "#,
- expect![[r#"
- 100..126 '{ ...e(); }': ()
- 110..111 'a': (S<i32>, i64)
- 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
- 114..123 'S::make()': (S<i32>, i64)
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method_generics_4() {
- check_infer(
- r#"
- trait Trait<T> {
- fn make() -> (Self, T);
- }
- struct S<T>;
- impl Trait<i64> for S<u64> {}
- impl Trait<i32> for S<u32> {}
- fn test() {
- let a: (S<u64>, _) = S::make();
- let b: (_, i32) = S::make();
- }
- "#,
- expect![[r#"
- 130..202 '{ ...e(); }': ()
- 140..141 'a': (S<u64>, i64)
- 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
- 157..166 'S::make()': (S<u64>, i64)
- 176..177 'b': (S<u32>, i32)
- 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
- 190..199 'S::make()': (S<u32>, i32)
- "#]],
- );
-}
-
-#[test]
-fn infer_trait_assoc_method_generics_5() {
- check_infer(
- r#"
- trait Trait<T> {
- fn make<U>() -> (Self, T, U);
- }
- struct S<T>;
- impl Trait<i64> for S<u64> {}
- fn test() {
- let a = <S as Trait<i64>>::make::<u8>();
- let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
- }
- "#,
- expect![[r#"
- 106..210 '{ ...>(); }': ()
- 116..117 'a': (S<u64>, i64, u8)
- 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
- 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
- 161..162 'b': (S<u64>, i64, u8)
- 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
- 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
- "#]],
- );
-}
-
-#[test]
-fn infer_call_trait_method_on_generic_param_1() {
- check_infer(
- r#"
- trait Trait {
- fn method(&self) -> u32;
- }
- fn test<T: Trait>(t: T) {
- t.method();
- }
- "#,
- expect![[r#"
- 29..33 'self': &Self
- 63..64 't': T
- 69..88 '{ ...d(); }': ()
- 75..76 't': T
- 75..85 't.method()': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_call_trait_method_on_generic_param_2() {
- check_infer(
- r#"
- trait Trait<T> {
- fn method(&self) -> T;
- }
- fn test<U, T: Trait<U>>(t: T) {
- t.method();
- }
- "#,
- expect![[r#"
- 32..36 'self': &Self
- 70..71 't': T
- 76..95 '{ ...d(); }': ()
- 82..83 't': T
- 82..92 't.method()': U
- "#]],
- );
-}
-
-#[test]
-fn infer_with_multiple_trait_impls() {
- check_infer(
- r#"
- trait Into<T> {
- fn into(self) -> T;
- }
- struct S;
- impl Into<u32> for S {}
- impl Into<u64> for S {}
- fn test() {
- let x: u32 = S.into();
- let y: u64 = S.into();
- let z = Into::<u64>::into(S);
- }
- "#,
- expect![[r#"
- 28..32 'self': Self
- 110..201 '{ ...(S); }': ()
- 120..121 'x': u32
- 129..130 'S': S
- 129..137 'S.into()': u32
- 147..148 'y': u64
- 156..157 'S': S
- 156..164 'S.into()': u64
- 174..175 'z': u64
- 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
- 178..198 'Into::...nto(S)': u64
- 196..197 'S': S
- "#]],
- );
-}
-
-#[test]
-fn method_resolution_unify_impl_self_type() {
- check_types(
- r#"
-struct S<T>;
-impl S<u32> { fn foo(&self) -> u8 {} }
-impl S<i32> { fn foo(&self) -> i8 {} }
-fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
- //^ (u8, i8)
-"#,
- );
-}
-
-#[test]
-fn method_resolution_trait_before_autoref() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl S { fn foo(&self) -> i8 { 0 } }
-impl Trait for S { fn foo(self) -> u128 { 0 } }
-fn test() { S.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn method_resolution_by_value_before_autoref() {
- check_types(
- r#"
-trait Clone { fn clone(&self) -> Self; }
-struct S;
-impl Clone for S {}
-impl Clone for &S {}
-fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
- //^ (S, S, &S)
-"#,
- );
-}
-
-#[test]
-fn method_resolution_trait_before_autoderef() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl S { fn foo(self) -> i8 { 0 } }
-impl Trait for &S { fn foo(self) -> u128 { 0 } }
-fn test() { (&S).foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn method_resolution_impl_before_trait() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl S { fn foo(self) -> i8 { 0 } }
-impl Trait for S { fn foo(self) -> u128 { 0 } }
-fn test() { S.foo(); }
- //^ i8
-"#,
- );
-}
-
-#[test]
-fn method_resolution_impl_ref_before_trait() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl S { fn foo(&self) -> i8 { 0 } }
-impl Trait for &S { fn foo(self) -> u128 { 0 } }
-fn test() { S.foo(); }
- //^ i8
-"#,
- );
-}
-
-#[test]
-fn method_resolution_trait_autoderef() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Trait for S { fn foo(self) -> u128 { 0 } }
-fn test() { (&S).foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn method_resolution_unsize_array() {
- check_types(
- r#"
-#[lang = "slice"]
-impl<T> [T] {
- fn len(&self) -> usize { loop {} }
-}
-fn test() {
- let a = [1, 2, 3];
- a.len();
-} //^ usize
-"#,
- );
-}
-
-#[test]
-fn method_resolution_trait_from_prelude() {
- check_types(
- r#"
-//- /main.rs crate:main deps:other_crate
-struct S;
-impl Clone for S {}
-
-fn test() {
- S.clone();
- //^ S
-}
-
-//- /lib.rs crate:other_crate
-#[prelude_import] use foo::*;
-
-mod foo {
- trait Clone {
- fn clone(&self) -> Self;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_for_unknown_trait() {
- // The blanket impl currently applies because we ignore the unresolved where clause
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl<T> Trait for T where T: UnknownTrait {}
-fn test() { (&S).foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_not_met() {
- // The blanket impl shouldn't apply because we can't prove S: Clone
- // This is also to make sure that we don't resolve to the foo method just
- // because that's the only method named foo we can find, which would make
- // the below tests not work
- check_types(
- r#"
-trait Clone {}
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl<T> Trait for T where T: Clone {}
-fn test() { (&S).foo(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_inline_not_met() {
- // The blanket impl shouldn't apply because we can't prove S: Clone
- check_types(
- r#"
-trait Clone {}
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl<T: Clone> Trait for T {}
-fn test() { (&S).foo(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_1() {
- check_types(
- r#"
-trait Clone {}
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Clone for S {}
-impl<T> Trait for T where T: Clone {}
-fn test() { S.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_2() {
- check_types(
- r#"
-trait Into<T> { fn into(self) -> T; }
-trait From<T> { fn from(other: T) -> Self; }
-struct S1;
-struct S2;
-impl From<S2> for S1 {}
-impl<T, U> Into<U> for T where U: From<T> {}
-fn test() { S2.into(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_where_clause_inline() {
- check_types(
- r#"
-trait Into<T> { fn into(self) -> T; }
-trait From<T> { fn from(other: T) -> Self; }
-struct S1;
-struct S2;
-impl From<S2> for S1 {}
-impl<T, U: From<T>> Into<U> for T {}
-fn test() { S2.into(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_overloaded_method() {
- test_utils::mark::check!(impl_self_type_match_without_receiver);
- check_types(
- r#"
-struct Wrapper<T>(T);
-struct Foo<T>(T);
-struct Bar<T>(T);
-
-impl<T> Wrapper<Foo<T>> {
- pub fn new(foo_: T) -> Self {
- Wrapper(Foo(foo_))
- }
-}
-
-impl<T> Wrapper<Bar<T>> {
- pub fn new(bar_: T) -> Self {
- Wrapper(Bar(bar_))
- }
-}
-
-fn main() {
- let a = Wrapper::<Foo<f32>>::new(1.0);
- let b = Wrapper::<Bar<f32>>::new(1.0);
- (a, b);
- //^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
-}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_encountering_fn_type() {
- check_types(
- r#"
-//- /main.rs
-fn foo() {}
-trait FnOnce { fn call(self); }
-fn test() { foo.call(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_non_parameter_type() {
- check_types(
- r#"
-mod a {
- pub trait Foo {
- fn foo(&self);
- }
-}
-
-struct Wrapper<T>(T);
-fn foo<T>(t: Wrapper<T>)
-where
- Wrapper<T>: a::Foo,
-{
- t.foo();
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn method_resolution_3373() {
- check_types(
- r#"
-struct A<T>(T);
-
-impl A<i32> {
- fn from(v: i32) -> A<i32> { A(v) }
-}
-
-fn main() {
- A::from(3);
-} //^ A<i32>
-"#,
- );
-}
-
-#[test]
-fn method_resolution_slow() {
- // this can get quite slow if we set the solver size limit too high
- check_types(
- r#"
-trait SendX {}
-
-struct S1; impl SendX for S1 {}
-struct S2; impl SendX for S2 {}
-struct U1;
-
-trait Trait { fn method(self); }
-
-struct X1<A, B> {}
-impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
-
-struct S<B, C> {}
-
-trait FnX {}
-
-impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
-
-fn test() { (S {}).method(); }
- //^ ()
-"#,
- );
-}
-
-#[test]
-fn dyn_trait_super_trait_not_in_scope() {
- check_infer(
- r#"
- mod m {
- pub trait SuperTrait {
- fn foo(&self) -> u32 { 0 }
- }
- }
- trait Trait: m::SuperTrait {}
-
- struct S;
- impl m::SuperTrait for S {}
- impl Trait for S {}
-
- fn test(d: &dyn Trait) {
- d.foo();
- }
- "#,
- expect![[r#"
- 51..55 'self': &Self
- 64..69 '{ 0 }': u32
- 66..67 '0': u32
- 176..177 'd': &dyn Trait
- 191..207 '{ ...o(); }': ()
- 197..198 'd': &dyn Trait
- 197..204 'd.foo()': u32
- "#]],
- );
-}
+++ /dev/null
-use expect::expect;
-
-use super::{check_infer_with_mismatches, check_types};
-
-#[test]
-fn infer_never1() {
- check_types(
- r#"
-fn test() {
- let t = return;
- t;
-} //^ !
-"#,
- );
-}
-
-#[test]
-fn infer_never2() {
- check_types(
- r#"
-fn gen<T>() -> T { loop {} }
-
-fn test() {
- let a = gen();
- if false { a } else { loop {} };
- a;
-} //^ !
-"#,
- );
-}
-
-#[test]
-fn infer_never3() {
- check_types(
- r#"
-fn gen<T>() -> T { loop {} }
-
-fn test() {
- let a = gen();
- if false { loop {} } else { a };
- a;
- //^ !
-}
-"#,
- );
-}
-
-#[test]
-fn never_type_in_generic_args() {
- check_types(
- r#"
-enum Option<T> { None, Some(T) }
-
-fn test() {
- let a = if true { Option::None } else { Option::Some(return) };
- a;
-} //^ Option<!>
-"#,
- );
-}
-
-#[test]
-fn never_type_can_be_reinferred1() {
- check_types(
- r#"
-fn gen<T>() -> T { loop {} }
-
-fn test() {
- let a = gen();
- if false { loop {} } else { a };
- a;
- //^ ()
- if false { a };
-}
-"#,
- );
-}
-
-#[test]
-fn never_type_can_be_reinferred2() {
- check_types(
- r#"
-enum Option<T> { None, Some(T) }
-
-fn test() {
- let a = if true { Option::None } else { Option::Some(return) };
- a;
- //^ Option<i32>
- match 42 {
- 42 => a,
- _ => Option::Some(42),
- };
-}
-"#,
- );
-}
-
-#[test]
-fn never_type_can_be_reinferred3() {
- check_types(
- r#"
-enum Option<T> { None, Some(T) }
-
-fn test() {
- let a = if true { Option::None } else { Option::Some(return) };
- a;
- //^ Option<&str>
- match 42 {
- 42 => a,
- _ => Option::Some("str"),
- };
-}
-"#,
- );
-}
-
-#[test]
-fn match_no_arm() {
- check_types(
- r#"
-enum Void {}
-
-fn test(a: Void) {
- let t = match a {};
- t;
-} //^ !
-"#,
- );
-}
-
-#[test]
-fn match_unknown_arm() {
- check_types(
- r#"
-fn test(a: Option) {
- let t = match 0 {
- _ => unknown,
- };
- t;
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn if_never() {
- check_types(
- r#"
-fn test() {
- let i = if true {
- loop {}
- } else {
- 3.0
- };
- i;
-} //^ f64
-"#,
- );
-}
-
-#[test]
-fn if_else_never() {
- check_types(
- r#"
-fn test(input: bool) {
- let i = if input {
- 2.0
- } else {
- return
- };
- i;
-} //^ f64
-"#,
- );
-}
-
-#[test]
-fn match_first_arm_never() {
- check_types(
- r#"
-fn test(a: i32) {
- let i = match a {
- 1 => return,
- 2 => 2.0,
- 3 => loop {},
- _ => 3.0,
- };
- i;
-} //^ f64
-"#,
- );
-}
-
-#[test]
-fn match_second_arm_never() {
- check_types(
- r#"
-fn test(a: i32) {
- let i = match a {
- 1 => 3.0,
- 2 => loop {},
- 3 => 3.0,
- _ => return,
- };
- i;
-} //^ f64
-"#,
- );
-}
-
-#[test]
-fn match_all_arms_never() {
- check_types(
- r#"
-fn test(a: i32) {
- let i = match a {
- 2 => return,
- _ => loop {},
- };
- i;
-} //^ !
-"#,
- );
-}
-
-#[test]
-fn match_no_never_arms() {
- check_types(
- r#"
-fn test(a: i32) {
- let i = match a {
- 2 => 2.0,
- _ => 3.0,
- };
- i;
-} //^ f64
-"#,
- );
-}
-
-#[test]
-fn diverging_expression_1() {
- check_infer_with_mismatches(
- r"
- //- /main.rs
- fn test1() {
- let x: u32 = return;
- }
- fn test2() {
- let x: u32 = { return; };
- }
- fn test3() {
- let x: u32 = loop {};
- }
- fn test4() {
- let x: u32 = { loop {} };
- }
- fn test5() {
- let x: u32 = { if true { loop {}; } else { loop {}; } };
- }
- fn test6() {
- let x: u32 = { let y: u32 = { loop {}; }; };
- }
- ",
- expect![[r"
- 11..39 '{ ...urn; }': ()
- 21..22 'x': u32
- 30..36 'return': !
- 51..84 '{ ...; }; }': ()
- 61..62 'x': u32
- 70..81 '{ return; }': u32
- 72..78 'return': !
- 96..125 '{ ... {}; }': ()
- 106..107 'x': u32
- 115..122 'loop {}': !
- 120..122 '{}': ()
- 137..170 '{ ...} }; }': ()
- 147..148 'x': u32
- 156..167 '{ loop {} }': u32
- 158..165 'loop {}': !
- 163..165 '{}': ()
- 182..246 '{ ...} }; }': ()
- 192..193 'x': u32
- 201..243 '{ if t...}; } }': u32
- 203..241 'if tru... {}; }': u32
- 206..210 'true': bool
- 211..223 '{ loop {}; }': u32
- 213..220 'loop {}': !
- 218..220 '{}': ()
- 229..241 '{ loop {}; }': u32
- 231..238 'loop {}': !
- 236..238 '{}': ()
- 258..310 '{ ...; }; }': ()
- 268..269 'x': u32
- 277..307 '{ let ...; }; }': u32
- 283..284 'y': u32
- 292..304 '{ loop {}; }': u32
- 294..301 'loop {}': !
- 299..301 '{}': ()
- "]],
- );
-}
-
-#[test]
-fn diverging_expression_2() {
- check_infer_with_mismatches(
- r#"
- //- /main.rs
- fn test1() {
- // should give type mismatch
- let x: u32 = { loop {}; "foo" };
- }
- "#,
- expect![[r#"
- 11..84 '{ ..." }; }': ()
- 54..55 'x': u32
- 63..81 '{ loop...foo" }': &str
- 65..72 'loop {}': !
- 70..72 '{}': ()
- 74..79 '"foo"': &str
- 63..81: expected u32, got &str
- 74..79: expected u32, got &str
- "#]],
- );
-}
-
-#[test]
-fn diverging_expression_3_break() {
- check_infer_with_mismatches(
- r"
- //- /main.rs
- fn test1() {
- // should give type mismatch
- let x: u32 = { loop { break; } };
- }
- fn test2() {
- // should give type mismatch
- let x: u32 = { for a in b { break; }; };
- // should give type mismatch as well
- let x: u32 = { for a in b {}; };
- // should give type mismatch as well
- let x: u32 = { for a in b { return; }; };
- }
- fn test3() {
- // should give type mismatch
- let x: u32 = { while true { break; }; };
- // should give type mismatch as well -- there's an implicit break, even if it's never hit
- let x: u32 = { while true {}; };
- // should give type mismatch as well
- let x: u32 = { while true { return; }; };
- }
- ",
- expect![[r"
- 11..85 '{ ...} }; }': ()
- 54..55 'x': u32
- 63..82 '{ loop...k; } }': ()
- 65..80 'loop { break; }': ()
- 70..80 '{ break; }': ()
- 72..77 'break': !
- 63..82: expected u32, got ()
- 65..80: expected u32, got ()
- 97..343 '{ ...; }; }': ()
- 140..141 'x': u32
- 149..175 '{ for ...; }; }': ()
- 151..172 'for a ...eak; }': ()
- 155..156 'a': {unknown}
- 160..161 'b': {unknown}
- 162..172 '{ break; }': ()
- 164..169 'break': !
- 226..227 'x': u32
- 235..253 '{ for ... {}; }': ()
- 237..250 'for a in b {}': ()
- 241..242 'a': {unknown}
- 246..247 'b': {unknown}
- 248..250 '{}': ()
- 304..305 'x': u32
- 313..340 '{ for ...; }; }': ()
- 315..337 'for a ...urn; }': ()
- 319..320 'a': {unknown}
- 324..325 'b': {unknown}
- 326..337 '{ return; }': ()
- 328..334 'return': !
- 149..175: expected u32, got ()
- 235..253: expected u32, got ()
- 313..340: expected u32, got ()
- 355..654 '{ ...; }; }': ()
- 398..399 'x': u32
- 407..433 '{ whil...; }; }': ()
- 409..430 'while ...eak; }': ()
- 415..419 'true': bool
- 420..430 '{ break; }': ()
- 422..427 'break': !
- 537..538 'x': u32
- 546..564 '{ whil... {}; }': ()
- 548..561 'while true {}': ()
- 554..558 'true': bool
- 559..561 '{}': ()
- 615..616 'x': u32
- 624..651 '{ whil...; }; }': ()
- 626..648 'while ...urn; }': ()
- 632..636 'true': bool
- 637..648 '{ return; }': ()
- 639..645 'return': !
- 407..433: expected u32, got ()
- 546..564: expected u32, got ()
- 624..651: expected u32, got ()
- "]],
- );
-}
+++ /dev/null
-use expect::expect;
-use test_utils::mark;
-
-use super::{check_infer, check_infer_with_mismatches};
-
-#[test]
-fn infer_pattern() {
- check_infer(
- r#"
- fn test(x: &i32) {
- let y = x;
- let &z = x;
- let a = z;
- let (c, d) = (1, "hello");
-
- for (e, f) in some_iter {
- let g = e;
- }
-
- if let [val] = opt {
- let h = val;
- }
-
- let lambda = |a: u64, b, c: i32| { a + b; c };
-
- let ref ref_to_x = x;
- let mut mut_x = x;
- let ref mut mut_ref_to_x = x;
- let k = mut_ref_to_x;
- }
- "#,
- expect![[r#"
- 8..9 'x': &i32
- 17..368 '{ ...o_x; }': ()
- 27..28 'y': &i32
- 31..32 'x': &i32
- 42..44 '&z': &i32
- 43..44 'z': i32
- 47..48 'x': &i32
- 58..59 'a': i32
- 62..63 'z': i32
- 73..79 '(c, d)': (i32, &str)
- 74..75 'c': i32
- 77..78 'd': &str
- 82..94 '(1, "hello")': (i32, &str)
- 83..84 '1': i32
- 86..93 '"hello"': &str
- 101..151 'for (e... }': ()
- 105..111 '(e, f)': ({unknown}, {unknown})
- 106..107 'e': {unknown}
- 109..110 'f': {unknown}
- 115..124 'some_iter': {unknown}
- 125..151 '{ ... }': ()
- 139..140 'g': {unknown}
- 143..144 'e': {unknown}
- 157..204 'if let... }': ()
- 164..169 '[val]': [{unknown}]
- 165..168 'val': {unknown}
- 172..175 'opt': [{unknown}]
- 176..204 '{ ... }': ()
- 190..191 'h': {unknown}
- 194..197 'val': {unknown}
- 214..220 'lambda': |u64, u64, i32| -> i32
- 223..255 '|a: u6...b; c }': |u64, u64, i32| -> i32
- 224..225 'a': u64
- 232..233 'b': u64
- 235..236 'c': i32
- 243..255 '{ a + b; c }': i32
- 245..246 'a': u64
- 245..250 'a + b': u64
- 249..250 'b': u64
- 252..253 'c': i32
- 266..278 'ref ref_to_x': &&i32
- 281..282 'x': &i32
- 292..301 'mut mut_x': &i32
- 304..305 'x': &i32
- 315..335 'ref mu...f_to_x': &mut &i32
- 338..339 'x': &i32
- 349..350 'k': &mut &i32
- 353..365 'mut_ref_to_x': &mut &i32
- "#]],
- );
-}
-
-#[test]
-fn infer_literal_pattern() {
- check_infer_with_mismatches(
- r#"
- fn any<T>() -> T { loop {} }
- fn test(x: &i32) {
- if let "foo" = any() {}
- if let 1 = any() {}
- if let 1u32 = any() {}
- if let 1f32 = any() {}
- if let 1.0 = any() {}
- if let true = any() {}
- }
- "#,
- expect![[r#"
- 17..28 '{ loop {} }': T
- 19..26 'loop {}': !
- 24..26 '{}': ()
- 37..38 'x': &i32
- 46..208 '{ ...) {} }': ()
- 52..75 'if let...y() {}': ()
- 59..64 '"foo"': &str
- 59..64 '"foo"': &str
- 67..70 'any': fn any<&str>() -> &str
- 67..72 'any()': &str
- 73..75 '{}': ()
- 80..99 'if let...y() {}': ()
- 87..88 '1': i32
- 87..88 '1': i32
- 91..94 'any': fn any<i32>() -> i32
- 91..96 'any()': i32
- 97..99 '{}': ()
- 104..126 'if let...y() {}': ()
- 111..115 '1u32': u32
- 111..115 '1u32': u32
- 118..121 'any': fn any<u32>() -> u32
- 118..123 'any()': u32
- 124..126 '{}': ()
- 131..153 'if let...y() {}': ()
- 138..142 '1f32': f32
- 138..142 '1f32': f32
- 145..148 'any': fn any<f32>() -> f32
- 145..150 'any()': f32
- 151..153 '{}': ()
- 158..179 'if let...y() {}': ()
- 165..168 '1.0': f64
- 165..168 '1.0': f64
- 171..174 'any': fn any<f64>() -> f64
- 171..176 'any()': f64
- 177..179 '{}': ()
- 184..206 'if let...y() {}': ()
- 191..195 'true': bool
- 191..195 'true': bool
- 198..201 'any': fn any<bool>() -> bool
- 198..203 'any()': bool
- 204..206 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_range_pattern() {
- check_infer_with_mismatches(
- r#"
- fn test(x: &i32) {
- if let 1..76 = 2u32 {}
- if let 1..=76 = 2u32 {}
- }
- "#,
- expect![[r#"
- 8..9 'x': &i32
- 17..75 '{ ...2 {} }': ()
- 23..45 'if let...u32 {}': ()
- 30..35 '1..76': u32
- 38..42 '2u32': u32
- 43..45 '{}': ()
- 50..73 'if let...u32 {}': ()
- 57..63 '1..=76': u32
- 66..70 '2u32': u32
- 71..73 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_ergonomics() {
- check_infer(
- r#"
- struct A<T>(T);
-
- fn test() {
- let A(n) = &A(1);
- let A(n) = &mut A(1);
- }
- "#,
- expect![[r#"
- 27..78 '{ ...(1); }': ()
- 37..41 'A(n)': A<i32>
- 39..40 'n': &i32
- 44..49 '&A(1)': &A<i32>
- 45..46 'A': A<i32>(i32) -> A<i32>
- 45..49 'A(1)': A<i32>
- 47..48 '1': i32
- 59..63 'A(n)': A<i32>
- 61..62 'n': &mut i32
- 66..75 '&mut A(1)': &mut A<i32>
- 71..72 'A': A<i32>(i32) -> A<i32>
- 71..75 'A(1)': A<i32>
- 73..74 '1': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_ergonomics_ref() {
- mark::check!(match_ergonomics_ref);
- check_infer(
- r#"
- fn test() {
- let v = &(1, &2);
- let (_, &w) = v;
- }
- "#,
- expect![[r#"
- 10..56 '{ ...= v; }': ()
- 20..21 'v': &(i32, &i32)
- 24..32 '&(1, &2)': &(i32, &i32)
- 25..32 '(1, &2)': (i32, &i32)
- 26..27 '1': i32
- 29..31 '&2': &i32
- 30..31 '2': i32
- 42..49 '(_, &w)': (i32, &i32)
- 43..44 '_': i32
- 46..48 '&w': &i32
- 47..48 'w': i32
- 52..53 'v': &(i32, &i32)
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_slice() {
- check_infer(
- r#"
- fn test() {
- let slice: &[f64] = &[0.0];
- match slice {
- &[] => {},
- &[a] => {
- a;
- },
- &[b, c] => {
- b;
- c;
- }
- _ => {}
- }
- }
- "#,
- expect![[r#"
- 10..209 '{ ... } }': ()
- 20..25 'slice': &[f64]
- 36..42 '&[0.0]': &[f64; _]
- 37..42 '[0.0]': [f64; _]
- 38..41 '0.0': f64
- 48..207 'match ... }': ()
- 54..59 'slice': &[f64]
- 70..73 '&[]': &[f64]
- 71..73 '[]': [f64]
- 77..79 '{}': ()
- 89..93 '&[a]': &[f64]
- 90..93 '[a]': [f64]
- 91..92 'a': f64
- 97..123 '{ ... }': ()
- 111..112 'a': f64
- 133..140 '&[b, c]': &[f64]
- 134..140 '[b, c]': [f64]
- 135..136 'b': f64
- 138..139 'c': f64
- 144..185 '{ ... }': ()
- 158..159 'b': f64
- 173..174 'c': f64
- 194..195 '_': &[f64]
- 199..201 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_string_literal() {
- check_infer_with_mismatches(
- r#"
- fn test() {
- let s: &str = "hello";
- match s {
- "hello" => {}
- _ => {}
- }
- }
- "#,
- expect![[r#"
- 10..98 '{ ... } }': ()
- 20..21 's': &str
- 30..37 '"hello"': &str
- 43..96 'match ... }': ()
- 49..50 's': &str
- 61..68 '"hello"': &str
- 61..68 '"hello"': &str
- 72..74 '{}': ()
- 83..84 '_': &str
- 88..90 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_or() {
- check_infer_with_mismatches(
- r#"
- fn test() {
- let s: &str = "hello";
- match s {
- "hello" | "world" => {}
- _ => {}
- }
- }
- "#,
- expect![[r#"
- 10..108 '{ ... } }': ()
- 20..21 's': &str
- 30..37 '"hello"': &str
- 43..106 'match ... }': ()
- 49..50 's': &str
- 61..68 '"hello"': &str
- 61..68 '"hello"': &str
- 61..78 '"hello...world"': &str
- 71..78 '"world"': &str
- 71..78 '"world"': &str
- 82..84 '{}': ()
- 93..94 '_': &str
- 98..100 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_pattern_match_arr() {
- check_infer(
- r#"
- fn test() {
- let arr: [f64; 2] = [0.0, 1.0];
- match arr {
- [1.0, a] => {
- a;
- },
- [b, c] => {
- b;
- c;
- }
- }
- }
- "#,
- expect![[r#"
- 10..179 '{ ... } }': ()
- 20..23 'arr': [f64; _]
- 36..46 '[0.0, 1.0]': [f64; _]
- 37..40 '0.0': f64
- 42..45 '1.0': f64
- 52..177 'match ... }': ()
- 58..61 'arr': [f64; _]
- 72..80 '[1.0, a]': [f64; _]
- 73..76 '1.0': f64
- 73..76 '1.0': f64
- 78..79 'a': f64
- 84..110 '{ ... }': ()
- 98..99 'a': f64
- 120..126 '[b, c]': [f64; _]
- 121..122 'b': f64
- 124..125 'c': f64
- 130..171 '{ ... }': ()
- 144..145 'b': f64
- 159..160 'c': f64
- "#]],
- );
-}
-
-#[test]
-fn infer_adt_pattern() {
- check_infer(
- r#"
- enum E {
- A { x: usize },
- B
- }
-
- struct S(u32, E);
-
- fn test() {
- let e = E::A { x: 3 };
-
- let S(y, z) = foo;
- let E::A { x: new_var } = e;
-
- match e {
- E::A { x } => x,
- E::B if foo => 1,
- E::B => 10,
- };
-
- let ref d @ E::A { .. } = e;
- d;
- }
- "#,
- expect![[r#"
- 67..288 '{ ... d; }': ()
- 77..78 'e': E
- 81..94 'E::A { x: 3 }': E
- 91..92 '3': usize
- 105..112 'S(y, z)': S
- 107..108 'y': u32
- 110..111 'z': E
- 115..118 'foo': S
- 128..147 'E::A {..._var }': E
- 138..145 'new_var': usize
- 150..151 'e': E
- 158..244 'match ... }': usize
- 164..165 'e': E
- 176..186 'E::A { x }': E
- 183..184 'x': usize
- 190..191 'x': usize
- 201..205 'E::B': E
- 209..212 'foo': bool
- 216..217 '1': usize
- 227..231 'E::B': E
- 235..237 '10': usize
- 255..274 'ref d ...{ .. }': &E
- 263..274 'E::A { .. }': E
- 277..278 'e': E
- 284..285 'd': &E
- "#]],
- );
-}
-
-#[test]
-fn enum_variant_through_self_in_pattern() {
- check_infer(
- r#"
- enum E {
- A { x: usize },
- B(usize),
- C
- }
-
- impl E {
- fn test() {
- match (loop {}) {
- Self::A { x } => { x; },
- Self::B(x) => { x; },
- Self::C => {},
- };
- }
- }
- "#,
- expect![[r#"
- 75..217 '{ ... }': ()
- 85..210 'match ... }': ()
- 92..99 'loop {}': !
- 97..99 '{}': ()
- 115..128 'Self::A { x }': E
- 125..126 'x': usize
- 132..138 '{ x; }': ()
- 134..135 'x': usize
- 152..162 'Self::B(x)': E
- 160..161 'x': usize
- 166..172 '{ x; }': ()
- 168..169 'x': usize
- 186..193 'Self::C': E
- 197..199 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_generics_in_patterns() {
- check_infer(
- r#"
- struct A<T> {
- x: T,
- }
-
- enum Option<T> {
- Some(T),
- None,
- }
-
- fn test(a1: A<u32>, o: Option<u64>) {
- let A { x: x2 } = a1;
- let A::<i64> { x: x3 } = A { x: 1 };
- match o {
- Option::Some(t) => t,
- _ => 1,
- };
- }
- "#,
- expect![[r#"
- 78..80 'a1': A<u32>
- 90..91 'o': Option<u64>
- 106..243 '{ ... }; }': ()
- 116..127 'A { x: x2 }': A<u32>
- 123..125 'x2': u32
- 130..132 'a1': A<u32>
- 142..160 'A::<i6...: x3 }': A<i64>
- 156..158 'x3': i64
- 163..173 'A { x: 1 }': A<i64>
- 170..171 '1': i64
- 179..240 'match ... }': u64
- 185..186 'o': Option<u64>
- 197..212 'Option::Some(t)': Option<u64>
- 210..211 't': u64
- 216..217 't': u64
- 227..228 '_': Option<u64>
- 232..233 '1': u64
- "#]],
- );
-}
-
-#[test]
-fn infer_const_pattern() {
- check_infer_with_mismatches(
- r#"
- enum Option<T> { None }
- use Option::None;
- struct Foo;
- const Bar: usize = 1;
-
- fn test() {
- let a: Option<u32> = None;
- let b: Option<i64> = match a {
- None => None,
- };
- let _: () = match () { Foo => Foo }; // Expected mismatch
- let _: () = match () { Bar => Bar }; // Expected mismatch
- }
- "#,
- expect![[r#"
- 73..74 '1': usize
- 87..309 '{ ...atch }': ()
- 97..98 'a': Option<u32>
- 114..118 'None': Option<u32>
- 128..129 'b': Option<i64>
- 145..182 'match ... }': Option<i64>
- 151..152 'a': Option<u32>
- 163..167 'None': Option<u32>
- 171..175 'None': Option<i64>
- 192..193 '_': ()
- 200..223 'match ... Foo }': Foo
- 206..208 '()': ()
- 211..214 'Foo': Foo
- 218..221 'Foo': Foo
- 254..255 '_': ()
- 262..285 'match ... Bar }': usize
- 268..270 '()': ()
- 273..276 'Bar': usize
- 280..283 'Bar': usize
- 200..223: expected (), got Foo
- 262..285: expected (), got usize
- "#]],
- );
-}
-
-#[test]
-fn infer_guard() {
- check_infer(
- r#"
-struct S;
-impl S { fn foo(&self) -> bool { false } }
-
-fn main() {
- match S {
- s if s.foo() => (),
- }
-}
- "#,
- expect![[r#"
- 27..31 'self': &S
- 41..50 '{ false }': bool
- 43..48 'false': bool
- 64..115 '{ ... } }': ()
- 70..113 'match ... }': ()
- 76..77 'S': S
- 88..89 's': S
- 93..94 's': S
- 93..100 's.foo()': bool
- 104..106 '()': ()
- "#]],
- )
-}
-
-#[test]
-fn match_ergonomics_in_closure_params() {
- check_infer(
- r#"
- #[lang = "fn_once"]
- trait FnOnce<Args> {
- type Output;
- }
-
- fn foo<T, U, F: FnOnce(T) -> U>(t: T, f: F) -> U { loop {} }
-
- fn test() {
- foo(&(1, "a"), |&(x, y)| x); // normal, no match ergonomics
- foo(&(1, "a"), |(x, y)| x);
- }
- "#,
- expect![[r#"
- 93..94 't': T
- 99..100 'f': F
- 110..121 '{ loop {} }': U
- 112..119 'loop {}': !
- 117..119 '{}': ()
- 133..232 '{ ... x); }': ()
- 139..142 'foo': fn foo<&(i32, &str), i32, |&(i32, &str)| -> i32>(&(i32, &str), |&(i32, &str)| -> i32) -> i32
- 139..166 'foo(&(...y)| x)': i32
- 143..152 '&(1, "a")': &(i32, &str)
- 144..152 '(1, "a")': (i32, &str)
- 145..146 '1': i32
- 148..151 '"a"': &str
- 154..165 '|&(x, y)| x': |&(i32, &str)| -> i32
- 155..162 '&(x, y)': &(i32, &str)
- 156..162 '(x, y)': (i32, &str)
- 157..158 'x': i32
- 160..161 'y': &str
- 164..165 'x': i32
- 203..206 'foo': fn foo<&(i32, &str), &i32, |&(i32, &str)| -> &i32>(&(i32, &str), |&(i32, &str)| -> &i32) -> &i32
- 203..229 'foo(&(...y)| x)': &i32
- 207..216 '&(1, "a")': &(i32, &str)
- 208..216 '(1, "a")': (i32, &str)
- 209..210 '1': i32
- 212..215 '"a"': &str
- 218..228 '|(x, y)| x': |&(i32, &str)| -> &i32
- 219..225 '(x, y)': (i32, &str)
- 220..221 'x': &i32
- 223..224 'y': &&str
- 227..228 'x': &i32
- "#]],
- );
-}
-
-#[test]
-fn slice_tail_pattern() {
- check_infer(
- r#"
- fn foo(params: &[i32]) {
- match params {
- [head, tail @ ..] => {
- }
- }
- }
- "#,
- expect![[r#"
- 7..13 'params': &[i32]
- 23..92 '{ ... } }': ()
- 29..90 'match ... }': ()
- 35..41 'params': &[i32]
- 52..69 '[head,... @ ..]': [i32]
- 53..57 'head': &i32
- 59..68 'tail @ ..': &[i32]
- 66..68 '..': [i32]
- 73..84 '{ }': ()
- "#]],
- );
-}
+++ /dev/null
-use expect::expect;
-use test_utils::mark;
-
-use super::{check_infer, check_types};
-
-#[test]
-fn bug_484() {
- check_infer(
- r#"
- fn test() {
- let x = if true {};
- }
- "#,
- expect![[r#"
- 10..37 '{ ... {}; }': ()
- 20..21 'x': ()
- 24..34 'if true {}': ()
- 27..31 'true': bool
- 32..34 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn no_panic_on_field_of_enum() {
- check_infer(
- r#"
- enum X {}
-
- fn test(x: X) {
- x.some_field;
- }
- "#,
- expect![[r#"
- 19..20 'x': X
- 25..46 '{ ...eld; }': ()
- 31..32 'x': X
- 31..43 'x.some_field': {unknown}
- "#]],
- );
-}
-
-#[test]
-fn bug_585() {
- check_infer(
- r#"
- fn test() {
- X {};
- match x {
- A::B {} => (),
- A::Y() => (),
- }
- }
- "#,
- expect![[r#"
- 10..88 '{ ... } }': ()
- 16..20 'X {}': {unknown}
- 26..86 'match ... }': ()
- 32..33 'x': {unknown}
- 44..51 'A::B {}': {unknown}
- 55..57 '()': ()
- 67..73 'A::Y()': {unknown}
- 77..79 '()': ()
- "#]],
- );
-}
-
-#[test]
-fn bug_651() {
- check_infer(
- r#"
- fn quux() {
- let y = 92;
- 1 + y;
- }
- "#,
- expect![[r#"
- 10..40 '{ ...+ y; }': ()
- 20..21 'y': i32
- 24..26 '92': i32
- 32..33 '1': i32
- 32..37 '1 + y': i32
- 36..37 'y': i32
- "#]],
- );
-}
-
-#[test]
-fn recursive_vars() {
- mark::check!(type_var_cycles_resolve_completely);
- mark::check!(type_var_cycles_resolve_as_possible);
- check_infer(
- r#"
- fn test() {
- let y = unknown;
- [y, &y];
- }
- "#,
- expect![[r#"
- 10..47 '{ ...&y]; }': ()
- 20..21 'y': &{unknown}
- 24..31 'unknown': &{unknown}
- 37..44 '[y, &y]': [&&{unknown}; _]
- 38..39 'y': &{unknown}
- 41..43 '&y': &&{unknown}
- 42..43 'y': &{unknown}
- "#]],
- );
-}
-
-#[test]
-fn recursive_vars_2() {
- check_infer(
- r#"
- fn test() {
- let x = unknown;
- let y = unknown;
- [(x, y), (&y, &x)];
- }
- "#,
- expect![[r#"
- 10..79 '{ ...x)]; }': ()
- 20..21 'x': &&{unknown}
- 24..31 'unknown': &&{unknown}
- 41..42 'y': &&{unknown}
- 45..52 'unknown': &&{unknown}
- 58..76 '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _]
- 59..65 '(x, y)': (&&&{unknown}, &&&{unknown})
- 60..61 'x': &&{unknown}
- 63..64 'y': &&{unknown}
- 67..75 '(&y, &x)': (&&&{unknown}, &&&{unknown})
- 68..70 '&y': &&&{unknown}
- 69..70 'y': &&{unknown}
- 72..74 '&x': &&&{unknown}
- 73..74 'x': &&{unknown}
- "#]],
- );
-}
-
-#[test]
-fn infer_std_crash_1() {
- // caused stack overflow, taken from std
- check_infer(
- r#"
- enum Maybe<T> {
- Real(T),
- Fake,
- }
-
- fn write() {
- match something_unknown {
- Maybe::Real(ref mut something) => (),
- }
- }
- "#,
- expect![[r#"
- 53..138 '{ ... } }': ()
- 59..136 'match ... }': ()
- 65..82 'someth...nknown': Maybe<{unknown}>
- 93..123 'Maybe:...thing)': Maybe<{unknown}>
- 105..122 'ref mu...ething': &mut {unknown}
- 127..129 '()': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_std_crash_2() {
- mark::check!(type_var_resolves_to_int_var);
- // caused "equating two type variables, ...", taken from std
- check_infer(
- r#"
- fn test_line_buffer() {
- &[0, b'\n', 1, b'\n'];
- }
- "#,
- expect![[r#"
- 22..52 '{ ...n']; }': ()
- 28..49 '&[0, b...b'\n']': &[u8; _]
- 29..49 '[0, b'...b'\n']': [u8; _]
- 30..31 '0': u8
- 33..38 'b'\n'': u8
- 40..41 '1': u8
- 43..48 'b'\n'': u8
- "#]],
- );
-}
-
-#[test]
-fn infer_std_crash_3() {
- // taken from rustc
- check_infer(
- r#"
- pub fn compute() {
- match nope!() {
- SizeSkeleton::Pointer { non_zero: true, tail } => {}
- }
- }
- "#,
- expect![[r#"
- 17..107 '{ ... } }': ()
- 23..105 'match ... }': ()
- 29..36 'nope!()': {unknown}
- 47..93 'SizeSk...tail }': {unknown}
- 81..85 'true': bool
- 81..85 'true': bool
- 87..91 'tail': {unknown}
- 97..99 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_std_crash_4() {
- // taken from rustc
- check_infer(
- r#"
- pub fn primitive_type() {
- match *self {
- BorrowedRef { type_: Primitive(p), ..} => {},
- }
- }
- "#,
- expect![[r#"
- 24..105 '{ ... } }': ()
- 30..103 'match ... }': ()
- 36..41 '*self': {unknown}
- 37..41 'self': {unknown}
- 52..90 'Borrow...), ..}': {unknown}
- 73..85 'Primitive(p)': {unknown}
- 83..84 'p': {unknown}
- 94..96 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn infer_std_crash_5() {
- // taken from rustc
- check_infer(
- r#"
- fn extra_compiler_flags() {
- for content in doesnt_matter {
- let name = if doesnt_matter {
- first
- } else {
- &content
- };
-
- let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
- name
- } else {
- content
- };
- }
- }
- "#,
- expect![[r#"
- 26..322 '{ ... } }': ()
- 32..320 'for co... }': ()
- 36..43 'content': &{unknown}
- 47..60 'doesnt_matter': {unknown}
- 61..320 '{ ... }': ()
- 75..79 'name': &&{unknown}
- 82..166 'if doe... }': &&{unknown}
- 85..98 'doesnt_matter': bool
- 99..128 '{ ... }': &&{unknown}
- 113..118 'first': &&{unknown}
- 134..166 '{ ... }': &&{unknown}
- 148..156 '&content': &&{unknown}
- 149..156 'content': &{unknown}
- 181..188 'content': &{unknown}
- 191..313 'if ICE... }': &{unknown}
- 194..231 'ICE_RE..._VALUE': {unknown}
- 194..247 'ICE_RE...&name)': bool
- 241..246 '&name': &&&{unknown}
- 242..246 'name': &&{unknown}
- 248..276 '{ ... }': &&{unknown}
- 262..266 'name': &&{unknown}
- 282..313 '{ ... }': &{unknown}
- 296..303 'content': &{unknown}
- "#]],
- );
-}
-
-#[test]
-fn infer_nested_generics_crash() {
- // another crash found typechecking rustc
- check_infer(
- r#"
- struct Canonical<V> {
- value: V,
- }
- struct QueryResponse<V> {
- value: V,
- }
- fn test<R>(query_response: Canonical<QueryResponse<R>>) {
- &query_response.value;
- }
- "#,
- expect![[r#"
- 91..105 'query_response': Canonical<QueryResponse<R>>
- 136..166 '{ ...lue; }': ()
- 142..163 '&query....value': &QueryResponse<R>
- 143..157 'query_response': Canonical<QueryResponse<R>>
- 143..163 'query_....value': QueryResponse<R>
- "#]],
- );
-}
-
-#[test]
-fn infer_paren_macro_call() {
- check_infer(
- r#"
- macro_rules! bar { () => {0u32} }
- fn test() {
- let a = (bar!());
- }
- "#,
- expect![[r#"
- !0..4 '0u32': u32
- 44..69 '{ ...()); }': ()
- 54..55 'a': u32
- "#]],
- );
-}
-
-#[test]
-fn bug_1030() {
- check_infer(
- r#"
- struct HashSet<T, H>;
- struct FxHasher;
- type FxHashSet<T> = HashSet<T, FxHasher>;
-
- impl<T, H> HashSet<T, H> {
- fn default() -> HashSet<T, H> {}
- }
-
- pub fn main_loop() {
- FxHashSet::default();
- }
- "#,
- expect![[r#"
- 143..145 '{}': ()
- 168..197 '{ ...t(); }': ()
- 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
- 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
- "#]],
- );
-}
-
-#[test]
-fn issue_2669() {
- check_infer(
- r#"
- trait A {}
- trait Write {}
- struct Response<T> {}
-
- trait D {
- fn foo();
- }
-
- impl<T:A> D for Response<T> {
- fn foo() {
- end();
- fn end<W: Write>() {
- let _x: T = loop {};
- }
- }
- }
- "#,
- expect![[r#"
- 119..214 '{ ... }': ()
- 129..132 'end': fn end<{unknown}>()
- 129..134 'end()': ()
- 163..208 '{ ... }': ()
- 181..183 '_x': !
- 190..197 'loop {}': !
- 195..197 '{}': ()
- "#]],
- )
-}
-
-#[test]
-fn issue_2705() {
- check_infer(
- r#"
- trait Trait {}
- fn test() {
- <Trait<u32>>::foo()
- }
- "#,
- expect![[r#"
- 25..52 '{ ...oo() }': ()
- 31..48 '<Trait...>::foo': {unknown}
- 31..50 '<Trait...:foo()': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_2683_chars_impl() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-fn test() {
- let chars: std::str::Chars<'_>;
- (chars.next(), chars.nth(1));
-} //^ (Option<char>, Option<char>)
-
-//- /std.rs crate:std
-#[prelude_import]
-use prelude::*;
-
-pub mod prelude {
- pub use crate::iter::Iterator;
- pub use crate::option::Option;
-}
-
-pub mod iter {
- pub use self::traits::Iterator;
- pub mod traits {
- pub use self::iterator::Iterator;
-
- pub mod iterator {
- pub trait Iterator {
- type Item;
- fn next(&mut self) -> Option<Self::Item>;
- fn nth(&mut self, n: usize) -> Option<Self::Item> {}
- }
- }
- }
-}
-
-pub mod option {
- pub enum Option<T> {}
-}
-
-pub mod str {
- pub struct Chars<'a> {}
- impl<'a> Iterator for Chars<'a> {
- type Item = char;
- fn next(&mut self) -> Option<char> {}
- }
-}
-"#,
- );
-}
-
-#[test]
-fn issue_3642_bad_macro_stackover() {
- check_types(
- r#"
-#[macro_export]
-macro_rules! match_ast {
- (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
-
- (match ($node:expr) {
- $( ast::$ast:ident($it:ident) => $res:expr, )*
- _ => $catch_all:expr $(,)?
- }) => {{
- $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
- { $catch_all }
- }};
-}
-
-fn main() {
- let anchor = match_ast! {
- //^ ()
- match parent {
- as => {},
- _ => return None
- }
- };
-}"#,
- );
-}
-
-#[test]
-fn issue_3999_slice() {
- check_infer(
- r#"
- fn foo(params: &[usize]) {
- match params {
- [ps @ .., _] => {}
- }
- }
- "#,
- expect![[r#"
- 7..13 'params': &[usize]
- 25..80 '{ ... } }': ()
- 31..78 'match ... }': ()
- 37..43 'params': &[usize]
- 54..66 '[ps @ .., _]': [usize]
- 55..62 'ps @ ..': &[usize]
- 60..62 '..': [usize]
- 64..65 '_': usize
- 70..72 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_3999_struct() {
- // rust-analyzer should not panic on seeing this malformed
- // record pattern.
- check_infer(
- r#"
- struct Bar {
- a: bool,
- }
- fn foo(b: Bar) {
- match b {
- Bar { a: .. } => {},
- }
- }
- "#,
- expect![[r#"
- 35..36 'b': Bar
- 43..95 '{ ... } }': ()
- 49..93 'match ... }': ()
- 55..56 'b': Bar
- 67..80 'Bar { a: .. }': Bar
- 76..78 '..': bool
- 84..86 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_4235_name_conflicts() {
- check_infer(
- r#"
- struct FOO {}
- static FOO:FOO = FOO {};
-
- impl FOO {
- fn foo(&self) {}
- }
-
- fn main() {
- let a = &FOO;
- a.foo();
- }
- "#,
- expect![[r#"
- 31..37 'FOO {}': FOO
- 63..67 'self': &FOO
- 69..71 '{}': ()
- 85..119 '{ ...o(); }': ()
- 95..96 'a': &FOO
- 99..103 '&FOO': &FOO
- 100..103 'FOO': FOO
- 109..110 'a': &FOO
- 109..116 'a.foo()': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_4465_dollar_crate_at_type() {
- check_infer(
- r#"
- pub struct Foo {}
- pub fn anything<T>() -> T {
- loop {}
- }
- macro_rules! foo {
- () => {{
- let r: $crate::Foo = anything();
- r
- }};
- }
- fn main() {
- let _a = foo!();
- }
- "#,
- expect![[r#"
- 44..59 '{ loop {} }': T
- 50..57 'loop {}': !
- 55..57 '{}': ()
- !0..31 '{letr:...g();r}': Foo
- !4..5 'r': Foo
- !18..26 'anything': fn anything<Foo>() -> Foo
- !18..28 'anything()': Foo
- !29..30 'r': Foo
- 163..187 '{ ...!(); }': ()
- 173..175 '_a': Foo
- "#]],
- );
-}
-
-#[test]
-fn issue_4053_diesel_where_clauses() {
- check_infer(
- r#"
- trait BoxedDsl<DB> {
- type Output;
- fn internal_into_boxed(self) -> Self::Output;
- }
-
- struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
- order: Order,
- }
-
- trait QueryFragment<DB: Backend> {}
-
- trait Into<T> { fn into(self) -> T; }
-
- impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
- for SelectStatement<F, S, D, W, O, LOf, G>
- where
- O: Into<dyn QueryFragment<DB>>,
- {
- type Output = XXX;
-
- fn internal_into_boxed(self) -> Self::Output {
- self.order.into();
- }
- }
- "#,
- expect![[r#"
- 65..69 'self': Self
- 267..271 'self': Self
- 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
- 488..522 '{ ... }': ()
- 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
- 498..508 'self.order': O
- 498..515 'self.o...into()': dyn QueryFragment<DB>
- "#]],
- );
-}
-
-#[test]
-fn issue_4953() {
- check_infer(
- r#"
- pub struct Foo(pub i64);
- impl Foo {
- fn test() -> Self { Self(0i64) }
- }
- "#,
- expect![[r#"
- 58..72 '{ Self(0i64) }': Foo
- 60..64 'Self': Foo(i64) -> Foo
- 60..70 'Self(0i64)': Foo
- 65..69 '0i64': i64
- "#]],
- );
- check_infer(
- r#"
- pub struct Foo<T>(pub T);
- impl Foo<i64> {
- fn test() -> Self { Self(0i64) }
- }
- "#,
- expect![[r#"
- 64..78 '{ Self(0i64) }': Foo<i64>
- 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
- 66..76 'Self(0i64)': Foo<i64>
- 71..75 '0i64': i64
- "#]],
- );
-}
-
-#[test]
-fn issue_4931() {
- check_infer(
- r#"
- trait Div<T> {
- type Output;
- }
-
- trait CheckedDiv: Div<()> {}
-
- trait PrimInt: CheckedDiv<Output = ()> {
- fn pow(self);
- }
-
- fn check<T: PrimInt>(i: T) {
- i.pow();
- }
- "#,
- expect![[r#"
- 117..121 'self': Self
- 148..149 'i': T
- 154..170 '{ ...w(); }': ()
- 160..161 'i': T
- 160..167 'i.pow()': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_4885() {
- check_infer(
- r#"
- #[lang = "coerce_unsized"]
- pub trait CoerceUnsized<T> {}
-
- trait Future {
- type Output;
- }
- trait Foo<R> {
- type Bar;
- }
- fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
- where
- K: Foo<R>,
- {
- bar(key)
- }
- fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
- where
- K: Foo<R>,
- {
- }
- "#,
- expect![[r#"
- 136..139 'key': &K
- 198..214 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
- 204..207 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
- 204..212 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
- 208..211 'key': &K
- 228..231 'key': &K
- 290..293 '{ }': ()
- "#]],
- );
-}
-
-#[test]
-fn issue_4800() {
- check_infer(
- r#"
- trait Debug {}
-
- struct Foo<T>;
-
- type E1<T> = (T, T, T);
- type E2<T> = E1<E1<E1<(T, T, T)>>>;
-
- impl Debug for Foo<E2<()>> {}
-
- struct Request;
-
- pub trait Future {
- type Output;
- }
-
- pub struct PeerSet<D>;
-
- impl<D> Service<Request> for PeerSet<D>
- where
- D: Discover,
- D::Key: Debug,
- {
- type Error = ();
- type Future = dyn Future<Output = Self::Error>;
-
- fn call(&mut self) -> Self::Future {
- loop {}
- }
- }
-
- pub trait Discover {
- type Key;
- }
-
- pub trait Service<Request> {
- type Error;
- type Future: Future<Output = Self::Error>;
- fn call(&mut self) -> Self::Future;
- }
- "#,
- expect![[r#"
- 379..383 'self': &mut PeerSet<D>
- 401..424 '{ ... }': dyn Future<Output = ()>
- 411..418 'loop {}': !
- 416..418 '{}': ()
- 575..579 'self': &mut Self
- "#]],
- );
-}
-
-#[test]
-fn issue_4966() {
- check_infer(
- r#"
- pub trait IntoIterator {
- type Item;
- }
-
- struct Repeat<A> { element: A }
-
- struct Map<F> { f: F }
-
- struct Vec<T> {}
-
- #[lang = "deref"]
- pub trait Deref {
- type Target;
- }
-
- impl<T> Deref for Vec<T> {
- type Target = [T];
- }
-
- fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
-
- fn main() {
- let inner = Map { f: |_: &f64| 0.0 };
-
- let repeat = Repeat { element: inner };
-
- let vec = from_iter(repeat);
-
- vec.foo_bar();
- }
- "#,
- expect![[r#"
- 270..274 'iter': T
- 289..291 '{}': ()
- 303..447 '{ ...r(); }': ()
- 313..318 'inner': Map<|&f64| -> f64>
- 321..345 'Map { ... 0.0 }': Map<|&f64| -> f64>
- 330..343 '|_: &f64| 0.0': |&f64| -> f64
- 331..332 '_': &f64
- 340..343 '0.0': f64
- 356..362 'repeat': Repeat<Map<|&f64| -> f64>>
- 365..390 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
- 383..388 'inner': Map<|&f64| -> f64>
- 401..404 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
- 407..416 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
- 407..424 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
- 417..423 'repeat': Repeat<Map<|&f64| -> f64>>
- 431..434 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
- 431..444 'vec.foo_bar()': {unknown}
- "#]],
- );
-}
+++ /dev/null
-use expect::expect;
-
-use super::{check_infer, check_types};
-
-#[test]
-fn infer_box() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-fn test() {
- let x = box 1;
- let t = (x, box x, box &1, box [1]);
- t;
-} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>)
-
-//- /std.rs crate:std
-#[prelude_import] use prelude::*;
-mod prelude {}
-
-mod boxed {
- #[lang = "owned_box"]
- pub struct Box<T: ?Sized> {
- inner: *mut T,
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_adt_self() {
- check_types(
- r#"
-enum Nat { Succ(Self), Demo(Nat), Zero }
-
-fn test() {
- let foo: Nat = Nat::Zero;
- if let Nat::Succ(x) = foo {
- x
- } //^ Nat
-}
-"#,
- );
-}
-
-#[test]
-fn self_in_struct_lit() {
- check_infer(
- r#"
- //- /main.rs
- struct S<T> { x: T }
-
- impl S<u32> {
- fn foo() {
- Self { x: 1 };
- }
- }
- "#,
- expect![[r#"
- 49..79 '{ ... }': ()
- 59..72 'Self { x: 1 }': S<u32>
- 69..70 '1': u32
- "#]],
- );
-}
-
-#[test]
-fn type_alias_in_struct_lit() {
- check_infer(
- r#"
- //- /main.rs
- struct S<T> { x: T }
-
- type SS = S<u32>;
-
- fn foo() {
- SS { x: 1 };
- }
- "#,
- expect![[r#"
- 50..70 '{ ...1 }; }': ()
- 56..67 'SS { x: 1 }': S<u32>
- 64..65 '1': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_ranges() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-fn test() {
- let a = ..;
- let b = 1..;
- let c = ..2u32;
- let d = 1..2usize;
- let e = ..=10;
- let f = 'a'..='z';
-
- let t = (a, b, c, d, e, f);
- t;
-} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
-
-//- /core.rs crate:core
-#[prelude_import] use prelude::*;
-mod prelude {}
-
-pub mod ops {
- pub struct Range<Idx> {
- pub start: Idx,
- pub end: Idx,
- }
- pub struct RangeFrom<Idx> {
- pub start: Idx,
- }
- struct RangeFull;
- pub struct RangeInclusive<Idx> {
- start: Idx,
- end: Idx,
- is_empty: u8,
- }
- pub struct RangeTo<Idx> {
- pub end: Idx,
- }
- pub struct RangeToInclusive<Idx> {
- pub end: Idx,
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_while_let() {
- check_types(
- r#"
-enum Option<T> { Some(T), None }
-
-fn test() {
- let foo: Option<f32> = None;
- while let Option::Some(x) = foo {
- x
- } //^ f32
-}
-"#,
- );
-}
-
-#[test]
-fn infer_basics() {
- check_infer(
- r#"
- fn test(a: u32, b: isize, c: !, d: &str) {
- a;
- b;
- c;
- d;
- 1usize;
- 1isize;
- "test";
- 1.0f32;
- }"#,
- expect![[r#"
- 8..9 'a': u32
- 16..17 'b': isize
- 26..27 'c': !
- 32..33 'd': &str
- 41..120 '{ ...f32; }': ()
- 47..48 'a': u32
- 54..55 'b': isize
- 61..62 'c': !
- 68..69 'd': &str
- 75..81 '1usize': usize
- 87..93 '1isize': isize
- 99..105 '"test"': &str
- 111..117 '1.0f32': f32
- "#]],
- );
-}
-
-#[test]
-fn infer_let() {
- check_infer(
- r#"
- fn test() {
- let a = 1isize;
- let b: usize = 1;
- let c = b;
- let d: u32;
- let e;
- let f: i32 = e;
- }
- "#,
- expect![[r#"
- 10..117 '{ ...= e; }': ()
- 20..21 'a': isize
- 24..30 '1isize': isize
- 40..41 'b': usize
- 51..52 '1': usize
- 62..63 'c': usize
- 66..67 'b': usize
- 77..78 'd': u32
- 93..94 'e': i32
- 104..105 'f': i32
- 113..114 'e': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_paths() {
- check_infer(
- r#"
- fn a() -> u32 { 1 }
-
- mod b {
- fn c() -> u32 { 1 }
- }
-
- fn test() {
- a();
- b::c();
- }
- "#,
- expect![[r#"
- 14..19 '{ 1 }': u32
- 16..17 '1': u32
- 47..52 '{ 1 }': u32
- 49..50 '1': u32
- 66..90 '{ ...c(); }': ()
- 72..73 'a': fn a() -> u32
- 72..75 'a()': u32
- 81..85 'b::c': fn c() -> u32
- 81..87 'b::c()': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_path_type() {
- check_infer(
- r#"
- struct S;
-
- impl S {
- fn foo() -> i32 { 1 }
- }
-
- fn test() {
- S::foo();
- <S>::foo();
- }
- "#,
- expect![[r#"
- 40..45 '{ 1 }': i32
- 42..43 '1': i32
- 59..92 '{ ...o(); }': ()
- 65..71 'S::foo': fn foo() -> i32
- 65..73 'S::foo()': i32
- 79..87 '<S>::foo': fn foo() -> i32
- 79..89 '<S>::foo()': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_struct() {
- check_infer(
- r#"
- struct A {
- b: B,
- c: C,
- }
- struct B;
- struct C(usize);
-
- fn test() {
- let c = C(1);
- B;
- let a: A = A { b: B, c: C(1) };
- a.b;
- a.c;
- }
- "#,
- expect![[r#"
- 71..153 '{ ...a.c; }': ()
- 81..82 'c': C
- 85..86 'C': C(usize) -> C
- 85..89 'C(1)': C
- 87..88 '1': usize
- 95..96 'B': B
- 106..107 'a': A
- 113..132 'A { b:...C(1) }': A
- 120..121 'B': B
- 126..127 'C': C(usize) -> C
- 126..130 'C(1)': C
- 128..129 '1': usize
- 138..139 'a': A
- 138..141 'a.b': B
- 147..148 'a': A
- 147..150 'a.c': C
- "#]],
- );
-}
-
-#[test]
-fn infer_enum() {
- check_infer(
- r#"
- enum E {
- V1 { field: u32 },
- V2
- }
- fn test() {
- E::V1 { field: 1 };
- E::V2;
- }"#,
- expect![[r#"
- 51..89 '{ ...:V2; }': ()
- 57..75 'E::V1 ...d: 1 }': E
- 72..73 '1': u32
- 81..86 'E::V2': E
- "#]],
- );
-}
-
-#[test]
-fn infer_union() {
- check_infer(
- r#"
- union MyUnion {
- foo: u32,
- bar: f32,
- }
-
- fn test() {
- let u = MyUnion { foo: 0 };
- unsafe { baz(u); }
- let u = MyUnion { bar: 0.0 };
- unsafe { baz(u); }
- }
-
- unsafe fn baz(u: MyUnion) {
- let inner = u.foo;
- let inner = u.bar;
- }
- "#,
- expect![[r#"
- 57..172 '{ ...); } }': ()
- 67..68 'u': MyUnion
- 71..89 'MyUnio...o: 0 }': MyUnion
- 86..87 '0': u32
- 95..113 'unsafe...(u); }': ()
- 102..113 '{ baz(u); }': ()
- 104..107 'baz': fn baz(MyUnion)
- 104..110 'baz(u)': ()
- 108..109 'u': MyUnion
- 122..123 'u': MyUnion
- 126..146 'MyUnio... 0.0 }': MyUnion
- 141..144 '0.0': f32
- 152..170 'unsafe...(u); }': ()
- 159..170 '{ baz(u); }': ()
- 161..164 'baz': fn baz(MyUnion)
- 161..167 'baz(u)': ()
- 165..166 'u': MyUnion
- 188..189 'u': MyUnion
- 200..249 '{ ...bar; }': ()
- 210..215 'inner': u32
- 218..219 'u': MyUnion
- 218..223 'u.foo': u32
- 233..238 'inner': f32
- 241..242 'u': MyUnion
- 241..246 'u.bar': f32
- "#]],
- );
-}
-
-#[test]
-fn infer_refs() {
- check_infer(
- r#"
- fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
- a;
- *a;
- &a;
- &mut a;
- b;
- *b;
- &b;
- c;
- *c;
- d;
- *d;
- }
- "#,
- expect![[r#"
- 8..9 'a': &u32
- 17..18 'b': &mut u32
- 30..31 'c': *const u32
- 45..46 'd': *mut u32
- 58..149 '{ ... *d; }': ()
- 64..65 'a': &u32
- 71..73 '*a': u32
- 72..73 'a': &u32
- 79..81 '&a': &&u32
- 80..81 'a': &u32
- 87..93 '&mut a': &mut &u32
- 92..93 'a': &u32
- 99..100 'b': &mut u32
- 106..108 '*b': u32
- 107..108 'b': &mut u32
- 114..116 '&b': &&mut u32
- 115..116 'b': &mut u32
- 122..123 'c': *const u32
- 129..131 '*c': u32
- 130..131 'c': *const u32
- 137..138 'd': *mut u32
- 144..146 '*d': u32
- 145..146 'd': *mut u32
- "#]],
- );
-}
-
-#[test]
-fn infer_raw_ref() {
- check_infer(
- r#"
- fn test(a: i32) {
- &raw mut a;
- &raw const a;
- }
- "#,
- expect![[r#"
- 8..9 'a': i32
- 16..53 '{ ...t a; }': ()
- 22..32 '&raw mut a': *mut i32
- 31..32 'a': i32
- 38..50 '&raw const a': *const i32
- 49..50 'a': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_literals() {
- check_infer(
- r##"
- fn test() {
- 5i32;
- 5f32;
- 5f64;
- "hello";
- b"bytes";
- 'c';
- b'b';
- 3.14;
- 5000;
- false;
- true;
- r#"
- //! doc
- // non-doc
- mod foo {}
- "#;
- br#"yolo"#;
- }
- "##,
- expect![[r##"
- 10..216 '{ ...o"#; }': ()
- 16..20 '5i32': i32
- 26..30 '5f32': f32
- 36..40 '5f64': f64
- 46..53 '"hello"': &str
- 59..67 'b"bytes"': &[u8; _]
- 73..76 ''c'': char
- 82..86 'b'b'': u8
- 92..96 '3.14': f64
- 102..106 '5000': i32
- 112..117 'false': bool
- 123..127 'true': bool
- 133..197 'r#" ... "#': &str
- 203..213 'br#"yolo"#': &[u8; _]
- "##]],
- );
-}
-
-#[test]
-fn infer_unary_op() {
- check_infer(
- r#"
- enum SomeType {}
-
- fn test(x: SomeType) {
- let b = false;
- let c = !b;
- let a = 100;
- let d: i128 = -a;
- let e = -100;
- let f = !!!true;
- let g = !42;
- let h = !10u32;
- let j = !a;
- -3.14;
- !3;
- -x;
- !x;
- -"hello";
- !"hello";
- }
- "#,
- expect![[r#"
- 26..27 'x': SomeType
- 39..271 '{ ...lo"; }': ()
- 49..50 'b': bool
- 53..58 'false': bool
- 68..69 'c': bool
- 72..74 '!b': bool
- 73..74 'b': bool
- 84..85 'a': i128
- 88..91 '100': i128
- 101..102 'd': i128
- 111..113 '-a': i128
- 112..113 'a': i128
- 123..124 'e': i32
- 127..131 '-100': i32
- 128..131 '100': i32
- 141..142 'f': bool
- 145..152 '!!!true': bool
- 146..152 '!!true': bool
- 147..152 '!true': bool
- 148..152 'true': bool
- 162..163 'g': i32
- 166..169 '!42': i32
- 167..169 '42': i32
- 179..180 'h': u32
- 183..189 '!10u32': u32
- 184..189 '10u32': u32
- 199..200 'j': i128
- 203..205 '!a': i128
- 204..205 'a': i128
- 211..216 '-3.14': f64
- 212..216 '3.14': f64
- 222..224 '!3': i32
- 223..224 '3': i32
- 230..232 '-x': {unknown}
- 231..232 'x': SomeType
- 238..240 '!x': {unknown}
- 239..240 'x': SomeType
- 246..254 '-"hello"': {unknown}
- 247..254 '"hello"': &str
- 260..268 '!"hello"': {unknown}
- 261..268 '"hello"': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_backwards() {
- check_infer(
- r#"
- fn takes_u32(x: u32) {}
-
- struct S { i32_field: i32 }
-
- fn test() -> &mut &f64 {
- let a = unknown_function();
- takes_u32(a);
- let b = unknown_function();
- S { i32_field: b };
- let c = unknown_function();
- &mut &c
- }
- "#,
- expect![[r#"
- 13..14 'x': u32
- 21..23 '{}': ()
- 77..230 '{ ...t &c }': &mut &f64
- 87..88 'a': u32
- 91..107 'unknow...nction': {unknown}
- 91..109 'unknow...tion()': u32
- 115..124 'takes_u32': fn takes_u32(u32)
- 115..127 'takes_u32(a)': ()
- 125..126 'a': u32
- 137..138 'b': i32
- 141..157 'unknow...nction': {unknown}
- 141..159 'unknow...tion()': i32
- 165..183 'S { i3...d: b }': S
- 180..181 'b': i32
- 193..194 'c': f64
- 197..213 'unknow...nction': {unknown}
- 197..215 'unknow...tion()': f64
- 221..228 '&mut &c': &mut &f64
- 226..228 '&c': &f64
- 227..228 'c': f64
- "#]],
- );
-}
-
-#[test]
-fn infer_self() {
- check_infer(
- r#"
- struct S;
-
- impl S {
- fn test(&self) {
- self;
- }
- fn test2(self: &Self) {
- self;
- }
- fn test3() -> Self {
- S {}
- }
- fn test4() -> Self {
- Self {}
- }
- }
- "#,
- expect![[r#"
- 33..37 'self': &S
- 39..60 '{ ... }': ()
- 49..53 'self': &S
- 74..78 'self': &S
- 87..108 '{ ... }': ()
- 97..101 'self': &S
- 132..152 '{ ... }': S
- 142..146 'S {}': S
- 176..199 '{ ... }': S
- 186..193 'Self {}': S
- "#]],
- );
-}
-
-#[test]
-fn infer_self_as_path() {
- check_infer(
- r#"
- struct S1;
- struct S2(isize);
- enum E {
- V1,
- V2(u32),
- }
-
- impl S1 {
- fn test() {
- Self;
- }
- }
- impl S2 {
- fn test() {
- Self(1);
- }
- }
- impl E {
- fn test() {
- Self::V1;
- Self::V2(1);
- }
- }
- "#,
- expect![[r#"
- 86..107 '{ ... }': ()
- 96..100 'Self': S1
- 134..158 '{ ... }': ()
- 144..148 'Self': S2(isize) -> S2
- 144..151 'Self(1)': S2
- 149..150 '1': isize
- 184..230 '{ ... }': ()
- 194..202 'Self::V1': E
- 212..220 'Self::V2': V2(u32) -> E
- 212..223 'Self::V2(1)': E
- 221..222 '1': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_binary_op() {
- check_infer(
- r#"
- fn f(x: bool) -> i32 {
- 0i32
- }
-
- fn test() -> bool {
- let x = a && b;
- let y = true || false;
- let z = x == y;
- let t = x != y;
- let minus_forty: isize = -40isize;
- let h = minus_forty <= CONST_2;
- let c = f(z || y) + 5;
- let d = b;
- let g = minus_forty ^= i;
- let ten: usize = 10;
- let ten_is_eleven = ten == some_num;
-
- ten < 3
- }
- "#,
- expect![[r#"
- 5..6 'x': bool
- 21..33 '{ 0i32 }': i32
- 27..31 '0i32': i32
- 53..369 '{ ... < 3 }': bool
- 63..64 'x': bool
- 67..68 'a': bool
- 67..73 'a && b': bool
- 72..73 'b': bool
- 83..84 'y': bool
- 87..91 'true': bool
- 87..100 'true || false': bool
- 95..100 'false': bool
- 110..111 'z': bool
- 114..115 'x': bool
- 114..120 'x == y': bool
- 119..120 'y': bool
- 130..131 't': bool
- 134..135 'x': bool
- 134..140 'x != y': bool
- 139..140 'y': bool
- 150..161 'minus_forty': isize
- 171..179 '-40isize': isize
- 172..179 '40isize': isize
- 189..190 'h': bool
- 193..204 'minus_forty': isize
- 193..215 'minus_...ONST_2': bool
- 208..215 'CONST_2': isize
- 225..226 'c': i32
- 229..230 'f': fn f(bool) -> i32
- 229..238 'f(z || y)': i32
- 229..242 'f(z || y) + 5': i32
- 231..232 'z': bool
- 231..237 'z || y': bool
- 236..237 'y': bool
- 241..242 '5': i32
- 252..253 'd': {unknown}
- 256..257 'b': {unknown}
- 267..268 'g': ()
- 271..282 'minus_forty': isize
- 271..287 'minus_...y ^= i': ()
- 286..287 'i': isize
- 297..300 'ten': usize
- 310..312 '10': usize
- 322..335 'ten_is_eleven': bool
- 338..341 'ten': usize
- 338..353 'ten == some_num': bool
- 345..353 'some_num': usize
- 360..363 'ten': usize
- 360..367 'ten < 3': bool
- 366..367 '3': usize
- "#]],
- );
-}
-
-#[test]
-fn infer_shift_op() {
- check_infer(
- r#"
- fn test() {
- 1u32 << 5u8;
- 1u32 >> 5u8;
- }
- "#,
- expect![[r#"
- 10..47 '{ ...5u8; }': ()
- 16..20 '1u32': u32
- 16..27 '1u32 << 5u8': u32
- 24..27 '5u8': u8
- 33..37 '1u32': u32
- 33..44 '1u32 >> 5u8': u32
- 41..44 '5u8': u8
- "#]],
- );
-}
-
-#[test]
-fn infer_field_autoderef() {
- check_infer(
- r#"
- struct A {
- b: B,
- }
- struct B;
-
- fn test1(a: A) {
- let a1 = a;
- a1.b;
- let a2 = &a;
- a2.b;
- let a3 = &mut a;
- a3.b;
- let a4 = &&&&&&&a;
- a4.b;
- let a5 = &mut &&mut &&mut a;
- a5.b;
- }
-
- fn test2(a1: *const A, a2: *mut A) {
- a1.b;
- a2.b;
- }
- "#,
- expect![[r#"
- 43..44 'a': A
- 49..212 '{ ...5.b; }': ()
- 59..61 'a1': A
- 64..65 'a': A
- 71..73 'a1': A
- 71..75 'a1.b': B
- 85..87 'a2': &A
- 90..92 '&a': &A
- 91..92 'a': A
- 98..100 'a2': &A
- 98..102 'a2.b': B
- 112..114 'a3': &mut A
- 117..123 '&mut a': &mut A
- 122..123 'a': A
- 129..131 'a3': &mut A
- 129..133 'a3.b': B
- 143..145 'a4': &&&&&&&A
- 148..156 '&&&&&&&a': &&&&&&&A
- 149..156 '&&&&&&a': &&&&&&A
- 150..156 '&&&&&a': &&&&&A
- 151..156 '&&&&a': &&&&A
- 152..156 '&&&a': &&&A
- 153..156 '&&a': &&A
- 154..156 '&a': &A
- 155..156 'a': A
- 162..164 'a4': &&&&&&&A
- 162..166 'a4.b': B
- 176..178 'a5': &mut &&mut &&mut A
- 181..199 '&mut &...&mut a': &mut &&mut &&mut A
- 186..199 '&&mut &&mut a': &&mut &&mut A
- 187..199 '&mut &&mut a': &mut &&mut A
- 192..199 '&&mut a': &&mut A
- 193..199 '&mut a': &mut A
- 198..199 'a': A
- 205..207 'a5': &mut &&mut &&mut A
- 205..209 'a5.b': B
- 223..225 'a1': *const A
- 237..239 'a2': *mut A
- 249..272 '{ ...2.b; }': ()
- 255..257 'a1': *const A
- 255..259 'a1.b': B
- 265..267 'a2': *mut A
- 265..269 'a2.b': B
- "#]],
- );
-}
-
-#[test]
-fn infer_argument_autoderef() {
- check_infer(
- r#"
- #[lang = "deref"]
- pub trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
- }
-
- struct A<T>(T);
-
- impl<T> A<T> {
- fn foo(&self) -> &T {
- &self.0
- }
- }
-
- struct B<T>(T);
-
- impl<T> Deref for B<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
- }
-
- fn test() {
- let t = A::foo(&&B(B(A(42))));
- }
- "#,
- expect![[r#"
- 67..71 'self': &Self
- 138..142 'self': &A<T>
- 150..173 '{ ... }': &T
- 160..167 '&self.0': &T
- 161..165 'self': &A<T>
- 161..167 'self.0': T
- 254..258 'self': &B<T>
- 277..300 '{ ... }': &T
- 287..294 '&self.0': &T
- 288..292 'self': &B<T>
- 288..294 'self.0': T
- 314..352 '{ ...))); }': ()
- 324..325 't': &i32
- 328..334 'A::foo': fn foo<i32>(&A<i32>) -> &i32
- 328..349 'A::foo...42))))': &i32
- 335..348 '&&B(B(A(42)))': &&B<B<A<i32>>>
- 336..348 '&B(B(A(42)))': &B<B<A<i32>>>
- 337..338 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
- 337..348 'B(B(A(42)))': B<B<A<i32>>>
- 339..340 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
- 339..347 'B(A(42))': B<A<i32>>
- 341..342 'A': A<i32>(i32) -> A<i32>
- 341..346 'A(42)': A<i32>
- 343..345 '42': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_method_argument_autoderef() {
- check_infer(
- r#"
- #[lang = "deref"]
- pub trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
- }
-
- struct A<T>(*mut T);
-
- impl<T> A<T> {
- fn foo(&self, x: &A<T>) -> &T {
- &*x.0
- }
- }
-
- struct B<T>(T);
-
- impl<T> Deref for B<T> {
- type Target = T;
- fn deref(&self) -> &Self::Target {
- &self.0
- }
- }
-
- fn test(a: A<i32>) {
- let t = A(0 as *mut _).foo(&&B(B(a)));
- }
- "#,
- expect![[r#"
- 67..71 'self': &Self
- 143..147 'self': &A<T>
- 149..150 'x': &A<T>
- 165..186 '{ ... }': &T
- 175..180 '&*x.0': &T
- 176..180 '*x.0': T
- 177..178 'x': &A<T>
- 177..180 'x.0': *mut T
- 267..271 'self': &B<T>
- 290..313 '{ ... }': &T
- 300..307 '&self.0': &T
- 301..305 'self': &B<T>
- 301..307 'self.0': T
- 325..326 'a': A<i32>
- 336..382 '{ ...))); }': ()
- 346..347 't': &i32
- 350..351 'A': A<i32>(*mut i32) -> A<i32>
- 350..364 'A(0 as *mut _)': A<i32>
- 350..379 'A(0 as...B(a)))': &i32
- 352..353 '0': i32
- 352..363 '0 as *mut _': *mut i32
- 369..378 '&&B(B(a))': &&B<B<A<i32>>>
- 370..378 '&B(B(a))': &B<B<A<i32>>>
- 371..372 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
- 371..378 'B(B(a))': B<B<A<i32>>>
- 373..374 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
- 373..377 'B(a)': B<A<i32>>
- 375..376 'a': A<i32>
- "#]],
- );
-}
-
-#[test]
-fn infer_in_elseif() {
- check_infer(
- r#"
- struct Foo { field: i32 }
- fn main(foo: Foo) {
- if true {
-
- } else if false {
- foo.field
- }
- }
- "#,
- expect![[r#"
- 34..37 'foo': Foo
- 44..108 '{ ... } }': ()
- 50..106 'if tru... }': ()
- 53..57 'true': bool
- 58..66 '{ }': ()
- 72..106 'if fal... }': i32
- 75..80 'false': bool
- 81..106 '{ ... }': i32
- 91..94 'foo': Foo
- 91..100 'foo.field': i32
- "#]],
- )
-}
-
-#[test]
-fn infer_if_match_with_return() {
- check_infer(
- r#"
- fn foo() {
- let _x1 = if true {
- 1
- } else {
- return;
- };
- let _x2 = if true {
- 2
- } else {
- return
- };
- let _x3 = match true {
- true => 3,
- _ => {
- return;
- }
- };
- let _x4 = match true {
- true => 4,
- _ => return
- };
- }"#,
- expect![[r#"
- 9..322 '{ ... }; }': ()
- 19..22 '_x1': i32
- 25..79 'if tru... }': i32
- 28..32 'true': bool
- 33..50 '{ ... }': i32
- 43..44 '1': i32
- 56..79 '{ ... }': i32
- 66..72 'return': !
- 89..92 '_x2': i32
- 95..148 'if tru... }': i32
- 98..102 'true': bool
- 103..120 '{ ... }': i32
- 113..114 '2': i32
- 126..148 '{ ... }': !
- 136..142 'return': !
- 158..161 '_x3': i32
- 164..246 'match ... }': i32
- 170..174 'true': bool
- 185..189 'true': bool
- 185..189 'true': bool
- 193..194 '3': i32
- 204..205 '_': bool
- 209..240 '{ ... }': i32
- 223..229 'return': !
- 256..259 '_x4': i32
- 262..319 'match ... }': i32
- 268..272 'true': bool
- 283..287 'true': bool
- 283..287 'true': bool
- 291..292 '4': i32
- 302..303 '_': bool
- 307..313 'return': !
- "#]],
- )
-}
-
-#[test]
-fn infer_inherent_method() {
- check_infer(
- r#"
- struct A;
-
- impl A {
- fn foo(self, x: u32) -> i32 {}
- }
-
- mod b {
- impl super::A {
- fn bar(&self, x: u64) -> i64 {}
- }
- }
-
- fn test(a: A) {
- a.foo(1);
- (&a).bar(1);
- a.bar(1);
- }
- "#,
- expect![[r#"
- 31..35 'self': A
- 37..38 'x': u32
- 52..54 '{}': ()
- 102..106 'self': &A
- 108..109 'x': u64
- 123..125 '{}': ()
- 143..144 'a': A
- 149..197 '{ ...(1); }': ()
- 155..156 'a': A
- 155..163 'a.foo(1)': i32
- 161..162 '1': u32
- 169..180 '(&a).bar(1)': i64
- 170..172 '&a': &A
- 171..172 'a': A
- 178..179 '1': u64
- 186..187 'a': A
- 186..194 'a.bar(1)': i64
- 192..193 '1': u64
- "#]],
- );
-}
-
-#[test]
-fn infer_inherent_method_str() {
- check_infer(
- r#"
- #[lang = "str"]
- impl str {
- fn foo(&self) -> i32 {}
- }
-
- fn test() {
- "foo".foo();
- }
- "#,
- expect![[r#"
- 39..43 'self': &str
- 52..54 '{}': ()
- 68..88 '{ ...o(); }': ()
- 74..79 '"foo"': &str
- 74..85 '"foo".foo()': i32
- "#]],
- );
-}
-
-#[test]
-fn infer_tuple() {
- check_infer(
- r#"
- fn test(x: &str, y: isize) {
- let a: (u32, &str) = (1, "a");
- let b = (a, x);
- let c = (y, x);
- let d = (c, x);
- let e = (1, "e");
- let f = (e, "d");
- }
- "#,
- expect![[r#"
- 8..9 'x': &str
- 17..18 'y': isize
- 27..169 '{ ...d"); }': ()
- 37..38 'a': (u32, &str)
- 54..62 '(1, "a")': (u32, &str)
- 55..56 '1': u32
- 58..61 '"a"': &str
- 72..73 'b': ((u32, &str), &str)
- 76..82 '(a, x)': ((u32, &str), &str)
- 77..78 'a': (u32, &str)
- 80..81 'x': &str
- 92..93 'c': (isize, &str)
- 96..102 '(y, x)': (isize, &str)
- 97..98 'y': isize
- 100..101 'x': &str
- 112..113 'd': ((isize, &str), &str)
- 116..122 '(c, x)': ((isize, &str), &str)
- 117..118 'c': (isize, &str)
- 120..121 'x': &str
- 132..133 'e': (i32, &str)
- 136..144 '(1, "e")': (i32, &str)
- 137..138 '1': i32
- 140..143 '"e"': &str
- 154..155 'f': ((i32, &str), &str)
- 158..166 '(e, "d")': ((i32, &str), &str)
- 159..160 'e': (i32, &str)
- 162..165 '"d"': &str
- "#]],
- );
-}
-
-#[test]
-fn infer_array() {
- check_infer(
- r#"
- fn test(x: &str, y: isize) {
- let a = [x];
- let b = [a, a];
- let c = [b, b];
-
- let d = [y, 1, 2, 3];
- let d = [1, y, 2, 3];
- let e = [y];
- let f = [d, d];
- let g = [e, e];
-
- let h = [1, 2];
- let i = ["a", "b"];
-
- let b = [a, ["b"]];
- let x: [u8; 0] = [];
- }
- "#,
- expect![[r#"
- 8..9 'x': &str
- 17..18 'y': isize
- 27..292 '{ ... []; }': ()
- 37..38 'a': [&str; _]
- 41..44 '[x]': [&str; _]
- 42..43 'x': &str
- 54..55 'b': [[&str; _]; _]
- 58..64 '[a, a]': [[&str; _]; _]
- 59..60 'a': [&str; _]
- 62..63 'a': [&str; _]
- 74..75 'c': [[[&str; _]; _]; _]
- 78..84 '[b, b]': [[[&str; _]; _]; _]
- 79..80 'b': [[&str; _]; _]
- 82..83 'b': [[&str; _]; _]
- 95..96 'd': [isize; _]
- 99..111 '[y, 1, 2, 3]': [isize; _]
- 100..101 'y': isize
- 103..104 '1': isize
- 106..107 '2': isize
- 109..110 '3': isize
- 121..122 'd': [isize; _]
- 125..137 '[1, y, 2, 3]': [isize; _]
- 126..127 '1': isize
- 129..130 'y': isize
- 132..133 '2': isize
- 135..136 '3': isize
- 147..148 'e': [isize; _]
- 151..154 '[y]': [isize; _]
- 152..153 'y': isize
- 164..165 'f': [[isize; _]; _]
- 168..174 '[d, d]': [[isize; _]; _]
- 169..170 'd': [isize; _]
- 172..173 'd': [isize; _]
- 184..185 'g': [[isize; _]; _]
- 188..194 '[e, e]': [[isize; _]; _]
- 189..190 'e': [isize; _]
- 192..193 'e': [isize; _]
- 205..206 'h': [i32; _]
- 209..215 '[1, 2]': [i32; _]
- 210..211 '1': i32
- 213..214 '2': i32
- 225..226 'i': [&str; _]
- 229..239 '["a", "b"]': [&str; _]
- 230..233 '"a"': &str
- 235..238 '"b"': &str
- 250..251 'b': [[&str; _]; _]
- 254..264 '[a, ["b"]]': [[&str; _]; _]
- 255..256 'a': [&str; _]
- 258..263 '["b"]': [&str; _]
- 259..262 '"b"': &str
- 274..275 'x': [u8; _]
- 287..289 '[]': [u8; _]
- "#]],
- );
-}
-
-#[test]
-fn infer_struct_generics() {
- check_infer(
- r#"
- struct A<T> {
- x: T,
- }
-
- fn test(a1: A<u32>, i: i32) {
- a1.x;
- let a2 = A { x: i };
- a2.x;
- let a3 = A::<i128> { x: 1 };
- a3.x;
- }
- "#,
- expect![[r#"
- 35..37 'a1': A<u32>
- 47..48 'i': i32
- 55..146 '{ ...3.x; }': ()
- 61..63 'a1': A<u32>
- 61..65 'a1.x': u32
- 75..77 'a2': A<i32>
- 80..90 'A { x: i }': A<i32>
- 87..88 'i': i32
- 96..98 'a2': A<i32>
- 96..100 'a2.x': i32
- 110..112 'a3': A<i128>
- 115..133 'A::<i1...x: 1 }': A<i128>
- 130..131 '1': i128
- 139..141 'a3': A<i128>
- 139..143 'a3.x': i128
- "#]],
- );
-}
-
-#[test]
-fn infer_tuple_struct_generics() {
- check_infer(
- r#"
- struct A<T>(T);
- enum Option<T> { Some(T), None }
- use Option::*;
-
- fn test() {
- A(42);
- A(42u128);
- Some("x");
- Option::Some("x");
- None;
- let x: Option<i64> = None;
- }
- "#,
- expect![[r#"
- 75..183 '{ ...one; }': ()
- 81..82 'A': A<i32>(i32) -> A<i32>
- 81..86 'A(42)': A<i32>
- 83..85 '42': i32
- 92..93 'A': A<u128>(u128) -> A<u128>
- 92..101 'A(42u128)': A<u128>
- 94..100 '42u128': u128
- 107..111 'Some': Some<&str>(&str) -> Option<&str>
- 107..116 'Some("x")': Option<&str>
- 112..115 '"x"': &str
- 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
- 122..139 'Option...e("x")': Option<&str>
- 135..138 '"x"': &str
- 145..149 'None': Option<{unknown}>
- 159..160 'x': Option<i64>
- 176..180 'None': Option<i64>
- "#]],
- );
-}
-
-#[test]
-fn infer_function_generics() {
- check_infer(
- r#"
- fn id<T>(t: T) -> T { t }
-
- fn test() {
- id(1u32);
- id::<i128>(1);
- let x: u64 = id(1);
- }
- "#,
- expect![[r#"
- 9..10 't': T
- 20..25 '{ t }': T
- 22..23 't': T
- 37..97 '{ ...(1); }': ()
- 43..45 'id': fn id<u32>(u32) -> u32
- 43..51 'id(1u32)': u32
- 46..50 '1u32': u32
- 57..67 'id::<i128>': fn id<i128>(i128) -> i128
- 57..70 'id::<i128>(1)': i128
- 68..69 '1': i128
- 80..81 'x': u64
- 89..91 'id': fn id<u64>(u64) -> u64
- 89..94 'id(1)': u64
- 92..93 '1': u64
- "#]],
- );
-}
-
-#[test]
-fn infer_impl_generics_basic() {
- check_infer(
- r#"
- struct A<T1, T2> {
- x: T1,
- y: T2,
- }
- impl<Y, X> A<X, Y> {
- fn x(self) -> X {
- self.x
- }
- fn y(self) -> Y {
- self.y
- }
- fn z<T>(self, t: T) -> (X, Y, T) {
- (self.x, self.y, t)
- }
- }
-
- fn test() -> i128 {
- let a = A { x: 1u64, y: 1i64 };
- a.x();
- a.y();
- a.z(1i128);
- a.z::<u128>(1);
- }
- "#,
- expect![[r#"
- 73..77 'self': A<X, Y>
- 84..106 '{ ... }': X
- 94..98 'self': A<X, Y>
- 94..100 'self.x': X
- 116..120 'self': A<X, Y>
- 127..149 '{ ... }': Y
- 137..141 'self': A<X, Y>
- 137..143 'self.y': Y
- 162..166 'self': A<X, Y>
- 168..169 't': T
- 187..222 '{ ... }': (X, Y, T)
- 197..216 '(self.....y, t)': (X, Y, T)
- 198..202 'self': A<X, Y>
- 198..204 'self.x': X
- 206..210 'self': A<X, Y>
- 206..212 'self.y': Y
- 214..215 't': T
- 244..341 '{ ...(1); }': ()
- 254..255 'a': A<u64, i64>
- 258..280 'A { x:...1i64 }': A<u64, i64>
- 265..269 '1u64': u64
- 274..278 '1i64': i64
- 286..287 'a': A<u64, i64>
- 286..291 'a.x()': u64
- 297..298 'a': A<u64, i64>
- 297..302 'a.y()': i64
- 308..309 'a': A<u64, i64>
- 308..318 'a.z(1i128)': (u64, i64, i128)
- 312..317 '1i128': i128
- 324..325 'a': A<u64, i64>
- 324..338 'a.z::<u128>(1)': (u64, i64, u128)
- 336..337 '1': u128
- "#]],
- );
-}
-
-#[test]
-fn infer_impl_generics_with_autoderef() {
- check_infer(
- r#"
- enum Option<T> {
- Some(T),
- None,
- }
- impl<T> Option<T> {
- fn as_ref(&self) -> Option<&T> {}
- }
- fn test(o: Option<u32>) {
- (&o).as_ref();
- o.as_ref();
- }
- "#,
- expect![[r#"
- 77..81 'self': &Option<T>
- 97..99 '{}': ()
- 110..111 'o': Option<u32>
- 126..164 '{ ...f(); }': ()
- 132..145 '(&o).as_ref()': Option<&u32>
- 133..135 '&o': &Option<u32>
- 134..135 'o': Option<u32>
- 151..152 'o': Option<u32>
- 151..161 'o.as_ref()': Option<&u32>
- "#]],
- );
-}
-
-#[test]
-fn infer_generic_chain() {
- check_infer(
- r#"
- struct A<T> {
- x: T,
- }
- impl<T2> A<T2> {
- fn x(self) -> T2 {
- self.x
- }
- }
- fn id<T>(t: T) -> T { t }
-
- fn test() -> i128 {
- let x = 1;
- let y = id(x);
- let a = A { x: id(y) };
- let z = id(a.x);
- let b = A { x: z };
- b.x()
- }
- "#,
- expect![[r#"
- 52..56 'self': A<T2>
- 64..86 '{ ... }': T2
- 74..78 'self': A<T2>
- 74..80 'self.x': T2
- 98..99 't': T
- 109..114 '{ t }': T
- 111..112 't': T
- 134..254 '{ ....x() }': i128
- 144..145 'x': i128
- 148..149 '1': i128
- 159..160 'y': i128
- 163..165 'id': fn id<i128>(i128) -> i128
- 163..168 'id(x)': i128
- 166..167 'x': i128
- 178..179 'a': A<i128>
- 182..196 'A { x: id(y) }': A<i128>
- 189..191 'id': fn id<i128>(i128) -> i128
- 189..194 'id(y)': i128
- 192..193 'y': i128
- 206..207 'z': i128
- 210..212 'id': fn id<i128>(i128) -> i128
- 210..217 'id(a.x)': i128
- 213..214 'a': A<i128>
- 213..216 'a.x': i128
- 227..228 'b': A<i128>
- 231..241 'A { x: z }': A<i128>
- 238..239 'z': i128
- 247..248 'b': A<i128>
- 247..252 'b.x()': i128
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_const() {
- check_infer(
- r#"
- struct Struct;
-
- impl Struct {
- const FOO: u32 = 1;
- }
-
- enum Enum {}
-
- impl Enum {
- const BAR: u32 = 2;
- }
-
- trait Trait {
- const ID: u32;
- }
-
- struct TraitTest;
-
- impl Trait for TraitTest {
- const ID: u32 = 5;
- }
-
- fn test() {
- let x = Struct::FOO;
- let y = Enum::BAR;
- let z = TraitTest::ID;
- }
- "#,
- expect![[r#"
- 51..52 '1': u32
- 104..105 '2': u32
- 212..213 '5': u32
- 228..306 '{ ...:ID; }': ()
- 238..239 'x': u32
- 242..253 'Struct::FOO': u32
- 263..264 'y': u32
- 267..276 'Enum::BAR': u32
- 286..287 'z': u32
- 290..303 'TraitTest::ID': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_type_alias() {
- check_infer(
- r#"
- struct A<X, Y> { x: X, y: Y }
- type Foo = A<u32, i128>;
- type Bar<T> = A<T, u128>;
- type Baz<U, V> = A<V, U>;
- fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
- x.x;
- x.y;
- y.x;
- y.y;
- z.x;
- z.y;
- }
- "#,
- expect![[r#"
- 115..116 'x': A<u32, i128>
- 123..124 'y': A<&str, u128>
- 137..138 'z': A<u8, i8>
- 153..210 '{ ...z.y; }': ()
- 159..160 'x': A<u32, i128>
- 159..162 'x.x': u32
- 168..169 'x': A<u32, i128>
- 168..171 'x.y': i128
- 177..178 'y': A<&str, u128>
- 177..180 'y.x': &str
- 186..187 'y': A<&str, u128>
- 186..189 'y.y': u128
- 195..196 'z': A<u8, i8>
- 195..198 'z.x': u8
- 204..205 'z': A<u8, i8>
- 204..207 'z.y': i8
- "#]],
- )
-}
-
-#[test]
-fn recursive_type_alias() {
- check_infer(
- r#"
- struct A<X> {}
- type Foo = Foo;
- type Bar = A<Bar>;
- fn test(x: Foo) {}
- "#,
- expect![[r#"
- 58..59 'x': {unknown}
- 66..68 '{}': ()
- "#]],
- )
-}
-
-#[test]
-fn infer_type_param() {
- check_infer(
- r#"
- fn id<T>(x: T) -> T {
- x
- }
-
- fn clone<T>(x: &T) -> T {
- *x
- }
-
- fn test() {
- let y = 10u32;
- id(y);
- let x: bool = clone(z);
- id::<i128>(1);
- }
- "#,
- expect![[r#"
- 9..10 'x': T
- 20..29 '{ x }': T
- 26..27 'x': T
- 43..44 'x': &T
- 55..65 '{ *x }': T
- 61..63 '*x': T
- 62..63 'x': &T
- 77..157 '{ ...(1); }': ()
- 87..88 'y': u32
- 91..96 '10u32': u32
- 102..104 'id': fn id<u32>(u32) -> u32
- 102..107 'id(y)': u32
- 105..106 'y': u32
- 117..118 'x': bool
- 127..132 'clone': fn clone<bool>(&bool) -> bool
- 127..135 'clone(z)': bool
- 133..134 'z': &bool
- 141..151 'id::<i128>': fn id<i128>(i128) -> i128
- 141..154 'id::<i128>(1)': i128
- 152..153 '1': i128
- "#]],
- );
-}
-
-#[test]
-fn infer_const() {
- check_infer(
- r#"
- struct Foo;
- impl Foo { const ASSOC_CONST: u32 = 0; }
- const GLOBAL_CONST: u32 = 101;
- fn test() {
- const LOCAL_CONST: u32 = 99;
- let x = LOCAL_CONST;
- let z = GLOBAL_CONST;
- let id = Foo::ASSOC_CONST;
- }
- "#,
- expect![[r#"
- 48..49 '0': u32
- 79..82 '101': u32
- 94..212 '{ ...NST; }': ()
- 137..138 'x': u32
- 141..152 'LOCAL_CONST': u32
- 162..163 'z': u32
- 166..178 'GLOBAL_CONST': u32
- 188..190 'id': u32
- 193..209 'Foo::A..._CONST': u32
- 125..127 '99': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_static() {
- check_infer(
- r#"
- static GLOBAL_STATIC: u32 = 101;
- static mut GLOBAL_STATIC_MUT: u32 = 101;
- fn test() {
- static LOCAL_STATIC: u32 = 99;
- static mut LOCAL_STATIC_MUT: u32 = 99;
- let x = LOCAL_STATIC;
- let y = LOCAL_STATIC_MUT;
- let z = GLOBAL_STATIC;
- let w = GLOBAL_STATIC_MUT;
- }
- "#,
- expect![[r#"
- 28..31 '101': u32
- 69..72 '101': u32
- 84..279 '{ ...MUT; }': ()
- 172..173 'x': u32
- 176..188 'LOCAL_STATIC': u32
- 198..199 'y': u32
- 202..218 'LOCAL_...IC_MUT': u32
- 228..229 'z': u32
- 232..245 'GLOBAL_STATIC': u32
- 255..256 'w': u32
- 259..276 'GLOBAL...IC_MUT': u32
- 117..119 '99': u32
- 160..162 '99': u32
- "#]],
- );
-}
-
-#[test]
-fn shadowing_primitive() {
- check_types(
- r#"
-struct i32;
-struct Foo;
-
-impl i32 { fn foo(&self) -> Foo { Foo } }
-
-fn main() {
- let x: i32 = i32;
- x.foo();
- //^ Foo
-}"#,
- );
-}
-
-#[test]
-fn not_shadowing_primitive_by_module() {
- check_types(
- r#"
-//- /str.rs
-fn foo() {}
-
-//- /main.rs
-mod str;
-fn foo() -> &'static str { "" }
-
-fn main() {
- foo();
- //^ &str
-}"#,
- );
-}
-
-#[test]
-fn not_shadowing_module_by_primitive() {
- check_types(
- r#"
-//- /str.rs
-fn foo() -> u32 {0}
-
-//- /main.rs
-mod str;
-fn foo() -> &'static str { "" }
-
-fn main() {
- str::foo();
- //^ u32
-}"#,
- );
-}
-
-// This test is actually testing the shadowing behavior within hir_def. It
-// lives here because the testing infrastructure in hir_def isn't currently
-// capable of asserting the necessary conditions.
-#[test]
-fn should_be_shadowing_imports() {
- check_types(
- r#"
-mod a {
- pub fn foo() -> i8 {0}
- pub struct foo { a: i8 }
-}
-mod b { pub fn foo () -> u8 {0} }
-mod c { pub struct foo { a: u8 } }
-mod d {
- pub use super::a::*;
- pub use super::c::foo;
- pub use super::b::foo;
-}
-
-fn main() {
- d::foo();
- //^ u8
- d::foo{a:0};
- //^ u8
-}"#,
- );
-}
-
-#[test]
-fn closure_return() {
- check_infer(
- r#"
- fn foo() -> u32 {
- let x = || -> usize { return 1; };
- }
- "#,
- expect![[r#"
- 16..58 '{ ...; }; }': ()
- 26..27 'x': || -> usize
- 30..55 '|| -> ...n 1; }': || -> usize
- 42..55 '{ return 1; }': usize
- 44..52 'return 1': !
- 51..52 '1': usize
- "#]],
- );
-}
-
-#[test]
-fn closure_return_unit() {
- check_infer(
- r#"
- fn foo() -> u32 {
- let x = || { return; };
- }
- "#,
- expect![[r#"
- 16..47 '{ ...; }; }': ()
- 26..27 'x': || -> ()
- 30..44 '|| { return; }': || -> ()
- 33..44 '{ return; }': ()
- 35..41 'return': !
- "#]],
- );
-}
-
-#[test]
-fn closure_return_inferred() {
- check_infer(
- r#"
- fn foo() -> u32 {
- let x = || { "test" };
- }
- "#,
- expect![[r#"
- 16..46 '{ ..." }; }': ()
- 26..27 'x': || -> &str
- 30..43 '|| { "test" }': || -> &str
- 33..43 '{ "test" }': &str
- 35..41 '"test"': &str
- "#]],
- );
-}
-
-#[test]
-fn fn_pointer_return() {
- check_infer(
- r#"
- struct Vtable {
- method: fn(),
- }
-
- fn main() {
- let vtable = Vtable { method: || {} };
- let m = vtable.method;
- }
- "#,
- expect![[r#"
- 47..120 '{ ...hod; }': ()
- 57..63 'vtable': Vtable
- 66..90 'Vtable...| {} }': Vtable
- 83..88 '|| {}': || -> ()
- 86..88 '{}': ()
- 100..101 'm': fn()
- 104..110 'vtable': Vtable
- 104..117 'vtable.method': fn()
- "#]],
- );
-}
-
-#[test]
-fn effects_smoke_test() {
- check_infer(
- r#"
- fn main() {
- let x = unsafe { 92 };
- let y = async { async { () }.await };
- let z = try { () };
- let t = 'a: { 92 };
- }
- "#,
- expect![[r#"
- 10..130 '{ ...2 }; }': ()
- 20..21 'x': i32
- 24..37 'unsafe { 92 }': i32
- 31..37 '{ 92 }': i32
- 33..35 '92': i32
- 47..48 'y': {unknown}
- 57..79 '{ asyn...wait }': {unknown}
- 59..77 'async ....await': {unknown}
- 65..71 '{ () }': ()
- 67..69 '()': ()
- 89..90 'z': {unknown}
- 93..103 'try { () }': {unknown}
- 97..103 '{ () }': ()
- 99..101 '()': ()
- 113..114 't': i32
- 121..127 '{ 92 }': i32
- 123..125 '92': i32
- "#]],
- )
-}
-
-#[test]
-fn infer_generic_from_later_assignment() {
- check_infer(
- r#"
- enum Option<T> { Some(T), None }
- use Option::*;
-
- fn test() {
- let mut end = None;
- loop {
- end = Some(true);
- }
- }
- "#,
- expect![[r#"
- 59..129 '{ ... } }': ()
- 69..76 'mut end': Option<bool>
- 79..83 'None': Option<bool>
- 89..127 'loop {... }': !
- 94..127 '{ ... }': ()
- 104..107 'end': Option<bool>
- 104..120 'end = ...(true)': ()
- 110..114 'Some': Some<bool>(bool) -> Option<bool>
- 110..120 'Some(true)': Option<bool>
- 115..119 'true': bool
- "#]],
- );
-}
-
-#[test]
-fn infer_loop_break_with_val() {
- check_infer(
- r#"
- enum Option<T> { Some(T), None }
- use Option::*;
-
- fn test() {
- let x = loop {
- if false {
- break None;
- }
-
- break Some(true);
- };
- }
- "#,
- expect![[r#"
- 59..168 '{ ... }; }': ()
- 69..70 'x': Option<bool>
- 73..165 'loop {... }': Option<bool>
- 78..165 '{ ... }': ()
- 88..132 'if fal... }': ()
- 91..96 'false': bool
- 97..132 '{ ... }': ()
- 111..121 'break None': !
- 117..121 'None': Option<bool>
- 142..158 'break ...(true)': !
- 148..152 'Some': Some<bool>(bool) -> Option<bool>
- 148..158 'Some(true)': Option<bool>
- 153..157 'true': bool
- "#]],
- );
-}
-
-#[test]
-fn infer_loop_break_without_val() {
- check_infer(
- r#"
- enum Option<T> { Some(T), None }
- use Option::*;
-
- fn test() {
- let x = loop {
- if false {
- break;
- }
- };
- }
- "#,
- expect![[r#"
- 59..136 '{ ... }; }': ()
- 69..70 'x': ()
- 73..133 'loop {... }': ()
- 78..133 '{ ... }': ()
- 88..127 'if fal... }': ()
- 91..96 'false': bool
- 97..127 '{ ... }': ()
- 111..116 'break': !
- "#]],
- );
-}
-
-#[test]
-fn infer_labelled_break_with_val() {
- check_infer(
- r#"
- fn foo() {
- let _x = || 'outer: loop {
- let inner = 'inner: loop {
- let i = Default::default();
- if (break 'outer i) {
- loop { break 'inner 5i8; };
- } else if true {
- break 'inner 6;
- }
- break 7;
- };
- break inner < 8;
- };
- }
- "#,
- expect![[r#"
- 9..335 '{ ... }; }': ()
- 19..21 '_x': || -> bool
- 24..332 '|| 'ou... }': || -> bool
- 27..332 ''outer... }': bool
- 40..332 '{ ... }': ()
- 54..59 'inner': i8
- 62..300 ''inner... }': i8
- 75..300 '{ ... }': ()
- 93..94 'i': bool
- 97..113 'Defaul...efault': {unknown}
- 97..115 'Defaul...ault()': bool
- 129..269 'if (br... }': ()
- 133..147 'break 'outer i': !
- 146..147 'i': bool
- 149..208 '{ ... }': ()
- 167..193 'loop {...5i8; }': !
- 172..193 '{ brea...5i8; }': ()
- 174..190 'break ...er 5i8': !
- 187..190 '5i8': i8
- 214..269 'if tru... }': ()
- 217..221 'true': bool
- 222..269 '{ ... }': ()
- 240..254 'break 'inner 6': !
- 253..254 '6': i8
- 282..289 'break 7': !
- 288..289 '7': i8
- 310..325 'break inner < 8': !
- 316..321 'inner': i8
- 316..325 'inner < 8': bool
- 324..325 '8': i8
- "#]],
- );
-}
-
-#[test]
-fn generic_default() {
- check_infer(
- r#"
- struct Thing<T = ()> { t: T }
- enum OtherThing<T = ()> {
- One { t: T },
- Two(T),
- }
-
- fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
- t1.t;
- t3.t;
- match t2 {
- OtherThing::One { t } => { t; },
- OtherThing::Two(t) => { t; },
- }
- match t4 {
- OtherThing::One { t } => { t; },
- OtherThing::Two(t) => { t; },
- }
- }
- "#,
- expect![[r#"
- 97..99 't1': Thing<()>
- 108..110 't2': OtherThing<()>
- 124..126 't3': Thing<i32>
- 140..142 't4': OtherThing<i32>
- 161..384 '{ ... } }': ()
- 167..169 't1': Thing<()>
- 167..171 't1.t': ()
- 177..179 't3': Thing<i32>
- 177..181 't3.t': i32
- 187..282 'match ... }': ()
- 193..195 't2': OtherThing<()>
- 206..227 'OtherT... { t }': OtherThing<()>
- 224..225 't': ()
- 231..237 '{ t; }': ()
- 233..234 't': ()
- 247..265 'OtherT...Two(t)': OtherThing<()>
- 263..264 't': ()
- 269..275 '{ t; }': ()
- 271..272 't': ()
- 287..382 'match ... }': ()
- 293..295 't4': OtherThing<i32>
- 306..327 'OtherT... { t }': OtherThing<i32>
- 324..325 't': i32
- 331..337 '{ t; }': ()
- 333..334 't': i32
- 347..365 'OtherT...Two(t)': OtherThing<i32>
- 363..364 't': i32
- 369..375 '{ t; }': ()
- 371..372 't': i32
- "#]],
- );
-}
-
-#[test]
-fn generic_default_in_struct_literal() {
- check_infer(
- r#"
- struct Thing<T = ()> { t: T }
- enum OtherThing<T = ()> {
- One { t: T },
- Two(T),
- }
-
- fn test() {
- let x = Thing { t: loop {} };
- let y = Thing { t: () };
- let z = Thing { t: 1i32 };
- if let Thing { t } = z {
- t;
- }
-
- let a = OtherThing::One { t: 1i32 };
- let b = OtherThing::Two(1i32);
- }
- "#,
- expect![[r#"
- 99..319 '{ ...32); }': ()
- 109..110 'x': Thing<!>
- 113..133 'Thing ...p {} }': Thing<!>
- 124..131 'loop {}': !
- 129..131 '{}': ()
- 143..144 'y': Thing<()>
- 147..162 'Thing { t: () }': Thing<()>
- 158..160 '()': ()
- 172..173 'z': Thing<i32>
- 176..193 'Thing ...1i32 }': Thing<i32>
- 187..191 '1i32': i32
- 199..240 'if let... }': ()
- 206..217 'Thing { t }': Thing<i32>
- 214..215 't': i32
- 220..221 'z': Thing<i32>
- 222..240 '{ ... }': ()
- 232..233 't': i32
- 250..251 'a': OtherThing<i32>
- 254..281 'OtherT...1i32 }': OtherThing<i32>
- 275..279 '1i32': i32
- 291..292 'b': OtherThing<i32>
- 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
- 295..316 'OtherT...(1i32)': OtherThing<i32>
- 311..315 '1i32': i32
- "#]],
- );
-}
-
-#[test]
-fn generic_default_depending_on_other_type_arg() {
- // FIXME: the {unknown} is a bug
- check_infer(
- r#"
- struct Thing<T = u128, F = fn() -> T> { t: T }
-
- fn test(t1: Thing<u32>, t2: Thing) {
- t1;
- t2;
- Thing::<_> { t: 1u32 };
- }
- "#,
- expect![[r#"
- 56..58 't1': Thing<u32, fn() -> u32>
- 72..74 't2': Thing<u128, fn() -> u128>
- 83..130 '{ ...2 }; }': ()
- 89..91 't1': Thing<u32, fn() -> u32>
- 97..99 't2': Thing<u128, fn() -> u128>
- 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
- 121..125 '1u32': u32
- "#]],
- );
-}
-
-#[test]
-fn generic_default_depending_on_other_type_arg_forward() {
- // the {unknown} here is intentional, as defaults are not allowed to
- // refer to type parameters coming later
- check_infer(
- r#"
- struct Thing<F = fn() -> T, T = u128> { t: T }
-
- fn test(t1: Thing) {
- t1;
- }
- "#,
- expect![[r#"
- 56..58 't1': Thing<fn() -> {unknown}, u128>
- 67..78 '{ t1; }': ()
- 73..75 't1': Thing<fn() -> {unknown}, u128>
- "#]],
- );
-}
+++ /dev/null
-use expect::expect;
-use test_utils::mark;
-
-use super::{check_infer, check_infer_with_mismatches, check_types};
-
-#[test]
-fn infer_await() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-struct IntFuture;
-
-impl Future for IntFuture {
- type Output = u64;
-}
-
-fn test() {
- let r = IntFuture;
- let v = r.await;
- v;
-} //^ u64
-
-//- /core.rs crate:core
-#[prelude_import] use future::*;
-mod future {
- #[lang = "future_trait"]
- trait Future {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_async() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-async fn foo() -> u64 {
- 128
-}
-
-fn test() {
- let r = foo();
- let v = r.await;
- v;
-} //^ u64
-
-//- /core.rs crate:core
-#[prelude_import] use future::*;
-mod future {
- #[lang = "future_trait"]
- trait Future {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_desugar_async() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-async fn foo() -> u64 {
- 128
-}
-
-fn test() {
- let r = foo();
- r;
-} //^ impl Future<Output = u64>
-
-//- /core.rs crate:core
-#[prelude_import] use future::*;
-mod future {
- trait Future {
- type Output;
- }
-}
-
-"#,
- );
-}
-
-#[test]
-fn infer_try() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-fn test() {
- let r: Result<i32, u64> = Result::Ok(1);
- let v = r?;
- v;
-} //^ i32
-
-//- /core.rs crate:core
-#[prelude_import] use ops::*;
-mod ops {
- trait Try {
- type Ok;
- type Error;
- }
-}
-
-#[prelude_import] use result::*;
-mod result {
- enum Result<O, E> {
- Ok(O),
- Err(E)
- }
-
- impl<O, E> crate::ops::Try for Result<O, E> {
- type Ok = O;
- type Error = E;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_for_loop() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core,alloc
-use alloc::collections::Vec;
-
-fn test() {
- let v = Vec::new();
- v.push("foo");
- for x in v {
- x;
- } //^ &str
-}
-
-//- /core.rs crate:core
-#[prelude_import] use iter::*;
-mod iter {
- trait IntoIterator {
- type Item;
- }
-}
-
-//- /alloc.rs crate:alloc deps:core
-mod collections {
- struct Vec<T> {}
- impl<T> Vec<T> {
- fn new() -> Self { Vec {} }
- fn push(&mut self, t: T) { }
- }
-
- impl<T> IntoIterator for Vec<T> {
- type Item=T;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_ops_neg() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-struct Bar;
-struct Foo;
-
-impl std::ops::Neg for Bar {
- type Output = Foo;
-}
-
-fn test() {
- let a = Bar;
- let b = -a;
- b;
-} //^ Foo
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "neg"]
- pub trait Neg {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_ops_not() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-struct Bar;
-struct Foo;
-
-impl std::ops::Not for Bar {
- type Output = Foo;
-}
-
-fn test() {
- let a = Bar;
- let b = !a;
- b;
-} //^ Foo
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "not"]
- pub trait Not {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_from_bound_1() {
- check_infer(
- r#"
- trait Trait<T> {}
- struct S<T>(T);
- impl<U> Trait<U> for S<U> {}
- fn foo<T: Trait<u32>>(t: T) {}
- fn test() {
- let s = S(unknown);
- foo(s);
- }
- "#,
- expect![[r#"
- 85..86 't': T
- 91..93 '{}': ()
- 104..143 '{ ...(s); }': ()
- 114..115 's': S<u32>
- 118..119 'S': S<u32>(u32) -> S<u32>
- 118..128 'S(unknown)': S<u32>
- 120..127 'unknown': u32
- 134..137 'foo': fn foo<S<u32>>(S<u32>)
- 134..140 'foo(s)': ()
- 138..139 's': S<u32>
- "#]],
- );
-}
-
-#[test]
-fn infer_from_bound_2() {
- check_infer(
- r#"
- trait Trait<T> {}
- struct S<T>(T);
- impl<U> Trait<U> for S<U> {}
- fn foo<U, T: Trait<U>>(t: T) -> U {}
- fn test() {
- let s = S(unknown);
- let x: u32 = foo(s);
- }
- "#,
- expect![[r#"
- 86..87 't': T
- 97..99 '{}': ()
- 110..162 '{ ...(s); }': ()
- 120..121 's': S<u32>
- 124..125 'S': S<u32>(u32) -> S<u32>
- 124..134 'S(unknown)': S<u32>
- 126..133 'unknown': u32
- 144..145 'x': u32
- 153..156 'foo': fn foo<u32, S<u32>>(S<u32>) -> u32
- 153..159 'foo(s)': u32
- 157..158 's': S<u32>
- "#]],
- );
-}
-
-#[test]
-fn trait_default_method_self_bound_implements_trait() {
- mark::check!(trait_self_implements_self);
- check_infer(
- r#"
- trait Trait {
- fn foo(&self) -> i64;
- fn bar(&self) -> {
- let x = self.foo();
- }
- }
- "#,
- expect![[r#"
- 26..30 'self': &Self
- 52..56 'self': &Self
- 61..96 '{ ... }': ()
- 75..76 'x': i64
- 79..83 'self': &Self
- 79..89 'self.foo()': i64
- "#]],
- );
-}
-
-#[test]
-fn trait_default_method_self_bound_implements_super_trait() {
- check_infer(
- r#"
- trait SuperTrait {
- fn foo(&self) -> i64;
- }
- trait Trait: SuperTrait {
- fn bar(&self) -> {
- let x = self.foo();
- }
- }
- "#,
- expect![[r#"
- 31..35 'self': &Self
- 85..89 'self': &Self
- 94..129 '{ ... }': ()
- 108..109 'x': i64
- 112..116 'self': &Self
- 112..122 'self.foo()': i64
- "#]],
- );
-}
-
-#[test]
-fn infer_project_associated_type() {
- check_infer(
- r#"
- trait Iterable {
- type Item;
- }
- struct S;
- impl Iterable for S { type Item = u32; }
- fn test<T: Iterable>() {
- let x: <S as Iterable>::Item = 1;
- let y: <T as Iterable>::Item = no_matter;
- let z: T::Item = no_matter;
- let a: <T>::Item = no_matter;
- }
- "#,
- expect![[r#"
- 108..261 '{ ...ter; }': ()
- 118..119 'x': u32
- 145..146 '1': u32
- 156..157 'y': Iterable::Item<T>
- 183..192 'no_matter': Iterable::Item<T>
- 202..203 'z': Iterable::Item<T>
- 215..224 'no_matter': Iterable::Item<T>
- 234..235 'a': Iterable::Item<T>
- 249..258 'no_matter': Iterable::Item<T>
- "#]],
- );
-}
-
-#[test]
-fn infer_return_associated_type() {
- check_infer(
- r#"
- trait Iterable {
- type Item;
- }
- struct S;
- impl Iterable for S { type Item = u32; }
- fn foo1<T: Iterable>(t: T) -> T::Item {}
- fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item {}
- fn foo3<T: Iterable>(t: T) -> <T>::Item {}
- fn test() {
- let x = foo1(S);
- let y = foo2(S);
- let z = foo3(S);
- }
- "#,
- expect![[r#"
- 106..107 't': T
- 123..125 '{}': ()
- 147..148 't': T
- 178..180 '{}': ()
- 202..203 't': T
- 221..223 '{}': ()
- 234..300 '{ ...(S); }': ()
- 244..245 'x': u32
- 248..252 'foo1': fn foo1<S>(S) -> <S as Iterable>::Item
- 248..255 'foo1(S)': u32
- 253..254 'S': S
- 265..266 'y': u32
- 269..273 'foo2': fn foo2<S>(S) -> <S as Iterable>::Item
- 269..276 'foo2(S)': u32
- 274..275 'S': S
- 286..287 'z': u32
- 290..294 'foo3': fn foo3<S>(S) -> <S as Iterable>::Item
- 290..297 'foo3(S)': u32
- 295..296 'S': S
- "#]],
- );
-}
-
-#[test]
-fn infer_associated_type_bound() {
- check_infer(
- r#"
- trait Iterable {
- type Item;
- }
- fn test<T: Iterable<Item=u32>>() {
- let y: T::Item = unknown;
- }
- "#,
- expect![[r#"
- 67..100 '{ ...own; }': ()
- 77..78 'y': u32
- 90..97 'unknown': u32
- "#]],
- );
-}
-
-#[test]
-fn infer_const_body() {
- check_infer(
- r#"
- const A: u32 = 1 + 1;
- static B: u64 = { let x = 1; x };
- "#,
- expect![[r#"
- 15..16 '1': u32
- 15..20 '1 + 1': u32
- 19..20 '1': u32
- 38..54 '{ let ...1; x }': u64
- 44..45 'x': u64
- 48..49 '1': u64
- 51..52 'x': u64
- "#]],
- );
-}
-
-#[test]
-fn tuple_struct_fields() {
- check_infer(
- r#"
- struct S(i32, u64);
- fn test() -> u64 {
- let a = S(4, 6);
- let b = a.0;
- a.1
- }
- "#,
- expect![[r#"
- 37..86 '{ ... a.1 }': u64
- 47..48 'a': S
- 51..52 'S': S(i32, u64) -> S
- 51..58 'S(4, 6)': S
- 53..54 '4': i32
- 56..57 '6': u64
- 68..69 'b': i32
- 72..73 'a': S
- 72..75 'a.0': i32
- 81..82 'a': S
- 81..84 'a.1': u64
- "#]],
- );
-}
-
-#[test]
-fn tuple_struct_with_fn() {
- check_infer(
- r#"
- struct S(fn(u32) -> u64);
- fn test() -> u64 {
- let a = S(|i| 2*i);
- let b = a.0(4);
- a.0(2)
- }
- "#,
- expect![[r#"
- 43..101 '{ ...0(2) }': u64
- 53..54 'a': S
- 57..58 'S': S(fn(u32) -> u64) -> S
- 57..67 'S(|i| 2*i)': S
- 59..66 '|i| 2*i': |u32| -> u64
- 60..61 'i': u32
- 63..64 '2': u32
- 63..66 '2*i': u32
- 65..66 'i': u32
- 77..78 'b': u64
- 81..82 'a': S
- 81..84 'a.0': fn(u32) -> u64
- 81..87 'a.0(4)': u64
- 85..86 '4': u32
- 93..94 'a': S
- 93..96 'a.0': fn(u32) -> u64
- 93..99 'a.0(2)': u64
- 97..98 '2': u32
- "#]],
- );
-}
-
-#[test]
-fn indexing_arrays() {
- check_infer(
- "fn main() { &mut [9][2]; }",
- expect![[r#"
- 10..26 '{ &mut...[2]; }': ()
- 12..23 '&mut [9][2]': &mut {unknown}
- 17..20 '[9]': [i32; _]
- 17..23 '[9][2]': {unknown}
- 18..19 '9': i32
- 21..22 '2': i32
- "#]],
- )
-}
-
-#[test]
-fn infer_ops_index() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-struct Bar;
-struct Foo;
-
-impl std::ops::Index<u32> for Bar {
- type Output = Foo;
-}
-
-fn test() {
- let a = Bar;
- let b = a[1u32];
- b;
-} //^ Foo
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "index"]
- pub trait Index<Idx> {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_ops_index_int() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-struct Bar;
-struct Foo;
-
-impl std::ops::Index<u32> for Bar {
- type Output = Foo;
-}
-
-struct Range;
-impl std::ops::Index<Range> for Bar {
- type Output = Bar;
-}
-
-fn test() {
- let a = Bar;
- let b = a[1];
- b;
- //^ Foo
-}
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "index"]
- pub trait Index<Idx> {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn infer_ops_index_autoderef() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-fn test() {
- let a = &[1u32, 2, 3];
- let b = a[1u32];
- b;
-} //^ u32
-
-//- /std.rs crate:std
-impl<T> ops::Index<u32> for [T] {
- type Output = T;
-}
-
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "index"]
- pub trait Index<Idx> {
- type Output;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn deref_trait() {
- check_types(
- r#"
-#[lang = "deref"]
-trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
-}
-
-struct Arc<T>;
-impl<T> Deref for Arc<T> {
- type Target = T;
-}
-
-struct S;
-impl S {
- fn foo(&self) -> u128 {}
-}
-
-fn test(s: Arc<S>) {
- (*s, s.foo());
-} //^ (S, u128)
-"#,
- );
-}
-
-#[test]
-fn deref_trait_with_inference_var() {
- check_types(
- r#"
-//- /main.rs
-#[lang = "deref"]
-trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
-}
-
-struct Arc<T>;
-fn new_arc<T>() -> Arc<T> {}
-impl<T> Deref for Arc<T> {
- type Target = T;
-}
-
-struct S;
-fn foo(a: Arc<S>) {}
-
-fn test() {
- let a = new_arc();
- let b = (*a);
- //^ S
- foo(a);
-}
-"#,
- );
-}
-
-#[test]
-fn deref_trait_infinite_recursion() {
- check_types(
- r#"
-#[lang = "deref"]
-trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
-}
-
-struct S;
-
-impl Deref for S {
- type Target = S;
-}
-
-fn test(s: S) {
- s.foo();
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn deref_trait_with_question_mark_size() {
- check_types(
- r#"
-#[lang = "deref"]
-trait Deref {
- type Target;
- fn deref(&self) -> &Self::Target;
-}
-
-struct Arc<T>;
-impl<T> Deref for Arc<T> {
- type Target = T;
-}
-
-struct S;
-impl S {
- fn foo(&self) -> u128 {}
-}
-
-fn test(s: Arc<S>) {
- (*s, s.foo());
-} //^ (S, u128)
-"#,
- );
-}
-
-#[test]
-fn obligation_from_function_clause() {
- check_types(
- r#"
-struct S;
-
-trait Trait<T> {}
-impl Trait<u32> for S {}
-
-fn foo<T: Trait<U>, U>(t: T) -> U {}
-
-fn test(s: S) {
- (foo(s));
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn obligation_from_method_clause() {
- check_types(
- r#"
-//- /main.rs
-struct S;
-
-trait Trait<T> {}
-impl Trait<isize> for S {}
-
-struct O;
-impl O {
- fn foo<T: Trait<U>, U>(&self, t: T) -> U {}
-}
-
-fn test() {
- O.foo(S);
-} //^ isize
-"#,
- );
-}
-
-#[test]
-fn obligation_from_self_method_clause() {
- check_types(
- r#"
-struct S;
-
-trait Trait<T> {}
-impl Trait<i64> for S {}
-
-impl S {
- fn foo<U>(&self) -> U where Self: Trait<U> {}
-}
-
-fn test() {
- S.foo();
-} //^ i64
-"#,
- );
-}
-
-#[test]
-fn obligation_from_impl_clause() {
- check_types(
- r#"
-struct S;
-
-trait Trait<T> {}
-impl Trait<&str> for S {}
-
-struct O<T>;
-impl<U, T: Trait<U>> O<T> {
- fn foo(&self) -> U {}
-}
-
-fn test(o: O<S>) {
- o.foo();
-} //^ &str
-"#,
- );
-}
-
-#[test]
-fn generic_param_env_1() {
- check_types(
- r#"
-trait Clone {}
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Clone for S {}
-impl<T> Trait for T where T: Clone {}
-fn test<T: Clone>(t: T) { t.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn generic_param_env_1_not_met() {
- check_types(
- r#"
-//- /main.rs
-trait Clone {}
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Clone for S {}
-impl<T> Trait for T where T: Clone {}
-fn test<T>(t: T) { t.foo(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn generic_param_env_2() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Trait for S {}
-fn test<T: Trait>(t: T) { t.foo(); }
- //^ u128
-"#,
- );
-}
-
-#[test]
-fn generic_param_env_2_not_met() {
- check_types(
- r#"
-trait Trait { fn foo(self) -> u128; }
-struct S;
-impl Trait for S {}
-fn test<T>(t: T) { t.foo(); }
- //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn generic_param_env_deref() {
- check_types(
- r#"
-#[lang = "deref"]
-trait Deref {
- type Target;
-}
-trait Trait {}
-impl<T> Deref for T where T: Trait {
- type Target = i128;
-}
-fn test<T: Trait>(t: T) { (*t); }
- //^ i128
-"#,
- );
-}
-
-#[test]
-fn associated_type_placeholder() {
- // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
- check_types(
- r#"
-pub trait ApplyL {
- type Out;
-}
-
-pub struct RefMutL<T>;
-
-impl<T> ApplyL for RefMutL<T> {
- type Out = <T as ApplyL>::Out;
-}
-
-fn test<T: ApplyL>() {
- let y: <RefMutL<T> as ApplyL>::Out = no_matter;
- y;
-} //^ ApplyL::Out<T>
-"#,
- );
-}
-
-#[test]
-fn associated_type_placeholder_2() {
- check_types(
- r#"
-pub trait ApplyL {
- type Out;
-}
-fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
-
-fn test<T: ApplyL>(t: T) {
- let y = foo(t);
- y;
-} //^ ApplyL::Out<T>
-"#,
- );
-}
-
-#[test]
-fn argument_impl_trait() {
- check_infer_with_mismatches(
- r#"
- trait Trait<T> {
- fn foo(&self) -> T;
- fn foo2(&self) -> i64;
- }
- fn bar(x: impl Trait<u16>) {}
- struct S<T>(T);
- impl<T> Trait<T> for S<T> {}
-
- fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
- x;
- y;
- let z = S(1);
- bar(z);
- x.foo();
- y.foo();
- z.foo();
- x.foo2();
- y.foo2();
- z.foo2();
- }
- "#,
- expect![[r#"
- 29..33 'self': &Self
- 54..58 'self': &Self
- 77..78 'x': impl Trait<u16>
- 97..99 '{}': ()
- 154..155 'x': impl Trait<u64>
- 174..175 'y': &impl Trait<u32>
- 195..323 '{ ...2(); }': ()
- 201..202 'x': impl Trait<u64>
- 208..209 'y': &impl Trait<u32>
- 219..220 'z': S<u16>
- 223..224 'S': S<u16>(u16) -> S<u16>
- 223..227 'S(1)': S<u16>
- 225..226 '1': u16
- 233..236 'bar': fn bar(S<u16>)
- 233..239 'bar(z)': ()
- 237..238 'z': S<u16>
- 245..246 'x': impl Trait<u64>
- 245..252 'x.foo()': u64
- 258..259 'y': &impl Trait<u32>
- 258..265 'y.foo()': u32
- 271..272 'z': S<u16>
- 271..278 'z.foo()': u16
- 284..285 'x': impl Trait<u64>
- 284..292 'x.foo2()': i64
- 298..299 'y': &impl Trait<u32>
- 298..306 'y.foo2()': i64
- 312..313 'z': S<u16>
- 312..320 'z.foo2()': i64
- "#]],
- );
-}
-
-#[test]
-fn argument_impl_trait_type_args_1() {
- check_infer_with_mismatches(
- r#"
- trait Trait {}
- trait Foo {
- // this function has an implicit Self param, an explicit type param,
- // and an implicit impl Trait param!
- fn bar<T>(x: impl Trait) -> T { loop {} }
- }
- fn foo<T>(x: impl Trait) -> T { loop {} }
- struct S;
- impl Trait for S {}
- struct F;
- impl Foo for F {}
-
- fn test() {
- Foo::bar(S);
- <F as Foo>::bar(S);
- F::bar(S);
- Foo::bar::<u32>(S);
- <F as Foo>::bar::<u32>(S);
-
- foo(S);
- foo::<u32>(S);
- foo::<u32, i32>(S); // we should ignore the extraneous i32
- }
- "#,
- expect![[r#"
- 155..156 'x': impl Trait
- 175..186 '{ loop {} }': T
- 177..184 'loop {}': !
- 182..184 '{}': ()
- 199..200 'x': impl Trait
- 219..230 '{ loop {} }': T
- 221..228 'loop {}': !
- 226..228 '{}': ()
- 300..509 '{ ... i32 }': ()
- 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
- 306..317 'Foo::bar(S)': {unknown}
- 315..316 'S': S
- 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
- 323..341 '<F as ...bar(S)': {unknown}
- 339..340 'S': S
- 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
- 347..356 'F::bar(S)': {unknown}
- 354..355 'S': S
- 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
- 362..380 'Foo::b...32>(S)': u32
- 378..379 'S': S
- 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
- 386..411 '<F as ...32>(S)': u32
- 409..410 'S': S
- 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
- 418..424 'foo(S)': {unknown}
- 422..423 'S': S
- 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
- 430..443 'foo::<u32>(S)': u32
- 441..442 'S': S
- 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
- 449..467 'foo::<...32>(S)': u32
- 465..466 'S': S
- "#]],
- );
-}
-
-#[test]
-fn argument_impl_trait_type_args_2() {
- check_infer_with_mismatches(
- r#"
- trait Trait {}
- struct S;
- impl Trait for S {}
- struct F<T>;
- impl<T> F<T> {
- fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
- }
-
- fn test() {
- F.foo(S);
- F::<u32>.foo(S);
- F::<u32>.foo::<i32>(S);
- F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
- }
- "#,
- expect![[r#"
- 87..91 'self': F<T>
- 93..94 'x': impl Trait
- 118..129 '{ loop {} }': (T, U)
- 120..127 'loop {}': !
- 125..127 '{}': ()
- 143..283 '{ ...ored }': ()
- 149..150 'F': F<{unknown}>
- 149..157 'F.foo(S)': ({unknown}, {unknown})
- 155..156 'S': S
- 163..171 'F::<u32>': F<u32>
- 163..178 'F::<u32>.foo(S)': (u32, {unknown})
- 176..177 'S': S
- 184..192 'F::<u32>': F<u32>
- 184..206 'F::<u3...32>(S)': (u32, i32)
- 204..205 'S': S
- 212..220 'F::<u32>': F<u32>
- 212..239 'F::<u3...32>(S)': (u32, i32)
- 237..238 'S': S
- "#]],
- );
-}
-
-#[test]
-fn argument_impl_trait_to_fn_pointer() {
- check_infer_with_mismatches(
- r#"
- trait Trait {}
- fn foo(x: impl Trait) { loop {} }
- struct S;
- impl Trait for S {}
-
- fn test() {
- let f: fn(S) -> () = foo;
- }
- "#,
- expect![[r#"
- 22..23 'x': impl Trait
- 37..48 '{ loop {} }': ()
- 39..46 'loop {}': !
- 44..46 '{}': ()
- 90..123 '{ ...foo; }': ()
- 100..101 'f': fn(S)
- 117..120 'foo': fn foo(S)
- "#]],
- );
-}
-
-#[test]
-fn impl_trait() {
- check_infer(
- r#"
- trait Trait<T> {
- fn foo(&self) -> T;
- fn foo2(&self) -> i64;
- }
- fn bar() -> impl Trait<u64> {}
-
- fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
- x;
- y;
- let z = bar();
- x.foo();
- y.foo();
- z.foo();
- x.foo2();
- y.foo2();
- z.foo2();
- }
- "#,
- expect![[r#"
- 29..33 'self': &Self
- 54..58 'self': &Self
- 98..100 '{}': ()
- 110..111 'x': impl Trait<u64>
- 130..131 'y': &impl Trait<u64>
- 151..268 '{ ...2(); }': ()
- 157..158 'x': impl Trait<u64>
- 164..165 'y': &impl Trait<u64>
- 175..176 'z': impl Trait<u64>
- 179..182 'bar': fn bar() -> impl Trait<u64>
- 179..184 'bar()': impl Trait<u64>
- 190..191 'x': impl Trait<u64>
- 190..197 'x.foo()': u64
- 203..204 'y': &impl Trait<u64>
- 203..210 'y.foo()': u64
- 216..217 'z': impl Trait<u64>
- 216..223 'z.foo()': u64
- 229..230 'x': impl Trait<u64>
- 229..237 'x.foo2()': i64
- 243..244 'y': &impl Trait<u64>
- 243..251 'y.foo2()': i64
- 257..258 'z': impl Trait<u64>
- 257..265 'z.foo2()': i64
- "#]],
- );
-}
-
-#[test]
-fn simple_return_pos_impl_trait() {
- mark::check!(lower_rpit);
- check_infer(
- r#"
- trait Trait<T> {
- fn foo(&self) -> T;
- }
- fn bar() -> impl Trait<u64> { loop {} }
-
- fn test() {
- let a = bar();
- a.foo();
- }
- "#,
- expect![[r#"
- 29..33 'self': &Self
- 71..82 '{ loop {} }': !
- 73..80 'loop {}': !
- 78..80 '{}': ()
- 94..129 '{ ...o(); }': ()
- 104..105 'a': impl Trait<u64>
- 108..111 'bar': fn bar() -> impl Trait<u64>
- 108..113 'bar()': impl Trait<u64>
- 119..120 'a': impl Trait<u64>
- 119..126 'a.foo()': u64
- "#]],
- );
-}
-
-#[test]
-fn more_return_pos_impl_trait() {
- check_infer(
- r#"
- trait Iterator {
- type Item;
- fn next(&mut self) -> Self::Item;
- }
- trait Trait<T> {
- fn foo(&self) -> T;
- }
- fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
- fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
-
- fn test() {
- let (a, b) = bar();
- a.next().foo();
- b.foo();
- let (c, d) = baz(1u128);
- c.next().foo();
- d.foo();
- }
- "#,
- expect![[r#"
- 49..53 'self': &mut Self
- 101..105 'self': &Self
- 184..195 '{ loop {} }': ({unknown}, {unknown})
- 186..193 'loop {}': !
- 191..193 '{}': ()
- 206..207 't': T
- 268..279 '{ loop {} }': ({unknown}, {unknown})
- 270..277 'loop {}': !
- 275..277 '{}': ()
- 291..413 '{ ...o(); }': ()
- 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
- 302..303 'a': impl Iterator<Item = impl Trait<u32>>
- 305..306 'b': impl Trait<u64>
- 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
- 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
- 321..322 'a': impl Iterator<Item = impl Trait<u32>>
- 321..329 'a.next()': impl Trait<u32>
- 321..335 'a.next().foo()': u32
- 341..342 'b': impl Trait<u64>
- 341..348 'b.foo()': u64
- 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
- 359..360 'c': impl Iterator<Item = impl Trait<u128>>
- 362..363 'd': impl Trait<u128>
- 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
- 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
- 371..376 '1u128': u128
- 383..384 'c': impl Iterator<Item = impl Trait<u128>>
- 383..391 'c.next()': impl Trait<u128>
- 383..397 'c.next().foo()': u128
- 403..404 'd': impl Trait<u128>
- 403..410 'd.foo()': u128
- "#]],
- );
-}
-
-#[test]
-fn dyn_trait() {
- check_infer(
- r#"
- trait Trait<T> {
- fn foo(&self) -> T;
- fn foo2(&self) -> i64;
- }
- fn bar() -> dyn Trait<u64> {}
-
- fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
- x;
- y;
- let z = bar();
- x.foo();
- y.foo();
- z.foo();
- x.foo2();
- y.foo2();
- z.foo2();
- }
- "#,
- expect![[r#"
- 29..33 'self': &Self
- 54..58 'self': &Self
- 97..99 '{}': ()
- 109..110 'x': dyn Trait<u64>
- 128..129 'y': &dyn Trait<u64>
- 148..265 '{ ...2(); }': ()
- 154..155 'x': dyn Trait<u64>
- 161..162 'y': &dyn Trait<u64>
- 172..173 'z': dyn Trait<u64>
- 176..179 'bar': fn bar() -> dyn Trait<u64>
- 176..181 'bar()': dyn Trait<u64>
- 187..188 'x': dyn Trait<u64>
- 187..194 'x.foo()': u64
- 200..201 'y': &dyn Trait<u64>
- 200..207 'y.foo()': u64
- 213..214 'z': dyn Trait<u64>
- 213..220 'z.foo()': u64
- 226..227 'x': dyn Trait<u64>
- 226..234 'x.foo2()': i64
- 240..241 'y': &dyn Trait<u64>
- 240..248 'y.foo2()': i64
- 254..255 'z': dyn Trait<u64>
- 254..262 'z.foo2()': i64
- "#]],
- );
-}
-
-#[test]
-fn dyn_trait_in_impl() {
- check_infer(
- r#"
- trait Trait<T, U> {
- fn foo(&self) -> (T, U);
- }
- struct S<T, U> {}
- impl<T, U> S<T, U> {
- fn bar(&self) -> &dyn Trait<T, U> { loop {} }
- }
- trait Trait2<T, U> {
- fn baz(&self) -> (T, U);
- }
- impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
-
- fn test(s: S<u32, i32>) {
- s.bar().baz();
- }
- "#,
- expect![[r#"
- 32..36 'self': &Self
- 102..106 'self': &S<T, U>
- 128..139 '{ loop {} }': &dyn Trait<T, U>
- 130..137 'loop {}': !
- 135..137 '{}': ()
- 175..179 'self': &Self
- 251..252 's': S<u32, i32>
- 267..289 '{ ...z(); }': ()
- 273..274 's': S<u32, i32>
- 273..280 's.bar()': &dyn Trait<u32, i32>
- 273..286 's.bar().baz()': (u32, i32)
- "#]],
- );
-}
-
-#[test]
-fn dyn_trait_bare() {
- check_infer(
- r#"
- trait Trait {
- fn foo(&self) -> u64;
- }
- fn bar() -> Trait {}
-
- fn test(x: Trait, y: &Trait) -> u64 {
- x;
- y;
- let z = bar();
- x.foo();
- y.foo();
- z.foo();
- }
- "#,
- expect![[r#"
- 26..30 'self': &Self
- 60..62 '{}': ()
- 72..73 'x': dyn Trait
- 82..83 'y': &dyn Trait
- 100..175 '{ ...o(); }': ()
- 106..107 'x': dyn Trait
- 113..114 'y': &dyn Trait
- 124..125 'z': dyn Trait
- 128..131 'bar': fn bar() -> dyn Trait
- 128..133 'bar()': dyn Trait
- 139..140 'x': dyn Trait
- 139..146 'x.foo()': u64
- 152..153 'y': &dyn Trait
- 152..159 'y.foo()': u64
- 165..166 'z': dyn Trait
- 165..172 'z.foo()': u64
- "#]],
- );
-}
-
-#[test]
-fn weird_bounds() {
- check_infer(
- r#"
- trait Trait {}
- fn test(a: impl Trait + 'lifetime, b: impl 'lifetime, c: impl (Trait), d: impl ('lifetime), e: impl ?Sized, f: impl Trait + ?Sized) {}
- "#,
- expect![[r#"
- 23..24 'a': impl Trait + {error}
- 50..51 'b': impl {error}
- 69..70 'c': impl Trait
- 86..87 'd': impl {error}
- 107..108 'e': impl {error}
- 123..124 'f': impl Trait + {error}
- 147..149 '{}': ()
- "#]],
- );
-}
-
-#[test]
-#[ignore]
-fn error_bound_chalk() {
- check_types(
- r#"
-trait Trait {
- fn foo(&self) -> u32 {}
-}
-
-fn test(x: (impl Trait + UnknownTrait)) {
- x.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn assoc_type_bindings() {
- check_infer(
- r#"
- trait Trait {
- type Type;
- }
-
- fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
- fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
- fn set<T: Trait<Type = u64>>(t: T) -> T {t}
-
- struct S<T>;
- impl<T> Trait for S<T> { type Type = T; }
-
- fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
- get(x);
- get2(x);
- get(y);
- get2(y);
- get(set(S));
- get2(set(S));
- get2(S::<str>);
- }
- "#,
- expect![[r#"
- 49..50 't': T
- 77..79 '{}': ()
- 111..112 't': T
- 122..124 '{}': ()
- 154..155 't': T
- 165..168 '{t}': T
- 166..167 't': T
- 256..257 'x': T
- 262..263 'y': impl Trait<Type = i64>
- 289..397 '{ ...r>); }': ()
- 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
- 295..301 'get(x)': u32
- 299..300 'x': T
- 307..311 'get2': fn get2<u32, T>(T) -> u32
- 307..314 'get2(x)': u32
- 312..313 'x': T
- 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
- 320..326 'get(y)': i64
- 324..325 'y': impl Trait<Type = i64>
- 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
- 332..339 'get2(y)': i64
- 337..338 'y': impl Trait<Type = i64>
- 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
- 345..356 'get(set(S))': u64
- 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 349..355 'set(S)': S<u64>
- 353..354 'S': S<u64>
- 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
- 362..374 'get2(set(S))': u64
- 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 367..373 'set(S)': S<u64>
- 371..372 'S': S<u64>
- 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
- 380..394 'get2(S::<str>)': str
- 385..393 'S::<str>': S<str>
- "#]],
- );
-}
-
-#[test]
-fn impl_trait_assoc_binding_projection_bug() {
- check_types(
- r#"
-//- /main.rs crate:main deps:std
-pub trait Language {
- type Kind;
-}
-pub enum RustLanguage {}
-impl Language for RustLanguage {
- type Kind = SyntaxKind;
-}
-struct SyntaxNode<L> {}
-fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
-
-trait Clone {
- fn clone(&self) -> Self;
-}
-
-fn api_walkthrough() {
- for node in foo() {
- node.clone();
- } //^ {unknown}
-}
-
-//- /std.rs crate:std
-#[prelude_import] use iter::*;
-mod iter {
- trait IntoIterator {
- type Item;
- }
- trait Iterator {
- type Item;
- }
- impl<T: Iterator> IntoIterator for T {
- type Item = <T as Iterator>::Item;
- }
-}
-"#,
- );
-}
-
-#[test]
-fn projection_eq_within_chalk() {
- check_infer(
- r#"
- trait Trait1 {
- type Type;
- }
- trait Trait2<T> {
- fn foo(self) -> T;
- }
- impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
-
- fn test<T: Trait1<Type = u32>>(x: T) {
- x.foo();
- }
- "#,
- expect![[r#"
- 61..65 'self': Self
- 163..164 'x': T
- 169..185 '{ ...o(); }': ()
- 175..176 'x': T
- 175..182 'x.foo()': u32
- "#]],
- );
-}
-
-#[test]
-fn where_clause_trait_in_scope_for_method_resolution() {
- check_types(
- r#"
-mod foo {
- trait Trait {
- fn foo(&self) -> u32 {}
- }
-}
-
-fn test<T: foo::Trait>(x: T) {
- x.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn super_trait_method_resolution() {
- check_infer(
- r#"
- mod foo {
- trait SuperTrait {
- fn foo(&self) -> u32 {}
- }
- }
- trait Trait1: foo::SuperTrait {}
- trait Trait2 where Self: foo::SuperTrait {}
-
- fn test<T: Trait1, U: Trait2>(x: T, y: U) {
- x.foo();
- y.foo();
- }
- "#,
- expect![[r#"
- 49..53 'self': &Self
- 62..64 '{}': ()
- 181..182 'x': T
- 187..188 'y': U
- 193..222 '{ ...o(); }': ()
- 199..200 'x': T
- 199..206 'x.foo()': u32
- 212..213 'y': U
- 212..219 'y.foo()': u32
- "#]],
- );
-}
-
-#[test]
-fn super_trait_impl_trait_method_resolution() {
- check_infer(
- r#"
- mod foo {
- trait SuperTrait {
- fn foo(&self) -> u32 {}
- }
- }
- trait Trait1: foo::SuperTrait {}
-
- fn test(x: &impl Trait1) {
- x.foo();
- }
- "#,
- expect![[r#"
- 49..53 'self': &Self
- 62..64 '{}': ()
- 115..116 'x': &impl Trait1
- 132..148 '{ ...o(); }': ()
- 138..139 'x': &impl Trait1
- 138..145 'x.foo()': u32
- "#]],
- );
-}
-
-#[test]
-fn super_trait_cycle() {
- // This just needs to not crash
- check_infer(
- r#"
- trait A: B {}
- trait B: A {}
-
- fn test<T: A>(x: T) {
- x.foo();
- }
- "#,
- expect![[r#"
- 43..44 'x': T
- 49..65 '{ ...o(); }': ()
- 55..56 'x': T
- 55..62 'x.foo()': {unknown}
- "#]],
- );
-}
-
-#[test]
-fn super_trait_assoc_type_bounds() {
- check_infer(
- r#"
- trait SuperTrait { type Type; }
- trait Trait where Self: SuperTrait {}
-
- fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
- fn set<T: Trait<Type = u64>>(t: T) -> T {t}
-
- struct S<T>;
- impl<T> SuperTrait for S<T> { type Type = T; }
- impl<T> Trait for S<T> {}
-
- fn test() {
- get2(set(S));
- }
- "#,
- expect![[r#"
- 102..103 't': T
- 113..115 '{}': ()
- 145..146 't': T
- 156..159 '{t}': T
- 157..158 't': T
- 258..279 '{ ...S)); }': ()
- 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
- 264..276 'get2(set(S))': u64
- 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
- 269..275 'set(S)': S<u64>
- 273..274 'S': S<u64>
- "#]],
- );
-}
-
-#[test]
-fn fn_trait() {
- check_infer(
- r#"
- trait FnOnce<Args> {
- type Output;
-
- fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
- }
-
- fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
- f.call_once((1, 2));
- }
- "#,
- expect![[r#"
- 56..60 'self': Self
- 62..66 'args': Args
- 149..150 'f': F
- 155..183 '{ ...2)); }': ()
- 161..162 'f': F
- 161..180 'f.call...1, 2))': u128
- 173..179 '(1, 2)': (u32, u64)
- 174..175 '1': u32
- 177..178 '2': u64
- "#]],
- );
-}
-
-#[test]
-fn fn_ptr_and_item() {
- check_infer(
- r#"
- #[lang="fn_once"]
- trait FnOnce<Args> {
- type Output;
-
- fn call_once(self, args: Args) -> Self::Output;
- }
-
- trait Foo<T> {
- fn foo(&self) -> T;
- }
-
- struct Bar<T>(T);
-
- impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
- fn foo(&self) -> (A1, R) {}
- }
-
- enum Opt<T> { None, Some(T) }
- impl<T> Opt<T> {
- fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> {}
- }
-
- fn test() {
- let bar: Bar<fn(u8) -> u32>;
- bar.foo();
-
- let opt: Opt<u8>;
- let f: fn(u8) -> u32;
- opt.map(f);
- }
- "#,
- expect![[r#"
- 74..78 'self': Self
- 80..84 'args': Args
- 139..143 'self': &Self
- 243..247 'self': &Bar<F>
- 260..262 '{}': ()
- 346..350 'self': Opt<T>
- 352..353 'f': F
- 368..370 '{}': ()
- 384..500 '{ ...(f); }': ()
- 394..397 'bar': Bar<fn(u8) -> u32>
- 423..426 'bar': Bar<fn(u8) -> u32>
- 423..432 'bar.foo()': (u8, u32)
- 443..446 'opt': Opt<u8>
- 465..466 'f': fn(u8) -> u32
- 487..490 'opt': Opt<u8>
- 487..497 'opt.map(f)': Opt<u32>
- 495..496 'f': fn(u8) -> u32
- "#]],
- );
-}
-
-#[test]
-fn fn_trait_deref_with_ty_default() {
- check_infer(
- r#"
- #[lang = "deref"]
- trait Deref {
- type Target;
-
- fn deref(&self) -> &Self::Target;
- }
-
- #[lang="fn_once"]
- trait FnOnce<Args> {
- type Output;
-
- fn call_once(self, args: Args) -> Self::Output;
- }
-
- struct Foo;
-
- impl Foo {
- fn foo(&self) -> usize {}
- }
-
- struct Lazy<T, F = fn() -> T>(F);
-
- impl<T, F> Lazy<T, F> {
- pub fn new(f: F) -> Lazy<T, F> {}
- }
-
- impl<T, F: FnOnce() -> T> Deref for Lazy<T, F> {
- type Target = T;
- }
-
- fn test() {
- let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
- let r1 = lazy1.foo();
-
- fn make_foo_fn() -> Foo {}
- let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
- let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
- let r2 = lazy2.foo();
- }
- "#,
- expect![[r#"
- 64..68 'self': &Self
- 165..169 'self': Self
- 171..175 'args': Args
- 239..243 'self': &Foo
- 254..256 '{}': ()
- 334..335 'f': F
- 354..356 '{}': ()
- 443..689 '{ ...o(); }': ()
- 453..458 'lazy1': Lazy<Foo, || -> Foo>
- 475..484 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
- 475..492 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
- 485..491 '|| Foo': || -> Foo
- 488..491 'Foo': Foo
- 502..504 'r1': usize
- 507..512 'lazy1': Lazy<Foo, || -> Foo>
- 507..518 'lazy1.foo()': usize
- 560..575 'make_foo_fn_ptr': fn() -> Foo
- 591..602 'make_foo_fn': fn make_foo_fn() -> Foo
- 612..617 'lazy2': Lazy<Foo, fn() -> Foo>
- 634..643 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
- 634..660 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
- 644..659 'make_foo_fn_ptr': fn() -> Foo
- 670..672 'r2': usize
- 675..680 'lazy2': Lazy<Foo, fn() -> Foo>
- 675..686 'lazy2.foo()': usize
- 549..551 '{}': ()
- "#]],
- );
-}
-
-#[test]
-fn closure_1() {
- check_infer(
- r#"
- #[lang = "fn_once"]
- trait FnOnce<Args> {
- type Output;
- }
-
- enum Option<T> { Some(T), None }
- impl<T> Option<T> {
- fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> {}
- }
-
- fn test() {
- let x = Option::Some(1u32);
- x.map(|v| v + 1);
- x.map(|_v| 1u64);
- let y: Option<i64> = x.map(|_v| 1);
- }
- "#,
- expect![[r#"
- 147..151 'self': Option<T>
- 153..154 'f': F
- 172..174 '{}': ()
- 188..307 '{ ... 1); }': ()
- 198..199 'x': Option<u32>
- 202..214 'Option::Some': Some<u32>(u32) -> Option<u32>
- 202..220 'Option...(1u32)': Option<u32>
- 215..219 '1u32': u32
- 226..227 'x': Option<u32>
- 226..242 'x.map(...v + 1)': Option<u32>
- 232..241 '|v| v + 1': |u32| -> u32
- 233..234 'v': u32
- 236..237 'v': u32
- 236..241 'v + 1': u32
- 240..241 '1': u32
- 248..249 'x': Option<u32>
- 248..264 'x.map(... 1u64)': Option<u64>
- 254..263 '|_v| 1u64': |u32| -> u64
- 255..257 '_v': u32
- 259..263 '1u64': u64
- 274..275 'y': Option<i64>
- 291..292 'x': Option<u32>
- 291..304 'x.map(|_v| 1)': Option<i64>
- 297..303 '|_v| 1': |u32| -> i64
- 298..300 '_v': u32
- 302..303 '1': i64
- "#]],
- );
-}
-
-#[test]
-fn closure_2() {
- check_infer(
- r#"
- trait FnOnce<Args> {
- type Output;
- }
-
- fn test<F: FnOnce(u32) -> u64>(f: F) {
- f(1);
- let g = |v| v + 1;
- g(1u64);
- let h = |v| 1u128 + v;
- }
- "#,
- expect![[r#"
- 72..73 'f': F
- 78..154 '{ ...+ v; }': ()
- 84..85 'f': F
- 84..88 'f(1)': {unknown}
- 86..87 '1': i32
- 98..99 'g': |u64| -> i32
- 102..111 '|v| v + 1': |u64| -> i32
- 103..104 'v': u64
- 106..107 'v': u64
- 106..111 'v + 1': i32
- 110..111 '1': i32
- 117..118 'g': |u64| -> i32
- 117..124 'g(1u64)': i32
- 119..123 '1u64': u64
- 134..135 'h': |u128| -> u128
- 138..151 '|v| 1u128 + v': |u128| -> u128
- 139..140 'v': u128
- 142..147 '1u128': u128
- 142..151 '1u128 + v': u128
- 150..151 'v': u128
- "#]],
- );
-}
-
-#[test]
-fn closure_as_argument_inference_order() {
- check_infer(
- r#"
- #[lang = "fn_once"]
- trait FnOnce<Args> {
- type Output;
- }
-
- fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U {}
- fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U {}
-
- struct S;
- impl S {
- fn method(self) -> u64;
-
- fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U {}
- fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U {}
- }
-
- fn test() {
- let x1 = foo1(S, |s| s.method());
- let x2 = foo2(|s| s.method(), S);
- let x3 = S.foo1(S, |s| s.method());
- let x4 = S.foo2(|s| s.method(), S);
- }
- "#,
- expect![[r#"
- 94..95 'x': T
- 100..101 'f': F
- 111..113 '{}': ()
- 147..148 'f': F
- 153..154 'x': T
- 164..166 '{}': ()
- 201..205 'self': S
- 253..257 'self': S
- 259..260 'x': T
- 265..266 'f': F
- 276..278 '{}': ()
- 316..320 'self': S
- 322..323 'f': F
- 328..329 'x': T
- 339..341 '{}': ()
- 355..514 '{ ... S); }': ()
- 365..367 'x1': u64
- 370..374 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
- 370..393 'foo1(S...hod())': u64
- 375..376 'S': S
- 378..392 '|s| s.method()': |S| -> u64
- 379..380 's': S
- 382..383 's': S
- 382..392 's.method()': u64
- 403..405 'x2': u64
- 408..412 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
- 408..431 'foo2(|...(), S)': u64
- 413..427 '|s| s.method()': |S| -> u64
- 414..415 's': S
- 417..418 's': S
- 417..427 's.method()': u64
- 429..430 'S': S
- 441..443 'x3': u64
- 446..447 'S': S
- 446..471 'S.foo1...hod())': u64
- 453..454 'S': S
- 456..470 '|s| s.method()': |S| -> u64
- 457..458 's': S
- 460..461 's': S
- 460..470 's.method()': u64
- 481..483 'x4': u64
- 486..487 'S': S
- 486..511 'S.foo2...(), S)': u64
- 493..507 '|s| s.method()': |S| -> u64
- 494..495 's': S
- 497..498 's': S
- 497..507 's.method()': u64
- 509..510 'S': S
- "#]],
- );
-}
-
-#[test]
-fn fn_item_fn_trait() {
- check_types(
- r#"
-#[lang = "fn_once"]
-trait FnOnce<Args> {
- type Output;
-}
-
-struct S;
-
-fn foo() -> S {}
-
-fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
-
-fn test() {
- takes_closure(foo);
-} //^^^^^^^^^^^^^^^^^^ S
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_in_trait_env_1() {
- check_types(
- r#"
-//- /main.rs
-trait Trait {
- type Item;
-}
-
-trait Trait2 {
- fn foo(&self) -> u32;
-}
-
-fn test<T: Trait>() where T::Item: Trait2 {
- let x: T::Item = no_matter;
- x.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_in_trait_env_2() {
- check_types(
- r#"
-trait Trait<T> {
- type Item;
-}
-
-trait Trait2 {
- fn foo(&self) -> u32;
-}
-
-fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
- let x: T::Item = no_matter;
- x.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_on_impl_self() {
- check_infer(
- r#"
- //- /main.rs
- trait Trait {
- type Item;
-
- fn f(&self, x: Self::Item);
- }
-
- struct S;
-
- impl Trait for S {
- type Item = u32;
- fn f(&self, x: Self::Item) { let y = x; }
- }
-
- struct S2;
-
- impl Trait for S2 {
- type Item = i32;
- fn f(&self, x: <Self>::Item) { let y = x; }
- }
- "#,
- expect![[r#"
- 40..44 'self': &Self
- 46..47 'x': Trait::Item<Self>
- 126..130 'self': &S
- 132..133 'x': u32
- 147..161 '{ let y = x; }': ()
- 153..154 'y': u32
- 157..158 'x': u32
- 228..232 'self': &S2
- 234..235 'x': i32
- 251..265 '{ let y = x; }': ()
- 257..258 'y': i32
- 261..262 'x': i32
- "#]],
- );
-}
-
-#[test]
-fn unselected_projection_on_trait_self() {
- check_types(
- r#"
-trait Trait {
- type Item;
-
- fn f(&self) -> Self::Item { loop {} }
-}
-
-struct S;
-impl Trait for S {
- type Item = u32;
-}
-
-fn test() {
- S.f();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_chalk_fold() {
- check_types(
- r#"
-trait Interner {}
-trait Fold<I: Interner, TI = I> {
- type Result;
-}
-
-struct Ty<I: Interner> {}
-impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
- type Result = Ty<TI>;
-}
-
-fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
-where
- T: Fold<I, I>,
-{
- loop {}
-}
-
-fn foo<I: Interner>(interner: &I, t: Ty<I>) {
- fold(interner, t);
-} //^ Ty<I>
-"#,
- );
-}
-
-#[test]
-fn trait_impl_self_ty() {
- check_types(
- r#"
-trait Trait<T> {
- fn foo(&self);
-}
-
-struct S;
-
-impl Trait<Self> for S {}
-
-fn test() {
- S.foo();
-} //^ ()
-"#,
- );
-}
-
-#[test]
-fn trait_impl_self_ty_cycle() {
- check_types(
- r#"
-trait Trait {
- fn foo(&self);
-}
-
-struct S<T>;
-
-impl Trait for S<Self> {}
-
-fn test() {
- S.foo();
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_in_trait_env_cycle_1() {
- // this is a legitimate cycle
- check_types(
- r#"
-trait Trait {
- type Item;
-}
-
-trait Trait2<T> {}
-
-fn test<T: Trait>() where T: Trait2<T::Item> {
- let x: T::Item = no_matter;
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn unselected_projection_in_trait_env_cycle_2() {
- // this is a legitimate cycle
- check_types(
- r#"
-//- /main.rs
-trait Trait<T> {
- type Item;
-}
-
-fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
- let x: T::Item = no_matter;
-} //^ {unknown}
-"#,
- );
-}
-
-#[test]
-fn inline_assoc_type_bounds_1() {
- check_types(
- r#"
-trait Iterator {
- type Item;
-}
-trait OtherTrait<T> {
- fn foo(&self) -> T;
-}
-
-// workaround for Chalk assoc type normalization problems
-pub struct S<T>;
-impl<T: Iterator> Iterator for S<T> {
- type Item = <T as Iterator>::Item;
-}
-
-fn test<I: Iterator<Item: OtherTrait<u32>>>() {
- let x: <S<I> as Iterator>::Item;
- x.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn inline_assoc_type_bounds_2() {
- check_types(
- r#"
-trait Iterator {
- type Item;
-}
-
-fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
- let x: <<I as Iterator>::Item as Iterator>::Item;
- x;
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn proc_macro_server_types() {
- check_infer(
- r#"
- macro_rules! with_api {
- ($S:ident, $self:ident, $m:ident) => {
- $m! {
- TokenStream {
- fn new() -> $S::TokenStream;
- },
- Group {
- },
- }
- };
- }
- macro_rules! associated_item {
- (type TokenStream) =>
- (type TokenStream: 'static;);
- (type Group) =>
- (type Group: 'static;);
- ($($item:tt)*) => ($($item)*;)
- }
- macro_rules! declare_server_traits {
- ($($name:ident {
- $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
- }),* $(,)?) => {
- pub trait Types {
- $(associated_item!(type $name);)*
- }
-
- $(pub trait $name: Types {
- $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
- })*
-
- pub trait Server: Types $(+ $name)* {}
- impl<S: Types $(+ $name)*> Server for S {}
- }
- }
-
- with_api!(Self, self_, declare_server_traits);
- struct G {}
- struct T {}
- struct Rustc;
- impl Types for Rustc {
- type TokenStream = T;
- type Group = G;
- }
-
- fn make<T>() -> T { loop {} }
- impl TokenStream for Rustc {
- fn new() -> Self::TokenStream {
- let group: Self::Group = make();
- make()
- }
- }
- "#,
- expect![[r#"
- 1061..1072 '{ loop {} }': T
- 1063..1070 'loop {}': !
- 1068..1070 '{}': ()
- 1136..1199 '{ ... }': T
- 1150..1155 'group': G
- 1171..1175 'make': fn make<G>() -> G
- 1171..1177 'make()': G
- 1187..1191 'make': fn make<T>() -> T
- 1187..1193 'make()': T
- "#]],
- );
-}
-
-#[test]
-fn unify_impl_trait() {
- check_infer_with_mismatches(
- r#"
- trait Trait<T> {}
-
- fn foo(x: impl Trait<u32>) { loop {} }
- fn bar<T>(x: impl Trait<T>) -> T { loop {} }
-
- struct S<T>(T);
- impl<T> Trait<T> for S<T> {}
-
- fn default<T>() -> T { loop {} }
-
- fn test() -> impl Trait<i32> {
- let s1 = S(default());
- foo(s1);
- let x: i32 = bar(S(default()));
- S(default())
- }
- "#,
- expect![[r#"
- 26..27 'x': impl Trait<u32>
- 46..57 '{ loop {} }': ()
- 48..55 'loop {}': !
- 53..55 '{}': ()
- 68..69 'x': impl Trait<T>
- 91..102 '{ loop {} }': T
- 93..100 'loop {}': !
- 98..100 '{}': ()
- 171..182 '{ loop {} }': T
- 173..180 'loop {}': !
- 178..180 '{}': ()
- 213..309 '{ ...t()) }': S<{unknown}>
- 223..225 's1': S<u32>
- 228..229 'S': S<u32>(u32) -> S<u32>
- 228..240 'S(default())': S<u32>
- 230..237 'default': fn default<u32>() -> u32
- 230..239 'default()': u32
- 246..249 'foo': fn foo(S<u32>)
- 246..253 'foo(s1)': ()
- 250..252 's1': S<u32>
- 263..264 'x': i32
- 272..275 'bar': fn bar<i32>(S<i32>) -> i32
- 272..289 'bar(S(...lt()))': i32
- 276..277 'S': S<i32>(i32) -> S<i32>
- 276..288 'S(default())': S<i32>
- 278..285 'default': fn default<i32>() -> i32
- 278..287 'default()': i32
- 295..296 'S': S<{unknown}>({unknown}) -> S<{unknown}>
- 295..307 'S(default())': S<{unknown}>
- 297..304 'default': fn default<{unknown}>() -> {unknown}
- 297..306 'default()': {unknown}
- "#]],
- );
-}
-
-#[test]
-fn assoc_types_from_bounds() {
- check_infer(
- r#"
- //- /main.rs
- #[lang = "fn_once"]
- trait FnOnce<Args> {
- type Output;
- }
-
- trait T {
- type O;
- }
-
- impl T for () {
- type O = ();
- }
-
- fn f<X, F>(_v: F)
- where
- X: T,
- F: FnOnce(&X::O),
- { }
-
- fn main() {
- f::<(), _>(|z| { z; });
- }
- "#,
- expect![[r#"
- 133..135 '_v': F
- 178..181 '{ }': ()
- 193..224 '{ ... }); }': ()
- 199..209 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
- 199..221 'f::<()... z; })': ()
- 210..220 '|z| { z; }': |&()| -> ()
- 211..212 'z': &()
- 214..220 '{ z; }': ()
- 216..217 'z': &()
- "#]],
- );
-}
-
-#[test]
-fn associated_type_bound() {
- check_types(
- r#"
-pub trait Trait {
- type Item: OtherTrait<u32>;
-}
-pub trait OtherTrait<T> {
- fn foo(&self) -> T;
-}
-
-// this is just a workaround for chalk#234
-pub struct S<T>;
-impl<T: Trait> Trait for S<T> {
- type Item = <T as Trait>::Item;
-}
-
-fn test<T: Trait>() {
- let y: <S<T> as Trait>::Item = no_matter;
- y.foo();
-} //^ u32
-"#,
- );
-}
-
-#[test]
-fn dyn_trait_through_chalk() {
- check_types(
- r#"
-struct Box<T> {}
-#[lang = "deref"]
-trait Deref {
- type Target;
-}
-impl<T> Deref for Box<T> {
- type Target = T;
-}
-trait Trait {
- fn foo(&self);
-}
-
-fn test(x: Box<dyn Trait>) {
- x.foo();
-} //^ ()
-"#,
- );
-}
-
-#[test]
-fn string_to_owned() {
- check_types(
- r#"
-struct String {}
-pub trait ToOwned {
- type Owned;
- fn to_owned(&self) -> Self::Owned;
-}
-impl ToOwned for str {
- type Owned = String;
-}
-fn test() {
- "foo".to_owned();
-} //^ String
-"#,
- );
-}
-
-#[test]
-fn iterator_chain() {
- check_infer(
- r#"
- //- /main.rs
- #[lang = "fn_once"]
- trait FnOnce<Args> {
- type Output;
- }
- #[lang = "fn_mut"]
- trait FnMut<Args>: FnOnce<Args> { }
-
- enum Option<T> { Some(T), None }
- use Option::*;
-
- pub trait Iterator {
- type Item;
-
- fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
- where
- F: FnMut(Self::Item) -> Option<B>,
- { loop {} }
-
- fn for_each<F>(self, f: F)
- where
- F: FnMut(Self::Item),
- { loop {} }
- }
-
- pub trait IntoIterator {
- type Item;
- type IntoIter: Iterator<Item = Self::Item>;
- fn into_iter(self) -> Self::IntoIter;
- }
-
- pub struct FilterMap<I, F> { }
- impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
- where
- F: FnMut(I::Item) -> Option<B>,
- {
- type Item = B;
- }
-
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<I: Iterator> IntoIterator for I {
- type Item = I::Item;
- type IntoIter = I;
-
- fn into_iter(self) -> I {
- self
- }
- }
-
- struct Vec<T> {}
- impl<T> Vec<T> {
- fn new() -> Self { loop {} }
- }
-
- impl<T> IntoIterator for Vec<T> {
- type Item = T;
- type IntoIter = IntoIter<T>;
- }
-
- pub struct IntoIter<T> { }
- impl<T> Iterator for IntoIter<T> {
- type Item = T;
- }
-
- fn main() {
- Vec::<i32>::new().into_iter()
- .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
- .for_each(|y| { y; });
- }
- "#,
- expect![[r#"
- 226..230 'self': Self
- 232..233 'f': F
- 317..328 '{ loop {} }': FilterMap<Self, F>
- 319..326 'loop {}': !
- 324..326 '{}': ()
- 349..353 'self': Self
- 355..356 'f': F
- 405..416 '{ loop {} }': ()
- 407..414 'loop {}': !
- 412..414 '{}': ()
- 525..529 'self': Self
- 854..858 'self': I
- 865..885 '{ ... }': I
- 875..879 'self': I
- 944..955 '{ loop {} }': Vec<T>
- 946..953 'loop {}': !
- 951..953 '{}': ()
- 1142..1269 '{ ... }); }': ()
- 1148..1163 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
- 1148..1165 'Vec::<...:new()': Vec<i32>
- 1148..1177 'Vec::<...iter()': IntoIter<i32>
- 1148..1240 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
- 1148..1266 'Vec::<... y; })': ()
- 1194..1239 '|x| if...None }': |i32| -> Option<u32>
- 1195..1196 'x': i32
- 1198..1239 'if x >...None }': Option<u32>
- 1201..1202 'x': i32
- 1201..1206 'x > 0': bool
- 1205..1206 '0': i32
- 1207..1225 '{ Some...u32) }': Option<u32>
- 1209..1213 'Some': Some<u32>(u32) -> Option<u32>
- 1209..1223 'Some(x as u32)': Option<u32>
- 1214..1215 'x': i32
- 1214..1222 'x as u32': u32
- 1231..1239 '{ None }': Option<u32>
- 1233..1237 'None': Option<u32>
- 1255..1265 '|y| { y; }': |u32| -> ()
- 1256..1257 'y': u32
- 1259..1265 '{ y; }': ()
- 1261..1262 'y': u32
- "#]],
- );
-}
-
-#[test]
-fn nested_assoc() {
- check_types(
- r#"
-struct Bar;
-struct Foo;
-
-trait A {
- type OutputA;
-}
-
-impl A for Bar {
- type OutputA = Foo;
-}
-
-trait B {
- type Output;
- fn foo() -> Self::Output;
-}
-
-impl<T:A> B for T {
- type Output = T::OutputA;
- fn foo() -> Self::Output { loop {} }
-}
-
-fn main() {
- Bar::foo();
-} //^ Foo
-"#,
- );
-}
-
-#[test]
-fn trait_object_no_coercion() {
- check_infer_with_mismatches(
- r#"
- trait Foo {}
-
- fn foo(x: &dyn Foo) {}
-
- fn test(x: &dyn Foo) {
- foo(x);
- }
- "#,
- expect![[r#"
- 21..22 'x': &dyn Foo
- 34..36 '{}': ()
- 46..47 'x': &dyn Foo
- 59..74 '{ foo(x); }': ()
- 65..68 'foo': fn foo(&dyn Foo)
- 65..71 'foo(x)': ()
- 69..70 'x': &dyn Foo
- "#]],
- );
-}
-
-#[test]
-fn builtin_copy() {
- check_infer_with_mismatches(
- r#"
- #[lang = "copy"]
- trait Copy {}
-
- struct IsCopy;
- impl Copy for IsCopy {}
- struct NotCopy;
-
- trait Test { fn test(&self) -> bool; }
- impl<T: Copy> Test for T {}
-
- fn test() {
- IsCopy.test();
- NotCopy.test();
- (IsCopy, IsCopy).test();
- (IsCopy, NotCopy).test();
- }
- "#,
- expect![[r#"
- 110..114 'self': &Self
- 166..267 '{ ...t(); }': ()
- 172..178 'IsCopy': IsCopy
- 172..185 'IsCopy.test()': bool
- 191..198 'NotCopy': NotCopy
- 191..205 'NotCopy.test()': {unknown}
- 211..227 '(IsCop...sCopy)': (IsCopy, IsCopy)
- 211..234 '(IsCop...test()': bool
- 212..218 'IsCopy': IsCopy
- 220..226 'IsCopy': IsCopy
- 240..257 '(IsCop...tCopy)': (IsCopy, NotCopy)
- 240..264 '(IsCop...test()': {unknown}
- 241..247 'IsCopy': IsCopy
- 249..256 'NotCopy': NotCopy
- "#]],
- );
-}
-
-#[test]
-fn builtin_fn_def_copy() {
- check_infer_with_mismatches(
- r#"
- #[lang = "copy"]
- trait Copy {}
-
- fn foo() {}
- fn bar<T: Copy>(T) -> T {}
- struct Struct(usize);
- enum Enum { Variant(usize) }
-
- trait Test { fn test(&self) -> bool; }
- impl<T: Copy> Test for T {}
-
- fn test() {
- foo.test();
- bar.test();
- Struct.test();
- Enum::Variant.test();
- }
- "#,
- expect![[r#"
- 41..43 '{}': ()
- 60..61 'T': {unknown}
- 68..70 '{}': ()
- 68..70: expected T, got ()
- 145..149 'self': &Self
- 201..281 '{ ...t(); }': ()
- 207..210 'foo': fn foo()
- 207..217 'foo.test()': bool
- 223..226 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
- 223..233 'bar.test()': bool
- 239..245 'Struct': Struct(usize) -> Struct
- 239..252 'Struct.test()': bool
- 258..271 'Enum::Variant': Variant(usize) -> Enum
- 258..278 'Enum::...test()': bool
- "#]],
- );
-}
-
-#[test]
-fn builtin_fn_ptr_copy() {
- check_infer_with_mismatches(
- r#"
- #[lang = "copy"]
- trait Copy {}
-
- trait Test { fn test(&self) -> bool; }
- impl<T: Copy> Test for T {}
-
- fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
- f1.test();
- f2.test();
- f3.test();
- }
- "#,
- expect![[r#"
- 54..58 'self': &Self
- 108..110 'f1': fn()
- 118..120 'f2': fn(usize) -> u8
- 139..141 'f3': fn(u8, u8) -> &u8
- 162..210 '{ ...t(); }': ()
- 168..170 'f1': fn()
- 168..177 'f1.test()': bool
- 183..185 'f2': fn(usize) -> u8
- 183..192 'f2.test()': bool
- 198..200 'f3': fn(u8, u8) -> &u8
- 198..207 'f3.test()': bool
- "#]],
- );
-}
-
-#[test]
-fn builtin_sized() {
- check_infer_with_mismatches(
- r#"
- #[lang = "sized"]
- trait Sized {}
-
- trait Test { fn test(&self) -> bool; }
- impl<T: Sized> Test for T {}
-
- fn test() {
- 1u8.test();
- (*"foo").test(); // not Sized
- (1u8, 1u8).test();
- (1u8, *"foo").test(); // not Sized
- }
- "#,
- expect![[r#"
- 56..60 'self': &Self
- 113..228 '{ ...ized }': ()
- 119..122 '1u8': u8
- 119..129 '1u8.test()': bool
- 135..150 '(*"foo").test()': {unknown}
- 136..142 '*"foo"': str
- 137..142 '"foo"': &str
- 169..179 '(1u8, 1u8)': (u8, u8)
- 169..186 '(1u8, ...test()': bool
- 170..173 '1u8': u8
- 175..178 '1u8': u8
- 192..205 '(1u8, *"foo")': (u8, str)
- 192..212 '(1u8, ...test()': {unknown}
- 193..196 '1u8': u8
- 198..204 '*"foo"': str
- 199..204 '"foo"': &str
- "#]],
- );
-}
-
-#[test]
-fn integer_range_iterate() {
- check_types(
- r#"
-//- /main.rs crate:main deps:core
-fn test() {
- for x in 0..100 { x; }
-} //^ i32
-
-//- /core.rs crate:core
-pub mod ops {
- pub struct Range<Idx> {
- pub start: Idx,
- pub end: Idx,
- }
-}
-
-pub mod iter {
- pub trait Iterator {
- type Item;
- }
-
- pub trait IntoIterator {
- type Item;
- type IntoIter: Iterator<Item = Self::Item>;
- }
-
- impl<T> IntoIterator for T where T: Iterator {
- type Item = <T as Iterator>::Item;
- type IntoIter = Self;
- }
-}
-
-trait Step {}
-impl Step for i32 {}
-impl Step for i64 {}
-
-impl<A: Step> iter::Iterator for ops::Range<A> {
- type Item = A;
-}
-"#,
- );
-}
-
-#[test]
-fn infer_closure_arg() {
- check_infer(
- r#"
- //- /lib.rs
-
- enum Option<T> {
- None,
- Some(T)
- }
-
- fn foo() {
- let s = Option::None;
- let f = |x: Option<i32>| {};
- (&f)(s)
- }
- "#,
- expect![[r#"
- 52..126 '{ ...)(s) }': ()
- 62..63 's': Option<i32>
- 66..78 'Option::None': Option<i32>
- 88..89 'f': |Option<i32>| -> ()
- 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
- 93..94 'x': Option<i32>
- 109..111 '{}': ()
- 117..124 '(&f)(s)': ()
- 118..120 '&f': &|Option<i32>| -> ()
- 119..120 'f': |Option<i32>| -> ()
- 122..123 's': Option<i32>
- "#]],
- );
-}
-
-#[test]
-fn infer_fn_trait_arg() {
- check_infer(
- r#"
- //- /lib.rs deps:std
-
- #[lang = "fn_once"]
- pub trait FnOnce<Args> {
- type Output;
-
- extern "rust-call" fn call_once(&self, args: Args) -> Self::Output;
- }
-
- #[lang = "fn"]
- pub trait Fn<Args>:FnOnce<Args> {
- extern "rust-call" fn call(&self, args: Args) -> Self::Output;
- }
-
- enum Option<T> {
- None,
- Some(T)
- }
-
- fn foo<F, T>(f: F) -> T
- where
- F: Fn(Option<i32>) -> T,
- {
- let s = None;
- f(s)
- }
- "#,
- expect![[r#"
- 101..105 'self': &Self
- 107..111 'args': Args
- 220..224 'self': &Self
- 226..230 'args': Args
- 313..314 'f': F
- 359..389 '{ ...f(s) }': T
- 369..370 's': Option<i32>
- 373..377 'None': Option<i32>
- 383..384 'f': F
- 383..387 'f(s)': T
- 385..386 's': Option<i32>
- "#]],
- );
-}
-
-#[test]
-fn infer_box_fn_arg() {
- check_infer(
- r#"
- //- /lib.rs deps:std
-
- #[lang = "fn_once"]
- pub trait FnOnce<Args> {
- type Output;
-
- extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
- }
-
- #[lang = "deref"]
- pub trait Deref {
- type Target: ?Sized;
-
- fn deref(&self) -> &Self::Target;
- }
-
- #[lang = "owned_box"]
- pub struct Box<T: ?Sized> {
- inner: *mut T,
- }
-
- impl<T: ?Sized> Deref for Box<T> {
- type Target = T;
-
- fn deref(&self) -> &T {
- &self.inner
- }
- }
-
- enum Option<T> {
- None,
- Some(T)
- }
-
- fn foo() {
- let s = Option::None;
- let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
- f(&s)
- }
- "#,
- expect![[r#"
- 100..104 'self': Self
- 106..110 'args': Args
- 214..218 'self': &Self
- 384..388 'self': &Box<T>
- 396..423 '{ ... }': &T
- 406..417 '&self.inner': &*mut T
- 407..411 'self': &Box<T>
- 407..417 'self.inner': *mut T
- 478..575 '{ ...(&s) }': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)>
- 488..489 's': Option<i32>
- 492..504 'Option::None': Option<i32>
- 514..515 'f': Box<dyn FnOnce<(&Option<i32>,)>>
- 549..562 'box (|ps| {})': Box<|{unknown}| -> ()>
- 554..561 '|ps| {}': |{unknown}| -> ()
- 555..557 'ps': {unknown}
- 559..561 '{}': ()
- 568..569 'f': Box<dyn FnOnce<(&Option<i32>,)>>
- 568..573 'f(&s)': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)>
- 570..572 '&s': &Option<i32>
- 571..572 's': Option<i32>
- "#]],
- );
-}
-
-#[test]
-fn infer_dyn_fn_output() {
- check_types(
- r#"
-#[lang = "fn_once"]
-pub trait FnOnce<Args> {
- type Output;
- extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
-}
-
-#[lang = "fn"]
-pub trait Fn<Args>: FnOnce<Args> {
- extern "rust-call" fn call(&self, args: Args) -> Self::Output;
-}
-
-fn foo() {
- let f: &dyn Fn() -> i32;
- f();
- //^^^ i32
-}"#,
- );
-}
-
-#[test]
-fn infer_dyn_fn_once_output() {
- check_types(
- r#"
-#[lang = "fn_once"]
-pub trait FnOnce<Args> {
- type Output;
- extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
-}
-
-fn foo() {
- let f: dyn FnOnce() -> i32;
- f();
- //^^^ i32
-}"#,
- );
-}
-
-#[test]
-fn variable_kinds_1() {
- check_types(
- r#"
-trait Trait<T> { fn get(self, t: T) -> T; }
-struct S;
-impl Trait<u128> for S {}
-impl Trait<f32> for S {}
-fn test() {
- S.get(1);
- //^^^^^^^^ u128
- S.get(1.);
- //^^^^^^^^ f32
-}
- "#,
- );
-}
-
-#[test]
-fn variable_kinds_2() {
- check_types(
- r#"
-trait Trait { fn get(self) -> Self; }
-impl Trait for u128 {}
-impl Trait for f32 {}
-fn test() {
- 1.get();
- //^^^^^^^ u128
- (1.).get();
- //^^^^^^^^^^ f32
-}
- "#,
- );
-}
-
-#[test]
-fn underscore_import() {
- check_types(
- r#"
-mod tr {
- pub trait Tr {
- fn method(&self) -> u8 { 0 }
- }
-}
-
-struct Tr;
-impl crate::tr::Tr for Tr {}
-
-use crate::tr::Tr as _;
-fn test() {
- Tr.method();
- //^^^^^^^^^^^ u8
-}
- "#,
- );
-}
+++ /dev/null
-//! Trait solving using Chalk.
-use std::sync::Arc;
-
-use base_db::CrateId;
-use chalk_ir::cast::Cast;
-use chalk_solve::Solver;
-use hir_def::{lang_item::LangItemTarget, TraitId};
-
-use crate::{db::HirDatabase, DebruijnIndex, Substs};
-
-use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
-
-use self::chalk::{from_chalk, Interner, ToChalk};
-
-pub(crate) mod chalk;
-
-// This controls the maximum size of types Chalk considers. If we set this too
-// high, we can run into slow edge cases; if we set it too low, Chalk won't
-// find some solutions.
-// FIXME this is currently hardcoded in the recursive solver
-// const CHALK_SOLVER_MAX_SIZE: usize = 10;
-
-/// This controls how much 'time' we give the Chalk solver before giving up.
-const CHALK_SOLVER_FUEL: i32 = 100;
-
-#[derive(Debug, Copy, Clone)]
-struct ChalkContext<'a> {
- db: &'a dyn HirDatabase,
- krate: CrateId,
-}
-
-fn create_chalk_solver() -> chalk_recursive::RecursiveSolver<Interner> {
- let overflow_depth = 100;
- let caching_enabled = true;
- chalk_recursive::RecursiveSolver::new(overflow_depth, caching_enabled)
-}
-
-/// A set of clauses that we assume to be true. E.g. if we are inside this function:
-/// ```rust
-/// fn foo<T: Default>(t: T) {}
-/// ```
-/// we assume that `T: Default`.
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub struct TraitEnvironment {
- pub predicates: Vec<GenericPredicate>,
-}
-
-impl TraitEnvironment {
- /// Returns trait refs with the given self type which are supposed to hold
- /// in this trait env. E.g. if we are in `foo<T: SomeTrait>()`, this will
- /// find that `T: SomeTrait` if we call it for `T`.
- pub(crate) fn trait_predicates_for_self_ty<'a>(
- &'a self,
- ty: &'a Ty,
- ) -> impl Iterator<Item = &'a TraitRef> + 'a {
- self.predicates.iter().filter_map(move |pred| match pred {
- GenericPredicate::Implemented(tr) if tr.self_ty() == ty => Some(tr),
- _ => None,
- })
- }
-}
-
-/// Something (usually a goal), along with an environment.
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub struct InEnvironment<T> {
- pub environment: Arc<TraitEnvironment>,
- pub value: T,
-}
-
-impl<T> InEnvironment<T> {
- pub fn new(environment: Arc<TraitEnvironment>, value: T) -> InEnvironment<T> {
- InEnvironment { environment, value }
- }
-}
-
-/// Something that needs to be proven (by Chalk) during type checking, e.g. that
-/// a certain type implements a certain trait. Proving the Obligation might
-/// result in additional information about inference variables.
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub enum Obligation {
- /// Prove that a certain type implements a trait (the type is the `Self` type
- /// parameter to the `TraitRef`).
- Trait(TraitRef),
- Projection(ProjectionPredicate),
-}
-
-impl Obligation {
- pub fn from_predicate(predicate: GenericPredicate) -> Option<Obligation> {
- match predicate {
- GenericPredicate::Implemented(trait_ref) => Some(Obligation::Trait(trait_ref)),
- GenericPredicate::Projection(projection_pred) => {
- Some(Obligation::Projection(projection_pred))
- }
- GenericPredicate::Error => None,
- }
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub struct ProjectionPredicate {
- pub projection_ty: ProjectionTy,
- pub ty: Ty,
-}
-
-impl TypeWalk for ProjectionPredicate {
- fn walk(&self, f: &mut impl FnMut(&Ty)) {
- self.projection_ty.walk(f);
- self.ty.walk(f);
- }
-
- fn walk_mut_binders(
- &mut self,
- f: &mut impl FnMut(&mut Ty, DebruijnIndex),
- binders: DebruijnIndex,
- ) {
- self.projection_ty.walk_mut_binders(f, binders);
- self.ty.walk_mut_binders(f, binders);
- }
-}
-
-/// Solve a trait goal using Chalk.
-pub(crate) fn trait_solve_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- goal: Canonical<InEnvironment<Obligation>>,
-) -> Option<Solution> {
- let _p = profile::span("trait_solve_query").detail(|| match &goal.value.value {
- Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(),
- Obligation::Projection(_) => "projection".to_string(),
- });
- log::info!("trait_solve_query({})", goal.value.value.display(db));
-
- if let Obligation::Projection(pred) = &goal.value.value {
- if let Ty::Bound(_) = &pred.projection_ty.parameters[0] {
- // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible
- return Some(Solution::Ambig(Guidance::Unknown));
- }
- }
-
- let canonical = goal.to_chalk(db).cast(&Interner);
-
- // We currently don't deal with universes (I think / hope they're not yet
- // relevant for our use cases?)
- let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
- let solution = solve(db, krate, &u_canonical);
- solution.map(|solution| solution_from_chalk(db, solution))
-}
-
-fn solve(
- db: &dyn HirDatabase,
- krate: CrateId,
- goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
-) -> Option<chalk_solve::Solution<Interner>> {
- let context = ChalkContext { db, krate };
- log::debug!("solve goal: {:?}", goal);
- let mut solver = create_chalk_solver();
-
- let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
-
- let should_continue = || {
- context.db.check_canceled();
- let remaining = fuel.get();
- fuel.set(remaining - 1);
- if remaining == 0 {
- log::debug!("fuel exhausted");
- }
- remaining > 0
- };
- let mut solve = || {
- let solution = solver.solve_limited(&context, goal, should_continue);
- log::debug!("solve({:?}) => {:?}", goal, solution);
- solution
- };
- // don't set the TLS for Chalk unless Chalk debugging is active, to make
- // extra sure we only use it for debugging
- let solution =
- if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() };
-
- solution
-}
-
-fn is_chalk_debug() -> bool {
- std::env::var("CHALK_DEBUG").is_ok()
-}
-
-fn solution_from_chalk(
- db: &dyn HirDatabase,
- solution: chalk_solve::Solution<Interner>,
-) -> Solution {
- let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| {
- let result = from_chalk(db, subst);
- SolutionVariables(result)
- };
- match solution {
- chalk_solve::Solution::Unique(constr_subst) => {
- let subst = chalk_ir::Canonical {
- value: constr_subst.value.subst,
- binders: constr_subst.binders,
- };
- Solution::Unique(convert_subst(subst))
- }
- chalk_solve::Solution::Ambig(chalk_solve::Guidance::Definite(subst)) => {
- Solution::Ambig(Guidance::Definite(convert_subst(subst)))
- }
- chalk_solve::Solution::Ambig(chalk_solve::Guidance::Suggested(subst)) => {
- Solution::Ambig(Guidance::Suggested(convert_subst(subst)))
- }
- chalk_solve::Solution::Ambig(chalk_solve::Guidance::Unknown) => {
- Solution::Ambig(Guidance::Unknown)
- }
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub struct SolutionVariables(pub Canonical<Substs>);
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-/// A (possible) solution for a proposed goal.
-pub enum Solution {
- /// The goal indeed holds, and there is a unique value for all existential
- /// variables.
- Unique(SolutionVariables),
-
- /// The goal may be provable in multiple ways, but regardless we may have some guidance
- /// for type inference. In this case, we don't return any lifetime
- /// constraints, since we have not "committed" to any particular solution
- /// yet.
- Ambig(Guidance),
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-/// When a goal holds ambiguously (e.g., because there are multiple possible
-/// solutions), we issue a set of *guidance* back to type inference.
-pub enum Guidance {
- /// The existential variables *must* have the given values if the goal is
- /// ever to hold, but that alone isn't enough to guarantee the goal will
- /// actually hold.
- Definite(SolutionVariables),
-
- /// There are multiple plausible values for the existentials, but the ones
- /// here are suggested as the preferred choice heuristically. These should
- /// be used for inference fallback only.
- Suggested(SolutionVariables),
-
- /// There's no useful information to feed back to type inference
- Unknown,
-}
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub enum FnTrait {
- FnOnce,
- FnMut,
- Fn,
-}
-
-impl FnTrait {
- fn lang_item_name(self) -> &'static str {
- match self {
- FnTrait::FnOnce => "fn_once",
- FnTrait::FnMut => "fn_mut",
- FnTrait::Fn => "fn",
- }
- }
-
- pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
- let target = db.lang_item(krate, self.lang_item_name().into())?;
- match target {
- LangItemTarget::TraitId(t) => Some(t),
- _ => None,
- }
- }
-}
+++ /dev/null
-//! Conversion code from/to Chalk.
-use std::sync::Arc;
-
-use log::debug;
-
-use chalk_ir::{fold::shift::Shift, CanonicalVarKinds, GenericArg, TypeName};
-use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
-
-use base_db::{salsa::InternKey, CrateId};
-use hir_def::{
- lang_item::{lang_attr, LangItemTarget},
- AssocContainerId, AssocItemId, HasModule, Lookup, TypeAliasId,
-};
-
-use super::ChalkContext;
-use crate::{
- db::HirDatabase,
- display::HirDisplay,
- method_resolution::{TyFingerprint, ALL_FLOAT_FPS, ALL_INT_FPS},
- utils::generics,
- CallableDefId, DebruijnIndex, FnSig, GenericPredicate, Substs, Ty, TypeCtor,
-};
-use mapping::{
- convert_where_clauses, generic_predicate_to_inline_bound, make_binders, TypeAliasAsValue,
-};
-
-pub use self::interner::*;
-
-pub(super) mod tls;
-mod interner;
-mod mapping;
-
-pub(super) trait ToChalk {
- type Chalk;
- fn to_chalk(self, db: &dyn HirDatabase) -> Self::Chalk;
- fn from_chalk(db: &dyn HirDatabase, chalk: Self::Chalk) -> Self;
-}
-
-pub(super) fn from_chalk<T, ChalkT>(db: &dyn HirDatabase, chalk: ChalkT) -> T
-where
- T: ToChalk<Chalk = ChalkT>,
-{
- T::from_chalk(db, chalk)
-}
-
-impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
- fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
- self.db.associated_ty_data(id)
- }
- fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
- self.db.trait_datum(self.krate, trait_id)
- }
- fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
- self.db.struct_datum(self.krate, struct_id)
- }
- fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr {
- rust_ir::AdtRepr { repr_c: false, repr_packed: false }
- }
- fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
- self.db.impl_datum(self.krate, impl_id)
- }
-
- fn fn_def_datum(
- &self,
- fn_def_id: chalk_ir::FnDefId<Interner>,
- ) -> Arc<rust_ir::FnDefDatum<Interner>> {
- self.db.fn_def_datum(self.krate, fn_def_id)
- }
-
- fn impls_for_trait(
- &self,
- trait_id: TraitId,
- parameters: &[GenericArg<Interner>],
- binders: &CanonicalVarKinds<Interner>,
- ) -> Vec<ImplId> {
- debug!("impls_for_trait {:?}", trait_id);
- let trait_: hir_def::TraitId = from_chalk(self.db, trait_id);
-
- let ty: Ty = from_chalk(self.db, parameters[0].assert_ty_ref(&Interner).clone());
-
- fn binder_kind(ty: &Ty, binders: &CanonicalVarKinds<Interner>) -> Option<chalk_ir::TyKind> {
- if let Ty::Bound(bv) = ty {
- let binders = binders.as_slice(&Interner);
- if bv.debruijn == DebruijnIndex::INNERMOST {
- if let chalk_ir::VariableKind::Ty(tk) = binders[bv.index].kind {
- return Some(tk);
- }
- }
- }
- None
- }
-
- let self_ty_fp = TyFingerprint::for_impl(&ty);
- let fps: &[TyFingerprint] = match binder_kind(&ty, binders) {
- Some(chalk_ir::TyKind::Integer) => &ALL_INT_FPS,
- Some(chalk_ir::TyKind::Float) => &ALL_FLOAT_FPS,
- _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]),
- };
-
- // Note: Since we're using impls_for_trait, only impls where the trait
- // can be resolved should ever reach Chalk. `impl_datum` relies on that
- // and will panic if the trait can't be resolved.
- let in_deps = self.db.trait_impls_in_deps(self.krate);
- let in_self = self.db.trait_impls_in_crate(self.krate);
- let impl_maps = [in_deps, in_self];
-
- let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
-
- let result: Vec<_> = if fps.is_empty() {
- debug!("Unrestricted search for {:?} impls...", trait_);
- impl_maps
- .iter()
- .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk))
- .collect()
- } else {
- impl_maps
- .iter()
- .flat_map(|crate_impl_defs| {
- fps.iter().flat_map(move |fp| {
- crate_impl_defs.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
- })
- })
- .collect()
- };
-
- debug!("impls_for_trait returned {} impls", result.len());
- result
- }
- fn impl_provided_for(&self, auto_trait_id: TraitId, struct_id: AdtId) -> bool {
- debug!("impl_provided_for {:?}, {:?}", auto_trait_id, struct_id);
- false // FIXME
- }
- fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> {
- self.db.associated_ty_value(self.krate, id)
- }
-
- fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<Interner>> {
- vec![]
- }
- fn local_impls_to_coherence_check(&self, _trait_id: TraitId) -> Vec<ImplId> {
- // We don't do coherence checking (yet)
- unimplemented!()
- }
- fn interner(&self) -> &Interner {
- &Interner
- }
- fn well_known_trait_id(
- &self,
- well_known_trait: rust_ir::WellKnownTrait,
- ) -> Option<chalk_ir::TraitId<Interner>> {
- let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
- let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
- Some(LangItemTarget::TraitId(trait_)) => trait_,
- _ => return None,
- };
- Some(trait_.to_chalk(self.db))
- }
-
- fn program_clauses_for_env(
- &self,
- environment: &chalk_ir::Environment<Interner>,
- ) -> chalk_ir::ProgramClauses<Interner> {
- self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
- }
-
- fn opaque_ty_data(&self, id: chalk_ir::OpaqueTyId<Interner>) -> Arc<OpaqueTyDatum> {
- let interned_id = crate::db::InternedOpaqueTyId::from(id);
- let full_id = self.db.lookup_intern_impl_trait_id(interned_id);
- let (func, idx) = match full_id {
- crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => (func, idx),
- };
- let datas =
- self.db.return_type_impl_traits(func).expect("impl trait id without impl traits");
- let data = &datas.value.impl_traits[idx as usize];
- let bound = OpaqueTyDatumBound {
- bounds: make_binders(
- data.bounds
- .value
- .iter()
- .cloned()
- .filter(|b| !b.is_error())
- .map(|b| b.to_chalk(self.db))
- .collect(),
- 1,
- ),
- where_clauses: make_binders(vec![], 0),
- };
- let num_vars = datas.num_binders;
- Arc::new(OpaqueTyDatum { opaque_ty_id: id, bound: make_binders(bound, num_vars) })
- }
-
- fn hidden_opaque_type(&self, _id: chalk_ir::OpaqueTyId<Interner>) -> chalk_ir::Ty<Interner> {
- // FIXME: actually provide the hidden type; it is relevant for auto traits
- Ty::Unknown.to_chalk(self.db)
- }
-
- fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
- // FIXME: implement actual object safety
- true
- }
-
- fn closure_kind(
- &self,
- _closure_id: chalk_ir::ClosureId<Interner>,
- _substs: &chalk_ir::Substitution<Interner>,
- ) -> rust_ir::ClosureKind {
- // Fn is the closure kind that implements all three traits
- rust_ir::ClosureKind::Fn
- }
- fn closure_inputs_and_output(
- &self,
- _closure_id: chalk_ir::ClosureId<Interner>,
- substs: &chalk_ir::Substitution<Interner>,
- ) -> chalk_ir::Binders<rust_ir::FnDefInputsAndOutputDatum<Interner>> {
- let sig_ty: Ty =
- from_chalk(self.db, substs.at(&Interner, 0).assert_ty_ref(&Interner).clone());
- let sig = FnSig::from_fn_ptr_substs(
- &sig_ty.substs().expect("first closure param should be fn ptr"),
- false,
- );
- let io = rust_ir::FnDefInputsAndOutputDatum {
- argument_types: sig.params().iter().map(|ty| ty.clone().to_chalk(self.db)).collect(),
- return_type: sig.ret().clone().to_chalk(self.db),
- };
- make_binders(io.shifted_in(&Interner), 0)
- }
- fn closure_upvars(
- &self,
- _closure_id: chalk_ir::ClosureId<Interner>,
- _substs: &chalk_ir::Substitution<Interner>,
- ) -> chalk_ir::Binders<chalk_ir::Ty<Interner>> {
- let ty = Ty::unit().to_chalk(self.db);
- make_binders(ty, 0)
- }
- fn closure_fn_substitution(
- &self,
- _closure_id: chalk_ir::ClosureId<Interner>,
- _substs: &chalk_ir::Substitution<Interner>,
- ) -> chalk_ir::Substitution<Interner> {
- Substs::empty().to_chalk(self.db)
- }
-
- fn trait_name(&self, _trait_id: chalk_ir::TraitId<Interner>) -> String {
- unimplemented!()
- }
- fn adt_name(&self, _struct_id: chalk_ir::AdtId<Interner>) -> String {
- unimplemented!()
- }
- fn assoc_type_name(&self, _assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
- unimplemented!()
- }
- fn opaque_type_name(&self, _opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
- unimplemented!()
- }
- fn fn_def_name(&self, _fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
- unimplemented!()
- }
-}
-
-pub(crate) fn program_clauses_for_chalk_env_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- environment: chalk_ir::Environment<Interner>,
-) -> chalk_ir::ProgramClauses<Interner> {
- chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
-}
-
-pub(crate) fn associated_ty_data_query(
- db: &dyn HirDatabase,
- id: AssocTypeId,
-) -> Arc<AssociatedTyDatum> {
- debug!("associated_ty_data {:?}", id);
- let type_alias: TypeAliasId = from_chalk(db, id);
- let trait_ = match type_alias.lookup(db.upcast()).container {
- AssocContainerId::TraitId(t) => t,
- _ => panic!("associated type not in trait"),
- };
-
- // Lower bounds -- we could/should maybe move this to a separate query in `lower`
- let type_alias_data = db.type_alias_data(type_alias);
- let generic_params = generics(db.upcast(), type_alias.into());
- let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
- let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
- let ctx = crate::TyLoweringContext::new(db, &resolver)
- .with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable);
- let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0));
- let bounds = type_alias_data
- .bounds
- .iter()
- .flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone()))
- .filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
- .map(|bound| make_binders(bound.shifted_in(&Interner), 0))
- .collect();
-
- let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
- let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses };
- let datum = AssociatedTyDatum {
- trait_id: trait_.to_chalk(db),
- id,
- name: type_alias,
- binders: make_binders(bound_data, generic_params.len()),
- };
- Arc::new(datum)
-}
-
-pub(crate) fn trait_datum_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- trait_id: TraitId,
-) -> Arc<TraitDatum> {
- debug!("trait_datum {:?}", trait_id);
- let trait_: hir_def::TraitId = from_chalk(db, trait_id);
- let trait_data = db.trait_data(trait_);
- debug!("trait {:?} = {:?}", trait_id, trait_data.name);
- let generic_params = generics(db.upcast(), trait_.into());
- let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
- let flags = rust_ir::TraitFlags {
- auto: trait_data.auto,
- upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate,
- non_enumerable: true,
- coinductive: false, // only relevant for Chalk testing
- // FIXME: set these flags correctly
- marker: false,
- fundamental: false,
- };
- let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
- let associated_ty_ids =
- trait_data.associated_types().map(|type_alias| type_alias.to_chalk(db)).collect();
- let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
- let well_known =
- lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
- let trait_datum = TraitDatum {
- id: trait_id,
- binders: make_binders(trait_datum_bound, bound_vars.len()),
- flags,
- associated_ty_ids,
- well_known,
- };
- Arc::new(trait_datum)
-}
-
-fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
- Some(match name {
- "sized" => WellKnownTrait::Sized,
- "copy" => WellKnownTrait::Copy,
- "clone" => WellKnownTrait::Clone,
- "drop" => WellKnownTrait::Drop,
- "fn_once" => WellKnownTrait::FnOnce,
- "fn_mut" => WellKnownTrait::FnMut,
- "fn" => WellKnownTrait::Fn,
- "unsize" => WellKnownTrait::Unsize,
- _ => return None,
- })
-}
-
-fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
- match attr {
- WellKnownTrait::Sized => "sized",
- WellKnownTrait::Copy => "copy",
- WellKnownTrait::Clone => "clone",
- WellKnownTrait::Drop => "drop",
- WellKnownTrait::FnOnce => "fn_once",
- WellKnownTrait::FnMut => "fn_mut",
- WellKnownTrait::Fn => "fn",
- WellKnownTrait::Unsize => "unsize",
- }
-}
-
-pub(crate) fn struct_datum_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- struct_id: AdtId,
-) -> Arc<StructDatum> {
- debug!("struct_datum {:?}", struct_id);
- let type_ctor: TypeCtor = from_chalk(db, TypeName::Adt(struct_id));
- debug!("struct {:?} = {:?}", struct_id, type_ctor);
- let num_params = type_ctor.num_ty_params(db);
- let upstream = type_ctor.krate(db) != Some(krate);
- let where_clauses = type_ctor
- .as_generic_def()
- .map(|generic_def| {
- let generic_params = generics(db.upcast(), generic_def);
- let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
- convert_where_clauses(db, generic_def, &bound_vars)
- })
- .unwrap_or_else(Vec::new);
- let flags = rust_ir::AdtFlags {
- upstream,
- // FIXME set fundamental and phantom_data flags correctly
- fundamental: false,
- phantom_data: false,
- };
- // FIXME provide enum variants properly (for auto traits)
- let variant = rust_ir::AdtVariantDatum {
- fields: Vec::new(), // FIXME add fields (only relevant for auto traits),
- };
- let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses };
- let struct_datum = StructDatum {
- // FIXME set ADT kind
- kind: rust_ir::AdtKind::Struct,
- id: struct_id,
- binders: make_binders(struct_datum_bound, num_params),
- flags,
- };
- Arc::new(struct_datum)
-}
-
-pub(crate) fn impl_datum_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- impl_id: ImplId,
-) -> Arc<ImplDatum> {
- let _p = profile::span("impl_datum");
- debug!("impl_datum {:?}", impl_id);
- let impl_: hir_def::ImplId = from_chalk(db, impl_id);
- impl_def_datum(db, krate, impl_id, impl_)
-}
-
-fn impl_def_datum(
- db: &dyn HirDatabase,
- krate: CrateId,
- chalk_id: ImplId,
- impl_id: hir_def::ImplId,
-) -> Arc<ImplDatum> {
- let trait_ref = db
- .impl_trait(impl_id)
- // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
- .expect("invalid impl passed to Chalk")
- .value;
- let impl_data = db.impl_data(impl_id);
-
- let generic_params = generics(db.upcast(), impl_id.into());
- let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
- let trait_ = trait_ref.trait_;
- let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate {
- rust_ir::ImplType::Local
- } else {
- rust_ir::ImplType::External
- };
- let where_clauses = convert_where_clauses(db, impl_id.into(), &bound_vars);
- let negative = impl_data.is_negative;
- debug!(
- "impl {:?}: {}{} where {:?}",
- chalk_id,
- if negative { "!" } else { "" },
- trait_ref.display(db),
- where_clauses
- );
- let trait_ref = trait_ref.to_chalk(db);
-
- let polarity = if negative { rust_ir::Polarity::Negative } else { rust_ir::Polarity::Positive };
-
- let impl_datum_bound = rust_ir::ImplDatumBound { trait_ref, where_clauses };
- let trait_data = db.trait_data(trait_);
- let associated_ty_value_ids = impl_data
- .items
- .iter()
- .filter_map(|item| match item {
- AssocItemId::TypeAliasId(type_alias) => Some(*type_alias),
- _ => None,
- })
- .filter(|&type_alias| {
- // don't include associated types that don't exist in the trait
- let name = &db.type_alias_data(type_alias).name;
- trait_data.associated_type_by_name(name).is_some()
- })
- .map(|type_alias| TypeAliasAsValue(type_alias).to_chalk(db))
- .collect();
- debug!("impl_datum: {:?}", impl_datum_bound);
- let impl_datum = ImplDatum {
- binders: make_binders(impl_datum_bound, bound_vars.len()),
- impl_type,
- polarity,
- associated_ty_value_ids,
- };
- Arc::new(impl_datum)
-}
-
-pub(crate) fn associated_ty_value_query(
- db: &dyn HirDatabase,
- krate: CrateId,
- id: AssociatedTyValueId,
-) -> Arc<AssociatedTyValue> {
- let type_alias: TypeAliasAsValue = from_chalk(db, id);
- type_alias_associated_ty_value(db, krate, type_alias.0)
-}
-
-fn type_alias_associated_ty_value(
- db: &dyn HirDatabase,
- _krate: CrateId,
- type_alias: TypeAliasId,
-) -> Arc<AssociatedTyValue> {
- let type_alias_data = db.type_alias_data(type_alias);
- let impl_id = match type_alias.lookup(db.upcast()).container {
- AssocContainerId::ImplId(it) => it,
- _ => panic!("assoc ty value should be in impl"),
- };
-
- let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved
-
- let assoc_ty = db
- .trait_data(trait_ref.trait_)
- .associated_type_by_name(&type_alias_data.name)
- .expect("assoc ty value should not exist"); // validated when building the impl data as well
- let ty = db.ty(type_alias.into());
- let value_bound = rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
- let value = rust_ir::AssociatedTyValue {
- impl_id: impl_id.to_chalk(db),
- associated_ty_id: assoc_ty.to_chalk(db),
- value: make_binders(value_bound, ty.num_binders),
- };
- Arc::new(value)
-}
-
-pub(crate) fn fn_def_datum_query(
- db: &dyn HirDatabase,
- _krate: CrateId,
- fn_def_id: FnDefId,
-) -> Arc<FnDefDatum> {
- let callable_def: CallableDefId = from_chalk(db, fn_def_id);
- let generic_params = generics(db.upcast(), callable_def.into());
- let sig = db.callable_item_signature(callable_def);
- let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
- let where_clauses = convert_where_clauses(db, callable_def.into(), &bound_vars);
- let bound = rust_ir::FnDefDatumBound {
- // Note: Chalk doesn't actually use this information yet as far as I am aware, but we provide it anyway
- inputs_and_output: make_binders(
- rust_ir::FnDefInputsAndOutputDatum {
- argument_types: sig
- .value
- .params()
- .iter()
- .map(|ty| ty.clone().to_chalk(db))
- .collect(),
- return_type: sig.value.ret().clone().to_chalk(db),
- }
- .shifted_in(&Interner),
- 0,
- ),
- where_clauses,
- };
- let datum = FnDefDatum {
- id: fn_def_id,
- abi: (),
- safety: chalk_ir::Safety::Safe,
- variadic: sig.value.is_varargs,
- binders: make_binders(bound, sig.num_binders),
- };
- Arc::new(datum)
-}
-
-impl From<FnDefId> for crate::db::InternedCallableDefId {
- fn from(fn_def_id: FnDefId) -> Self {
- InternKey::from_intern_id(fn_def_id.0)
- }
-}
-
-impl From<crate::db::InternedCallableDefId> for FnDefId {
- fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
- chalk_ir::FnDefId(callable_def_id.as_intern_id())
- }
-}
-
-impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
- fn from(id: OpaqueTyId) -> Self {
- InternKey::from_intern_id(id.0)
- }
-}
-
-impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
- fn from(id: crate::db::InternedOpaqueTyId) -> Self {
- chalk_ir::OpaqueTyId(id.as_intern_id())
- }
-}
-
-impl From<chalk_ir::ClosureId<Interner>> for crate::db::ClosureId {
- fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
- Self::from_intern_id(id.0)
- }
-}
-
-impl From<crate::db::ClosureId> for chalk_ir::ClosureId<Interner> {
- fn from(id: crate::db::ClosureId) -> Self {
- chalk_ir::ClosureId(id.as_intern_id())
- }
-}
+++ /dev/null
-//! Implementation of the Chalk `Interner` trait, which allows customizing the
-//! representation of the various objects Chalk deals with (types, goals etc.).
-
-use super::tls;
-use base_db::salsa::InternId;
-use chalk_ir::{GenericArg, Goal, GoalData};
-use hir_def::TypeAliasId;
-use std::{fmt, sync::Arc};
-
-#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
-pub struct Interner;
-
-pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
-pub type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum<Interner>;
-pub type TraitId = chalk_ir::TraitId<Interner>;
-pub type TraitDatum = chalk_solve::rust_ir::TraitDatum<Interner>;
-pub type AdtId = chalk_ir::AdtId<Interner>;
-pub type StructDatum = chalk_solve::rust_ir::AdtDatum<Interner>;
-pub type ImplId = chalk_ir::ImplId<Interner>;
-pub type ImplDatum = chalk_solve::rust_ir::ImplDatum<Interner>;
-pub type AssociatedTyValueId = chalk_solve::rust_ir::AssociatedTyValueId<Interner>;
-pub type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Interner>;
-pub type FnDefId = chalk_ir::FnDefId<Interner>;
-pub type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>;
-pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
-pub type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum<Interner>;
-
-impl chalk_ir::interner::Interner for Interner {
- type InternedType = Box<chalk_ir::TyData<Self>>; // FIXME use Arc?
- type InternedLifetime = chalk_ir::LifetimeData<Self>;
- type InternedConst = Arc<chalk_ir::ConstData<Self>>;
- type InternedConcreteConst = ();
- type InternedGenericArg = chalk_ir::GenericArgData<Self>;
- type InternedGoal = Arc<GoalData<Self>>;
- type InternedGoals = Vec<Goal<Self>>;
- type InternedSubstitution = Vec<GenericArg<Self>>;
- type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
- type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>;
- type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
- type InternedVariableKinds = Vec<chalk_ir::VariableKind<Self>>;
- type InternedCanonicalVarKinds = Vec<chalk_ir::CanonicalVarKind<Self>>;
- type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
- type DefId = InternId;
- type InternedAdtId = hir_def::AdtId;
- type Identifier = TypeAliasId;
- type FnAbi = ();
-
- fn debug_adt_id(type_kind_id: AdtId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_struct_id(type_kind_id, fmt)))
- }
-
- fn debug_trait_id(type_kind_id: TraitId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_trait_id(type_kind_id, fmt)))
- }
-
- fn debug_assoc_type_id(id: AssocTypeId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_assoc_type_id(id, fmt)))
- }
-
- fn debug_alias(
- alias: &chalk_ir::AliasTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
- }
-
- fn debug_projection_ty(
- proj: &chalk_ir::ProjectionTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
- }
-
- fn debug_opaque_ty(
- opaque_ty: &chalk_ir::OpaqueTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt)))
- }
-
- fn debug_opaque_ty_id(
- opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt)))
- }
-
- fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
- }
-
- fn debug_lifetime(
- lifetime: &chalk_ir::Lifetime<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_lifetime(lifetime, fmt)))
- }
-
- fn debug_generic_arg(
- parameter: &GenericArg<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_generic_arg(parameter, fmt)))
- }
-
- fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_goal(goal, fmt)))
- }
-
- fn debug_goals(
- goals: &chalk_ir::Goals<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_goals(goals, fmt)))
- }
-
- fn debug_program_clause_implication(
- pci: &chalk_ir::ProgramClauseImplication<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_program_clause_implication(pci, fmt)))
- }
-
- fn debug_application_ty(
- application_ty: &chalk_ir::ApplicationTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_application_ty(application_ty, fmt)))
- }
-
- fn debug_substitution(
- substitution: &chalk_ir::Substitution<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_substitution(substitution, fmt)))
- }
-
- fn debug_separator_trait_ref(
- separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| {
- Some(prog?.debug_separator_trait_ref(separator_trait_ref, fmt))
- })
- }
-
- fn debug_fn_def_id(
- fn_def_id: chalk_ir::FnDefId<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
- }
- fn debug_const(
- constant: &chalk_ir::Const<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_const(constant, fmt)))
- }
- fn debug_variable_kinds(
- variable_kinds: &chalk_ir::VariableKinds<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_variable_kinds(variable_kinds, fmt)))
- }
- fn debug_variable_kinds_with_angles(
- variable_kinds: &chalk_ir::VariableKinds<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| {
- Some(prog?.debug_variable_kinds_with_angles(variable_kinds, fmt))
- })
- }
- fn debug_canonical_var_kinds(
- canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| {
- Some(prog?.debug_canonical_var_kinds(canonical_var_kinds, fmt))
- })
- }
- fn debug_program_clause(
- clause: &chalk_ir::ProgramClause<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_program_clause(clause, fmt)))
- }
- fn debug_program_clauses(
- clauses: &chalk_ir::ProgramClauses<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_program_clauses(clauses, fmt)))
- }
- fn debug_quantified_where_clauses(
- clauses: &chalk_ir::QuantifiedWhereClauses<Self>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- tls::with_current_program(|prog| Some(prog?.debug_quantified_where_clauses(clauses, fmt)))
- }
-
- fn intern_ty(&self, ty: chalk_ir::TyData<Self>) -> Box<chalk_ir::TyData<Self>> {
- Box::new(ty)
- }
-
- fn ty_data<'a>(&self, ty: &'a Box<chalk_ir::TyData<Self>>) -> &'a chalk_ir::TyData<Self> {
- ty
- }
-
- fn intern_lifetime(
- &self,
- lifetime: chalk_ir::LifetimeData<Self>,
- ) -> chalk_ir::LifetimeData<Self> {
- lifetime
- }
-
- fn lifetime_data<'a>(
- &self,
- lifetime: &'a chalk_ir::LifetimeData<Self>,
- ) -> &'a chalk_ir::LifetimeData<Self> {
- lifetime
- }
-
- fn intern_const(&self, constant: chalk_ir::ConstData<Self>) -> Arc<chalk_ir::ConstData<Self>> {
- Arc::new(constant)
- }
-
- fn const_data<'a>(
- &self,
- constant: &'a Arc<chalk_ir::ConstData<Self>>,
- ) -> &'a chalk_ir::ConstData<Self> {
- constant
- }
-
- fn const_eq(&self, _ty: &Box<chalk_ir::TyData<Self>>, _c1: &(), _c2: &()) -> bool {
- true
- }
-
- fn intern_generic_arg(
- &self,
- parameter: chalk_ir::GenericArgData<Self>,
- ) -> chalk_ir::GenericArgData<Self> {
- parameter
- }
-
- fn generic_arg_data<'a>(
- &self,
- parameter: &'a chalk_ir::GenericArgData<Self>,
- ) -> &'a chalk_ir::GenericArgData<Self> {
- parameter
- }
-
- fn intern_goal(&self, goal: GoalData<Self>) -> Arc<GoalData<Self>> {
- Arc::new(goal)
- }
-
- fn intern_goals<E>(
- &self,
- data: impl IntoIterator<Item = Result<Goal<Self>, E>>,
- ) -> Result<Self::InternedGoals, E> {
- data.into_iter().collect()
- }
-
- fn goal_data<'a>(&self, goal: &'a Arc<GoalData<Self>>) -> &'a GoalData<Self> {
- goal
- }
-
- fn goals_data<'a>(&self, goals: &'a Vec<Goal<Interner>>) -> &'a [Goal<Interner>] {
- goals
- }
-
- fn intern_substitution<E>(
- &self,
- data: impl IntoIterator<Item = Result<GenericArg<Self>, E>>,
- ) -> Result<Vec<GenericArg<Self>>, E> {
- data.into_iter().collect()
- }
-
- fn substitution_data<'a>(
- &self,
- substitution: &'a Vec<GenericArg<Self>>,
- ) -> &'a [GenericArg<Self>] {
- substitution
- }
-
- fn intern_program_clause(
- &self,
- data: chalk_ir::ProgramClauseData<Self>,
- ) -> chalk_ir::ProgramClauseData<Self> {
- data
- }
-
- fn program_clause_data<'a>(
- &self,
- clause: &'a chalk_ir::ProgramClauseData<Self>,
- ) -> &'a chalk_ir::ProgramClauseData<Self> {
- clause
- }
-
- fn intern_program_clauses<E>(
- &self,
- data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>,
- ) -> Result<Arc<[chalk_ir::ProgramClause<Self>]>, E> {
- data.into_iter().collect()
- }
-
- fn program_clauses_data<'a>(
- &self,
- clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>,
- ) -> &'a [chalk_ir::ProgramClause<Self>] {
- &clauses
- }
-
- fn intern_quantified_where_clauses<E>(
- &self,
- data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>,
- ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
- data.into_iter().collect()
- }
-
- fn quantified_where_clauses_data<'a>(
- &self,
- clauses: &'a Self::InternedQuantifiedWhereClauses,
- ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
- clauses
- }
-
- fn intern_generic_arg_kinds<E>(
- &self,
- data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>,
- ) -> Result<Self::InternedVariableKinds, E> {
- data.into_iter().collect()
- }
-
- fn variable_kinds_data<'a>(
- &self,
- parameter_kinds: &'a Self::InternedVariableKinds,
- ) -> &'a [chalk_ir::VariableKind<Self>] {
- ¶meter_kinds
- }
-
- fn intern_canonical_var_kinds<E>(
- &self,
- data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>,
- ) -> Result<Self::InternedCanonicalVarKinds, E> {
- data.into_iter().collect()
- }
-
- fn canonical_var_kinds_data<'a>(
- &self,
- canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
- ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
- &canonical_var_kinds
- }
-
- fn intern_constraints<E>(
- &self,
- data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>,
- ) -> Result<Self::InternedConstraints, E> {
- data.into_iter().collect()
- }
-
- fn constraints_data<'a>(
- &self,
- constraints: &'a Self::InternedConstraints,
- ) -> &'a [chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] {
- constraints
- }
- fn debug_closure_id(
- _fn_def_id: chalk_ir::ClosureId<Self>,
- _fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- None
- }
- fn debug_constraints(
- _clauses: &chalk_ir::Constraints<Self>,
- _fmt: &mut fmt::Formatter<'_>,
- ) -> Option<fmt::Result> {
- None
- }
-}
-
-impl chalk_ir::interner::HasInterner for Interner {
- type Interner = Self;
-}
+++ /dev/null
-//! This module contains the implementations of the `ToChalk` trait, which
-//! handles conversion between our data types and their corresponding types in
-//! Chalk (in both directions); plus some helper functions for more specialized
-//! conversions.
-
-use chalk_ir::{
- cast::Cast, fold::shift::Shift, interner::HasInterner, PlaceholderIndex, Scalar, TypeName,
- UniverseIndex,
-};
-use chalk_solve::rust_ir;
-
-use base_db::salsa::InternKey;
-use hir_def::{type_ref::Mutability, AssocContainerId, GenericDefId, Lookup, TypeAliasId};
-
-use crate::{
- db::HirDatabase,
- primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness},
- traits::{Canonical, Obligation},
- ApplicationTy, CallableDefId, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId,
- ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor,
-};
-
-use super::interner::*;
-use super::*;
-
-impl ToChalk for Ty {
- type Chalk = chalk_ir::Ty<Interner>;
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Ty<Interner> {
- match self {
- Ty::Apply(apply_ty) => match apply_ty.ctor {
- TypeCtor::Ref(m) => ref_to_chalk(db, m, apply_ty.parameters),
- TypeCtor::Array => array_to_chalk(db, apply_ty.parameters),
- TypeCtor::FnPtr { num_args: _, is_varargs } => {
- let substitution = apply_ty.parameters.to_chalk(db).shifted_in(&Interner);
- chalk_ir::TyData::Function(chalk_ir::FnPointer {
- num_binders: 0,
- abi: (),
- safety: chalk_ir::Safety::Safe,
- variadic: is_varargs,
- substitution,
- })
- .intern(&Interner)
- }
- _ => {
- let name = apply_ty.ctor.to_chalk(db);
- let substitution = apply_ty.parameters.to_chalk(db);
- chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
- }
- },
- Ty::Projection(proj_ty) => {
- let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
- let substitution = proj_ty.parameters.to_chalk(db);
- chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy {
- associated_ty_id,
- substitution,
- })
- .cast(&Interner)
- .intern(&Interner)
- }
- Ty::Placeholder(id) => {
- let interned_id = db.intern_type_param_id(id);
- PlaceholderIndex {
- ui: UniverseIndex::ROOT,
- idx: interned_id.as_intern_id().as_usize(),
- }
- .to_ty::<Interner>(&Interner)
- }
- Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner),
- Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
- Ty::Dyn(predicates) => {
- let where_clauses = chalk_ir::QuantifiedWhereClauses::from_iter(
- &Interner,
- predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)),
- );
- let bounded_ty = chalk_ir::DynTy {
- bounds: make_binders(where_clauses, 1),
- lifetime: FAKE_PLACEHOLDER.to_lifetime(&Interner),
- };
- chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner)
- }
- Ty::Opaque(opaque_ty) => {
- let opaque_ty_id = opaque_ty.opaque_ty_id.to_chalk(db);
- let substitution = opaque_ty.parameters.to_chalk(db);
- chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(chalk_ir::OpaqueTy {
- opaque_ty_id,
- substitution,
- }))
- .intern(&Interner)
- }
- Ty::Unknown => {
- let substitution = chalk_ir::Substitution::empty(&Interner);
- let name = TypeName::Error;
- chalk_ir::ApplicationTy { name, substitution }.cast(&Interner).intern(&Interner)
- }
- }
- }
- fn from_chalk(db: &dyn HirDatabase, chalk: chalk_ir::Ty<Interner>) -> Self {
- match chalk.data(&Interner).clone() {
- chalk_ir::TyData::Apply(apply_ty) => match apply_ty.name {
- TypeName::Error => Ty::Unknown,
- TypeName::Ref(m) => ref_from_chalk(db, m, apply_ty.substitution),
- TypeName::Array => array_from_chalk(db, apply_ty.substitution),
- _ => {
- let ctor = from_chalk(db, apply_ty.name);
- let parameters = from_chalk(db, apply_ty.substitution);
- Ty::Apply(ApplicationTy { ctor, parameters })
- }
- },
- chalk_ir::TyData::Placeholder(idx) => {
- assert_eq!(idx.ui, UniverseIndex::ROOT);
- let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
- crate::salsa::InternId::from(idx.idx),
- );
- Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
- }
- chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => {
- let associated_ty = from_chalk(db, proj.associated_ty_id);
- let parameters = from_chalk(db, proj.substitution);
- Ty::Projection(ProjectionTy { associated_ty, parameters })
- }
- chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(opaque_ty)) => {
- let impl_trait_id = from_chalk(db, opaque_ty.opaque_ty_id);
- let parameters = from_chalk(db, opaque_ty.substitution);
- Ty::Opaque(OpaqueTy { opaque_ty_id: impl_trait_id, parameters })
- }
- chalk_ir::TyData::Function(chalk_ir::FnPointer {
- num_binders,
- variadic,
- substitution,
- ..
- }) => {
- assert_eq!(num_binders, 0);
- let parameters: Substs = from_chalk(
- db,
- substitution.shifted_out(&Interner).expect("fn ptr should have no binders"),
- );
- Ty::Apply(ApplicationTy {
- ctor: TypeCtor::FnPtr {
- num_args: (parameters.len() - 1) as u16,
- is_varargs: variadic,
- },
- parameters,
- })
- }
- chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx),
- chalk_ir::TyData::InferenceVar(_iv, _kind) => Ty::Unknown,
- chalk_ir::TyData::Dyn(where_clauses) => {
- assert_eq!(where_clauses.bounds.binders.len(&Interner), 1);
- let predicates = where_clauses
- .bounds
- .skip_binders()
- .iter(&Interner)
- .map(|c| from_chalk(db, c.clone()))
- .collect();
- Ty::Dyn(predicates)
- }
- }
- }
-}
-
-const FAKE_PLACEHOLDER: PlaceholderIndex =
- PlaceholderIndex { ui: UniverseIndex::ROOT, idx: usize::MAX };
-
-/// We currently don't model lifetimes, but Chalk does. So, we have to insert a
-/// fake lifetime here, because Chalks built-in logic may expect it to be there.
-fn ref_to_chalk(
- db: &dyn HirDatabase,
- mutability: Mutability,
- subst: Substs,
-) -> chalk_ir::Ty<Interner> {
- let arg = subst[0].clone().to_chalk(db);
- let lifetime = FAKE_PLACEHOLDER.to_lifetime(&Interner);
- chalk_ir::ApplicationTy {
- name: TypeName::Ref(mutability.to_chalk(db)),
- substitution: chalk_ir::Substitution::from_iter(
- &Interner,
- vec![lifetime.cast(&Interner), arg.cast(&Interner)],
- ),
- }
- .intern(&Interner)
-}
-
-/// Here we remove the lifetime from the type we got from Chalk.
-fn ref_from_chalk(
- db: &dyn HirDatabase,
- mutability: chalk_ir::Mutability,
- subst: chalk_ir::Substitution<Interner>,
-) -> Ty {
- let tys = subst
- .iter(&Interner)
- .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
- .collect();
- Ty::apply(TypeCtor::Ref(from_chalk(db, mutability)), Substs(tys))
-}
-
-/// We currently don't model constants, but Chalk does. So, we have to insert a
-/// fake constant here, because Chalks built-in logic may expect it to be there.
-fn array_to_chalk(db: &dyn HirDatabase, subst: Substs) -> chalk_ir::Ty<Interner> {
- let arg = subst[0].clone().to_chalk(db);
- let usize_ty = chalk_ir::ApplicationTy {
- name: TypeName::Scalar(Scalar::Uint(chalk_ir::UintTy::Usize)),
- substitution: chalk_ir::Substitution::empty(&Interner),
- }
- .intern(&Interner);
- let const_ = FAKE_PLACEHOLDER.to_const(&Interner, usize_ty);
- chalk_ir::ApplicationTy {
- name: TypeName::Array,
- substitution: chalk_ir::Substitution::from_iter(
- &Interner,
- vec![arg.cast(&Interner), const_.cast(&Interner)],
- ),
- }
- .intern(&Interner)
-}
-
-/// Here we remove the const from the type we got from Chalk.
-fn array_from_chalk(db: &dyn HirDatabase, subst: chalk_ir::Substitution<Interner>) -> Ty {
- let tys = subst
- .iter(&Interner)
- .filter_map(|p| Some(from_chalk(db, p.ty(&Interner)?.clone())))
- .collect();
- Ty::apply(TypeCtor::Array, Substs(tys))
-}
-
-impl ToChalk for Substs {
- type Chalk = chalk_ir::Substitution<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Substitution<Interner> {
- chalk_ir::Substitution::from_iter(&Interner, self.iter().map(|ty| ty.clone().to_chalk(db)))
- }
-
- fn from_chalk(db: &dyn HirDatabase, parameters: chalk_ir::Substitution<Interner>) -> Substs {
- let tys = parameters
- .iter(&Interner)
- .map(|p| match p.ty(&Interner) {
- Some(ty) => from_chalk(db, ty.clone()),
- None => unimplemented!(),
- })
- .collect();
- Substs(tys)
- }
-}
-
-impl ToChalk for TraitRef {
- type Chalk = chalk_ir::TraitRef<Interner>;
-
- fn to_chalk(self: TraitRef, db: &dyn HirDatabase) -> chalk_ir::TraitRef<Interner> {
- let trait_id = self.trait_.to_chalk(db);
- let substitution = self.substs.to_chalk(db);
- chalk_ir::TraitRef { trait_id, substitution }
- }
-
- fn from_chalk(db: &dyn HirDatabase, trait_ref: chalk_ir::TraitRef<Interner>) -> Self {
- let trait_ = from_chalk(db, trait_ref.trait_id);
- let substs = from_chalk(db, trait_ref.substitution);
- TraitRef { trait_, substs }
- }
-}
-
-impl ToChalk for hir_def::TraitId {
- type Chalk = TraitId;
-
- fn to_chalk(self, _db: &dyn HirDatabase) -> TraitId {
- chalk_ir::TraitId(self.as_intern_id())
- }
-
- fn from_chalk(_db: &dyn HirDatabase, trait_id: TraitId) -> hir_def::TraitId {
- InternKey::from_intern_id(trait_id.0)
- }
-}
-
-impl ToChalk for OpaqueTyId {
- type Chalk = chalk_ir::OpaqueTyId<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::OpaqueTyId<Interner> {
- db.intern_impl_trait_id(self).into()
- }
-
- fn from_chalk(
- db: &dyn HirDatabase,
- opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
- ) -> OpaqueTyId {
- db.lookup_intern_impl_trait_id(opaque_ty_id.into())
- }
-}
-
-impl ToChalk for TypeCtor {
- type Chalk = TypeName<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> TypeName<Interner> {
- match self {
- TypeCtor::AssociatedType(type_alias) => {
- let type_id = type_alias.to_chalk(db);
- TypeName::AssociatedType(type_id)
- }
-
- TypeCtor::OpaqueType(impl_trait_id) => {
- let id = impl_trait_id.to_chalk(db);
- TypeName::OpaqueType(id)
- }
-
- TypeCtor::Bool => TypeName::Scalar(Scalar::Bool),
- TypeCtor::Char => TypeName::Scalar(Scalar::Char),
- TypeCtor::Int(int_ty) => TypeName::Scalar(int_ty_to_chalk(int_ty)),
- TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 }) => {
- TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32))
- }
- TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 }) => {
- TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64))
- }
-
- TypeCtor::Tuple { cardinality } => TypeName::Tuple(cardinality.into()),
- TypeCtor::RawPtr(mutability) => TypeName::Raw(mutability.to_chalk(db)),
- TypeCtor::Slice => TypeName::Slice,
- TypeCtor::Array => TypeName::Array,
- TypeCtor::Ref(mutability) => TypeName::Ref(mutability.to_chalk(db)),
- TypeCtor::Str => TypeName::Str,
- TypeCtor::FnDef(callable_def) => {
- let id = callable_def.to_chalk(db);
- TypeName::FnDef(id)
- }
- TypeCtor::Never => TypeName::Never,
-
- TypeCtor::Closure { def, expr } => {
- let closure_id = db.intern_closure((def, expr));
- TypeName::Closure(closure_id.into())
- }
-
- TypeCtor::Adt(adt_id) => TypeName::Adt(chalk_ir::AdtId(adt_id)),
-
- TypeCtor::FnPtr { .. } => {
- // This should not be reached, since Chalk doesn't represent
- // function pointers with TypeName
- unreachable!()
- }
- }
- }
-
- fn from_chalk(db: &dyn HirDatabase, type_name: TypeName<Interner>) -> TypeCtor {
- match type_name {
- TypeName::Adt(struct_id) => TypeCtor::Adt(struct_id.0),
- TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
- TypeName::OpaqueType(opaque_type_id) => {
- TypeCtor::OpaqueType(from_chalk(db, opaque_type_id))
- }
-
- TypeName::Scalar(Scalar::Bool) => TypeCtor::Bool,
- TypeName::Scalar(Scalar::Char) => TypeCtor::Char,
- TypeName::Scalar(Scalar::Int(int_ty)) => TypeCtor::Int(IntTy {
- signedness: Signedness::Signed,
- bitness: bitness_from_chalk_int(int_ty),
- }),
- TypeName::Scalar(Scalar::Uint(uint_ty)) => TypeCtor::Int(IntTy {
- signedness: Signedness::Unsigned,
- bitness: bitness_from_chalk_uint(uint_ty),
- }),
- TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F32)) => {
- TypeCtor::Float(FloatTy { bitness: FloatBitness::X32 })
- }
- TypeName::Scalar(Scalar::Float(chalk_ir::FloatTy::F64)) => {
- TypeCtor::Float(FloatTy { bitness: FloatBitness::X64 })
- }
- TypeName::Tuple(cardinality) => TypeCtor::Tuple { cardinality: cardinality as u16 },
- TypeName::Raw(mutability) => TypeCtor::RawPtr(from_chalk(db, mutability)),
- TypeName::Slice => TypeCtor::Slice,
- TypeName::Ref(mutability) => TypeCtor::Ref(from_chalk(db, mutability)),
- TypeName::Str => TypeCtor::Str,
- TypeName::Never => TypeCtor::Never,
-
- TypeName::FnDef(fn_def_id) => {
- let callable_def = from_chalk(db, fn_def_id);
- TypeCtor::FnDef(callable_def)
- }
- TypeName::Array => TypeCtor::Array,
-
- TypeName::Closure(id) => {
- let id: crate::db::ClosureId = id.into();
- let (def, expr) = db.lookup_intern_closure(id);
- TypeCtor::Closure { def, expr }
- }
-
- TypeName::Error => {
- // this should not be reached, since we don't represent TypeName::Error with TypeCtor
- unreachable!()
- }
- }
- }
-}
-
-fn bitness_from_chalk_uint(uint_ty: chalk_ir::UintTy) -> IntBitness {
- use chalk_ir::UintTy;
-
- match uint_ty {
- UintTy::Usize => IntBitness::Xsize,
- UintTy::U8 => IntBitness::X8,
- UintTy::U16 => IntBitness::X16,
- UintTy::U32 => IntBitness::X32,
- UintTy::U64 => IntBitness::X64,
- UintTy::U128 => IntBitness::X128,
- }
-}
-
-fn bitness_from_chalk_int(int_ty: chalk_ir::IntTy) -> IntBitness {
- use chalk_ir::IntTy;
-
- match int_ty {
- IntTy::Isize => IntBitness::Xsize,
- IntTy::I8 => IntBitness::X8,
- IntTy::I16 => IntBitness::X16,
- IntTy::I32 => IntBitness::X32,
- IntTy::I64 => IntBitness::X64,
- IntTy::I128 => IntBitness::X128,
- }
-}
-
-fn int_ty_to_chalk(int_ty: IntTy) -> Scalar {
- use chalk_ir::{IntTy, UintTy};
-
- match int_ty.signedness {
- Signedness::Signed => Scalar::Int(match int_ty.bitness {
- IntBitness::Xsize => IntTy::Isize,
- IntBitness::X8 => IntTy::I8,
- IntBitness::X16 => IntTy::I16,
- IntBitness::X32 => IntTy::I32,
- IntBitness::X64 => IntTy::I64,
- IntBitness::X128 => IntTy::I128,
- }),
- Signedness::Unsigned => Scalar::Uint(match int_ty.bitness {
- IntBitness::Xsize => UintTy::Usize,
- IntBitness::X8 => UintTy::U8,
- IntBitness::X16 => UintTy::U16,
- IntBitness::X32 => UintTy::U32,
- IntBitness::X64 => UintTy::U64,
- IntBitness::X128 => UintTy::U128,
- }),
- }
-}
-
-impl ToChalk for Mutability {
- type Chalk = chalk_ir::Mutability;
- fn to_chalk(self, _db: &dyn HirDatabase) -> Self::Chalk {
- match self {
- Mutability::Shared => chalk_ir::Mutability::Not,
- Mutability::Mut => chalk_ir::Mutability::Mut,
- }
- }
- fn from_chalk(_db: &dyn HirDatabase, chalk: Self::Chalk) -> Self {
- match chalk {
- chalk_ir::Mutability::Mut => Mutability::Mut,
- chalk_ir::Mutability::Not => Mutability::Shared,
- }
- }
-}
-
-impl ToChalk for hir_def::ImplId {
- type Chalk = ImplId;
-
- fn to_chalk(self, _db: &dyn HirDatabase) -> ImplId {
- chalk_ir::ImplId(self.as_intern_id())
- }
-
- fn from_chalk(_db: &dyn HirDatabase, impl_id: ImplId) -> hir_def::ImplId {
- InternKey::from_intern_id(impl_id.0)
- }
-}
-
-impl ToChalk for CallableDefId {
- type Chalk = FnDefId;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> FnDefId {
- db.intern_callable_def(self).into()
- }
-
- fn from_chalk(db: &dyn HirDatabase, fn_def_id: FnDefId) -> CallableDefId {
- db.lookup_intern_callable_def(fn_def_id.into())
- }
-}
-
-impl ToChalk for TypeAliasId {
- type Chalk = AssocTypeId;
-
- fn to_chalk(self, _db: &dyn HirDatabase) -> AssocTypeId {
- chalk_ir::AssocTypeId(self.as_intern_id())
- }
-
- fn from_chalk(_db: &dyn HirDatabase, type_alias_id: AssocTypeId) -> TypeAliasId {
- InternKey::from_intern_id(type_alias_id.0)
- }
-}
-
-pub struct TypeAliasAsValue(pub TypeAliasId);
-
-impl ToChalk for TypeAliasAsValue {
- type Chalk = AssociatedTyValueId;
-
- fn to_chalk(self, _db: &dyn HirDatabase) -> AssociatedTyValueId {
- rust_ir::AssociatedTyValueId(self.0.as_intern_id())
- }
-
- fn from_chalk(
- _db: &dyn HirDatabase,
- assoc_ty_value_id: AssociatedTyValueId,
- ) -> TypeAliasAsValue {
- TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
- }
-}
-
-impl ToChalk for GenericPredicate {
- type Chalk = chalk_ir::QuantifiedWhereClause<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::QuantifiedWhereClause<Interner> {
- match self {
- GenericPredicate::Implemented(trait_ref) => {
- let chalk_trait_ref = trait_ref.to_chalk(db);
- let chalk_trait_ref = chalk_trait_ref.shifted_in(&Interner);
- make_binders(chalk_ir::WhereClause::Implemented(chalk_trait_ref), 0)
- }
- GenericPredicate::Projection(projection_pred) => {
- let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner);
- let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner);
- let alias = chalk_ir::AliasTy::Projection(projection);
- make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0)
- }
- GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"),
- }
- }
-
- fn from_chalk(
- db: &dyn HirDatabase,
- where_clause: chalk_ir::QuantifiedWhereClause<Interner>,
- ) -> GenericPredicate {
- // we don't produce any where clauses with binders and can't currently deal with them
- match where_clause
- .skip_binders()
- .shifted_out(&Interner)
- .expect("unexpected bound vars in where clause")
- {
- chalk_ir::WhereClause::Implemented(tr) => {
- GenericPredicate::Implemented(from_chalk(db, tr))
- }
- chalk_ir::WhereClause::AliasEq(projection_eq) => {
- let projection_ty = from_chalk(
- db,
- match projection_eq.alias {
- chalk_ir::AliasTy::Projection(p) => p,
- _ => unimplemented!(),
- },
- );
- let ty = from_chalk(db, projection_eq.ty);
- GenericPredicate::Projection(ProjectionPredicate { projection_ty, ty })
- }
-
- chalk_ir::WhereClause::LifetimeOutlives(_) => {
- // we shouldn't get these from Chalk
- panic!("encountered LifetimeOutlives from Chalk")
- }
-
- chalk_ir::WhereClause::TypeOutlives(_) => {
- // we shouldn't get these from Chalk
- panic!("encountered TypeOutlives from Chalk")
- }
- }
- }
-}
-
-impl ToChalk for ProjectionTy {
- type Chalk = chalk_ir::ProjectionTy<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> {
- chalk_ir::ProjectionTy {
- associated_ty_id: self.associated_ty.to_chalk(db),
- substitution: self.parameters.to_chalk(db),
- }
- }
-
- fn from_chalk(
- db: &dyn HirDatabase,
- projection_ty: chalk_ir::ProjectionTy<Interner>,
- ) -> ProjectionTy {
- ProjectionTy {
- associated_ty: from_chalk(db, projection_ty.associated_ty_id),
- parameters: from_chalk(db, projection_ty.substitution),
- }
- }
-}
-
-impl ToChalk for ProjectionPredicate {
- type Chalk = chalk_ir::AliasEq<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
- chalk_ir::AliasEq {
- alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)),
- ty: self.ty.to_chalk(db),
- }
- }
-
- fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
- unimplemented!()
- }
-}
-
-impl ToChalk for Obligation {
- type Chalk = chalk_ir::DomainGoal<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::DomainGoal<Interner> {
- match self {
- Obligation::Trait(tr) => tr.to_chalk(db).cast(&Interner),
- Obligation::Projection(pr) => pr.to_chalk(db).cast(&Interner),
- }
- }
-
- fn from_chalk(_db: &dyn HirDatabase, _goal: chalk_ir::DomainGoal<Interner>) -> Self {
- unimplemented!()
- }
-}
-
-impl<T> ToChalk for Canonical<T>
-where
- T: ToChalk,
- T::Chalk: HasInterner<Interner = Interner>,
-{
- type Chalk = chalk_ir::Canonical<T::Chalk>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
- let kinds = self
- .kinds
- .iter()
- .map(|k| match k {
- TyKind::General => chalk_ir::TyKind::General,
- TyKind::Integer => chalk_ir::TyKind::Integer,
- TyKind::Float => chalk_ir::TyKind::Float,
- })
- .map(|tk| {
- chalk_ir::CanonicalVarKind::new(
- chalk_ir::VariableKind::Ty(tk),
- chalk_ir::UniverseIndex::ROOT,
- )
- });
- let value = self.value.to_chalk(db);
- chalk_ir::Canonical {
- value,
- binders: chalk_ir::CanonicalVarKinds::from_iter(&Interner, kinds),
- }
- }
-
- fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
- let kinds = canonical
- .binders
- .iter(&Interner)
- .map(|k| match k.kind {
- chalk_ir::VariableKind::Ty(tk) => match tk {
- chalk_ir::TyKind::General => TyKind::General,
- chalk_ir::TyKind::Integer => TyKind::Integer,
- chalk_ir::TyKind::Float => TyKind::Float,
- },
- chalk_ir::VariableKind::Lifetime => panic!("unexpected lifetime from Chalk"),
- chalk_ir::VariableKind::Const(_) => panic!("unexpected const from Chalk"),
- })
- .collect();
- Canonical { kinds, value: from_chalk(db, canonical.value) }
- }
-}
-
-impl ToChalk for Arc<TraitEnvironment> {
- type Chalk = chalk_ir::Environment<Interner>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Environment<Interner> {
- let mut clauses = Vec::new();
- for pred in &self.predicates {
- if pred.is_error() {
- // for env, we just ignore errors
- continue;
- }
- let program_clause: chalk_ir::ProgramClause<Interner> =
- pred.clone().to_chalk(db).cast(&Interner);
- clauses.push(program_clause.into_from_env_clause(&Interner));
- }
- chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses)
- }
-
- fn from_chalk(
- _db: &dyn HirDatabase,
- _env: chalk_ir::Environment<Interner>,
- ) -> Arc<TraitEnvironment> {
- unimplemented!()
- }
-}
-
-impl<T: ToChalk> ToChalk for InEnvironment<T>
-where
- T::Chalk: chalk_ir::interner::HasInterner<Interner = Interner>,
-{
- type Chalk = chalk_ir::InEnvironment<T::Chalk>;
-
- fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::InEnvironment<T::Chalk> {
- chalk_ir::InEnvironment {
- environment: self.environment.to_chalk(db),
- goal: self.value.to_chalk(db),
- }
- }
-
- fn from_chalk(
- db: &dyn HirDatabase,
- in_env: chalk_ir::InEnvironment<T::Chalk>,
- ) -> InEnvironment<T> {
- InEnvironment {
- environment: from_chalk(db, in_env.environment),
- value: from_chalk(db, in_env.goal),
- }
- }
-}
-
-pub(super) fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T>
-where
- T: HasInterner<Interner = Interner>,
-{
- chalk_ir::Binders::new(
- chalk_ir::VariableKinds::from_iter(
- &Interner,
- std::iter::repeat(chalk_ir::VariableKind::Ty(chalk_ir::TyKind::General)).take(num_vars),
- ),
- value,
- )
-}
-
-pub(super) fn convert_where_clauses(
- db: &dyn HirDatabase,
- def: GenericDefId,
- substs: &Substs,
-) -> Vec<chalk_ir::QuantifiedWhereClause<Interner>> {
- let generic_predicates = db.generic_predicates(def);
- let mut result = Vec::with_capacity(generic_predicates.len());
- for pred in generic_predicates.iter() {
- if pred.value.is_error() {
- // skip errored predicates completely
- continue;
- }
- result.push(pred.clone().subst(substs).to_chalk(db));
- }
- result
-}
-
-pub(super) fn generic_predicate_to_inline_bound(
- db: &dyn HirDatabase,
- pred: &GenericPredicate,
- self_ty: &Ty,
-) -> Option<rust_ir::InlineBound<Interner>> {
- // An InlineBound is like a GenericPredicate, except the self type is left out.
- // We don't have a special type for this, but Chalk does.
- match pred {
- GenericPredicate::Implemented(trait_ref) => {
- if &trait_ref.substs[0] != self_ty {
- // we can only convert predicates back to type bounds if they
- // have the expected self type
- return None;
- }
- let args_no_self = trait_ref.substs[1..]
- .iter()
- .map(|ty| ty.clone().to_chalk(db).cast(&Interner))
- .collect();
- let trait_bound =
- rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self };
- Some(rust_ir::InlineBound::TraitBound(trait_bound))
- }
- GenericPredicate::Projection(proj) => {
- if &proj.projection_ty.parameters[0] != self_ty {
- return None;
- }
- let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container {
- AssocContainerId::TraitId(t) => t,
- _ => panic!("associated type not in trait"),
- };
- let args_no_self = proj.projection_ty.parameters[1..]
- .iter()
- .map(|ty| ty.clone().to_chalk(db).cast(&Interner))
- .collect();
- let alias_eq_bound = rust_ir::AliasEqBound {
- value: proj.ty.clone().to_chalk(db),
- trait_bound: rust_ir::TraitBound { trait_id: trait_.to_chalk(db), args_no_self },
- associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db),
- parameters: Vec::new(), // FIXME we don't support generic associated types yet
- };
- Some(rust_ir::InlineBound::AliasEqBound(alias_eq_bound))
- }
- GenericPredicate::Error => None,
- }
-}
+++ /dev/null
-//! Implementation of Chalk debug helper functions using TLS.
-use std::fmt;
-
-use chalk_ir::{AliasTy, GenericArg, Goal, Goals, Lifetime, ProgramClauseImplication, TypeName};
-use itertools::Itertools;
-
-use super::{from_chalk, Interner};
-use crate::{db::HirDatabase, CallableDefId, TypeCtor};
-use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId};
-
-pub use unsafe_tls::{set_current_program, with_current_program};
-
-pub struct DebugContext<'a>(&'a dyn HirDatabase);
-
-impl DebugContext<'_> {
- pub fn debug_struct_id(
- &self,
- id: super::AdtId,
- f: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let type_ctor: TypeCtor = from_chalk(self.0, TypeName::Adt(id));
- match type_ctor {
- TypeCtor::Bool => write!(f, "bool")?,
- TypeCtor::Char => write!(f, "char")?,
- TypeCtor::Int(t) => write!(f, "{}", t)?,
- TypeCtor::Float(t) => write!(f, "{}", t)?,
- TypeCtor::Str => write!(f, "str")?,
- TypeCtor::Slice => write!(f, "slice")?,
- TypeCtor::Array => write!(f, "array")?,
- TypeCtor::RawPtr(m) => write!(f, "*{}", m.as_keyword_for_ptr())?,
- TypeCtor::Ref(m) => write!(f, "&{}", m.as_keyword_for_ref())?,
- TypeCtor::Never => write!(f, "!")?,
- TypeCtor::Tuple { .. } => {
- write!(f, "()")?;
- }
- TypeCtor::FnPtr { .. } => {
- write!(f, "fn")?;
- }
- TypeCtor::FnDef(def) => {
- let name = match def {
- CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
- CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
- CallableDefId::EnumVariantId(e) => {
- let enum_data = self.0.enum_data(e.parent);
- enum_data.variants[e.local_id].name.clone()
- }
- };
- match def {
- CallableDefId::FunctionId(_) => write!(f, "{{fn {}}}", name)?,
- CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
- write!(f, "{{ctor {}}}", name)?
- }
- }
- }
- TypeCtor::Adt(def_id) => {
- let name = match def_id {
- AdtId::StructId(it) => self.0.struct_data(it).name.clone(),
- AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
- AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
- };
- write!(f, "{}", name)?;
- }
- TypeCtor::AssociatedType(type_alias) => {
- let trait_ = match type_alias.lookup(self.0.upcast()).container {
- AssocContainerId::TraitId(it) => it,
- _ => panic!("not an associated type"),
- };
- let trait_name = self.0.trait_data(trait_).name.clone();
- let name = self.0.type_alias_data(type_alias).name.clone();
- write!(f, "{}::{}", trait_name, name)?;
- }
- TypeCtor::OpaqueType(opaque_ty_id) => match opaque_ty_id {
- crate::OpaqueTyId::ReturnTypeImplTrait(func, idx) => {
- write!(f, "{{impl trait {} of {:?}}}", idx, func)?;
- }
- },
- TypeCtor::Closure { def, expr } => {
- write!(f, "{{closure {:?} in ", expr.into_raw())?;
- match def {
- DefWithBodyId::FunctionId(func) => {
- write!(f, "fn {}", self.0.function_data(func).name)?
- }
- DefWithBodyId::StaticId(s) => {
- if let Some(name) = self.0.static_data(s).name.as_ref() {
- write!(f, "body of static {}", name)?;
- } else {
- write!(f, "body of unnamed static {:?}", s)?;
- }
- }
- DefWithBodyId::ConstId(c) => {
- if let Some(name) = self.0.const_data(c).name.as_ref() {
- write!(f, "body of const {}", name)?;
- } else {
- write!(f, "body of unnamed const {:?}", c)?;
- }
- }
- };
- write!(f, "}}")?;
- }
- }
- Ok(())
- }
-
- pub fn debug_trait_id(
- &self,
- id: super::TraitId,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let trait_: hir_def::TraitId = from_chalk(self.0, id);
- let trait_data = self.0.trait_data(trait_);
- write!(fmt, "{}", trait_data.name)
- }
-
- pub fn debug_assoc_type_id(
- &self,
- id: super::AssocTypeId,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let type_alias: TypeAliasId = from_chalk(self.0, id);
- let type_alias_data = self.0.type_alias_data(type_alias);
- let trait_ = match type_alias.lookup(self.0.upcast()).container {
- AssocContainerId::TraitId(t) => t,
- _ => panic!("associated type not in trait"),
- };
- let trait_data = self.0.trait_data(trait_);
- write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
- }
-
- pub fn debug_opaque_ty_id(
- &self,
- opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
- }
-
- pub fn debug_alias(
- &self,
- alias_ty: &AliasTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- match alias_ty {
- AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
- AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
- }
- }
-
- pub fn debug_projection_ty(
- &self,
- projection_ty: &chalk_ir::ProjectionTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
- let type_alias_data = self.0.type_alias_data(type_alias);
- let trait_ = match type_alias.lookup(self.0.upcast()).container {
- AssocContainerId::TraitId(t) => t,
- _ => panic!("associated type not in trait"),
- };
- let trait_data = self.0.trait_data(trait_);
- let params = projection_ty.substitution.as_slice(&Interner);
- write!(fmt, "<{:?} as {}", ¶ms[0], trait_data.name,)?;
- if params.len() > 1 {
- write!(
- fmt,
- "<{}>",
- ¶ms[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
- )?;
- }
- write!(fmt, ">::{}", type_alias_data.name)
- }
-
- pub fn debug_opaque_ty(
- &self,
- opaque_ty: &chalk_ir::OpaqueTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
- }
-
- pub fn debug_ty(
- &self,
- ty: &chalk_ir::Ty<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", ty.data(&Interner))
- }
-
- pub fn debug_lifetime(
- &self,
- lifetime: &Lifetime<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", lifetime.data(&Interner))
- }
-
- pub fn debug_generic_arg(
- &self,
- parameter: &GenericArg<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", parameter.data(&Interner).inner_debug())
- }
-
- pub fn debug_goal(
- &self,
- goal: &Goal<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let goal_data = goal.data(&Interner);
- write!(fmt, "{:?}", goal_data)
- }
-
- pub fn debug_goals(
- &self,
- goals: &Goals<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", goals.debug(&Interner))
- }
-
- pub fn debug_program_clause_implication(
- &self,
- pci: &ProgramClauseImplication<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", pci.debug(&Interner))
- }
-
- pub fn debug_application_ty(
- &self,
- application_ty: &chalk_ir::ApplicationTy<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", application_ty.debug(&Interner))
- }
-
- pub fn debug_substitution(
- &self,
- substitution: &chalk_ir::Substitution<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", substitution.debug(&Interner))
- }
-
- pub fn debug_separator_trait_ref(
- &self,
- separator_trait_ref: &chalk_ir::SeparatorTraitRef<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- write!(fmt, "{:?}", separator_trait_ref.debug(&Interner))
- }
-
- pub fn debug_fn_def_id(
- &self,
- fn_def_id: chalk_ir::FnDefId<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> Result<(), fmt::Error> {
- let def: CallableDefId = from_chalk(self.0, fn_def_id);
- let name = match def {
- CallableDefId::FunctionId(ff) => self.0.function_data(ff).name.clone(),
- CallableDefId::StructId(s) => self.0.struct_data(s).name.clone(),
- CallableDefId::EnumVariantId(e) => {
- let enum_data = self.0.enum_data(e.parent);
- enum_data.variants[e.local_id].name.clone()
- }
- };
- match def {
- CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
- CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
- write!(fmt, "{{ctor {}}}", name)
- }
- }
- }
-
- pub fn debug_const(
- &self,
- _constant: &chalk_ir::Const<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "const")
- }
-
- pub fn debug_variable_kinds(
- &self,
- variable_kinds: &chalk_ir::VariableKinds<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", variable_kinds.as_slice(&Interner))
- }
- pub fn debug_variable_kinds_with_angles(
- &self,
- variable_kinds: &chalk_ir::VariableKinds<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner))
- }
- pub fn debug_canonical_var_kinds(
- &self,
- canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner))
- }
- pub fn debug_program_clause(
- &self,
- clause: &chalk_ir::ProgramClause<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", clause.data(&Interner))
- }
- pub fn debug_program_clauses(
- &self,
- clauses: &chalk_ir::ProgramClauses<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", clauses.as_slice(&Interner))
- }
- pub fn debug_quantified_where_clauses(
- &self,
- clauses: &chalk_ir::QuantifiedWhereClauses<Interner>,
- fmt: &mut fmt::Formatter<'_>,
- ) -> fmt::Result {
- write!(fmt, "{:?}", clauses.as_slice(&Interner))
- }
-}
-
-mod unsafe_tls {
- use super::DebugContext;
- use crate::db::HirDatabase;
- use scoped_tls::scoped_thread_local;
-
- scoped_thread_local!(static PROGRAM: DebugContext);
-
- pub fn with_current_program<R>(
- op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R,
- ) -> R {
- if PROGRAM.is_set() {
- PROGRAM.with(|prog| op(Some(prog)))
- } else {
- op(None)
- }
- }
-
- pub fn set_current_program<OP, R>(p: &dyn HirDatabase, op: OP) -> R
- where
- OP: FnOnce() -> R,
- {
- let ctx = DebugContext(p);
- // we're transmuting the lifetime in the DebugContext to static. This is
- // fine because we only keep the reference for the lifetime of this
- // function, *and* the only way to access the context is through
- // `with_current_program`, which hides the lifetime through the `for`
- // type.
- let static_p: &DebugContext<'static> =
- unsafe { std::mem::transmute::<&DebugContext, &DebugContext<'static>>(&ctx) };
- PROGRAM.set(static_p, || op())
- }
-}
+++ /dev/null
-//! Helper functions for working with def, which don't need to be a separate
-//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
-use std::sync::Arc;
-
-use hir_def::generics::WherePredicateTarget;
-use hir_def::{
- adt::VariantData,
- db::DefDatabase,
- generics::{GenericParams, TypeParamData, TypeParamProvenance},
- path::Path,
- resolver::{HasResolver, TypeNs},
- type_ref::TypeRef,
- AssocContainerId, GenericDefId, Lookup, TraitId, TypeAliasId, TypeParamId, VariantId,
-};
-use hir_expand::name::{name, Name};
-
-use crate::{db::HirDatabase, GenericPredicate, TraitRef};
-
-fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
- let resolver = trait_.resolver(db);
- // returning the iterator directly doesn't easily work because of
- // lifetime problems, but since there usually shouldn't be more than a
- // few direct traits this should be fine (we could even use some kind of
- // SmallVec if performance is a concern)
- let generic_params = db.generic_params(trait_.into());
- let trait_self = generic_params.find_trait_self_param();
- generic_params
- .where_predicates
- .iter()
- .filter_map(|pred| match &pred.target {
- WherePredicateTarget::TypeRef(TypeRef::Path(p)) if p == &Path::from(name![Self]) => {
- pred.bound.as_path()
- }
- WherePredicateTarget::TypeParam(local_id) if Some(*local_id) == trait_self => {
- pred.bound.as_path()
- }
- _ => None,
- })
- .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
- Some(TypeNs::TraitId(t)) => Some(t),
- _ => None,
- })
- .collect()
-}
-
-fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
- // returning the iterator directly doesn't easily work because of
- // lifetime problems, but since there usually shouldn't be more than a
- // few direct traits this should be fine (we could even use some kind of
- // SmallVec if performance is a concern)
- let generic_params = db.generic_params(trait_ref.trait_.into());
- let trait_self = match generic_params.find_trait_self_param() {
- Some(p) => TypeParamId { parent: trait_ref.trait_.into(), local_id: p },
- None => return Vec::new(),
- };
- db.generic_predicates_for_param(trait_self)
- .iter()
- .filter_map(|pred| {
- pred.as_ref().filter_map(|pred| match pred {
- GenericPredicate::Implemented(tr) => Some(tr.clone()),
- _ => None,
- })
- })
- .map(|pred| pred.subst(&trait_ref.substs))
- .collect()
-}
-
-/// Returns an iterator over the whole super trait hierarchy (including the
-/// trait itself).
-pub(super) fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> Vec<TraitId> {
- // we need to take care a bit here to avoid infinite loops in case of cycles
- // (i.e. if we have `trait A: B; trait B: A;`)
- let mut result = vec![trait_];
- let mut i = 0;
- while i < result.len() {
- let t = result[i];
- // yeah this is quadratic, but trait hierarchies should be flat
- // enough that this doesn't matter
- for tt in direct_super_traits(db, t) {
- if !result.contains(&tt) {
- result.push(tt);
- }
- }
- i += 1;
- }
- result
-}
-
-/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
-/// super traits. The original trait ref will be included. So the difference to
-/// `all_super_traits` is that we keep track of type parameters; for example if
-/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
-/// `Self: OtherTrait<i32>`.
-pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> Vec<TraitRef> {
- // we need to take care a bit here to avoid infinite loops in case of cycles
- // (i.e. if we have `trait A: B; trait B: A;`)
- let mut result = vec![trait_ref];
- let mut i = 0;
- while i < result.len() {
- let t = &result[i];
- // yeah this is quadratic, but trait hierarchies should be flat
- // enough that this doesn't matter
- for tt in direct_super_trait_refs(db, t) {
- if !result.iter().any(|tr| tr.trait_ == tt.trait_) {
- result.push(tt);
- }
- }
- i += 1;
- }
- result
-}
-
-pub(super) fn associated_type_by_name_including_super_traits(
- db: &dyn HirDatabase,
- trait_ref: TraitRef,
- name: &Name,
-) -> Option<(TraitRef, TypeAliasId)> {
- all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| {
- let assoc_type = db.trait_data(t.trait_).associated_type_by_name(name)?;
- Some((t, assoc_type))
- })
-}
-
-pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> {
- match var {
- VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
- VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
- VariantId::EnumVariantId(it) => {
- db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
- }
- }
-}
-
-/// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices).
-/// The underlying values are cloned if there are other strong references.
-pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] {
- if Arc::get_mut(a).is_none() {
- *a = a.iter().cloned().collect();
- }
- Arc::get_mut(a).unwrap()
-}
-
-pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
- let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
- Generics { def, params: db.generic_params(def), parent_generics }
-}
-
-#[derive(Debug)]
-pub(crate) struct Generics {
- def: GenericDefId,
- pub(crate) params: Arc<GenericParams>,
- parent_generics: Option<Box<Generics>>,
-}
-
-impl Generics {
- pub(crate) fn iter<'a>(
- &'a self,
- ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
- self.parent_generics
- .as_ref()
- .into_iter()
- .flat_map(|it| {
- it.params
- .types
- .iter()
- .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
- })
- .chain(
- self.params
- .types
- .iter()
- .map(move |(local_id, p)| (TypeParamId { parent: self.def, local_id }, p)),
- )
- }
-
- pub(crate) fn iter_parent<'a>(
- &'a self,
- ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
- self.parent_generics.as_ref().into_iter().flat_map(|it| {
- it.params
- .types
- .iter()
- .map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
- })
- }
-
- pub(crate) fn len(&self) -> usize {
- self.len_split().0
- }
-
- /// (total, parents, child)
- pub(crate) fn len_split(&self) -> (usize, usize, usize) {
- let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
- let child = self.params.types.len();
- (parent + child, parent, child)
- }
-
- /// (parent total, self param, type param list, impl trait)
- pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize) {
- let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
- let self_params = self
- .params
- .types
- .iter()
- .filter(|(_, p)| p.provenance == TypeParamProvenance::TraitSelf)
- .count();
- let list_params = self
- .params
- .types
- .iter()
- .filter(|(_, p)| p.provenance == TypeParamProvenance::TypeParamList)
- .count();
- let impl_trait_params = self
- .params
- .types
- .iter()
- .filter(|(_, p)| p.provenance == TypeParamProvenance::ArgumentImplTrait)
- .count();
- (parent, self_params, list_params, impl_trait_params)
- }
-
- pub(crate) fn param_idx(&self, param: TypeParamId) -> Option<usize> {
- Some(self.find_param(param)?.0)
- }
-
- fn find_param(&self, param: TypeParamId) -> Option<(usize, &TypeParamData)> {
- if param.parent == self.def {
- let (idx, (_local_id, data)) = self
- .params
- .types
- .iter()
- .enumerate()
- .find(|(_, (idx, _))| *idx == param.local_id)
- .unwrap();
- let (_total, parent_len, _child) = self.len_split();
- Some((parent_len + idx, data))
- } else {
- self.parent_generics.as_ref().and_then(|g| g.find_param(param))
- }
- }
-}
-
-fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
- let container = match def {
- GenericDefId::FunctionId(it) => it.lookup(db).container,
- GenericDefId::TypeAliasId(it) => it.lookup(db).container,
- GenericDefId::ConstId(it) => it.lookup(db).container,
- GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
- GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
- };
-
- match container {
- AssocContainerId::ImplId(it) => Some(it.into()),
- AssocContainerId::TraitId(it) => Some(it.into()),
- AssocContainerId::ContainerId(_) => None,
- }
-}
ra_ssr = { path = "../ra_ssr" }
hir = { path = "../ra_hir", package = "ra_hir" }
hir_def = { path = "../hir_def" }
-hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
+hir_ty = { path = "../hir_ty" }
proc_macro_srv = { path = "../proc_macro_srv" }
[target.'cfg(windows)'.dependencies]
"project_model",
"syntax",
"tt",
- "ra_hir_ty",
+ "hir_ty",
];
let mut has_fixmes =