db::HirDatabase,
traits::{InEnvironment, Solution},
utils::generics,
- BoundVar, Canonical, DebruijnIndex, Substs, Ty,
+ BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
};
const AUTODEREF_RECURSION_LIMIT: usize = 10;
let parameters =
Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
+ // Check that the type implements Deref at all
+ let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
+ let implements_goal = super::Canonical {
+ num_vars: ty.value.num_vars,
+ value: InEnvironment {
+ value: Obligation::Trait(trait_ref),
+ environment: ty.environment.clone(),
+ },
+ };
+ if db.trait_solve(krate, implements_goal).is_none() {
+ return None;
+ }
+
+ // Now do the assoc type projection
let projection = super::traits::ProjectionPredicate {
ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)),
projection_ty: super::ProjectionTy { associated_ty: target, parameters },
// they're just being 'passed through'. In the 'standard' case where
// we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
// the case.
+
+ // FIXME: if the trait solver decides to truncate the type, these
+ // assumptions will be broken. We would need to properly introduce
+ // new variables in that case
+
for i in 1..vars.0.num_vars {
if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
{
#[test]
fn infer_project_associated_type() {
- // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234
assert_snapshot!(
infer(r#"
trait Iterable {
[108; 261) '{ ...ter; }': ()
[118; 119) 'x': u32
[145; 146) '1': u32
- [156; 157) 'y': {unknown}
- [183; 192) 'no_matter': {unknown}
- [202; 203) 'z': {unknown}
- [215; 224) 'no_matter': {unknown}
- [234; 235) 'a': {unknown}
- [249; 258) 'no_matter': {unknown}
+ [156; 157) 'y': Iterable::Item<T>
+ [183; 192) 'no_matter': Iterable::Item<T>
+ [202; 203) 'z': Iterable::Item<T>
+ [215; 224) 'no_matter': Iterable::Item<T>
+ [234; 235) 'a': Iterable::Item<T>
+ [249; 258) 'no_matter': Iterable::Item<T>
"###
);
}
"#),
@r###"
[67; 100) '{ ...own; }': ()
- [77; 78) 'y': {unknown}
- [90; 97) 'unknown': {unknown}
+ [77; 78) 'y': u32
+ [90; 97) 'unknown': u32
"###
);
}
fn test() {
let a = Bar;
- let b = a[1];
+ let b = a[1u32];
b<|>;
}
//- /main.rs crate:main deps:std
fn test() {
let a = &[1u32, 2, 3];
- let b = a[1];
+ let b = a[1u32];
b<|>;
}
}
"#,
);
- // FIXME here Chalk doesn't normalize the type to a placeholder. I think we
- // need to add a rule like Normalize(<T as ApplyL>::Out -> ApplyL::Out<T>)
- // to the trait env ourselves here; probably Chalk can't do this by itself.
- // assert_eq!(t, "ApplyL::Out<[missing name]>");
- assert_eq!(t, "{unknown}");
+ assert_eq!(t, "ApplyL::Out<T>");
}
#[test]
[263; 264) 'y': impl Trait<Type = i64>
[290; 398) '{ ...r>); }': ()
[296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type
- [296; 302) 'get(x)': {unknown}
+ [296; 302) 'get(x)': u32
[300; 301) 'x': T
- [308; 312) 'get2': fn get2<{unknown}, T>(T) -> {unknown}
- [308; 315) 'get2(x)': {unknown}
+ [308; 312) 'get2': fn get2<u32, T>(T) -> u32
+ [308; 315) 'get2(x)': u32
[313; 314) 'x': T
[321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
- [321; 327) 'get(y)': {unknown}
+ [321; 327) 'get(y)': i64
[325; 326) 'y': impl Trait<Type = i64>
- [333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> {unknown}
- [333; 340) 'get2(y)': {unknown}
+ [333; 337) 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ [333; 340) 'get2(y)': i64
[338; 339) 'y': impl Trait<Type = i64>
[346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
[346; 357) 'get(set(S))': u64
#[test]
fn projection_eq_within_chalk() {
- // std::env::set_var("CHALK_DEBUG", "1");
assert_snapshot!(
infer(r#"
trait Trait1 {
[164; 165) 'x': T
[170; 186) '{ ...o(); }': ()
[176; 177) 'x': T
- [176; 183) 'x.foo()': {unknown}
+ [176; 183) 'x.foo()': u32
"###
);
}
[150; 151) 'f': F
[156; 184) '{ ...2)); }': ()
[162; 163) 'f': F
- [162; 181) 'f.call...1, 2))': {unknown}
+ [162; 181) 'f.call...1, 2))': u128
[174; 180) '(1, 2)': (u32, u64)
[175; 176) '1': u32
[178; 179) '2': u64
"#,
), @r###"
[54; 58) 'self': &Self
- [60; 61) 'x': {unknown}
+ [60; 61) 'x': Trait::Item<Self>
[140; 144) 'self': &S
[146; 147) 'x': u32
[161; 175) '{ let y = x; }': ()
}
"#,
);
- // assert_eq!(t, "u32");
- // doesn't currently work, Chalk #234
- assert_eq!(t, "{unknown}");
+ assert_eq!(t, "u32");
}
#[test]
pub(crate) mod chalk;
mod builtin;
-/// This controls the maximum size of types Chalk considers. If we set this too
-/// high, we can run into slow edge cases; if we set it too low, Chalk won't
-/// find some solutions.
-const CHALK_SOLVER_MAX_SIZE: usize = 10;
+// This controls the maximum size of types Chalk considers. If we set this too
+// high, we can run into slow edge cases; if we set it too low, Chalk won't
+// find some solutions.
+// FIXME this is currently hardcoded in the recursive solver
+// const CHALK_SOLVER_MAX_SIZE: usize = 10;
+
/// This controls how much 'time' we give the Chalk solver before giving up.
const CHALK_SOLVER_FUEL: i32 = 100;
}
fn create_chalk_solver() -> chalk_solve::Solver<Interner> {
- let solver_choice =
- chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None };
+ let solver_choice = chalk_solve::SolverChoice::recursive();
solver_choice.into_solver()
}