}
}
+impl<I> StepBy<I> where I: ExactSizeIterator {
+ // The zero-based index starting from the end of the iterator of the
+ // last element. Used in the `DoubleEndedIterator` implementation.
+ fn next_back_index(&self) -> usize {
+ let rem = self.iter.len() % (self.step + 1);
+ if self.first_take {
+ if rem == 0 { self.step } else { rem - 1 }
+ } else {
+ rem
+ }
+ }
+}
+
+#[stable(feature = "double_ended_step_by_iterator", since = "1.38.0")]
+impl<I> DoubleEndedIterator for StepBy<I> where I: DoubleEndedIterator + ExactSizeIterator {
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.iter.nth_back(self.next_back_index())
+ }
+
+ #[inline]
+ fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
+ // `self.iter.nth_back(usize::MAX)` does the right thing here when `n`
+ // is out of bounds because the length of `self.iter` does not exceed
+ // `usize::MAX` (because `I: ExactSizeIterator`) and `nth_back` is
+ // zero-indexed
+ let n = n
+ .saturating_mul(self.step + 1)
+ .saturating_add(self.next_back_index());
+ self.iter.nth_back(n)
+ }
+}
+
// StepBy can only make the iterator shorter, so the len will still fit.
#[stable(feature = "iterator_step_by", since = "1.28.0")]
impl<I> ExactSizeIterator for StepBy<I> where I: ExactSizeIterator {}
}
}
+#[stable(feature = "double_ended_peek_iterator", since = "1.38.0")]
+impl<I> DoubleEndedIterator for Peekable<I> where I: DoubleEndedIterator {
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.iter.next_back().or_else(|| self.peeked.take().and_then(|x| x))
+ }
+
+ #[inline]
+ fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R where
+ Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try<Ok=B>
+ {
+ match self.peeked.take() {
+ Some(None) => return Try::from_ok(init),
+ Some(Some(v)) => match self.iter.try_rfold(init, &mut f).into_result() {
+ Ok(acc) => f(acc, v),
+ Err(e) => {
+ self.peeked = Some(Some(v));
+ Try::from_error(e)
+ }
+ },
+ None => self.iter.try_rfold(init, f),
+ }
+ }
+
+ #[inline]
+ fn rfold<Acc, Fold>(self, init: Acc, mut fold: Fold) -> Acc
+ where Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ match self.peeked {
+ Some(None) => return init,
+ Some(Some(v)) => {
+ let acc = self.iter.rfold(init, &mut fold);
+ fold(acc, v)
+ }
+ None => self.iter.rfold(init, fold),
+ }
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
}
}
+#[stable(feature = "double_ended_take_iterator", since = "1.38.0")]
+impl<I> DoubleEndedIterator for Take<I> where I: DoubleEndedIterator + ExactSizeIterator {
+ #[inline]
+ fn next_back(&mut self) -> Option<Self::Item> {
+ if self.n == 0 {
+ None
+ } else {
+ let n = self.n;
+ self.n -= 1;
+ self.iter.nth_back(self.iter.len().saturating_sub(n))
+ }
+ }
+
+ #[inline]
+ fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
+ let len = self.iter.len();
+ if self.n > n {
+ let m = len.saturating_sub(self.n) + n;
+ self.n -= n + 1;
+ self.iter.nth_back(m)
+ } else {
+ if len > 0 {
+ self.iter.nth_back(len - 1);
+ }
+ None
+ }
+ }
+
+ #[inline]
+ fn try_rfold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where
+ Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok = Acc>
+ {
+ if self.n == 0 {
+ Try::from_ok(init)
+ } else {
+ let len = self.iter.len();
+ if len > self.n && self.iter.nth_back(len - self.n - 1).is_none() {
+ Try::from_ok(init)
+ } else {
+ self.iter.try_rfold(init, fold)
+ }
+ }
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Take<I> where I: ExactSizeIterator {}
assert_eq!(it.next(), Some(6));
assert_eq!(it.next(), Some(9));
assert_eq!(it.next(), None);
+
+ let mut it = (0..3).step_by(1);
+ assert_eq!(it.next_back(), Some(2));
+ assert_eq!(it.next_back(), Some(1));
+ assert_eq!(it.next_back(), Some(0));
+ assert_eq!(it.next_back(), None);
+
+ let mut it = (0..11).step_by(3);
+ assert_eq!(it.next_back(), Some(9));
+ assert_eq!(it.next_back(), Some(6));
+ assert_eq!(it.next_back(), Some(3));
+ assert_eq!(it.next_back(), Some(0));
+ assert_eq!(it.next_back(), None);
}
#[test]
assert_eq!(it.0, (usize::MAX as Bigger) * 1);
}
+#[test]
+fn test_iterator_step_by_nth_back() {
+ let mut it = (0..16).step_by(5);
+ assert_eq!(it.nth_back(0), Some(15));
+ assert_eq!(it.nth_back(0), Some(10));
+ assert_eq!(it.nth_back(0), Some(5));
+ assert_eq!(it.nth_back(0), Some(0));
+ assert_eq!(it.nth_back(0), None);
+
+ let mut it = (0..16).step_by(5);
+ assert_eq!(it.next(), Some(0)); // to set `first_take` to `false`
+ assert_eq!(it.nth_back(0), Some(15));
+ assert_eq!(it.nth_back(0), Some(10));
+ assert_eq!(it.nth_back(0), Some(5));
+ assert_eq!(it.nth_back(0), None);
+
+ let it = || (0..18).step_by(5);
+ assert_eq!(it().nth_back(0), Some(15));
+ assert_eq!(it().nth_back(1), Some(10));
+ assert_eq!(it().nth_back(2), Some(5));
+ assert_eq!(it().nth_back(3), Some(0));
+ assert_eq!(it().nth_back(4), None);
+ assert_eq!(it().nth_back(42), None);
+}
+
#[test]
#[should_panic]
fn test_iterator_step_by_zero() {
#[test]
fn test_iterator_peekable() {
let xs = vec![0, 1, 2, 3, 4, 5];
- let mut it = xs.iter().cloned().peekable();
+ let mut it = xs.iter().cloned().peekable();
assert_eq!(it.len(), 6);
assert_eq!(it.peek().unwrap(), &0);
assert_eq!(it.len(), 6);
assert_eq!(it.len(), 0);
assert!(it.next().is_none());
assert_eq!(it.len(), 0);
+
+ let mut it = xs.iter().cloned().peekable();
+ assert_eq!(it.len(), 6);
+ assert_eq!(it.peek().unwrap(), &0);
+ assert_eq!(it.len(), 6);
+ assert_eq!(it.next_back().unwrap(), 5);
+ assert_eq!(it.len(), 5);
+ assert_eq!(it.next_back().unwrap(), 4);
+ assert_eq!(it.len(), 4);
+ assert_eq!(it.next_back().unwrap(), 3);
+ assert_eq!(it.len(), 3);
+ assert_eq!(it.peek().unwrap(), &0);
+ assert_eq!(it.len(), 3);
+ assert_eq!(it.peek().unwrap(), &0);
+ assert_eq!(it.len(), 3);
+ assert_eq!(it.next_back().unwrap(), 2);
+ assert_eq!(it.len(), 2);
+ assert_eq!(it.next_back().unwrap(), 1);
+ assert_eq!(it.len(), 1);
+ assert_eq!(it.peek().unwrap(), &0);
+ assert_eq!(it.len(), 1);
+ assert_eq!(it.next_back().unwrap(), 0);
+ assert_eq!(it.len(), 0);
+ assert!(it.peek().is_none());
+ assert_eq!(it.len(), 0);
+ assert!(it.next_back().is_none());
+ assert_eq!(it.len(), 0);
}
#[test]
assert_eq!(i, xs.len());
}
+#[test]
+fn test_iterator_peekable_rfold() {
+ let xs = [0, 1, 2, 3, 4, 5];
+ let mut it = xs.iter().peekable();
+ assert_eq!(it.peek(), Some(&&0));
+ let i = it.rfold(0, |i, &x| {
+ assert_eq!(x, xs[xs.len() - 1 - i]);
+ i + 1
+ });
+ assert_eq!(i, xs.len());
+}
+
/// This is an iterator that follows the Iterator contract,
/// but it is not fused. After having returned None once, it will start
/// producing elements if .next() is called again.
fn test_iterator_take() {
let xs = [0, 1, 2, 3, 5, 13, 15, 16, 17, 19];
let ys = [0, 1, 2, 3, 5];
- let mut it = xs.iter().take(5);
+
+ let mut it = xs.iter().take(ys.len());
let mut i = 0;
- assert_eq!(it.len(), 5);
+ assert_eq!(it.len(), ys.len());
while let Some(&x) = it.next() {
assert_eq!(x, ys[i]);
i += 1;
- assert_eq!(it.len(), 5-i);
+ assert_eq!(it.len(), ys.len() - i);
+ }
+ assert_eq!(i, ys.len());
+ assert_eq!(it.len(), 0);
+
+ let mut it = xs.iter().take(ys.len());
+ let mut i = 0;
+ assert_eq!(it.len(), ys.len());
+ while let Some(&x) = it.next_back() {
+ i += 1;
+ assert_eq!(x, ys[ys.len() - i]);
+ assert_eq!(it.len(), ys.len() - i);
}
assert_eq!(i, ys.len());
assert_eq!(it.len(), 0);
}
}
+#[test]
+fn test_iterator_take_nth_back() {
+ let xs = [0, 1, 2, 4, 5];
+ let mut it = xs.iter();
+ {
+ let mut take = it.by_ref().take(3);
+ let mut i = 0;
+ while let Some(&x) = take.nth_back(0) {
+ i += 1;
+ assert_eq!(x, 3 - i);
+ }
+ }
+ assert_eq!(it.nth_back(0), None);
+
+ let xs = [0, 1, 2, 3, 4];
+ let mut it = xs.iter().take(7);
+ assert_eq!(it.nth_back(1), Some(&3));
+ assert_eq!(it.nth_back(1), Some(&1));
+ assert_eq!(it.nth_back(1), None);
+}
+
#[test]
fn test_iterator_take_short() {
let xs = [0, 1, 2, 3];
- let ys = [0, 1, 2, 3];
+
let mut it = xs.iter().take(5);
let mut i = 0;
- assert_eq!(it.len(), 4);
+ assert_eq!(it.len(), xs.len());
while let Some(&x) = it.next() {
- assert_eq!(x, ys[i]);
+ assert_eq!(x, xs[i]);
i += 1;
- assert_eq!(it.len(), 4-i);
+ assert_eq!(it.len(), xs.len() - i);
}
- assert_eq!(i, ys.len());
+ assert_eq!(i, xs.len());
+ assert_eq!(it.len(), 0);
+
+ let mut it = xs.iter().take(5);
+ let mut i = 0;
+ assert_eq!(it.len(), xs.len());
+ while let Some(&x) = it.next_back() {
+ i += 1;
+ assert_eq!(x, xs[xs.len() - i]);
+ assert_eq!(it.len(), xs.len() - i);
+ }
+ assert_eq!(i, xs.len());
assert_eq!(it.len(), 0);
}
}
#[test]
-fn test_peek_try_fold() {
+fn test_peek_try_folds() {
let f = &|acc, x| i32::checked_add(2*acc, x);
+
assert_eq!((1..20).peekable().try_fold(7, f), (1..20).try_fold(7, f));
+ assert_eq!((1..20).peekable().try_rfold(7, f), (1..20).try_rfold(7, f));
+
let mut iter = (1..20).peekable();
assert_eq!(iter.peek(), Some(&1));
assert_eq!(iter.try_fold(7, f), (1..20).try_fold(7, f));
+ let mut iter = (1..20).peekable();
+ assert_eq!(iter.peek(), Some(&1));
+ assert_eq!(iter.try_rfold(7, f), (1..20).try_rfold(7, f));
+
let mut iter = [100, 20, 30, 40, 50, 60, 70].iter().cloned().peekable();
assert_eq!(iter.peek(), Some(&100));
assert_eq!(iter.try_fold(0, i8::checked_add), None);
assert_eq!(iter.peek(), Some(&40));
+
+ let mut iter = [100, 20, 30, 40, 50, 60, 70].iter().cloned().peekable();
+ assert_eq!(iter.peek(), Some(&100));
+ assert_eq!(iter.try_rfold(0, i8::checked_add), None);
+ assert_eq!(iter.peek(), Some(&100));
+ assert_eq!(iter.next_back(), Some(50));
+
+ let mut iter = (2..5).peekable();
+ assert_eq!(iter.peek(), Some(&2));
+ assert_eq!(iter.try_for_each(Err), Err(2));
+ assert_eq!(iter.peek(), Some(&3));
+ assert_eq!(iter.try_for_each(Err), Err(3));
+ assert_eq!(iter.peek(), Some(&4));
+ assert_eq!(iter.try_for_each(Err), Err(4));
+ assert_eq!(iter.peek(), None);
+ assert_eq!(iter.try_for_each(Err), Ok(()));
+
+ let mut iter = (2..5).peekable();
+ assert_eq!(iter.peek(), Some(&2));
+ assert_eq!(iter.try_rfold((), |(), x| Err(x)), Err(4));
+ assert_eq!(iter.peek(), Some(&2));
+ assert_eq!(iter.try_rfold((), |(), x| Err(x)), Err(3));
+ assert_eq!(iter.peek(), Some(&2));
+ assert_eq!(iter.try_rfold((), |(), x| Err(x)), Err(2));
+ assert_eq!(iter.peek(), None);
+ assert_eq!(iter.try_rfold((), |(), x| Err(x)), Ok(()));
}
#[test]
fn test_take_try_folds() {
let f = &|acc, x| i32::checked_add(2*acc, x);
assert_eq!((10..30).take(10).try_fold(7, f), (10..20).try_fold(7, f));
- //assert_eq!((10..30).take(10).try_rfold(7, f), (10..20).try_rfold(7, f));
+ assert_eq!((10..30).take(10).try_rfold(7, f), (10..20).try_rfold(7, f));
let mut iter = (10..30).take(20);
assert_eq!(iter.try_fold(0, i8::checked_add), None);
assert_eq!(iter.next(), Some(20));
- //assert_eq!(iter.try_rfold(0, i8::checked_add), None);
- //assert_eq!(iter.next_back(), Some(24));
+ assert_eq!(iter.try_rfold(0, i8::checked_add), None);
+ assert_eq!(iter.next_back(), Some(24));
+
+ let mut iter = (2..20).take(3);
+ assert_eq!(iter.try_for_each(Err), Err(2));
+ assert_eq!(iter.try_for_each(Err), Err(3));
+ assert_eq!(iter.try_for_each(Err), Err(4));
+ assert_eq!(iter.try_for_each(Err), Ok(()));
+
+ let mut iter = (2..20).take(3).rev();
+ assert_eq!(iter.try_for_each(Err), Err(4));
+ assert_eq!(iter.try_for_each(Err), Err(3));
+ assert_eq!(iter.try_for_each(Err), Err(2));
+ assert_eq!(iter.try_for_each(Err), Ok(()));
}
#[test]
}
#[test]
-#[cfg(not(miri))] // This test is UB according to Stacked Borrows
fn test_as_mut() {
unsafe {
let p: *mut isize = null_mut();
// Pointers to unsized types -- slices
let s: &mut [u8] = &mut [1, 2, 3];
let ms: *mut [u8] = s;
- assert_eq!(ms.as_mut(), Some(s));
+ assert_eq!(ms.as_mut(), Some(&mut [1, 2, 3][..]));
let mz: *mut [u8] = &mut [];
assert_eq!(mz.as_mut(), Some(&mut [][..]));
let attr = {
// `allow(unreachable_code)`
let allow = {
- let allow_ident = Ident::with_empty_ctxt(sym::allow).with_span_pos(e.span);
- let uc_ident = Ident::with_empty_ctxt(sym::unreachable_code)
- .with_span_pos(e.span);
+ let allow_ident = Ident::new(sym::allow, e.span);
+ let uc_ident = Ident::new(sym::unreachable_code, e.span);
let uc_nested = attr::mk_nested_word_item(uc_ident);
- attr::mk_list_item(e.span, allow_ident, vec![uc_nested])
+ attr::mk_list_item(allow_ident, vec![uc_nested])
};
attr::mk_attr_outer(allow)
};
}
token::DocComment(val) |
- token::Shebang(val) => val.hash_stable(hcx, hasher),
+ token::Shebang(val) |
+ token::Unknown(val) => val.hash_stable(hcx, hasher),
}
}
}
ambient_variance,
needs_wf: false,
root_ty: ty,
+ param_env: self.param_env,
};
let ty = match generalize.relate(&ty, &ty) {
/// The root type that we are generalizing. Used when reporting cycles.
root_ty: Ty<'tcx>,
+
+ param_env: ty::ParamEnv<'tcx>,
}
/// Result from a generalization operation. This includes
fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.param_env }
fn tag(&self) -> &'static str {
"Generalizer"
fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.fields.param_env }
+
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn relate_item_substs(&mut self,
fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.fields.param_env }
+
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
fn tcx(&self) -> TyCtxt<'tcx> { self.fields.tcx() }
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.fields.param_env }
+
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
self.infcx.tcx
}
+ // FIXME(oli-obk): not sure how to get the correct ParamEnv
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { ty::ParamEnv::empty() }
+
fn tag(&self) -> &'static str {
"nll::subtype"
}
self.infcx.tcx
}
+ // FIXME(oli-obk): not sure how to get the correct ParamEnv
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { ty::ParamEnv::empty() }
+
fn tag(&self) -> &'static str {
"nll::generalizer"
}
/// interested in the `OutlivesEnvironment`. -nmatsakis
#[derive(Clone)]
pub struct OutlivesEnvironment<'tcx> {
- param_env: ty::ParamEnv<'tcx>,
+ pub param_env: ty::ParamEnv<'tcx>,
free_region_map: FreeRegionMap<'tcx>,
// Contains, for each body B that we are checking (that is, the fn
impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
fn tag(&self) -> &'static str { "Sub" }
fn tcx(&self) -> TyCtxt<'tcx> { self.fields.infcx.tcx }
+
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.fields.param_env }
+
fn a_is_expected(&self) -> bool { self.a_is_expected }
fn with_cause<F,R>(&mut self, cause: Cause, f: F) -> R
) -> Self {
ExprUseVisitor {
mc: mc::MemCategorizationContext::new(tcx,
+ param_env,
body_owner,
region_scope_tree,
tables,
ExprUseVisitor {
mc: mc::MemCategorizationContext::with_infer(
infcx,
+ param_env,
body_owner,
region_scope_tree,
tables,
#[derive(Clone)]
pub struct MemCategorizationContext<'a, 'tcx> {
pub tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
pub body_owner: DefId,
pub upvars: Option<&'tcx FxIndexMap<hir::HirId, hir::Upvar>>,
pub region_scope_tree: &'a region::ScopeTree,
impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
pub fn new(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_owner: DefId,
region_scope_tree: &'a region::ScopeTree,
tables: &'a ty::TypeckTables<'tcx>,
region_scope_tree,
tables,
rvalue_promotable_map,
- infcx: None
+ infcx: None,
+ param_env,
}
}
}
/// known, the results around upvar accesses may be incorrect.
pub fn with_infer(
infcx: &'a InferCtxt<'a, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_owner: DefId,
region_scope_tree: &'a region::ScopeTree,
tables: &'a ty::TypeckTables<'tcx>,
tables,
rvalue_promotable_map,
infcx: Some(infcx),
+ param_env,
}
}
// Always promote `[T; 0]` (even when e.g., borrowed mutably).
let promotable = match expr_ty.sty {
- ty::Array(_, len) if len.assert_usize(self.tcx) == Some(0) => true,
+ ty::Array(_, len) if len.try_eval_usize(self.tcx, self.param_env) == Some(0) => true,
_ => promotable,
};
/// `PlaceElem`, where we can just use the `Ty` that is already
/// stored inline on field projection elems.
pub fn projection_ty(self, tcx: TyCtxt<'tcx>, elem: &PlaceElem<'tcx>) -> PlaceTy<'tcx> {
- self.projection_ty_core(tcx, elem, |_, _, ty| ty)
+ self.projection_ty_core(tcx, ty::ParamEnv::empty(), elem, |_, _, ty| ty)
}
/// `place_ty.projection_ty_core(tcx, elem, |...| { ... })`
pub fn projection_ty_core<V, T>(
self,
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
elem: &ProjectionElem<V, T>,
mut handle_field: impl FnMut(&Self, &Field, &T) -> Ty<'tcx>,
) -> PlaceTy<'tcx>
ProjectionElem::Subslice { from, to } => {
PlaceTy::from_ty(match self.ty.sty {
ty::Array(inner, size) => {
- let size = size.unwrap_usize(tcx);
+ let size = size.eval_usize(tcx, param_env);
let len = size - (from as u64) - (to as u64);
tcx.mk_array(inner, len)
}
Some(format!("[{}]", self.tcx.type_of(def.did).to_string())),
));
let tcx = self.tcx;
- if let Some(len) = len.assert_usize(tcx) {
+ if let Some(len) = len.try_eval_usize(tcx, ty::ParamEnv::empty()) {
flags.push((
sym::_Self,
Some(format!("[{}; {}]", self.tcx.type_of(def.did).to_string(), len)),
}
if unbound_input_types && stack.iter().skip(1).any(|prev| {
stack.obligation.param_env == prev.obligation.param_env
- && self.match_fresh_trait_refs(&stack.fresh_trait_ref, &prev.fresh_trait_ref)
+ && self.match_fresh_trait_refs(
+ &stack.fresh_trait_ref, &prev.fresh_trait_ref, prev.obligation.param_env)
}) {
debug!(
"evaluate_stack({:?}) --> unbound argument, recursive --> giving up",
&self,
previous: &ty::PolyTraitRef<'tcx>,
current: &ty::PolyTraitRef<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
) -> bool {
- let mut matcher = ty::_match::Match::new(self.tcx());
+ let mut matcher = ty::_match::Match::new(self.tcx(), param_env);
matcher.relate(previous, current).is_ok()
}
}
}
- let access_levels = tcx.privacy_access_levels(impl_def_id.krate);
- if let Some(id) = tcx.hir().as_local_hir_id(impl_def_id) {
- if access_levels.is_exported(id) || access_levels.is_public(id) {
- for cause in &overlap.intercrate_ambiguity_causes {
- cause.add_intercrate_ambiguity_hint(&mut err);
- }
- }
+ for cause in &overlap.intercrate_ambiguity_causes {
+ cause.add_intercrate_ambiguity_hint(&mut err);
}
if overlap.involves_placeholder {
/// affects any type variables or unification state.
pub struct Match<'tcx> {
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
impl Match<'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>) -> Match<'tcx> {
- Match { tcx }
+ pub fn new(tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Match<'tcx> {
+ Match { tcx, param_env }
}
}
impl TypeRelation<'tcx> for Match<'tcx> {
fn tag(&self) -> &'static str { "Match" }
fn tcx(&self) -> TyCtxt<'tcx> { self.tcx }
+ fn param_env(&self) -> ty::ParamEnv<'tcx> { self.param_env }
fn a_is_expected(&self) -> bool { true } // irrelevant
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
ty::Foreign(def_id) => format!("extern type `{}`", tcx.def_path_str(def_id)).into(),
ty::Array(_, n) => {
let n = tcx.lift_to_global(&n).unwrap();
- match n.assert_usize(tcx) {
+ match n.try_eval_usize(tcx, ty::ParamEnv::empty()) {
Some(n) => format!("array of {} elements", n).into(),
None => "array".into(),
}
use crate::ty::{DefId, SubstsRef};
use crate::ty::{AdtKind, Visibility};
use crate::ty::TyKind::*;
+use crate::ty;
pub use self::def_id_forest::DefIdForest;
}))
}
- Array(ty, len) => match len.assert_usize(tcx) {
+ Array(ty, len) => match len.try_eval_usize(tcx, ty::ParamEnv::empty()) {
// If the array is definitely non-empty, it's uninhabited if
// the type of its elements is uninhabited.
Some(n) if n != 0 => ty.uninhabited_from(tcx),
}
}
- let count = count.assert_usize(tcx).ok_or(LayoutError::Unknown(ty))?;
+ let count = count.try_eval_usize(tcx, param_env).ok_or(LayoutError::Unknown(ty))?;
let element = self.layout_of(element)?;
let size = element.size.checked_mul(count, dl)
.ok_or(LayoutError::SizeOverflow(ty))?;
#[inline]
pub fn eval_explicit_discr(&self, tcx: TyCtxt<'tcx>, expr_did: DefId) -> Option<Discr<'tcx>> {
- let param_env = ParamEnv::empty();
+ let param_env = tcx.param_env(expr_did);
let repr_type = self.repr.discr_type();
let substs = InternalSubsts::identity_for_item(tcx.global_tcx(), expr_did);
let instance = ty::Instance::new(expr_did, substs);
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => {
// FIXME: Find the right type and use it instead of `val.ty` here
- if let Some(b) = val.assert_bits(tcx.global_tcx(), param_env.and(val.ty)) {
+ if let Some(b) = val.try_eval_bits(tcx.global_tcx(), param_env, val.ty) {
trace!("discriminants: {} ({:?})", b, repr_type);
Some(Discr {
val: b,
ty::Array(inner_type, len) => {
output.push('[');
self.push_type_name(inner_type, output, debug);
- write!(output, "; {}", len.unwrap_usize(self.tcx)).unwrap();
+ let len = len.eval_usize(self.tcx, ty::ParamEnv::reveal_all());
+ write!(output, "; {}", len).unwrap();
output.push(']');
}
ty::Slice(inner_type) => {
},
ty::Array(ty, sz) => {
p!(write("["), print(ty), write("; "));
- if let Some(n) = sz.assert_usize(self.tcx()) {
+ if let ConstValue::Unevaluated(..) = sz.val {
+ // do not try to evalute unevaluated constants. If we are const evaluating an
+ // array length anon const, rustc will (with debug assertions) print the
+ // constant's path. Which will end up here again.
+ p!(write("_"));
+ } else if let Some(n) = sz.try_eval_usize(self.tcx(), ty::ParamEnv::empty()) {
p!(write("{}", n));
} else {
p!(write("_"));
if let ty::Ref(_, ref_ty, _) = ct.ty.sty {
let byte_str = match (ct.val, &ref_ty.sty) {
(ConstValue::Scalar(Scalar::Ptr(ptr)), ty::Array(t, n)) if *t == u8 => {
- let n = n.unwrap_usize(self.tcx());
+ let n = n.eval_usize(self.tcx(), ty::ParamEnv::empty());
Some(self.tcx()
.alloc_map.lock()
.unwrap_memory(ptr.alloc_id)
pub trait TypeRelation<'tcx>: Sized {
fn tcx(&self) -> TyCtxt<'tcx>;
+ fn param_env(&self) -> ty::ParamEnv<'tcx>;
+
/// Returns a static string we can use for printouts.
fn tag(&self) -> &'static str;
Err(err) => {
// Check whether the lengths are both concrete/known values,
// but are unequal, for better diagnostics.
- match (sz_a.assert_usize(tcx), sz_b.assert_usize(tcx)) {
+ let sz_a = sz_a.try_eval_usize(tcx, relation.param_env());
+ let sz_b = sz_b.try_eval_usize(tcx, relation.param_env());
+ match (sz_a, sz_b) {
(Some(sz_a_val), Some(sz_b_val)) => {
Err(TypeError::FixedArraySize(
expected_found(relation, &sz_a_val, &sz_b_val)
use crate::ty::{List, TyS, ParamEnvAnd, ParamEnv};
use crate::ty::layout::VariantIdx;
use crate::util::captures::Captures;
-use crate::mir::interpret::{Scalar, Pointer};
+use crate::mir::interpret::{Scalar, GlobalId};
use smallvec::SmallVec;
use std::borrow::Cow;
ty.expect_ty().conservative_is_privately_uninhabited(tcx)
}),
ty::Array(ty, len) => {
- match len.assert_usize(tcx) {
+ match len.try_eval_usize(tcx, ParamEnv::empty()) {
// If the array is definitely non-empty, it's uninhabited if
// the type of its elements is uninhabited.
Some(n) if n != 0 => ty.conservative_is_privately_uninhabited(tcx),
}
#[inline]
- pub fn to_bits(&self, tcx: TyCtxt<'tcx>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Option<u128> {
- if self.ty != ty.value {
- return None;
+ pub fn try_eval_bits(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ ) -> Option<u128> {
+ assert_eq!(self.ty, ty);
+ // if `ty` does not depend on generic parameters, use an empty param_env
+ let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
+ match self.val {
+ // FIXME(const_generics): this doesn't work right now,
+ // because it tries to relate an `Infer` to a `Param`.
+ ConstValue::Unevaluated(did, substs) => {
+ // if `substs` has no unresolved components, use and empty param_env
+ let (param_env, substs) = param_env.with_reveal_all().and(substs).into_parts();
+ // try to resolve e.g. associated constants to their definition on an impl
+ let instance = ty::Instance::resolve(tcx, param_env, did, substs)?;
+ let gid = GlobalId {
+ instance,
+ promoted: None,
+ };
+ let evaluated = tcx.const_eval(param_env.and(gid)).ok()?;
+ evaluated.val.try_to_bits(size)
+ },
+ // otherwise just extract a `ConstValue`'s bits if possible
+ _ => self.val.try_to_bits(size),
}
- let size = tcx.layout_of(ty).ok()?.size;
- self.val.try_to_bits(size)
- }
-
- #[inline]
- pub fn to_ptr(&self) -> Option<Pointer> {
- self.val.try_to_ptr()
- }
-
- #[inline]
- pub fn assert_bits(&self, tcx: TyCtxt<'tcx>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Option<u128> {
- assert_eq!(self.ty, ty.value);
- let size = tcx.layout_of(ty).ok()?.size;
- self.val.try_to_bits(size)
}
#[inline]
- pub fn assert_bool(&self, tcx: TyCtxt<'tcx>) -> Option<bool> {
- self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.bool)).and_then(|v| match v {
+ pub fn try_eval_bool(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<bool> {
+ self.try_eval_bits(tcx, param_env, tcx.types.bool).and_then(|v| match v {
0 => Some(false),
1 => Some(true),
_ => None,
}
#[inline]
- pub fn assert_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
- self.assert_bits(tcx, ParamEnv::empty().and(tcx.types.usize)).map(|v| v as u64)
+ pub fn try_eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<u64> {
+ self.try_eval_bits(tcx, param_env, tcx.types.usize).map(|v| v as u64)
}
#[inline]
- pub fn unwrap_bits(&self, tcx: TyCtxt<'tcx>, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> u128 {
- self.assert_bits(tcx, ty).unwrap_or_else(||
- bug!("expected bits of {}, got {:#?}", ty.value, self))
+ pub fn eval_bits(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>) -> u128 {
+ self.try_eval_bits(tcx, param_env, ty).unwrap_or_else(||
+ bug!("expected bits of {:#?}, got {:#?}", ty, self))
}
#[inline]
- pub fn unwrap_usize(&self, tcx: TyCtxt<'tcx>) -> u64 {
- self.assert_usize(tcx).unwrap_or_else(||
- bug!("expected constant usize, got {:#?}", self))
+ pub fn eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> u64 {
+ self.eval_bits(tcx, param_env, tcx.types.usize) as u64
}
}
let (size, align) = cx.size_and_align_of(array_or_slice_type);
let upper_bound = match array_or_slice_type.sty {
- ty::Array(_, len) => len.unwrap_usize(cx.tcx) as c_longlong,
+ ty::Array(_, len) => len.eval_usize(cx.tcx, ty::ParamEnv::reveal_all()) as c_longlong,
_ => -1
};
cx.tcx().struct_lockstep_tails_erasing_lifetimes(source, target, cx.param_env());
match (&source.sty, &target.sty) {
(&ty::Array(_, len), &ty::Slice(_)) => {
- cx.const_usize(len.unwrap_usize(cx.tcx()))
+ cx.const_usize(len.eval_usize(cx.tcx(), ty::ParamEnv::reveal_all()))
}
(&ty::Dynamic(..), &ty::Dynamic(..)) => {
// For now, upcasts are limited to changes in marker
ty::Array(inner_type, len) => {
output.push('[');
push_debuginfo_type_name(tcx, inner_type, true, output, visited);
- output.push_str(&format!("; {}", len.unwrap_usize(tcx)));
+ output.push_str(&format!("; {}", len.eval_usize(tcx, ty::ParamEnv::reveal_all())));
output.push(']');
},
ty::Slice(inner_type) => {
.map(|c| {
let field_ty = c.ty.builtin_index().unwrap();
let fields = match c.ty.sty {
- ty::Array(_, n) => n.unwrap_usize(bx.tcx()),
+ ty::Array(_, n) => n.eval_usize(bx.tcx(), ty::ParamEnv::reveal_all()),
_ => bug!("invalid simd shuffle type: {}", c.ty),
};
let values: Vec<_> = (0..fields).map(|field| {
} = *place {
if let LocalRef::Operand(Some(op)) = self.locals[index] {
if let ty::Array(_, n) = op.layout.ty.sty {
- let n = n.unwrap_usize(bx.cx().tcx());
+ let n = n.eval_usize(bx.cx().tcx(), ty::ParamEnv::reveal_all());
return bx.cx().const_usize(n);
}
}
}
self = ct.ty.print(self)?;
- if let Some(bits) = ct.assert_bits(self.tcx, ty::ParamEnv::empty().and(ct.ty)) {
+ if let Some(bits) = ct.try_eval_bits(self.tcx, ty::ParamEnv::reveal_all(), ct.ty) {
let _ = write!(self.out, "{:x}_", bits);
} else {
// NOTE(eddyb) despite having the path, we need to
}
has_emitted
}
- ty::Array(ty, len) => match len.assert_usize(cx.tcx) {
+ ty::Array(ty, len) => match len.try_eval_usize(cx.tcx, cx.param_env) {
// If the array is definitely non-empty, we can do `#[must_use]` checking.
Some(n) if n != 0 => {
let descr_pre = &format!(
format!("{} occurs due to use{}", desired_action.as_noun(), use_spans.describe()),
);
+ // This error should not be downgraded to a warning,
+ // even in migrate mode.
+ self.disable_error_downgrading();
err.buffer(&mut self.errors_buffer);
} else {
if let Some((reported_place, _)) = self.move_error_reported.get(&move_out_indices) {
def_id,
&attributes,
&dead_unwinds,
- Borrows::new(tcx, body, regioncx.clone(), &borrow_set),
+ Borrows::new(tcx, body, param_env, regioncx.clone(), &borrow_set),
|rs, i| DebugFormatted::new(&rs.location(i)),
));
let flow_uninits = FlowAtLocation::new(do_dataflow(
infcx,
body,
mir_def_id: def_id,
+ param_env,
move_data: &mdpe.move_data,
location_table,
movable_generator,
move_error_reported: BTreeMap::new(),
uninitialized_error_reported: Default::default(),
errors_buffer,
+ disable_error_downgrading: false,
nonlexical_regioncx: regioncx,
used_mut: Default::default(),
used_mut_upvars: SmallVec::new(),
if !mbcx.errors_buffer.is_empty() {
mbcx.errors_buffer.sort_by_key(|diag| diag.span.primary_span());
- if tcx.migrate_borrowck() {
+ if !mbcx.disable_error_downgrading && tcx.migrate_borrowck() {
// When borrowck=migrate, check if AST-borrowck would
// error on the given code.
crate infcx: &'cx InferCtxt<'cx, 'tcx>,
body: &'cx Body<'tcx>,
mir_def_id: DefId,
+ param_env: ty::ParamEnv<'tcx>,
move_data: &'cx MoveData<'tcx>,
/// Map from MIR `Location` to `LocationIndex`; created
uninitialized_error_reported: FxHashSet<PlaceRef<'cx, 'tcx>>,
/// Errors to be reported buffer
errors_buffer: Vec<Diagnostic>,
+ /// If there are no errors reported by the HIR borrow checker, we downgrade
+ /// all NLL errors to warnings. Setting this flag disables downgrading.
+ disable_error_downgrading: bool,
/// This field keeps track of all the local variables that are declared mut and are mutated.
/// Used for the warning issued by an unused mutable local variable.
used_mut: FxHashSet<Local>,
}
impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
+ /// If there are no errors reported by the HIR borrow checker, we downgrade
+ /// all NLL errors to warnings. Calling this disables downgrading.
+ crate fn disable_error_downgrading(&mut self) {
+ self.disable_error_downgrading = true;
+ }
+
/// Checks an access to the given place to see if it is allowed. Examines the set of borrows
/// that are in scope, as well as which paths have been initialized, to ensure that (a) the
/// place is initialized and (b) it is not borrowed in some way that would prevent this
let mut error_reported = false;
let tcx = self.infcx.tcx;
let body = self.body;
+ let param_env = self.param_env;
let location_table = self.location_table.start_index(location);
let borrow_set = self.borrow_set.clone();
each_borrow_involving_path(
self,
tcx,
+ param_env,
body,
location,
(sd, place_span.0),
if places_conflict::borrow_conflicts_with_place(
self.infcx.tcx,
+ self.param_env,
self.body,
place,
borrow.kind,
pub(super) fn generate_constraints<'cx, 'tcx>(
infcx: &InferCtxt<'cx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
liveness_constraints: &mut LivenessValues<RegionVid>,
all_facts: &mut Option<AllFacts>,
location_table: &LocationTable,
location_table,
all_facts,
body,
+ param_env,
};
for (bb, data) in body.basic_blocks().iter_enumerated() {
/// 'cg = the duration of the constraint generation process itself.
struct ConstraintGeneration<'cg, 'cx, 'tcx> {
infcx: &'cg InferCtxt<'cx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
all_facts: &'cg mut Option<AllFacts>,
location_table: &'cg LocationTable,
liveness_constraints: &'cg mut LivenessValues<RegionVid>,
for &borrow_index in borrow_indices {
let places_conflict = places_conflict::places_conflict(
self.infcx.tcx,
+ self.param_env,
self.body,
&self.borrow_set.borrows[borrow_index].borrowed_place,
place,
use crate::borrow_check::nll::facts::AllFacts;
use crate::borrow_check::path_utils::*;
use crate::dataflow::indexes::BorrowIndex;
-use rustc::ty::TyCtxt;
+use rustc::ty::{self, TyCtxt};
use rustc::mir::visit::Visitor;
use rustc::mir::{BasicBlock, Location, Body, Place, Rvalue};
use rustc::mir::{Statement, StatementKind};
pub(super) fn generate_invalidates<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
all_facts: &mut Option<AllFacts>,
location_table: &LocationTable,
body: &Body<'tcx>,
let mut ig = InvalidationGenerator {
all_facts,
borrow_set,
+ param_env,
tcx,
location_table,
body,
struct InvalidationGenerator<'cx, 'tcx> {
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
all_facts: &'cx mut AllFacts,
location_table: &'cx LocationTable,
body: &'cx Body<'tcx>,
);
let tcx = self.tcx;
let body = self.body;
+ let param_env = self.param_env;
let borrow_set = self.borrow_set.clone();
let indices = self.borrow_set.borrows.indices();
each_borrow_involving_path(
self,
tcx,
+ param_env,
body,
location,
(sd, place),
constraint_generation::generate_constraints(
infcx,
+ param_env,
&mut liveness_constraints,
&mut all_facts,
location_table,
// Generate various additional constraints.
invalidation::generate_invalidates(
infcx.tcx,
+ param_env,
&mut all_facts,
location_table,
&body,
ProjectionElem::Subslice { from, to } => PlaceTy::from_ty(
match base_ty.sty {
ty::Array(inner, size) => {
- let size = size.unwrap_usize(tcx);
+ let size = size.eval_usize(tcx, self.cx.param_env);
let min_size = (from as u64) + (to as u64);
if let Some(rest_size) = size.checked_sub(min_size) {
tcx.mk_array(inner, rest_size)
let tcx = self.infcx.tcx;
for proj in &user_ty.projs {
- let projected_ty = curr_projected_ty.projection_ty_core(tcx, proj, |this, field, &()| {
- let ty = this.field_ty(tcx, field);
- self.normalize(ty, locations)
- });
+ let projected_ty = curr_projected_ty.projection_ty_core(
+ tcx,
+ self.param_env,
+ proj,
+ |this, field, &()| {
+ let ty = this.field_ty(tcx, field);
+ self.normalize(ty, locations)
+ },
+ );
curr_projected_ty = projected_ty;
}
debug!("user_ty base: {:?} freshened: {:?} projs: {:?} yields: {:?}",
use crate::dataflow::indexes::BorrowIndex;
use rustc::mir::{BasicBlock, Location, Body, Place, PlaceBase};
use rustc::mir::{ProjectionElem, BorrowKind};
-use rustc::ty::TyCtxt;
+use rustc::ty::{self, TyCtxt};
use rustc_data_structures::graph::dominators::Dominators;
/// Returns `true` if the borrow represented by `kind` is
pub(super) fn each_borrow_involving_path<'tcx, F, I, S>(
s: &mut S,
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &Body<'tcx>,
_location: Location,
access_place: (AccessDepth, &Place<'tcx>),
if places_conflict::borrow_conflicts_with_place(
tcx,
+ param_env,
body,
&borrowed.borrowed_place,
borrowed.kind,
/// dataflow).
crate fn places_conflict<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &Body<'tcx>,
borrow_place: &Place<'tcx>,
access_place: &Place<'tcx>,
) -> bool {
borrow_conflicts_with_place(
tcx,
+ param_env,
body,
borrow_place,
BorrowKind::Mut { allow_two_phase_borrow: true },
/// order to make the conservative choice and preserve soundness.
pub(super) fn borrow_conflicts_with_place<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &Body<'tcx>,
borrow_place: &Place<'tcx>,
borrow_kind: BorrowKind,
access_place.iterate(|access_base, access_projections| {
place_components_conflict(
tcx,
+ param_env,
body,
(borrow_base, borrow_projections),
borrow_kind,
fn place_components_conflict<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body: &Body<'tcx>,
borrow_projections: (&PlaceBase<'tcx>, ProjectionsIter<'_, 'tcx>),
borrow_kind: BorrowKind,
let borrow_base = borrow_projections.0;
let access_base = access_projections.0;
- match place_base_conflict(tcx, borrow_base, access_base) {
+ match place_base_conflict(tcx, param_env, borrow_base, access_base) {
Overlap::Arbitrary => {
bug!("Two base can't return Arbitrary");
}
// between `elem1` and `elem2`.
fn place_base_conflict<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
elem1: &PlaceBase<'tcx>,
elem2: &PlaceBase<'tcx>,
) -> Overlap {
(StaticKind::Promoted(promoted_1), StaticKind::Promoted(promoted_2)) => {
if promoted_1 == promoted_2 {
if let ty::Array(_, len) = s1.ty.sty {
- if let Some(0) = len.assert_usize(tcx) {
+ if let Some(0) = len.try_eval_usize(tcx, param_env) {
// Ignore conflicts with promoted [T; 0].
debug!("place_element_conflict: IGNORE-LEN-0-PROMOTED");
return Overlap::Disjoint;
match *match_pair.pattern.kind {
PatternKind::Constant { value } => {
- let switch_ty = ty::ParamEnv::empty().and(switch_ty);
indices.entry(value)
.or_insert_with(|| {
- options.push(value.unwrap_bits(self.hir.tcx(), switch_ty));
+ options.push(value.eval_bits(
+ self.hir.tcx(), self.hir.param_env, switch_ty,
+ ));
options.len() - 1
});
true
use std::cmp::Ordering::*;
use rustc::hir::RangeEnd::*;
- let param_env = ty::ParamEnv::empty().and(test.ty);
let tcx = self.hir.tcx();
- let lo = compare_const_vals(tcx, test.lo, pat.hi, param_env)?;
- let hi = compare_const_vals(tcx, test.hi, pat.lo, param_env)?;
+ let lo = compare_const_vals(tcx, test.lo, pat.hi, self.hir.param_env, test.ty)?;
+ let hi = compare_const_vals(tcx, test.hi, pat.lo, self.hir.param_env, test.ty)?;
match (test.end, pat.end, lo, hi) {
// pat < test
) -> Option<bool> {
use std::cmp::Ordering::*;
- let param_env = ty::ParamEnv::empty().and(range.ty);
let tcx = self.hir.tcx();
- let a = compare_const_vals(tcx, range.lo, value, param_env)?;
- let b = compare_const_vals(tcx, value, range.hi, param_env)?;
+ let a = compare_const_vals(tcx, range.lo, value, self.hir.param_env, range.ty)?;
+ let b = compare_const_vals(tcx, value, range.hi, self.hir.param_env, range.ty)?;
match (b, range.end) {
(Less, _) |
use crate::borrow_check::place_ext::PlaceExt;
use rustc::mir::{self, Location, Place, PlaceBase, Body};
-use rustc::ty::TyCtxt;
+use rustc::ty::{self, TyCtxt};
use rustc::ty::RegionVid;
use rustc_data_structures::bit_set::BitSet;
pub struct Borrows<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
borrow_set: Rc<BorrowSet<'tcx>>,
borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
crate fn new(
tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
borrow_set: &Rc<BorrowSet<'tcx>>,
) -> Self {
Borrows {
tcx: tcx,
body: body,
+ param_env,
borrow_set: borrow_set.clone(),
borrows_out_of_scope_at_location,
_nonlexical_regioncx: nonlexical_regioncx,
.filter(|&&i| {
places_conflict::places_conflict(
self.tcx,
+ self.param_env,
self.body,
&self.borrow_set.borrows[i].borrowed_place,
place,
};
let span = cx.tcx.def_span(def_id);
let count = match cx.tcx.at(span).const_eval(cx.param_env.and(global_id)) {
- Ok(cv) => cv.unwrap_usize(cx.tcx),
+ Ok(cv) => cv.eval_usize(cx.tcx, cx.param_env),
Err(ErrorHandled::Reported) => 0,
Err(ErrorHandled::TooGeneric) => {
cx.tcx.sess.span_err(span, "array lengths can't depend on generic parameters");
ConstValue::Slice {
data: self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id),
start: p.offset.bytes().try_into().unwrap(),
- end: n.unwrap_usize(self.tcx).try_into().unwrap(),
+ end: n.eval_usize(self.tcx, ty::ParamEnv::empty()).try_into().unwrap(),
}
},
// fat pointers stay the same
ConstantValue(ty::Const::from_bool(cx.tcx, b))
}).collect()
}
- ty::Array(ref sub_ty, len) if len.assert_usize(cx.tcx).is_some() => {
- let len = len.unwrap_usize(cx.tcx);
+ ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => {
+ let len = len.eval_usize(cx.tcx, cx.param_env);
if len != 0 && cx.is_uninhabited(sub_ty) {
vec![]
} else {
match (value.val, &value.ty.sty) {
(_, ty::Array(_, n)) => max_fixed_len = cmp::max(
max_fixed_len,
- n.unwrap_usize(cx.tcx),
+ n.eval_usize(cx.tcx, cx.param_env),
),
(ConstValue::Slice{ start, end, .. }, ty::Slice(_)) => max_fixed_len = cmp::max(
max_fixed_len,
}
impl<'tcx> IntRange<'tcx> {
- fn from_ctor(tcx: TyCtxt<'tcx>, ctor: &Constructor<'tcx>) -> Option<IntRange<'tcx>> {
+ fn from_ctor(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ctor: &Constructor<'tcx>,
+ ) -> Option<IntRange<'tcx>> {
// Floating-point ranges are permitted and we don't want
// to consider them when constructing integer ranges.
fn is_integral(ty: Ty<'_>) -> bool {
}
ConstantValue(val) if is_integral(val.ty) => {
let ty = val.ty;
- if let Some(val) = val.assert_bits(tcx, ty::ParamEnv::empty().and(ty)) {
+ if let Some(val) = val.try_eval_bits(tcx, param_env, ty) {
let bias = IntRange::signed_bias(tcx, ty);
let val = val ^ bias;
Some(IntRange { range: val..=val, ty })
}
}
- fn from_pat(tcx: TyCtxt<'tcx>, mut pat: &Pattern<'tcx>) -> Option<IntRange<'tcx>> {
+ fn from_pat(
+ tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ mut pat: &Pattern<'tcx>,
+ ) -> Option<IntRange<'tcx>> {
let range = loop {
match pat.kind {
box PatternKind::Constant { value } => break ConstantValue(value),
box PatternKind::Range(PatternRange { lo, hi, ty, end }) => break ConstantRange(
- lo.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
- hi.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
+ lo.eval_bits(tcx, param_env, ty),
+ hi.eval_bits(tcx, param_env, ty),
ty,
end,
),
_ => return None,
}
};
- Self::from_ctor(tcx, &range)
+ Self::from_ctor(tcx, param_env, &range)
}
// The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
fn subtract_from(
self,
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ranges: Vec<Constructor<'tcx>>,
) -> Vec<Constructor<'tcx>> {
let ranges = ranges.into_iter().filter_map(|r| {
- IntRange::from_ctor(tcx, &r).map(|i| i.range)
+ IntRange::from_ctor(tcx, param_env, &r).map(|i| i.range)
});
let mut remaining_ranges = vec![];
let ty = self.ty;
fn compute_missing_ctors<'tcx>(
info: MissingCtorsInfo,
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
all_ctors: &Vec<Constructor<'tcx>>,
used_ctors: &Vec<Constructor<'tcx>>,
) -> MissingCtors<'tcx> {
// If a constructor appears in a `match` arm, we can
// eliminate it straight away.
refined_ctors = vec![]
- } else if let Some(interval) = IntRange::from_ctor(tcx, used_ctor) {
+ } else if let Some(interval) = IntRange::from_ctor(tcx, param_env, used_ctor) {
// Refine the required constructors for the type by subtracting
// the range defined by the current constructor pattern.
- refined_ctors = interval.subtract_from(tcx, refined_ctors);
+ refined_ctors = interval.subtract_from(tcx, param_env, refined_ctors);
}
// If the constructor patterns that have been considered so far
if is_declared_nonexhaustive {
Useful
} else {
- split_grouped_constructors(cx.tcx, constructors, matrix, pcx.ty).into_iter().map(|c|
+ split_grouped_constructors(
+ cx.tcx, cx.param_env, constructors, matrix, pcx.ty,
+ ).into_iter().map(|c|
is_useful_specialized(cx, matrix, v, c, pcx.ty, witness)
).find(|result| result.is_useful()).unwrap_or(NotUseful)
}
// non-wildcard patterns in the current column. We always determine if
// the set is empty, but we only fully construct them on-demand,
// because they're rarely used and can be big.
- let cheap_missing_ctors =
- compute_missing_ctors(MissingCtorsInfo::Emptiness, cx.tcx, &all_ctors, &used_ctors);
+ let cheap_missing_ctors = compute_missing_ctors(
+ MissingCtorsInfo::Emptiness, cx.tcx, cx.param_env, &all_ctors, &used_ctors,
+ );
let is_privately_empty = all_ctors.is_empty() && !cx.is_uninhabited(pcx.ty);
let is_declared_nonexhaustive = cx.is_non_exhaustive_enum(pcx.ty) && !cx.is_local(pcx.ty);
(pcx.ty.is_ptr_sized_integral() && !cx.tcx.features().precise_pointer_size_matching);
if cheap_missing_ctors == MissingCtors::Empty && !is_non_exhaustive {
- split_grouped_constructors(cx.tcx, all_ctors, matrix, pcx.ty).into_iter().map(|c| {
- is_useful_specialized(cx, matrix, v, c, pcx.ty, witness)
- }).find(|result| result.is_useful()).unwrap_or(NotUseful)
+ split_grouped_constructors(cx.tcx, cx.param_env, all_ctors, matrix, pcx.ty)
+ .into_iter().map(|c| is_useful_specialized(cx, matrix, v, c, pcx.ty, witness))
+ .find(|result| result.is_useful())
+ .unwrap_or(NotUseful)
} else {
let matrix = rows.iter().filter_map(|r| {
if r[0].is_wildcard() {
witness
}).collect()
} else {
- let expensive_missing_ctors =
- compute_missing_ctors(MissingCtorsInfo::Ctors, cx.tcx, &all_ctors,
- &used_ctors);
+ let expensive_missing_ctors = compute_missing_ctors(
+ MissingCtorsInfo::Ctors, cx.tcx, cx.param_env, &all_ctors, &used_ctors,
+ );
if let MissingCtors::Ctors(missing_ctors) = expensive_missing_ctors {
pats.into_iter().flat_map(|witness| {
missing_ctors.iter().map(move |ctor| {
PatternKind::Constant { value } => Some(vec![ConstantValue(value)]),
PatternKind::Range(PatternRange { lo, hi, ty, end }) =>
Some(vec![ConstantRange(
- lo.to_bits(cx.tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
- hi.to_bits(cx.tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
+ lo.eval_bits(cx.tcx, cx.param_env, ty),
+ hi.eval_bits(cx.tcx, cx.param_env, ty),
ty,
end,
)]),
PatternKind::Array { .. } => match pcx.ty.sty {
ty::Array(_, length) => Some(vec![
- Slice(length.unwrap_usize(cx.tcx))
+ Slice(length.eval_usize(cx.tcx, cx.param_env))
]),
_ => span_bug!(pat.span, "bad ty {:?} for array pattern", pcx.ty)
},
match ty.sty {
// If the field type returned is an array of an unknown
// size return an TyErr.
- ty::Array(_, len) if len.assert_usize(cx.tcx).is_none() =>
+ ty::Array(_, len)
+ if len.try_eval_usize(cx.tcx, cx.param_env).is_none() =>
cx.tcx.types.err,
_ => ty,
}
prefix: &[Pattern<'tcx>],
slice: &Option<Pattern<'tcx>>,
suffix: &[Pattern<'tcx>],
+ param_env: ty::ParamEnv<'tcx>,
) -> Result<bool, ErrorReported> {
let data: &[u8] = match (const_val.val, &const_val.ty.sty) {
(ConstValue::ByRef { offset, alloc, .. }, ty::Array(t, n)) => {
assert_eq!(*t, tcx.types.u8);
- let n = n.assert_usize(tcx).unwrap();
+ let n = n.eval_usize(tcx, param_env);
let ptr = Pointer::new(AllocId(0), offset);
alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap()
},
{
match pat.kind {
box PatternKind::Constant { value } => {
- let b = value.unwrap_bits(tcx, ty::ParamEnv::empty().and(pat.ty));
+ let b = value.eval_bits(tcx, param_env, pat.ty);
assert_eq!(b as u8 as u128, b);
if b as u8 != *ch {
return Ok(false);
/// merging operation depicted above.)
fn split_grouped_constructors<'p, 'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ctors: Vec<Constructor<'tcx>>,
&Matrix(ref m): &Matrix<'p, 'tcx>,
ty: Ty<'tcx>,
// We only care about finding all the subranges within the range of the constructor
// range. Anything else is irrelevant, because it is guaranteed to result in
// `NotUseful`, which is the default case anyway, and can be ignored.
- let ctor_range = IntRange::from_ctor(tcx, &ctor).unwrap();
+ let ctor_range = IntRange::from_ctor(tcx, param_env, &ctor).unwrap();
/// Represents a border between 2 integers. Because the intervals spanning borders
/// must be able to cover every integer, we need to be able to represent
// `borders` is the set of borders between equivalence classes: each equivalence
// class lies between 2 borders.
let row_borders = m.iter()
- .flat_map(|row| IntRange::from_pat(tcx, row[0]))
+ .flat_map(|row| IntRange::from_pat(tcx, param_env, row[0]))
.flat_map(|range| ctor_range.intersection(&range))
.flat_map(|range| range_borders(range));
let ctor_borders = range_borders(ctor_range.clone());
/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them.
fn constructor_intersects_pattern<'p, 'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ctor: &Constructor<'tcx>,
pat: &'p Pattern<'tcx>,
) -> Option<SmallVec<[&'p Pattern<'tcx>; 2]>> {
if should_treat_range_exhaustively(tcx, ctor) {
- match (IntRange::from_ctor(tcx, ctor), IntRange::from_pat(tcx, pat)) {
+ match (IntRange::from_ctor(tcx, param_env, ctor), IntRange::from_pat(tcx, param_env, pat)) {
(Some(ctor), Some(pat)) => {
ctor.intersection(&pat).map(|_| {
let (pat_lo, pat_hi) = pat.range.into_inner();
// Fallback for non-ranges and ranges that involve floating-point numbers, which are not
// conveniently handled by `IntRange`. For these cases, the constructor may not be a range
// so intersection actually devolves into being covered by the pattern.
- match constructor_covered_by_range(tcx, ctor, pat) {
+ match constructor_covered_by_range(tcx, param_env, ctor, pat) {
Ok(true) => Some(smallvec![]),
Ok(false) | Err(ErrorReported) => None,
}
fn constructor_covered_by_range<'tcx>(
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ctor: &Constructor<'tcx>,
pat: &Pattern<'tcx>,
) -> Result<bool, ErrorReported> {
_ => bug!("`constructor_covered_by_range` called with {:?}", pat),
};
trace!("constructor_covered_by_range {:#?}, {:#?}, {:#?}, {}", ctor, from, to, ty);
- let cmp_from = |c_from| compare_const_vals(tcx, c_from, from, ty::ParamEnv::empty().and(ty))
+ let cmp_from = |c_from| compare_const_vals(tcx, c_from, from, param_env, ty)
.map(|res| res != Ordering::Less);
- let cmp_to = |c_to| compare_const_vals(tcx, c_to, to, ty::ParamEnv::empty().and(ty));
+ let cmp_to = |c_to| compare_const_vals(tcx, c_to, to, param_env, ty);
macro_rules! some_or_ok {
($e:expr) => {
match $e {
ConstValue::ByRef { offset, alloc, .. } => (
alloc,
offset,
- n.unwrap_usize(cx.tcx),
+ n.eval_usize(cx.tcx, cx.param_env),
t,
),
_ => span_bug!(
// If the constructor is a:
// Single value: add a row if the constructor equals the pattern.
// Range: add a row if the constructor contains the pattern.
- constructor_intersects_pattern(cx.tcx, constructor, pat)
+ constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat)
}
}
}
// If the constructor is a:
// Single value: add a row if the pattern contains the constructor.
// Range: add a row if the constructor intersects the pattern.
- constructor_intersects_pattern(cx.tcx, constructor, pat)
+ constructor_intersects_pattern(cx.tcx, cx.param_env, constructor, pat)
}
PatternKind::Array { ref prefix, ref slice, ref suffix } |
}
}
ConstantValue(cv) => {
- match slice_pat_covered_by_const(cx.tcx, pat.span, cv, prefix, slice, suffix) {
+ match slice_pat_covered_by_const(
+ cx.tcx, pat.span, cv, prefix, slice, suffix, cx.param_env,
+ ) {
Ok(true) => Some(smallvec![]),
Ok(false) => None,
Err(ErrorReported) => None
self.tcx,
lo,
hi,
- self.param_env.and(ty),
+ self.param_env,
+ ty,
);
match (end, cmp) {
(RangeEnd::Excluded, Some(Ordering::Less)) =>
ty::Array(_, len) => {
// fixed-length array
- let len = len.unwrap_usize(self.tcx);
+ let len = len.eval_usize(self.tcx, self.param_env);
assert!(len >= prefix.len() as u64 + suffix.len() as u64);
PatternKind::Array { prefix: prefix, slice: slice, suffix: suffix }
}
}
ty::Array(_, n) => {
PatternKind::Array {
- prefix: (0..n.unwrap_usize(self.tcx))
+ prefix: (0..n.eval_usize(self.tcx, self.param_env))
.map(|i| adt_subpattern(i as usize, None))
.collect(),
slice: None,
// (But still tell caller to continue search.)
return false;
}
- ty::Array(_, n) if n.assert_usize(self.tcx) == Some(0) => {
+ ty::Array(_, n) if n.try_eval_usize(self.tcx, ty::ParamEnv::reveal_all()) == Some(0)
+ => {
// rust-lang/rust#62336: ignore type of contents
// for empty array.
return false;
tcx: TyCtxt<'tcx>,
a: &'tcx ty::Const<'tcx>,
b: &'tcx ty::Const<'tcx>,
- ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
) -> Option<Ordering> {
trace!("compare_const_vals: {:?}, {:?}", a, b);
let fallback = || from_bool(a == b);
// Use the fallback if any type differs
- if a.ty != b.ty || a.ty != ty.value {
+ if a.ty != b.ty || a.ty != ty {
return fallback();
}
- // FIXME: This should use assert_bits(ty) instead of use_bits
- // but triggers possibly bugs due to mismatching of arrays and slices
- if let (Some(a), Some(b)) = (a.to_bits(tcx, ty), b.to_bits(tcx, ty)) {
+ let a_bits = a.try_eval_bits(tcx, param_env, ty);
+ let b_bits = b.try_eval_bits(tcx, param_env, ty);
+
+ if let (Some(a), Some(b)) = (a_bits, b_bits) {
use ::rustc_apfloat::Float;
- return match ty.value.sty {
+ return match ty.sty {
ty::Float(ast::FloatTy::F32) => {
let l = ::rustc_apfloat::ieee::Single::from_bits(a);
let r = ::rustc_apfloat::ieee::Single::from_bits(b);
}
}
- if let ty::Str = ty.value.sty {
+ if let ty::Str = ty.sty {
match (a.val, b.val) {
(
ConstValue::Slice { data: alloc_a, start: offset_a, end: end_a },
// u64 cast is from usize to u64, which is always good
let val = Immediate::new_slice(
ptr,
- length.unwrap_usize(self.tcx.tcx),
+ length.eval_usize(self.tcx.tcx, self.param_env),
self,
);
self.write_immediate(val, dest)
(InternMode::ConstBase, hir::Mutability::MutMutable) |
(InternMode::Const, hir::Mutability::MutMutable) => {
match referenced_ty.sty {
- ty::Array(_, n) if n.unwrap_usize(self.ecx.tcx.tcx) == 0 => {}
+ ty::Array(_, n)
+ if n.eval_usize(self.ecx.tcx.tcx, self.param_env) == 0 => {}
ty::Slice(_)
if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {}
_ => bug!("const qualif failed to prevent mutable references"),
fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> {
debug!("build_clone_shim(def_id={:?})", def_id);
+ let param_env = tcx.param_env(def_id);
+
let mut builder = CloneShimBuilder::new(tcx, def_id, self_ty);
- let is_copy = self_ty.is_copy_modulo_regions(tcx, tcx.param_env(def_id), builder.span);
+ let is_copy = self_ty.is_copy_modulo_regions(tcx, param_env, builder.span);
let dest = Place::RETURN_PLACE;
let src = Place::from(Local::new(1+0)).deref();
match self_ty.sty {
_ if is_copy => builder.copy_shim(),
ty::Array(ty, len) => {
- let len = len.unwrap_usize(tcx);
+ let len = len.eval_usize(tcx, param_env);
builder.array_shim(dest, src, ty, len)
}
ty::Closure(def_id, substs) => {
} else if let ty::Array(_, len) = ty.sty {
// FIXME(eddyb) the `cx.mode == Mode::NonConstFn` condition
// seems unnecessary, given that this is merely a ZST.
- match len.assert_usize(cx.tcx) {
+ match len.try_eval_usize(cx.tcx, cx.param_env) {
Some(0) if cx.mode == Mode::NonConstFn => {},
_ => return true,
}
//! A pass that simplifies branches when their condition is known.
-use rustc::ty::{TyCtxt, ParamEnv};
+use rustc::ty::TyCtxt;
use rustc::mir::*;
use crate::transform::{MirPass, MirSource};
Cow::Borrowed(&self.label)
}
- fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
+ let param_env = tcx.param_env(src.def_id());
for block in body.basic_blocks_mut() {
let terminator = block.terminator_mut();
terminator.kind = match terminator.kind {
TerminatorKind::SwitchInt {
discr: Operand::Constant(ref c), switch_ty, ref values, ref targets, ..
} => {
- let switch_ty = ParamEnv::empty().and(switch_ty);
- let constant = c.literal.assert_bits(tcx, switch_ty);
+ let constant = c.literal.try_eval_bits(tcx, param_env, switch_ty);
if let Some(constant) = constant {
let (otherwise, targets) = targets.split_last().unwrap();
let mut ret = TerminatorKind::Goto { target: *otherwise };
},
TerminatorKind::Assert {
target, cond: Operand::Constant(ref c), expected, ..
- } if (c.literal.assert_bool(tcx) == Some(true)) == expected =>
+ } if (c.literal.try_eval_bool(tcx, param_env) == Some(true)) == expected =>
TerminatorKind::Goto { target },
TerminatorKind::FalseEdges { real_target, .. } => {
TerminatorKind::Goto { target: real_target }
pub struct UniformArrayMoveOut;
impl MirPass for UniformArrayMoveOut {
- fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
+ let param_env = tcx.param_env(src.def_id());
{
- let mut visitor = UniformArrayMoveOutVisitor{body, patch: &mut patch, tcx};
+ let mut visitor = UniformArrayMoveOutVisitor{body, patch: &mut patch, tcx, param_env};
visitor.visit_body(body);
}
patch.apply(body);
body: &'a Body<'tcx>,
patch: &'a mut MirPatch<'tcx>,
tcx: TyCtxt<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for UniformArrayMoveOutVisitor<'a, 'tcx> {
let place_ty =
Place::ty_from(&src_place.base, &proj.base, self.body, self.tcx).ty;
if let ty::Array(item_ty, const_size) = place_ty.sty {
- if let Some(size) = const_size.assert_usize(self.tcx) {
+ if let Some(size) = const_size.try_eval_usize(self.tcx, self.param_env) {
assert!(size <= u32::max_value() as u64,
"uniform array move out doesn't supported
for array bigger then u32");
pub struct RestoreSubsliceArrayMoveOut;
impl MirPass for RestoreSubsliceArrayMoveOut {
- fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, _src: MirSource<'tcx>, body: &mut Body<'tcx>) {
+ fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
+ let param_env = tcx.param_env(src.def_id());
{
let mut visitor = RestoreDataCollector {
locals_use: IndexVec::from_elem(LocalUse::new(), &body.local_decls),
let src_ty =
Place::ty_from(src_place.base, src_place.projection, body, tcx).ty;
if let ty::Array(_, ref size_o) = src_ty.sty {
- size_o.assert_usize(tcx)
+ size_o.try_eval_usize(tcx, param_env)
} else {
None
}
self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
}
ty::Array(ety, size) => {
- let size = size.assert_usize(self.tcx());
+ let size = size.try_eval_usize(self.tcx(), self.elaborator.param_env());
self.open_drop_for_array(ety, size)
},
ty::Slice(ety) => self.open_drop_for_array(ety, None),
self.infcx.tcx
}
+ fn param_env(&self) -> ty::ParamEnv<'tcx> {
+ // FIXME(oli-obk): learn chalk and create param envs
+ ty::ParamEnv::empty()
+ }
+
fn tag(&self) -> &'static str {
"chalk_context::answer_substitutor"
}
let expected_ty = self.structurally_resolved_type(pat.span, expected);
let (inner_ty, slice_ty) = match expected_ty.sty {
ty::Array(inner_ty, size) => {
- if let Some(size) = size.assert_usize(tcx) {
+ if let Some(size) = size.try_eval_usize(tcx, self.param_env) {
let min_len = before.len() as u64 + after.len() as u64;
if slice.is_none() {
if min_len != size {
}
ty::Array(_, len) => {
if let (Some(len), Ok(user_index)) = (
- len.assert_usize(self.tcx),
+ len.try_eval_usize(self.tcx, self.param_env),
field.as_str().parse::<u64>()
) {
let base = self.tcx.sess.source_map()
{
f(mc::MemCategorizationContext::with_infer(
&self.infcx,
+ self.outlives_environment.param_env,
self.body_owner,
&self.region_scope_tree,
&self.tables.borrow(),
fn test_parse_err() {
with_default_globals(|| {
let mi = attr::mk_name_value_item(
- DUMMY_SP,
Ident::from_str("foo"),
LitKind::Bool(false),
DUMMY_SP,
let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
- Err(classifier.lexer.buffer_fatal_errors())
+ Err(())
} else {
Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
}
}
write_footer(&mut out).unwrap();
}
- Err(errors) => {
- // If errors are encountered while trying to highlight, cancel the errors and just emit
- // the unhighlighted source. The errors will have already been reported in the
- // `check-code-block-syntax` pass.
- for mut error in errors {
- error.cancel();
- }
-
+ Err(()) => {
+ // If errors are encountered while trying to highlight, just emit
+ // the unhighlighted source.
write!(out, "<pre><code>{}</code></pre>", src).unwrap();
}
}
if let Some(token) = self.peek_token.take() {
return Ok(token);
}
- self.lexer.try_next_token().map_err(|()| HighlightError::LexError)
+ let token = self.lexer.next_token();
+ if let token::Unknown(..) = &token.kind {
+ return Err(HighlightError::LexError);
+ }
+ Ok(token)
}
fn peek(&mut self) -> Result<&Token, HighlightError> {
if self.peek_token.is_none() {
- self.peek_token = Some(
- self.lexer.try_next_token().map_err(|()| HighlightError::LexError)?
- );
+ let token = self.lexer.next_token();
+ if let token::Unknown(..) = &token.kind {
+ return Err(HighlightError::LexError);
+ }
+ self.peek_token = Some(token);
}
Ok(self.peek_token.as_ref().unwrap())
}
return Ok(());
},
- token::Whitespace => Class::None,
+ token::Whitespace | token::Unknown(..) => Class::None,
token::Comment => Class::Comment,
token::DocComment(..) => Class::DocComment,
dox[code_block.code].to_owned(),
);
- let errors = {
+ let has_errors = {
+ let mut has_errors = false;
let mut lexer = Lexer::new(&sess, source_file, None);
- while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
- if kind == token::Eof {
- break;
+ loop {
+ match lexer.next_token().kind {
+ token::Eof => break,
+ token::Unknown(..) => has_errors = true,
+ _ => (),
}
}
-
- let errors = lexer.buffer_fatal_errors();
-
- if !errors.is_empty() {
- Err(errors)
- } else {
- Ok(())
- }
+ has_errors
};
- if let Err(errors) = errors {
+ if has_errors {
let mut diag = if let Some(sp) =
super::source_span_for_markdown_range(self.cx, &dox, &code_block.range, &item.attrs)
{
.sess()
.struct_span_warn(sp, "could not parse code block as Rust code");
- for mut err in errors {
- diag.note(&format!("error from rustc: {}", err.message()));
- err.cancel();
- }
-
if code_block.syntax.is_none() && code_block.is_fenced {
let sp = sp.from_inner(InnerSpan::new(0, 3));
diag.span_suggestion(
"doc comment contains an invalid Rust code block",
);
- for mut err in errors {
- // Don't bother reporting the error, because we can't show where it happened.
- err.cancel();
- }
-
if code_block.syntax.is_none() && code_block.is_fenced {
diag.help("mark blocks that do not contain Rust code as text: ```text");
}
/// Yields a [`&str`] slice if the `Path` is valid unicode.
///
/// This conversion may entail doing a check for UTF-8 validity.
+ /// Note that validation is performed because non-UTF-8 strings are
+ /// perfectly valid for some OS.
///
/// [`&str`]: ../primitive.str.html
///
#![unstable(feature = "thread_local_internals", issue = "0")]
+use crate::error::Error;
use crate::fmt;
/// A thread local storage key which owns its contents.
/// An error returned by [`LocalKey::try_with`](struct.LocalKey.html#method.try_with).
#[stable(feature = "thread_local_try_with", since = "1.26.0")]
+#[derive(Clone, Copy, Eq, PartialEq)]
pub struct AccessError {
_private: (),
}
}
}
+#[stable(feature = "thread_local_try_with", since = "1.26.0")]
+impl Error for AccessError {}
+
impl<T: 'static> LocalKey<T> {
#[doc(hidden)]
#[unstable(feature = "thread_local_internals",
pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked);
- mk_name_value_item(ident.span.to(value.span), ident, lit_kind, value.span)
+ mk_name_value_item(ident, lit_kind, value.span)
}
-pub fn mk_name_value_item(span: Span, ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
+pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
let lit = Lit::from_lit_kind(lit_kind, lit_span);
+ let span = ident.span.to(lit_span);
MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::NameValue(lit) }
}
-pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
- MetaItem { path: Path::from_ident(ident), span, node: MetaItemKind::List(items) }
+pub fn mk_list_item(ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
+ MetaItem { path: Path::from_ident(ident), span: ident.span, node: MetaItemKind::List(items) }
}
pub fn mk_word_item(ident: Ident) -> MetaItem {
NestedMetaItem::MetaItem(mk_word_item(ident))
}
-pub fn mk_attr_id() -> AttrId {
+crate fn mk_attr_id() -> AttrId {
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
pub fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>)
-> ast::MetaItem {
- attr::mk_list_item(sp, Ident::new(name, sp), mis)
+ attr::mk_list_item(Ident::new(name, sp), mis)
}
pub fn meta_name_value(&self, span: Span, name: ast::Name, lit_kind: ast::LitKind)
-> ast::MetaItem {
- attr::mk_name_value_item(span, Ident::new(name, span),
- lit_kind, span)
+ attr::mk_name_value_item(Ident::new(name, span), lit_kind, span)
}
pub fn item_use(&self, sp: Span,
];
let include_ident = Ident::with_empty_ctxt(sym::include);
- let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info);
+ let item = attr::mk_list_item(include_ident, include_info);
items.push(ast::NestedMetaItem::MetaItem(item));
}
Err(e) => {
}
}
- let meta = attr::mk_list_item(DUMMY_SP, Ident::with_empty_ctxt(sym::doc), items);
+ let meta = attr::mk_list_item(Ident::with_empty_ctxt(sym::doc), items);
*at = attr::Attribute {
span: at.span,
id: at.id,
}
OpenDelim(..) | CloseDelim(..) => unreachable!(),
- Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
+ Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => unreachable!(),
}
}
}
use crate::symbol::{sym, Symbol};
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
-use errors::{FatalError, Diagnostic, DiagnosticBuilder};
+use errors::{FatalError, DiagnosticBuilder};
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION};
use rustc_lexer::Base;
use rustc_lexer::unescape;
pos: BytePos,
/// Stop reading src at this index.
end_src_index: usize,
- fatal_errs: Vec<DiagnosticBuilder<'a>>,
/// Source text to tokenize.
src: Lrc<String>,
override_span: Option<Span>,
pos: source_file.start_pos,
end_src_index: src.len(),
src,
- fatal_errs: Vec::new(),
override_span,
}
}
self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION))
}
- fn unwrap_or_abort(&mut self, res: Result<Token, ()>) -> Token {
- match res {
- Ok(tok) => tok,
- Err(_) => {
- self.emit_fatal_errors();
- FatalError.raise();
- }
- }
- }
-
/// Returns the next token, including trivia like whitespace or comments.
///
/// `Err(())` means that some errors were encountered, which can be
/// retrieved using `buffer_fatal_errors`.
- pub fn try_next_token(&mut self) -> Result<Token, ()> {
- assert!(self.fatal_errs.is_empty());
-
+ pub fn next_token(&mut self) -> Token {
let start_src_index = self.src_index(self.pos);
let text: &str = &self.src[start_src_index..self.end_src_index];
if text.is_empty() {
let span = self.mk_sp(self.pos, self.pos);
- return Ok(Token::new(token::Eof, span));
+ return Token::new(token::Eof, span);
}
{
let kind = token::Shebang(sym);
let span = self.mk_sp(start, self.pos);
- return Ok(Token::new(kind, span));
+ return Token::new(kind, span);
}
}
}
// This could use `?`, but that makes code significantly (10-20%) slower.
// https://github.com/rust-lang/rust/issues/37939
- let kind = match self.cook_lexer_token(token.kind, start) {
- Ok(it) => it,
- Err(err) => return Err(self.fatal_errs.push(err)),
- };
+ let kind = self.cook_lexer_token(token.kind, start);
let span = self.mk_sp(start, self.pos);
- Ok(Token::new(kind, span))
- }
-
- /// Returns the next token, including trivia like whitespace or comments.
- ///
- /// Aborts in case of an error.
- pub fn next_token(&mut self) -> Token {
- let res = self.try_next_token();
- self.unwrap_or_abort(res)
- }
-
- fn emit_fatal_errors(&mut self) {
- for err in &mut self.fatal_errs {
- err.emit();
- }
-
- self.fatal_errs.clear();
- }
-
- pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
- let mut buffer = Vec::new();
-
- for err in self.fatal_errs.drain(..) {
- err.buffer(&mut buffer);
- }
-
- buffer
+ Token::new(kind, span)
}
/// Report a fatal lexical error with a given span.
&self,
token: rustc_lexer::TokenKind,
start: BytePos,
- ) -> Result<TokenKind, DiagnosticBuilder<'a>> {
- let kind = match token {
+ ) -> TokenKind {
+ match token {
rustc_lexer::TokenKind::LineComment => {
let string = self.str_from(start);
// comments with only more "/"s are not doc comments
// this should be inside `rustc_lexer`. However, we should first remove compound
// tokens like `<<` from `rustc_lexer`, and then add fancier error recovery to it,
// as there will be less overall work to do this way.
- return match unicode_chars::check_for_substitution(self, start, c, &mut err) {
- Some(token) => {
- err.emit();
- Ok(token)
- }
- None => Err(err),
- }
+ let token = unicode_chars::check_for_substitution(self, start, c, &mut err)
+ .unwrap_or_else(|| token::Unknown(self.symbol_from(start)));
+ err.emit();
+ token
}
- };
- Ok(kind)
+ }
}
fn cook_lexer_literal(
loop {
let token = self.string_reader.next_token();
match token.kind {
- token::Whitespace | token::Comment | token::Shebang(_) => {
+ token::Whitespace | token::Comment | token::Shebang(_) | token::Unknown(_) => {
self.joint_to_prev = NonJoint;
}
_ => {
/// A comment.
Comment,
Shebang(ast::Name),
+ /// A completely invalid token which should be skipped.
+ Unknown(ast::Name),
Eof,
}
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
Question | OpenDelim(..) | CloseDelim(..) |
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
- Whitespace | Comment | Shebang(..) | Eof => return None,
+ Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => return None,
};
Some(Token::new(kind, self.span.to(joint.span)))
use rustc_target::spec::abi::{self, Abi};
use syntax_pos::{self, BytePos};
-use syntax_pos::{DUMMY_SP, FileName, Span};
+use syntax_pos::{FileName, Span};
use std::borrow::Cow;
// #![feature(prelude_import)]
let pi_nested = attr::mk_nested_word_item(ast::Ident::with_empty_ctxt(sym::prelude_import));
- let list = attr::mk_list_item(
- DUMMY_SP, ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]);
+ let list = attr::mk_list_item(ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(list);
s.print_attribute(&fake_attr);
token::Whitespace => " ".to_string(),
token::Comment => "/* */".to_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s),
+ token::Unknown(s) => s.to_string(),
token::Interpolated(ref nt) => nonterminal_to_string(nt),
}
item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
let allow_ident = Ident::with_empty_ctxt(sym::allow);
let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
- let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident,
- vec![dc_nested]);
+ let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item);
ast::Item {
+error: unknown start of token: \
+ --> <doctest>:1:1
+ |
+1 | \__________pkt->size___________/ \_result->size_/ \__pkt->size__/
+ | ^
+
+error: unknown start of token: \
+ --> <doctest>:1:43
+ |
+1 | \__________pkt->size___________/ \_result->size_/ \__pkt->size__/
+ | ^
+
+error: unknown start of token: \
+ --> <doctest>:1:60
+ |
+1 | \__________pkt->size___________/ \_result->size_/ \__pkt->size__/
+ | ^
+
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:3:5
|
LL | | /// \__________pkt->size___________/ \_result->size_/ \__pkt->size__/
LL | | /// ```
| |_______^
- |
- = note: error from rustc: unknown start of token: \
help: mark blocks that do not contain Rust code as text
|
LL | /// ```text
| ^^^^^^^
+error: unknown start of token: `
+ --> <doctest>:3:30
+ |
+3 | | ^^^^^^ did you mean `baz::foobar`?
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+3 | | ^^^^^^ did you mean 'baz::foobar`?
+ | ^
+
+error: unknown start of token: `
+ --> <doctest>:3:42
+ |
+3 | | ^^^^^^ did you mean `baz::foobar`?
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+3 | | ^^^^^^ did you mean `baz::foobar'?
+ | ^
+
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:8:5
|
LL | | /// | ^^^^^^ did you mean `baz::foobar`?
LL | | /// ```
| |_______^
- |
- = note: error from rustc: unknown start of token: `
help: mark blocks that do not contain Rust code as text
|
LL | /// ```text
| ^^^^^^^
+error: unknown start of token: \
+ --> <doctest>:1:1
+ |
+1 | \_
+ | ^
+
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:19:5
|
LL | | /// \_
LL | | /// ```
| |_______^
- |
- = note: error from rustc: unknown start of token: \
help: mark blocks that do not contain Rust code as text
|
LL | /// ```text
| ^^^^^^^
+error: unknown start of token: \
+ --> <doctest>:1:1
+ |
+1 | \_
+ | ^
+
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:32:5
|
LL | | /// \_
LL | | /// ```
| |_______^
- |
- = note: error from rustc: unknown start of token: \
+
+error: unknown start of token: \
+ --> <doctest>:2:5
+ |
+2 | \_
+ | ^
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:41:9
| _________^
LL | | /// \_
| |__________^
- |
- = note: error from rustc: unknown start of token: \
+
+error: unknown start of token: `
+ --> <doctest>:1:1
+ |
+1 | ```
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+1 | '``
+ | ^
+
+error: unknown start of token: `
+ --> <doctest>:1:2
+ |
+1 | ```
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+1 | `'`
+ | ^
+
+error: unknown start of token: `
+ --> <doctest>:1:3
+ |
+1 | ```
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+1 | ``'
+ | ^
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:55:9
|
LL | /// ```
| ^^^
- |
- = note: error from rustc: unknown start of token: `
+
+error: unknown start of token: \
+ --> <doctest>:1:1
+ |
+1 | \_
+ | ^
warning: could not parse code block as Rust code
--> $DIR/invalid-syntax.rs:58:5
LL | | /// \_
LL | | /// ```
| |_______^
- |
- = note: error from rustc: unknown start of token: \
+
+error: unknown start of token: \
+ --> <doctest>:1:1
+ |
+1 | \_
+ | ^
warning: doc comment contains an invalid Rust code block
--> $DIR/invalid-syntax.rs:63:1
|
= help: mark blocks that do not contain Rust code as text: ```text
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | \_
+ | ^
+
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | \_
+ | ^
+
+error: unknown start of token: `
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | ```
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+1 | '``
+ | ^
+
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:2:1
+ |
+2 | \_
+ | ^
+
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | \_
+ | ^
+
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | \_
+ | ^
+
+error: unknown start of token: `
+ --> <rustdoc-highlighting>:3:30
+ |
+3 | | ^^^^^^ did you mean `baz::foobar`?
+ | ^
+help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not
+ |
+3 | | ^^^^^^ did you mean 'baz::foobar`?
+ | ^
+
+error: unknown start of token: \
+ --> <rustdoc-highlighting>:1:1
+ |
+1 | \__________pkt->size___________/ \_result->size_/ \__pkt->size__/
+ | ^
+
--- /dev/null
+// Test that we don't allow awaiting from an async fn while a local is partially
+// initialized.
+
+// edition:2018
+
+#![feature(async_await)]
+
+struct S { x: i32, y: i32 }
+struct T(i32, i32);
+
+async fn noop() {}
+
+async fn test_tuple() {
+ let mut t: (i32, i32);
+ t.0 = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ noop().await;
+ t.1 = 88;
+ let _ = t;
+}
+
+async fn test_tuple_struct() {
+ let mut t: T;
+ t.0 = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ noop().await;
+ t.1 = 88;
+ let _ = t;
+}
+
+async fn test_struct() {
+ let mut t: S;
+ t.x = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ noop().await;
+ t.y = 88;
+ let _ = t;
+}
+
+fn main() {
+ let _ = test_tuple();
+ let _ = test_tuple_struct();
+ let _ = test_struct();
+}
--- /dev/null
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-await.rs:15:5
+ |
+LL | t.0 = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-await.rs:24:5
+ |
+LL | t.0 = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-await.rs:33:5
+ |
+LL | t.x = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0381`.
--- /dev/null
+// Test that we don't allow partial initialization.
+// This may be relaxed in the future (see #54987).
+
+fn main() {
+ let mut t: (u64, u64);
+ t.0 = 1;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ t.1 = 1;
+
+ let mut t: (u64, u64);
+ t.1 = 1;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ t.0 = 1;
+
+ let mut t: (u64, u64);
+ t.0 = 1;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+
+ let mut t: (u64,);
+ t.0 = 1;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+}
--- /dev/null
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/disallow-possibly-uninitialized.rs:6:5
+ |
+LL | t.0 = 1;
+ | ^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/disallow-possibly-uninitialized.rs:11:5
+ |
+LL | t.1 = 1;
+ | ^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/disallow-possibly-uninitialized.rs:16:5
+ |
+LL | t.0 = 1;
+ | ^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/disallow-possibly-uninitialized.rs:20:5
+ |
+LL | t.0 = 1;
+ | ^^^^^^^ use of possibly uninitialized `t`
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0381`.
| --------------------------------- first implementation here
LL | impl Foo for i16 {}
| ^^^^^^^^^^^^^^^^ conflicting implementation for `i16`
+ |
+ = note: upstream crates may add new impl of trait `coherence_lib::Remote` for type `i16` in future versions
error: aborting due to previous error
| --------------------------------- first implementation here
LL | impl Foo for i16 {}
| ^^^^^^^^^^^^^^^^ conflicting implementation for `i16`
+ |
+ = note: upstream crates may add new impl of trait `coherence_lib::Remote` for type `i16` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for lib::MyFundamentalStruct<(MyType,)> { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `lib::MyFundamentalStruct<(MyType,)>`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `lib::MyFundamentalStruct<(MyType,)>` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for lib::MyFundamentalStruct<(MyType,)> { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `lib::MyFundamentalStruct<(MyType,)>`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `lib::MyFundamentalStruct<(MyType,)>` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for lib::MyStruct<MyType> { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `lib::MyStruct<MyType>`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `lib::MyStruct<MyType>` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for lib::MyStruct<MyType> { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `lib::MyStruct<MyType>`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `lib::MyStruct<MyType>` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for (MyType,) { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `(MyType,)`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `(MyType,)` in future versions
error: aborting due to previous error
...
LL | impl MyTrait for (MyType,) { }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `(MyType,)`
+ |
+ = note: upstream crates may add new impl of trait `lib::MyCopy` for type `(MyType,)` in future versions
error: aborting due to previous error
+++ /dev/null
-error[E0005]: refutable pattern in function argument: `&[]` not covered
- --> $DIR/const_let_refutable.rs:3:16
- |
-LL | const fn slice([a, b]: &[i32]) -> i32 {
- | ^^^^^^ pattern `&[]` not covered
-
-error[E0723]: can only call other `const fn` within a `const fn`, but `const <&i32 as std::ops::Add>::add` is not stable as `const fn`
- --> $DIR/const_let_refutable.rs:4:5
- |
-LL | a + b
- | ^^^^^
- |
- = note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
- = help: add `#![feature(const_fn)]` to the crate attributes to enable
-
-error[E0381]: use of possibly uninitialized variable: `a`
- --> $DIR/const_let_refutable.rs:4:5
- |
-LL | a + b
- | ^ use of possibly uninitialized `a`
-
-error[E0381]: use of possibly uninitialized variable: `b`
- --> $DIR/const_let_refutable.rs:4:9
- |
-LL | a + b
- | ^ use of possibly uninitialized `b`
-
-error: aborting due to 4 previous errors
-
-Some errors have detailed explanations: E0005, E0381, E0723.
-For more information about an error, try `rustc --explain E0005`.
const fn slice([a, b]: &[i32]) -> i32 { //~ ERROR refutable pattern in function argument
a + b //~ ERROR can only call other `const fn` within a `const fn`
- //~^ WARN use of possibly uninitialized variable: `a`
- //~| WARN this error has been downgraded to a warning for backwards compatibility
- //~| WARN this represents potential undefined behavior in your code and this warning will
- //~| WARN use of possibly uninitialized variable: `b`
- //~| WARN this error has been downgraded to a warning for backwards compatibility
- //~| WARN this represents potential undefined behavior in your code and this warning will
+ //~^ ERROR use of possibly uninitialized variable: `a`
+ //~| ERROR use of possibly uninitialized variable: `b`
}
= note: for more information, see issue https://github.com/rust-lang/rust/issues/57563
= help: add `#![feature(const_fn)]` to the crate attributes to enable
-warning[E0381]: use of possibly uninitialized variable: `a`
+error[E0381]: use of possibly uninitialized variable: `a`
--> $DIR/const_let_refutable.rs:4:5
|
LL | a + b
| ^ use of possibly uninitialized `a`
- |
- = warning: this error has been downgraded to a warning for backwards compatibility with previous releases
- = warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
- = note: for more information, try `rustc --explain E0729`
-warning[E0381]: use of possibly uninitialized variable: `b`
+error[E0381]: use of possibly uninitialized variable: `b`
--> $DIR/const_let_refutable.rs:4:9
|
LL | a + b
| ^ use of possibly uninitialized `b`
- |
- = warning: this error has been downgraded to a warning for backwards compatibility with previous releases
- = warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
- = note: for more information, try `rustc --explain E0729`
-error: aborting due to 2 previous errors
+error: aborting due to 4 previous errors
Some errors have detailed explanations: E0005, E0381, E0723.
For more information about an error, try `rustc --explain E0005`.
--- /dev/null
+// check-pass
+
+fn testfn(_arr: &mut [(); 0]) {}
+
+trait TestTrait {
+ fn method();
+}
+
+impl TestTrait for [(); 0] {
+ fn method() {
+ let mut arr: Self = [(); 0];
+ testfn(&mut arr);
+ }
+}
+
+fn main() {}
--- /dev/null
+// check-pass
+
+trait Gen<T> {
+ fn gen(x: Self) -> T;
+}
+
+struct A;
+
+impl Gen<[(); 0]> for A {
+ fn gen(x: Self) -> [(); 0] {
+ []
+ }
+}
+
+fn array() -> impl Gen<[(); 0]> {
+ A
+}
+
+fn main() {
+ let [] = Gen::gen(array());
+}
+++ /dev/null
-error[E0005]: refutable pattern in local binding: `T(_, _)` not covered
- --> $DIR/empty-never-array.rs:10:9
- |
-LL | / enum Helper<T, U> {
-LL | | T(T, [!; 0]),
-LL | | #[allow(dead_code)]
-LL | | U(U),
-LL | | }
- | |_- `Helper<T, U>` defined here
-...
-LL | let Helper::U(u) = Helper::T(t, []);
- | ^^^^^^^^^^^^ pattern `T(_, _)` not covered
-
-error[E0381]: use of possibly uninitialized variable: `u`
- --> $DIR/empty-never-array.rs:12:5
- |
-LL | u
- | ^ use of possibly uninitialized `u`
-
-error: aborting due to 2 previous errors
-
-Some errors have detailed explanations: E0005, E0381.
-For more information about an error, try `rustc --explain E0005`.
let Helper::U(u) = Helper::T(t, []);
//~^ ERROR refutable pattern in local binding: `T(_, _)` not covered
u
- //~^ WARN use of possibly uninitialized variable: `u`
- //~| WARN this error has been downgraded to a warning for backwards compatibility
- //~| WARN this represents potential undefined behavior in your code and this warning will
+ //~^ ERROR use of possibly uninitialized variable: `u`
}
fn main() {
LL | let Helper::U(u) = Helper::T(t, []);
| ^^^^^^^^^^^^ pattern `T(_, _)` not covered
-warning[E0381]: use of possibly uninitialized variable: `u`
+error[E0381]: use of possibly uninitialized variable: `u`
--> $DIR/empty-never-array.rs:12:5
|
LL | u
| ^ use of possibly uninitialized `u`
- |
- = warning: this error has been downgraded to a warning for backwards compatibility with previous releases
- = warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
- = note: for more information, try `rustc --explain E0729`
-error: aborting due to previous error
+error: aborting due to 2 previous errors
Some errors have detailed explanations: E0005, E0381.
For more information about an error, try `rustc --explain E0005`.
--- /dev/null
+// Test that we don't allow yielding from a generator while a local is partially
+// initialized.
+
+#![feature(generators)]
+
+struct S { x: i32, y: i32 }
+struct T(i32, i32);
+
+fn test_tuple() {
+ let _ = || {
+ let mut t: (i32, i32);
+ t.0 = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ yield;
+ t.1 = 88;
+ let _ = t;
+ };
+}
+
+fn test_tuple_struct() {
+ let _ = || {
+ let mut t: T;
+ t.0 = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ yield;
+ t.1 = 88;
+ let _ = t;
+ };
+}
+
+fn test_struct() {
+ let _ = || {
+ let mut t: S;
+ t.x = 42;
+ //~^ ERROR assign to part of possibly uninitialized variable: `t` [E0381]
+ yield;
+ t.y = 88;
+ let _ = t;
+ };
+}
+
+fn main() {
+ test_tuple();
+ test_tuple_struct();
+ test_struct();
+}
--- /dev/null
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-yield.rs:12:9
+ |
+LL | t.0 = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-yield.rs:23:9
+ |
+LL | t.0 = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error[E0381]: assign to part of possibly uninitialized variable: `t`
+ --> $DIR/partial-initialization-across-yield.rs:34:9
+ |
+LL | t.x = 42;
+ | ^^^^^^^^ use of possibly uninitialized `t`
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0381`.
+++ /dev/null
-error[E0005]: refutable pattern in `for` loop binding: `&[]` not covered
- --> $DIR/issue-15381.rs:4:9
- |
-LL | for &[x,y,z] in values.chunks(3).filter(|&xs| xs.len() == 3) {
- | ^^^^^^^^ pattern `&[]` not covered
-
-error[E0381]: borrow of possibly uninitialized variable: `y`
- --> $DIR/issue-15381.rs:6:26
- |
-LL | println!("y={}", y);
- | ^ use of possibly uninitialized `y`
-
-error: aborting due to 2 previous errors
-
-Some errors have detailed explanations: E0005, E0381.
-For more information about an error, try `rustc --explain E0005`.
for &[x,y,z] in values.chunks(3).filter(|&xs| xs.len() == 3) {
//~^ ERROR refutable pattern in `for` loop binding: `&[]` not covered
println!("y={}", y);
- //~^ WARN borrow of possibly uninitialized variable: `y`
- //~| WARN this error has been downgraded to a warning for backwards compatibility
- //~| WARN this represents potential undefined behavior in your code and this warning will
+ //~^ ERROR borrow of possibly uninitialized variable: `y`
}
}
LL | for &[x,y,z] in values.chunks(3).filter(|&xs| xs.len() == 3) {
| ^^^^^^^^ pattern `&[]` not covered
-warning[E0381]: borrow of possibly uninitialized variable: `y`
+error[E0381]: borrow of possibly uninitialized variable: `y`
--> $DIR/issue-15381.rs:6:26
|
LL | println!("y={}", y);
| ^ use of possibly uninitialized `y`
- |
- = warning: this error has been downgraded to a warning for backwards compatibility with previous releases
- = warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
- = note: for more information, try `rustc --explain E0729`
-error: aborting due to previous error
+error: aborting due to 2 previous errors
Some errors have detailed explanations: E0005, E0381.
For more information about an error, try `rustc --explain E0005`.
...
LL | impl<T: Clone + ?Sized> Clone for Node<[T]> {
| ------------------------------------------- first implementation here
+ |
+ = note: upstream crates may add new impl of trait `std::clone::Clone` for type `[_]` in future versions
error: aborting due to previous error
● //~ ERROR: unknown start of token
+
+fn main() {}
\ //~ ERROR: unknown start of token: \
+
+fn main() {}
println!(“hello world”);
//~^ ERROR unknown start of token: \u{201c}
//~^^ HELP Unicode characters '“' (Left Double Quotation Mark) and '”' (Right Double Quotation Mark) look like '"' (Quotation Mark), but are not
+ //~^^^ ERROR unknown start of token: \u{201d}
+ //~^^^^ HELP Unicode character '”' (Right Double Quotation Mark) looks like '"' (Quotation Mark), but it is not
+ //~^^^^^ ERROR expected token: `,`
}
LL | println!("hello world");
| ^^^^^^^^^^^^^
-error: aborting due to previous error
+error: unknown start of token: \u{201d}
+ --> $DIR/unicode-quote-chars.rs:4:26
+ |
+LL | println!(“hello world”);
+ | ^
+help: Unicode character '”' (Right Double Quotation Mark) looks like '"' (Quotation Mark), but it is not
+ |
+LL | println!(“hello world");
+ | ^
+
+error: expected token: `,`
+ --> $DIR/unicode-quote-chars.rs:4:21
+ |
+LL | println!(“hello world”);
+ | ^^^^^ expected `,`
+
+error: aborting due to 3 previous errors
+++ /dev/null
-error[E0005]: refutable pattern in local binding: `Err(_)` not covered
- --> $DIR/recursive-types-are-not-uninhabited.rs:6:9
- |
-LL | let Ok(x) = res;
- | ^^^^^ pattern `Err(_)` not covered
-
-error[E0381]: use of possibly uninitialized variable: `x`
- --> $DIR/recursive-types-are-not-uninhabited.rs:8:5
- |
-LL | x
- | ^ use of possibly uninitialized `x`
-
-error: aborting due to 2 previous errors
-
-Some errors have detailed explanations: E0005, E0381.
-For more information about an error, try `rustc --explain E0005`.
let Ok(x) = res;
//~^ ERROR refutable pattern
x
- //~^ WARN use of possibly uninitialized variable: `x`
- //~| WARN this error has been downgraded to a warning for backwards compatibility
- //~| WARN this represents potential undefined behavior in your code and this warning will
+ //~^ ERROR use of possibly uninitialized variable: `x`
}
fn main() {
LL | let Ok(x) = res;
| ^^^^^ pattern `Err(_)` not covered
-warning[E0381]: use of possibly uninitialized variable: `x`
+error[E0381]: use of possibly uninitialized variable: `x`
--> $DIR/recursive-types-are-not-uninhabited.rs:8:5
|
LL | x
| ^ use of possibly uninitialized `x`
- |
- = warning: this error has been downgraded to a warning for backwards compatibility with previous releases
- = warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
- = note: for more information, try `rustc --explain E0729`
-error: aborting due to previous error
+error: aborting due to 2 previous errors
Some errors have detailed explanations: E0005, E0381.
For more information about an error, try `rustc --explain E0005`.
LL |
LL | impl IntoPyDictPointer for ()
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `()`
+ |
+ = note: upstream crates may add new impl of trait `std::iter::Iterator` for type `()` in future versions
error: aborting due to previous error