let span = cx.with_def_site_ctxt(span);
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
let builder = cx.ident_of("debug_trait_builder", span);
- let builder_expr = cx.expr_ident(span, builder.clone());
+ let builder_expr = cx.expr_ident(span, builder);
let fmt = substr.nonself_args[0].clone();
let _buffer_lock = lock::acquire_global_lock("rustc_errors");
for (pos, line) in rendered_buffer.iter().enumerate() {
for part in line {
- dst.apply_style(lvl.clone(), part.style)?;
+ dst.apply_style(*lvl, part.style)?;
write!(dst, "{}", part.text)?;
dst.reset()?;
}
return Some(());
}
if let &ty::Adt(def, _) = &ta.kind {
- let path_ = self.tcx.def_path_str(def.did.clone());
+ let path_ = self.tcx.def_path_str(def.did);
if path_ == other_path {
self.highlight_outer(&mut t1_out, &mut t2_out, path, sub, i, &other_ty);
return Some(());
let sub_no_defaults_1 = self.strip_generic_default_params(def1.did, sub1);
let sub_no_defaults_2 = self.strip_generic_default_params(def2.did, sub2);
let mut values = (DiagnosticStyledString::new(), DiagnosticStyledString::new());
- let path1 = self.tcx.def_path_str(def1.did.clone());
- let path2 = self.tcx.def_path_str(def2.did.clone());
+ let path1 = self.tcx.def_path_str(def1.did);
+ let path2 = self.tcx.def_path_str(def2.did);
if def1.did == def2.did {
// Easy case. Replace same types with `_` to shorten the output and highlight
// the differing ones.
// even though a satisfactory solution exists.
let generic = GenericKind::Projection(projection_ty);
let verify_bound = self.verify_bound.generic_bound(generic);
- self.delegate.push_verify(origin, generic.clone(), region, verify_bound);
+ self.delegate.push_verify(origin, generic, region, verify_bound);
}
}
place: {:?}",
place_span.0
);
- this.reservation_error_reported.insert(place_span.0.clone());
+ this.reservation_error_reported.insert(place_span.0);
}
Activation(_, activating) => {
debug!(
// Box starts out uninitialized - need to create a separate
// move-path for the interior so it will be separate from
// the exterior.
- self.create_move_path(self.builder.tcx.mk_place_deref(place.clone()));
+ self.create_move_path(self.builder.tcx.mk_place_deref(*place));
self.gather_init(place.as_ref(), InitKind::Shallow);
} else {
self.gather_init(place.as_ref(), InitKind::Deep);
for offset in from..to {
let elem =
ProjectionElem::ConstantIndex { offset, min_length: len, from_end: false };
- let path = self.add_move_path(base_path, &elem, |tcx| {
- tcx.mk_place_elem(base_place.clone(), elem)
- });
+ let path =
+ self.add_move_path(base_path, &elem, |tcx| tcx.mk_place_elem(base_place, elem));
self.record_move(place, path);
}
} else {
recursion_depths: &mut DefIdMap<usize>,
inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
) {
- if !visited.lock_mut().insert(starting_point.clone()) {
+ if !visited.lock_mut().insert(starting_point) {
// We've been here already, no need to search again.
return;
}
// BB #2
// `dest[i] = Clone::clone(src[beg])`;
// Goto #3 if ok, #5 if unwinding happens.
- let dest_field = self.tcx.mk_place_index(dest.clone(), beg);
+ let dest_field = self.tcx.mk_place_index(dest, beg);
let src_field = self.tcx.mk_place_index(src, beg);
self.make_clone_call(dest_field, src_field, ty, BasicBlock::new(3), BasicBlock::new(5));
let mut previous_field = None;
for (i, ity) in tys.enumerate() {
let field = Field::new(i);
- let src_field = self.tcx.mk_place_field(src.clone(), field, ity);
+ let src_field = self.tcx.mk_place_field(src, field, ity);
- let dest_field = self.tcx.mk_place_field(dest.clone(), field, ity);
+ let dest_field = self.tcx.mk_place_field(dest, field, ity);
// #(2i + 1) is the cleanup block for the previous clone operation
let cleanup_block = self.block_index_offset(1);
// BB #(2i)
// `dest.i = Clone::clone(&src.i);`
// Goto #(2i + 2) if ok, #(2i + 1) if unwinding happens.
- self.make_clone_call(dest_field.clone(), src_field, ity, next_block, cleanup_block);
+ self.make_clone_call(dest_field, src_field, ity, next_block, cleanup_block);
// BB #(2i + 1) (cleanup)
if let Some((previous_field, previous_cleanup)) = previous_field.take() {
let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
// This is e.g., `tuple_tmp.0` in our example above.
let tuple_field =
- Operand::Move(tcx.mk_place_field(tuple.clone(), Field::new(i), ty.expect_ty()));
+ Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
// Spill to a local to make e.g., `tmp0`.
self.create_temp_if_necessary(tuple_field, callsite, caller_body)
let offset = i as u32;
assert_eq!(offset as usize, i);
tcx.mk_place_elem(
- lhs.clone(),
+ lhs,
ProjectionElem::ConstantIndex {
offset,
// FIXME(eddyb) `min_length` doesn't appear to be used.
)
} else {
let field = Field::new(active_field_index.unwrap_or(i));
- tcx.mk_place_field(lhs.clone(), field, ty)
+ tcx.mk_place_field(lhs, field, ty)
};
Statement { source_info, kind: StatementKind::Assign(box (lhs_field, Rvalue::Use(op))) }
})
assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
let field_ty =
tcx.normalize_erasing_regions(self.elaborator.param_env(), f.ty(tcx, substs));
- (tcx.mk_place_field(base_place.clone(), field, field_ty), subpath)
+ (tcx.mk_place_field(base_place, field, field_ty), subpath)
})
.collect()
}
.enumerate()
.map(|(i, &ty)| {
(
- self.tcx().mk_place_field(self.place.clone(), Field::new(i), ty),
+ self.tcx().mk_place_field(self.place, Field::new(i), ty),
self.elaborator.field_subpath(self.path, Field::new(i)),
)
})
fn open_drop_for_box(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
- let interior = self.tcx().mk_place_deref(self.place.clone());
+ let interior = self.tcx().mk_place_deref(self.place);
let interior_path = self.elaborator.deref_subpath(self.path);
let succ = self.succ; // FIXME(#43234)
if let Some(variant_path) = subpath {
let base_place = tcx.mk_place_elem(
- self.place.clone(),
+ self.place,
ProjectionElem::Downcast(Some(variant.ident.name), variant_index),
);
let fields = self.move_paths_for_fields(base_place, variant_path, &variant, substs);
(Rvalue::Use(copy(cur.into())), Rvalue::BinaryOp(BinOp::Offset, move_(cur.into()), one))
} else {
(
- Rvalue::AddressOf(Mutability::Mut, tcx.mk_place_index(self.place.clone(), cur)),
+ Rvalue::AddressOf(Mutability::Mut, tcx.mk_place_index(self.place, cur)),
Rvalue::BinaryOp(BinOp::Add, move_(cur.into()), one),
)
};
self.elaborator.patch().patch_terminator(
drop_block,
TerminatorKind::Drop {
- location: tcx.mk_place_deref(ptr.clone()),
+ location: tcx.mk_place_deref(ptr),
target: loop_block,
unwind: unwind.into_option(),
},
.map(|i| {
(
tcx.mk_place_elem(
- self.place.clone(),
+ self.place,
ProjectionElem::ConstantIndex {
offset: i,
min_length: size,
switch_ty: tcx.types.usize,
values: From::from(USIZE_SWITCH_ZERO),
targets: vec![
- self.drop_loop_pair(ety, false, len.clone()),
- self.drop_loop_pair(ety, true, len.clone()),
+ self.drop_loop_pair(ety, false, len),
+ self.drop_loop_pair(ety, true, len),
],
},
}),
.map(|(i, f)| {
let field = Field::new(i);
let field_ty = f.ty(tcx, substs);
- Operand::Move(tcx.mk_place_field(self.place.clone(), field, field_ty))
+ Operand::Move(tcx.mk_place_field(self.place, field, field_ty))
})
.collect();
use crate::build::ForGuard::OutsideGuard;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::hair::*;
-use rustc_middle::mir::*;
use rustc_hir as hir;
+use rustc_middle::mir::*;
use rustc_span::Span;
impl<'a, 'tcx> Builder<'a, 'tcx> {
// This is a `break`-able block
let exit_block = this.cfg.start_new_block();
let block_exit =
- this.in_breakable_scope(None, exit_block, destination.clone(), |this| {
+ this.in_breakable_scope(None, exit_block, destination, |this| {
this.ast_block_stmts(destination, block, span, stmts, expr, safety_mode)
});
this.cfg.goto(unpack!(block_exit), source_info, exit_block);
use crate::build::expr::category::{Category, RvalueFunc};
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder};
use crate::hair::*;
-use rustc_middle::mir::*;
-use rustc_middle::ty::{self, CanonicalUserTypeAnnotation};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
+use rustc_middle::mir::*;
+use rustc_middle::ty::{self, CanonicalUserTypeAnnotation};
use rustc_span::symbol::sym;
use rustc_target::spec::abi::Abi;
// Start the loop.
this.cfg.goto(block, source_info, loop_block);
- this.in_breakable_scope(
- Some(loop_block),
- exit_block,
- destination.clone(),
- move |this| {
- // conduct the test, if necessary
- let body_block = this.cfg.start_new_block();
- let diverge_cleanup = this.diverge_cleanup();
- this.cfg.terminate(
- loop_block,
- source_info,
- TerminatorKind::FalseUnwind {
- real_target: body_block,
- unwind: Some(diverge_cleanup),
- },
- );
-
- // The “return” value of the loop body must always be an unit. We therefore
- // introduce a unit temporary as the destination for the loop body.
- let tmp = this.get_unit_temp();
- // Execute the body, branching back to the test.
- let body_block_end = unpack!(this.into(tmp, body_block, body));
- this.cfg.goto(body_block_end, source_info, loop_block);
- },
- );
+ this.in_breakable_scope(Some(loop_block), exit_block, destination, move |this| {
+ // conduct the test, if necessary
+ let body_block = this.cfg.start_new_block();
+ let diverge_cleanup = this.diverge_cleanup();
+ this.cfg.terminate(
+ loop_block,
+ source_info,
+ TerminatorKind::FalseUnwind {
+ real_target: body_block,
+ unwind: Some(diverge_cleanup),
+ },
+ );
+
+ // The “return” value of the loop body must always be an unit. We therefore
+ // introduce a unit temporary as the destination for the loop body.
+ let tmp = this.get_unit_temp();
+ // Execute the body, branching back to the test.
+ let body_block_end = unpack!(this.into(tmp, body_block, body));
+ this.cfg.goto(body_block_end, source_info, loop_block);
+ });
exit_block.unit()
}
ExprKind::Call { ty, fun, args, from_hir_call } => {
let field_names = this.hir.all_fields(adt_def, variant_index);
- let fields =
- if let Some(FruInfo { base, field_types }) = base {
- let base = unpack!(block = this.as_place(block, base));
-
- // MIR does not natively support FRU, so for each
- // base-supplied field, generate an operand that
- // reads it from the base.
- field_names
- .into_iter()
- .zip(field_types.into_iter())
- .map(|(n, ty)| match fields_map.get(&n) {
- Some(v) => v.clone(),
- None => this.consume_by_copy_or_move(
- this.hir.tcx().mk_place_field(base.clone(), n, ty),
- ),
- })
- .collect()
- } else {
- field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect()
- };
+ let fields = if let Some(FruInfo { base, field_types }) = base {
+ let base = unpack!(block = this.as_place(block, base));
+
+ // MIR does not natively support FRU, so for each
+ // base-supplied field, generate an operand that
+ // reads it from the base.
+ field_names
+ .into_iter()
+ .zip(field_types.into_iter())
+ .map(|(n, ty)| match fields_map.get(&n) {
+ Some(v) => v.clone(),
+ None => this.consume_by_copy_or_move(
+ this.hir.tcx().mk_place_field(base, n, ty),
+ ),
+ })
+ .collect()
+ } else {
+ field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect()
+ };
let inferred_ty = expr.ty;
let user_ty = user_ty.map(|ty| {
self.schedule_drop_for_binding(binding.var_id, binding.span, OutsideGuard);
}
let rvalue = match binding.binding_mode {
- BindingMode::ByValue => {
- Rvalue::Use(self.consume_by_copy_or_move(binding.source.clone()))
- }
+ BindingMode::ByValue => Rvalue::Use(self.consume_by_copy_or_move(binding.source)),
BindingMode::ByRef(borrow_kind) => {
Rvalue::Ref(re_erased, borrow_kind, binding.source)
}
subpatterns
.iter()
.map(|fieldpat| {
- let place = self.hir.tcx().mk_place_field(
- place.clone(),
- fieldpat.field,
- fieldpat.pattern.ty,
- );
+ let place =
+ self.hir.tcx().mk_place_field(place, fieldpat.field, fieldpat.pattern.ty);
MatchPair::new(place, &fieldpat.pattern)
})
.collect()
match_pairs.extend(prefix.iter().enumerate().map(|(idx, subpattern)| {
let elem =
ProjectionElem::ConstantIndex { offset: idx as u32, min_length, from_end: false };
- let place = tcx.mk_place_elem(place.clone(), elem);
+ let place = tcx.mk_place_elem(*place, elem);
MatchPair::new(place, subpattern)
}));
if let Some(subslice_pat) = opt_slice {
let suffix_len = suffix.len() as u32;
let subslice = tcx.mk_place_elem(
- place.clone(),
+ *place,
ProjectionElem::Subslice {
from: prefix.len() as u32,
to: if exact_size { min_length - suffix_len } else { suffix_len },
min_length,
from_end: !exact_size,
};
- let place = tcx.mk_place_elem(place.clone(), elem);
+ let place = tcx.mk_place_elem(*place, elem);
MatchPair::new(place, subpattern)
}));
}
// Warn if the user enables a lib feature multiple times.
duplicate_feature_err(tcx.sess, *span, *feature);
}
- remaining_lib_features.insert(feature, span.clone());
+ remaining_lib_features.insert(feature, *span);
}
// `stdbuild` has special handling for `libc`, so we need to
// recognise the feature when building std.
let mut graph = Graph::with_capacity(nodes.len(), edges.len());
let mut indices = FxHashMap::default();
for node in nodes {
- indices.insert(node.clone(), graph.add_node(node.clone()));
+ indices.insert(*node, graph.add_node(*node));
}
for &(ref source, ref target) in edges {
if param_name.name == kw::UnderscoreLifetime {
// Pick the elided lifetime "definition" if one exists
// and use it to make an elision scope.
- self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many);
+ self.lifetime_uses.insert(def_id, LifetimeUseSet::Many);
elision = Some(reg);
} else {
lifetimes.insert(name, reg);
}
} else {
- self.lifetime_uses.insert(def_id.clone(), LifetimeUseSet::Many);
+ self.lifetime_uses.insert(def_id, LifetimeUseSet::Many);
lifetimes.insert(name, reg);
}
}
.borrow_mut()
.entry(span)
.or_default()
- .push(error.obligation.predicate.clone());
+ .push(error.obligation.predicate);
}
// We do this in 2 passes because we want to display errors in order, though
match obligation.predicate {
ty::Predicate::Trait(ref data, _) => {
- let trait_obligation = obligation.with(data.clone());
+ let trait_obligation = obligation.with(*data);
if data.is_global() {
// no type variables present, can use evaluation for better caching.
}
ty::Predicate::Projection(ref data) => {
- let project_obligation = obligation.with(data.clone());
+ let project_obligation = obligation.with(*data);
match project::poly_project_and_unify_type(self.selcx, &project_obligation) {
Ok(None) => {
let tcx = self.selcx.tcx();
match obligation.predicate {
ty::Predicate::Trait(ref t, _) => {
debug_assert!(!t.has_escaping_bound_vars());
- let obligation = obligation.with(t.clone());
+ let obligation = obligation.with(*t);
self.evaluate_trait_predicate_recursively(previous_stack, obligation)
}
}
ty::Predicate::Projection(ref data) => {
- let project_obligation = obligation.with(data.clone());
+ let project_obligation = obligation.with(*data);
match project::poly_project_and_unify_type(self, &project_obligation) {
Ok(Some(mut subobligations)) => {
self.add_depth(subobligations.iter_mut(), obligation.recursion_depth);
// separately rather than using `stack.fresh_trait_ref` --
// this is because we want the unbound variables to be
// replaced with fresh types starting from index 0.
- let cache_fresh_trait_pred = self.infcx.freshen(stack.obligation.predicate.clone());
+ let cache_fresh_trait_pred = self.infcx.freshen(stack.obligation.predicate);
debug!(
"candidate_from_obligation(cache_fresh_trait_pred={:?}, obligation={:?})",
cache_fresh_trait_pred, stack
self.infcx.probe(|_| {
self.match_projection(
obligation,
- bound.clone(),
- placeholder_trait_predicate.trait_ref.clone(),
+ *bound,
+ placeholder_trait_predicate.trait_ref,
&placeholder_map,
snapshot,
)
let result = self.match_projection(
obligation,
bound,
- placeholder_trait_predicate.trait_ref.clone(),
+ placeholder_trait_predicate.trait_ref,
&placeholder_map,
snapshot,
);
// Keep only those bounds which may apply, and propagate overflow if it occurs.
let mut param_candidates = vec![];
for bound in matching_bounds {
- let wc = self.evaluate_where_clause(stack, bound.clone())?;
+ let wc = self.evaluate_where_clause(stack, bound)?;
if wc.may_apply() {
param_candidates.push(ParamCandidate(bound));
}
// where-clause trait-ref could be unified with the obligation
// trait-ref. Repeat that unification now without any
// transactional boundary; it should not fail.
- match self.match_where_clause_trait_ref(obligation, param.clone()) {
+ match self.match_where_clause_trait_ref(obligation, param) {
Ok(obligations) => obligations,
Err(()) => {
bug!(
);
};
- self.tables.user_provided_types_mut().insert(hir_id, c_ty.clone());
+ self.tables.user_provided_types_mut().insert(hir_id, *c_ty);
if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
if self.rustc_dump_user_substs {
);
};
- self.tables.user_provided_sigs.insert(def_id, c_sig.clone());
+ self.tables.user_provided_sigs.insert(def_id, *c_sig);
}
}
for (&local_id, fn_sig) in fcx_tables.liberated_fn_sigs().iter() {
let hir_id = hir::HirId { owner: common_hir_owner, local_id };
let fn_sig = self.resolve(fn_sig, &hir_id);
- self.tables.liberated_fn_sigs_mut().insert(hir_id, fn_sig.clone());
+ self.tables.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
}
}
continue;
}
- let mut for_generics = self.extract_for_generics(tcx, orig_p.clone());
+ let mut for_generics = self.extract_for_generics(tcx, orig_p);
assert!(bounds.len() == 1);
let mut b = bounds.pop().expect("bounds were empty");
}
let mut ret = Vec::new();
- let attrs_clone = attrs.clone();
+ let attrs_clone = attrs;
let inner = match res {
Res::Def(DefKind::Trait, did) => {
let mut impls = Vec::new();
for &did in tcx.inherent_impls(did).iter() {
- build_impl(cx, did, attrs.clone(), &mut impls);
+ build_impl(cx, did, attrs, &mut impls);
}
impls
let name = self.generate_name(line, &filename);
let cratename = self.cratename.to_string();
let opts = self.opts.clone();
- let edition = config.edition.unwrap_or(self.options.edition.clone());
+ let edition = config.edition.unwrap_or(self.options.edition);
let options = self.options.clone();
let runtool = self.options.runtool.clone();
let runtool_args = self.options.runtool_args.clone();