impl<'a> ShouldRun<'a> {
fn new(builder: &'a Builder) -> ShouldRun<'a> {
ShouldRun {
- builder: builder,
+ builder,
paths: BTreeSet::new(),
is_really_default: true, // by default no additional conditions
}
};
let builder = Builder {
- build: build,
+ build,
top_stage: build.config.stage.unwrap_or(2),
- kind: kind,
+ kind,
cache: Cache::new(),
stack: RefCell::new(Vec::new()),
};
};
let builder = Builder {
- build: build,
+ build,
top_stage: build.config.stage.unwrap_or(2),
- kind: kind,
+ kind,
cache: Cache::new(),
stack: RefCell::new(Vec::new()),
};
builder.ensure(CrateLibrustc {
compiler,
target: run.target,
- test_kind: test_kind,
+ test_kind,
krate: name,
});
};
builder.ensure(Crate {
compiler,
target: run.target,
- mode: mode,
- test_kind: test_kind,
+ mode,
+ test_kind,
krate: name,
});
};
let from = builder.compiler(1, build.build);
builder.ensure(Std {
compiler: from,
- target: target,
+ target,
});
println!("Uplifting stage1 std ({} -> {})", from.host, target);
builder.ensure(StdLink {
compiler: from,
target_compiler: compiler,
- target: target,
+ target,
});
return;
}
builder.ensure(StdLink {
compiler: builder.compiler(compiler.stage, build.build),
target_compiler: compiler,
- target: target,
+ target,
});
}
}
builder.ensure(tool::CleanTools {
compiler: target_compiler,
- target: target,
+ target,
mode: Mode::Libstd,
});
}
if build.force_use_stage1(compiler, target) {
builder.ensure(Test {
compiler: builder.compiler(1, build.build),
- target: target,
+ target,
});
println!("Uplifting stage1 test ({} -> {})", &build.build, target);
builder.ensure(TestLink {
compiler: builder.compiler(1, build.build),
target_compiler: compiler,
- target: target,
+ target,
});
return;
}
builder.ensure(TestLink {
compiler: builder.compiler(compiler.stage, build.build),
target_compiler: compiler,
- target: target,
+ target,
});
}
}
&libtest_stamp(build, compiler, target));
builder.ensure(tool::CleanTools {
compiler: target_compiler,
- target: target,
+ target,
mode: Mode::Libtest,
});
}
if build.force_use_stage1(compiler, target) {
builder.ensure(Rustc {
compiler: builder.compiler(1, build.build),
- target: target,
+ target,
});
println!("Uplifting stage1 rustc ({} -> {})", &build.build, target);
builder.ensure(RustcLink {
&librustc_stamp(build, compiler, target));
builder.ensure(tool::CleanTools {
compiler: target_compiler,
- target: target,
+ target,
mode: Mode::Librustc,
});
}
let name = self.name;
// build book first edition
builder.ensure(Rustbook {
- target: target,
+ target,
name: INTERNER.intern_string(format!("{}/first-edition", name)),
});
// build book second edition
builder.ensure(Rustbook {
- target: target,
+ target,
name: INTERNER.intern_string(format!("{}/second-edition", name)),
});
}
"test" => {
Subcommand::Test {
- paths: paths,
+ paths,
test_args: matches.opt_strs("test-args"),
fail_fast: !matches.opt_present("no-fail-fast"),
}
}
"bench" => {
Subcommand::Bench {
- paths: paths,
+ paths,
test_args: matches.opt_strs("test-args"),
}
}
}
"dist" => {
Subcommand::Dist {
- paths: paths,
+ paths,
}
}
"install" => {
Subcommand::Install {
- paths: paths,
+ paths,
}
}
_ => {
Flags {
verbose: matches.opt_count("verbose"),
- stage: stage,
+ stage,
on_fail: matches.opt_str("on-fail"),
keep_stage: matches.opt_str("keep-stage").map(|j| j.parse().unwrap()),
build: matches.opt_str("build").map(|s| INTERNER.intern_string(s)),
target: split(matches.opt_strs("target"))
.into_iter().map(|x| INTERNER.intern_string(x)).collect::<Vec<_>>(),
config: cfg_file,
- src: src,
+ src,
jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()),
- cmd: cmd,
+ cmd,
incremental: matches.opt_present("incremental"),
}
}
hosts: config.hosts.clone(),
targets: config.targets.clone(),
- config: config,
- src: src,
- out: out,
+ config,
+ src,
+ out,
- rust_info: rust_info,
- cargo_info: cargo_info,
- rls_info: rls_info,
+ rust_info,
+ cargo_info,
+ rls_info,
cc: HashMap::new(),
cxx: HashMap::new(),
crates: HashMap::new(),
lldb_version: None,
lldb_python_dir: None,
- is_sudo: is_sudo,
+ is_sudo,
ci_env: CiEnv::current(),
delayed_failures: Cell::new(0),
}
doc_step: format!("doc-crate-{}", name),
test_step: format!("test-crate-{}", name),
bench_step: format!("bench-crate-{}", name),
- name: name,
+ name,
version: package.version,
deps: Vec::new(),
- path: path,
+ path,
});
}
}
let x: Box<_> = box ArcInner {
strong: atomic::AtomicUsize::new(1),
weak: atomic::AtomicUsize::new(1),
- data: data,
+ data,
};
Arc { ptr: Shared::from(Box::into_unique(x)) }
}
debug_assert!(pos < data.len());
let elt = ptr::read(&data[pos]);
Hole {
- data: data,
+ data,
elt: Some(elt),
- pos: pos,
+ pos,
}
}
let place = Placer::make_place(self.data.place_back());
BinaryHeapPlace {
heap: ptr,
- place: place,
+ place,
}
}
}
IntermediateBox {
ptr: p,
- layout: layout,
+ layout,
marker: marker::PhantomData,
}
}
match search::search_tree(self.root.as_mut(), key) {
Found(handle) => {
Some(OccupiedEntry {
- handle: handle,
+ handle,
length: &mut self.length,
_marker: PhantomData,
}
Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
GoDown(handle) => {
VacantEntry {
- key: key,
- handle: handle,
+ key,
+ handle,
length: &mut self.length,
_marker: PhantomData,
}
match search::search_tree(self.root.as_mut(), key) {
Found(handle) => {
Some(OccupiedEntry {
- handle: handle,
+ handle,
length: &mut self.length,
_marker: PhantomData,
}
match search::search_tree(self.root.as_mut(), &key) {
Found(handle) => {
Occupied(OccupiedEntry {
- handle: handle,
+ handle,
length: &mut self.length,
_marker: PhantomData,
})
}
GoDown(handle) => {
Vacant(VacantEntry {
- key: key,
- handle: handle,
+ key,
+ handle,
length: &mut self.length,
_marker: PhantomData,
})
debug_assert!(idx < node.len());
Handle {
- node: node,
- idx: idx,
+ node,
+ idx,
_marker: PhantomData
}
}
debug_assert!(idx <= node.len());
Handle {
- node: node,
- idx: idx,
+ node,
+ idx,
_marker: PhantomData
}
}
let mut new_root = Root {
node: BoxedNode::from_internal(new_node),
- height: height
+ height,
};
for i in 0..(new_len+1) {
> {
match self.node.force() {
ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
- node: node,
+ node,
idx: self.idx,
_marker: PhantomData
}),
ForceResult::Internal(node) => ForceResult::Internal(Handle {
- node: node,
+ node,
idx: self.idx,
_marker: PhantomData
})
Node {
next: None,
prev: None,
- element: element,
+ element,
}
}
let node = Some(Shared::from(Box::into_unique(box Node {
next: Some(head),
prev: Some(prev),
- element: element,
+ element,
})));
prev.as_mut().next = node;
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::empty(),
- cap: cap,
- a: a,
+ cap,
+ a,
}
}
RawVec {
ptr: Unique::new_unchecked(ptr as *mut _),
- cap: cap,
- a: a,
+ cap,
+ a,
}
}
}
pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: A) -> Self {
RawVec {
ptr: Unique::new_unchecked(ptr),
- cap: cap,
- a: a,
+ cap,
+ a,
}
}
}
pub unsafe fn from_raw_parts(ptr: *mut T, cap: usize) -> Self {
RawVec {
ptr: Unique::new_unchecked(ptr),
- cap: cap,
+ cap,
a: Heap,
}
}
ptr: Shared::from(Box::into_unique(box RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
- value: value,
+ value,
})),
}
}
// Push this run onto the stack.
runs.push(Run {
- start: start,
+ start,
len: end - start,
});
end = start;
let chars_iter = self[start..end].chars();
Drain {
- start: start,
- end: end,
+ start,
+ end,
iter: chars_iter,
string: self_ptr,
}
let chars_iter = self[start..end].chars();
Splice {
- start: start,
- end: end,
+ start,
+ end,
iter: chars_iter,
string: self_ptr,
- replace_with: replace_with
+ replace_with,
}
}
mem::forget(self);
IntoIter {
buf: Shared::new_unchecked(begin),
- cap: cap,
+ cap,
ptr: begin,
- end: end,
+ end,
}
}
}
VecDeque {
tail: 0,
head: len,
- buf: buf,
+ buf,
}
}
}
impl<'a, 'b: 'a> PadAdapter<'a, 'b> {
fn new(fmt: &'a mut fmt::Formatter<'b>) -> PadAdapter<'a, 'b> {
PadAdapter {
- fmt: fmt,
+ fmt,
on_newline: false,
}
}
-> DebugStruct<'a, 'b> {
let result = fmt.write_str(name);
DebugStruct {
- fmt: fmt,
- result: result,
+ fmt,
+ result,
has_fields: false,
}
}
pub fn debug_tuple_new<'a, 'b>(fmt: &'a mut fmt::Formatter<'b>, name: &str) -> DebugTuple<'a, 'b> {
let result = fmt.write_str(name);
DebugTuple {
- fmt: fmt,
- result: result,
+ fmt,
+ result,
fields: 0,
empty_name: name.is_empty(),
}
let result = write!(fmt, "{{");
DebugSet {
inner: DebugInner {
- fmt: fmt,
- result: result,
+ fmt,
+ result,
has_fields: false,
},
}
let result = write!(fmt, "[");
DebugList {
inner: DebugInner {
- fmt: fmt,
- result: result,
+ fmt,
+ result,
has_fields: false,
},
}
pub fn debug_map_new<'a, 'b>(fmt: &'a mut fmt::Formatter<'b>) -> DebugMap<'a, 'b> {
let result = write!(fmt, "{{");
DebugMap {
- fmt: fmt,
- result: result,
+ fmt,
+ result,
has_fields: false,
}
}
pub fn new_v1(pieces: &'a [&'a str],
args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
Arguments {
- pieces: pieces,
+ pieces,
fmt: None,
- args: args
+ args,
}
}
args: &'a [ArgumentV1<'a>],
fmt: &'a [rt::v1::Argument]) -> Arguments<'a> {
Arguments {
- pieces: pieces,
+ pieces,
fmt: Some(fmt),
- args: args
+ args,
}
}
type Item = (A::Item, B::Item);
default fn new(a: A, b: B) -> Self {
Zip {
- a: a,
- b: b,
+ a,
+ b,
index: 0, // unused
len: 0, // unused
}
fn new(a: A, b: B) -> Self {
let len = cmp::min(a.len(), b.len());
Zip {
- a: a,
- b: b,
+ a,
+ b,
index: 0,
- len: len,
+ len,
}
}
fn new(iter: I) -> Self {
ResultShunt {
- iter: iter,
+ iter,
error: None,
}
}
assert_eq!(self.f << edelta >> edelta, self.f);
Fp {
f: self.f << edelta,
- e: e,
+ e,
}
}
}
}
FullDecoded::Finite(ref decoded) => {
let (len, exp) = format_shortest(decoded, buf);
- Formatted { sign: sign,
+ Formatted { sign,
parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts) }
}
}
let trunc = if ndigits < maxlen { ndigits } else { maxlen };
let (len, exp) = format_exact(decoded, &mut buf[..trunc], i16::MIN);
- Formatted { sign: sign,
+ Formatted { sign,
parts: digits_to_exp_str(&buf[..len], exp, ndigits, upper, parts) }
}
}
Formatted { sign: sign, parts: &parts[..1] }
}
} else {
- Formatted { sign: sign,
+ Formatted { sign,
parts: digits_to_dec_str(&buf[..len], exp, frac_digits, parts) }
}
}
{
Split {
v: self,
- pred: pred,
+ pred,
finished: false
}
}
fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P> {
SplitN(SplitNInternal {
iter: self.split(pat).0,
- count: count,
+ count,
})
}
fn into_searcher(self, haystack: &'a str) -> CharEqSearcher<'a, C> {
CharEqSearcher {
ascii_only: self.0.only_ascii(),
- haystack: haystack,
+ haystack,
char_eq: self.0,
char_indices: haystack.char_indices(),
}
fn new(haystack: &'a str, needle: &'b str) -> StrSearcher<'a, 'b> {
if needle.is_empty() {
StrSearcher {
- haystack: haystack,
- needle: needle,
+ haystack,
+ needle,
searcher: StrSearcherImpl::Empty(EmptyNeedle {
position: 0,
end: haystack.len(),
}
} else {
StrSearcher {
- haystack: haystack,
- needle: needle,
+ haystack,
+ needle,
searcher: StrSearcherImpl::TwoWay(
TwoWaySearcher::new(needle.as_bytes(), haystack.len())
),
TwoWaySearcher::reverse_maximal_suffix(needle, period, true));
TwoWaySearcher {
- crit_pos: crit_pos,
- crit_pos_back: crit_pos_back,
- period: period,
+ crit_pos,
+ crit_pos_back,
+ period,
byteset: Self::byteset_create(&needle[..period]),
position: 0,
- end: end,
+ end,
memory: 0,
memory_back: needle.len(),
}
// reverse search.
TwoWaySearcher {
- crit_pos: crit_pos,
+ crit_pos,
crit_pos_back: crit_pos,
period: cmp::max(crit_pos, needle.len() - crit_pos) + 1,
byteset: Self::byteset_create(needle),
position: 0,
- end: end,
+ end,
memory: usize::MAX, // Dummy value to signify that the period is long
memory_back: usize::MAX,
}
pub fn cycle<T>(data: &[T]) -> CycleIter<T> {
CycleIter {
index: 0,
- data: data,
+ data,
}
}
fn r(i: Rc<RefCell<isize>>) -> R {
R {
- i: i
+ i,
}
}
Argument {
position: pos,
- format: format,
+ format,
}
}
(0, _) => {
Opt {
name: Long((long_name)),
- hasarg: hasarg,
- occur: occur,
+ hasarg,
+ occur,
aliases: Vec::new(),
}
}
(1, 0) => {
Opt {
name: Short(short_name.chars().next().unwrap()),
- hasarg: hasarg,
- occur: occur,
+ hasarg,
+ occur,
aliases: Vec::new(),
}
}
(1, _) => {
Opt {
name: Long((long_name)),
- hasarg: hasarg,
- occur: occur,
+ hasarg,
+ occur,
aliases: vec![Opt {
name: Short(short_name.chars().next().unwrap()),
- hasarg: hasarg,
- occur: occur,
+ hasarg,
+ occur,
aliases: Vec::new(),
}],
}
long_name: long_name.to_owned(),
hint: hint.to_owned(),
desc: desc.to_owned(),
- hasarg: hasarg,
- occur: occur,
+ hasarg,
+ occur,
}
}
}
}
Ok(Matches {
- opts: opts,
- vals: vals,
- free: free,
+ opts,
+ vals,
+ free,
})
}
fn edge(from: usize, to: usize, label: &'static str, style: Style) -> Edge {
Edge {
- from: from,
- to: to,
- label: label,
- style: style,
+ from,
+ to,
+ label,
+ style,
}
}
-> LabelledGraph {
let count = node_labels.len();
LabelledGraph {
- name: name,
+ name,
node_labels: node_labels.to_opt_strs(),
- edges: edges,
+ edges,
node_styles: match node_styles {
Some(nodes) => nodes,
None => vec![Style::None; count],
let exception = Box::new(Exception {
_uwe: uw::_Unwind_Exception {
exception_class: rust_exception_class(),
- exception_cleanup: exception_cleanup,
+ exception_cleanup,
private: [0; uw::unwinder_private_data_size],
},
cause: Some(data),
fn new_raw(shape: f64, scale: f64) -> GammaLargeShape {
let d = shape - 1. / 3.;
GammaLargeShape {
- scale: scale,
+ scale,
c: 1. / (9. * d).sqrt(),
- d: d,
+ d,
}
}
}
"WeightedChoice::new called with a total weight of 0");
WeightedChoice {
- items: items,
+ items,
// we're likely to be generating numbers in this range
// relatively often, so might as well cache it
weight_range: Range::new(0, running_total),
pub fn new(mean: f64, std_dev: f64) -> Normal {
assert!(std_dev >= 0.0, "Normal::new called with `std_dev` < 0");
Normal {
- mean: mean,
- std_dev: std_dev,
+ mean,
+ std_dev,
}
}
}
let zone = unsigned_max - unsigned_max % range;
Range {
- low: low,
+ low,
range: range as $ty,
accept_zone: zone as $ty
}
impl SampleRange for $ty {
fn construct_range(low: $ty, high: $ty) -> Range<$ty> {
Range {
- low: low,
+ low,
range: high - low,
accept_zone: 0.0 // unused
}
}
let (x, y, z, w) = tuple;
XorShiftRng {
- x: x,
- y: y,
- z: z,
- w: w,
+ x,
+ y,
+ z,
+ w,
}
}
}
/// * `reseeder`: the reseeding object to use.
pub fn new(rng: R, generation_threshold: usize, reseeder: Rsdr) -> ReseedingRng<R, Rsdr> {
ReseedingRng {
- rng: rng,
- generation_threshold: generation_threshold,
+ rng,
+ generation_threshold,
bytes_generated: 0,
- reseeder: reseeder,
+ reseeder,
}
}
let krate = tcx.hir.krate();
let live_symbols = find_live(tcx, access_levels, krate);
let mut visitor = DeadVisitor {
- tcx: tcx,
- live_symbols: live_symbols,
+ tcx,
+ live_symbols,
};
intravisit::walk_crate(&mut visitor, krate);
}
substs: &'tcx Substs<'tcx>)
-> Ty<'tcx> {
self.mk_ty(TyProjection(ProjectionTy {
- item_def_id: item_def_id,
- substs: substs,
+ item_def_id,
+ substs,
}))
}
let substs = relation.relate(&a.substs, &b.substs)?;
Ok(ty::ExistentialProjection {
item_def_id: a.item_def_id,
- substs: substs,
+ substs,
ty,
})
}
tcx.lift(&self.substs).map(|substs| {
ty::ProjectionTy {
item_def_id: self.item_def_id,
- substs: substs,
+ substs,
}
})
}
pub fn trait_ref(&self, tcx: TyCtxt) -> ty::TraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::TraitRef {
- def_id: def_id,
+ def_id,
substs: self.substs,
}
}
pub fn trait_ref(&self, tcx: TyCtxt) -> ty::ExistentialTraitRef<'tcx> {
let def_id = tcx.associated_item(self.item_def_id).container.id();
ty::ExistentialTraitRef{
- def_id: def_id,
+ def_id,
substs: self.substs,
}
}
krate: Crate,
handler: &rustc_errors::Handler) -> ast::Crate {
ExpandAllocatorDirectives {
- handler: handler,
- sess: sess,
- resolver: resolver,
+ handler,
+ sess,
+ resolver,
found: false,
}.fold_crate(krate)
}
};
let ecfg = ExpansionConfig::default(name.to_string());
let mut f = AllocFnFactory {
- span: span,
+ span,
kind: AllocatorKind::Global,
global: item.ident,
alloc: Ident::from_str("alloc"),
cpu: target_cpu(arch),
dynamic_linking: false,
executables: true,
- pre_link_args: pre_link_args,
+ pre_link_args,
has_elf_tls: false,
.. super::apple_base::opts()
})
exe_allocation_crate: Some("alloc_system".to_string()),
panic_strategy: PanicStrategy::Abort,
linker: "ld".to_string(),
- pre_link_args: pre_link_args,
+ pre_link_args,
target_family: Some("unix".to_string()),
.. Default::default()
}
linker: "pnacl-clang".to_string(),
ar: "pnacl-ar".to_string(),
- pre_link_args: pre_link_args,
- post_link_args: post_link_args,
+ pre_link_args,
+ post_link_args,
dynamic_linking: false,
executables: true,
exe_suffix: ".pexe".to_string(),
obj_is_bitcode: true,
is_like_emscripten: true,
max_atomic_width: Some(32),
- post_link_args: post_link_args,
+ post_link_args,
target_family: Some("unix".to_string()),
.. Default::default()
};
obj_is_bitcode: true,
is_like_emscripten: true,
max_atomic_width: Some(32),
- post_link_args: post_link_args,
+ post_link_args,
target_family: Some("unix".to_string()),
.. Default::default()
};
target_family: Some("windows".to_string()),
is_like_windows: true,
allows_weak_linkage: false,
- pre_link_args: pre_link_args,
+ pre_link_args,
pre_link_objects_exe: vec![
"crt2.o".to_string(), // mingw C runtime initialization for executables
"rsbegin.o".to_string(), // Rust compiler runtime initialization, see rsbegin.rs
"dllcrt2.o".to_string(), // mingw C runtime initialization for dlls
"rsbegin.o".to_string(),
],
- late_link_args: late_link_args,
+ late_link_args,
post_link_objects: vec![
"rsend.o".to_string()
],
let def_id = bccx.tcx.hir.body_owner_def_id(body.id());
let param_env = bccx.tcx.param_env(def_id);
let mut clcx = CheckLoanCtxt {
- bccx: bccx,
- dfcx_loans: dfcx_loans,
- move_data: move_data,
- all_loans: all_loans,
+ bccx,
+ dfcx_loans,
+ move_data,
+ all_loans,
param_env,
};
euv::ExprUseVisitor::new(&mut clcx, bccx.tcx, param_env, &bccx.region_maps, bccx.tables)
};
let move_info = GatherMoveInfo {
id: move_expr_id,
- kind: kind,
- cmt: cmt,
+ kind,
+ cmt,
span_path_opt: None,
};
gather_move(bccx, move_data, move_error_collector, move_info);
let move_info = GatherMoveInfo {
id: move_pat.id,
kind: MovePat,
- cmt: cmt,
+ cmt,
span_path_opt: pat_span_path_opt,
};
debug!("guarantee_lifetime(cmt={:?}, loan_region={:?})",
cmt, loan_region);
let ctxt = GuaranteeLifetimeContext {bccx: bccx,
- item_scope: item_scope,
- span: span,
- cause: cause,
- loan_region: loan_region,
+ item_scope,
+ span,
+ cause,
+ loan_region,
cmt_original: cmt.clone()};
ctxt.check(&cmt, None)
}
let def_id = bccx.tcx.hir.body_owner_def_id(body);
let param_env = bccx.tcx.param_env(def_id);
let mut glcx = GatherLoanCtxt {
- bccx: bccx,
+ bccx,
all_loans: Vec::new(),
item_ub: region::CodeExtent::Misc(body.node_id),
move_data: MoveData::new(),
// Only mutable data can be lent as mutable.
if !cmt.mutbl.is_mutable() {
Err(bccx.report(BckError { span: borrow_span,
- cause: cause,
- cmt: cmt,
+ cause,
+ cmt,
code: err_mutbl }))
} else {
Ok(())
Loan {
index: self.all_loans.len(),
- loan_path: loan_path,
+ loan_path,
kind: req_kind,
- gen_scope: gen_scope,
- kill_scope: kill_scope,
+ gen_scope,
+ kill_scope,
span: borrow_span,
- restricted_paths: restricted_paths,
- cause: cause,
+ restricted_paths,
+ cause,
}
}
};
// let all_loans = &mut *self.all_loans; // FIXME(#5074)
// Loan {
// index: all_loans.len(),
- // loan_path: loan_path,
- // cmt: cmt,
+ // loan_path,
+ // cmt,
// mutbl: ConstMutability,
// gen_scope: borrow_id,
- // kill_scope: kill_scope,
+ // kill_scope,
// span: borrow_span,
- // restrictions: restrictions
+ // restrictions,
// }
// }
}
move_to: Option<MovePlace<'tcx>>)
-> MoveError<'tcx> {
MoveError {
- move_from: move_from,
- move_to: move_to,
+ move_from,
+ move_to,
}
}
}
loan_region: ty::Region<'tcx>)
-> RestrictionResult<'tcx> {
let ctxt = RestrictionsContext {
- bccx: bccx,
- span: span,
- cause: cause,
- loan_region: loan_region,
+ bccx,
+ span,
+ cause,
+ loan_region,
};
ctxt.restrict(cmt)
id_range,
body);
- Some(AnalysisData { all_loans: all_loans,
+ Some(AnalysisData { all_loans,
loans: loan_dfcx,
move_data:flowed_moves })
}
parent: parent_index,
first_move: InvalidMoveIndex,
first_child: InvalidMovePathIndex,
- next_sibling: next_sibling,
+ next_sibling,
});
index
self.moves.borrow_mut().push(Move {
path: path_index,
- id: id,
- kind: kind,
- next_move: next_move
+ id,
+ kind,
+ next_move,
});
}
let assignment = Assignment {
path: path_index,
id: assign_id,
- span: span,
- assignee_id: assignee_id,
+ span,
+ assignee_id,
};
if self.is_var_path(path_index) {
path: path_index,
base_path: base_path_index,
id: pattern_id,
- mode: mode,
+ mode,
};
self.variant_matches.borrow_mut().push(variant_match);
dfcx_assign.propagate(cfg, body);
FlowedMoveData {
- move_data: move_data,
- dfcx_moves: dfcx_moves,
- dfcx_assign: dfcx_assign,
+ move_data,
+ dfcx_moves,
+ dfcx_assign,
}
}
let pattern_arena = TypedArena::new();
f(MatchCheckCtxt {
- tcx: tcx,
- module: module,
+ tcx,
+ module,
pattern_arena: &pattern_arena,
byte_array_map: FxHashMap(),
})
let sub_pattern_tys = constructor_sub_pattern_tys(cx, ctor, ty);
self.0.extend(sub_pattern_tys.into_iter().map(|ty| {
Pattern {
- ty: ty,
+ ty,
span: DUMMY_SP,
kind: box PatternKind::Wild,
}
if adt.variants.len() > 1 {
PatternKind::Variant {
adt_def: adt,
- substs: substs,
+ substs,
variant_index: ctor.variant_index_for_adt(adt),
subpatterns: pats
}
};
self.0.push(Pattern {
- ty: ty,
+ ty,
span: DUMMY_SP,
kind: Box::new(pat),
});
let sub_pat_tys = constructor_sub_pattern_tys(cx, &ctor, lty);
let wild_patterns_owned: Vec<_> = sub_pat_tys.iter().map(|ty| {
Pattern {
- ty: ty,
+ ty,
span: DUMMY_SP,
kind: box PatternKind::Wild,
}
/// FIXME: this should be done by borrowck.
fn check_for_mutation_in_guard(cx: &MatchVisitor, guard: &hir::Expr) {
let mut checker = MutationChecker {
- cx: cx,
+ cx,
};
ExprUseVisitor::new(&mut checker, cx.tcx, cx.param_env, cx.region_maps, cx.tables)
.walk_expr(guard);
tcx,
param_env: cx.param_env,
tables: tcx.typeck_tables_of(def_id),
- substs: substs,
+ substs,
fn_args: Some(call_args)
};
callee_cx.eval(&body.value)?
}
PatternKind::Binding {
- mutability: mutability,
- mode: mode,
+ mutability,
+ mode,
name: ident.node,
var: id,
ty: var_ty,
Pattern {
span: pat.span,
- ty: ty,
+ ty,
kind: Box::new(kind),
}
}
_ => bug!("inappropriate type for def: {:?}", ty.sty),
};
PatternKind::Variant {
- adt_def: adt_def,
- substs: substs,
+ adt_def,
+ substs,
variant_index: adt_def.variant_index_with_id(variant_id),
- subpatterns: subpatterns,
+ subpatterns,
}
} else {
PatternKind::Leaf { subpatterns: subpatterns }
};
Pattern {
- span: span,
- ty: ty,
+ span,
+ ty,
kind: Box::new(kind),
}
}
};
Pattern {
- span: span,
+ span,
ty: pat_ty,
kind: Box::new(kind),
}
let indices = 0..self.count;
mem::forget(self);
Iter {
- indices: indices,
- store: store,
+ indices,
+ store,
}
}
}
// element. Round up to an even number of u64s.
let u64s_per_row = u64s(columns);
BitMatrix {
- columns: columns,
+ columns,
vector: vec![0; rows * u64s_per_row],
}
}
}
Dominators {
- post_order_rank: post_order_rank,
- immediate_dominators: immediate_dominators,
+ post_order_rank,
+ immediate_dominators,
}
}
}
DominatorTree {
root: root.unwrap(),
- children: children,
+ children,
}
}
}
pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
let mut graph = TestGraph {
num_nodes: start_node + 1,
- start_node: start_node,
+ start_node,
successors: HashMap::new(),
predecessors: HashMap::new(),
};
pub fn with_start(base_graph: G, start_node: G::Node) -> Self {
TransposedGraph {
- base_graph: base_graph,
- start_node: start_node,
+ base_graph,
+ start_node,
}
}
}
let idx = self.next_node_index();
self.nodes.push(Node {
first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
- data: data,
+ data,
});
idx
}
// as the next pointers
self.edges.push(Edge {
next_edge: [source_first, target_first],
- source: source,
- target: target,
- data: data,
+ source,
+ target,
+ data,
});
// adjust the firsts for each node target be the next object.
let first_edge = self.node(source).first_edge[direction.repr];
AdjacentEdges {
graph: self,
- direction: direction,
+ direction,
next: first_edge,
}
}
pub fn new(graph: &'g Graph<N, E>, direction: Direction) -> Self {
let visited = BitVector::new(graph.len_nodes());
DepthFirstTraversal {
- graph: graph,
+ graph,
stack: vec![],
- visited: visited,
- direction: direction,
+ visited,
+ direction,
}
}
let mut visited = BitVector::new(graph.len_nodes());
visited.insert(start_node.node_id());
DepthFirstTraversal {
- graph: graph,
+ graph,
stack: vec![start_node],
- visited: visited,
- direction: direction,
+ visited,
+ direction,
}
}
let backtrace = self.error_at(index);
errors.push(Error {
error: error.clone(),
- backtrace: backtrace,
+ backtrace,
});
}
}
let backtrace = self.error_at(index);
errors.push(Error {
error: err,
- backtrace: backtrace,
+ backtrace,
});
}
}
// changed.
return Outcome {
completed: vec![],
- errors: errors,
- stalled: stalled,
+ errors,
+ stalled,
};
}
Outcome {
completed: completed_obligations,
- errors: errors,
- stalled: stalled,
+ errors,
+ stalled,
}
}
impl<O> Node<O> {
fn new(parent: Option<NodeIndex>, obligation: O) -> Node<O> {
Node {
- obligation: obligation,
- parent: parent,
+ obligation,
+ parent,
state: Cell::new(NodeState::Pending),
dependents: vec![],
}
fn new(parent: K, value: K::Value, rank: u32) -> VarValue<K> {
VarValue {
parent: parent, // this is a root
- value: value,
- rank: rank,
+ value,
+ rank,
}
}
fn root(self, rank: u32, value: K::Value) -> VarValue<K> {
VarValue {
- rank: rank,
- value: value,
+ rank,
+ value,
..self
}
}
out_dir: &'a Option<PathBuf>)
-> Self {
CompileState {
- input: input,
- session: session,
+ input,
+ session,
out_dir: out_dir.as_ref().map(|s| &**s),
out_file: None,
arena: None,
trait_map: resolver.trait_map,
maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
},
- hir_forest: hir_forest,
+ hir_forest,
})
}
let mut cfgs = Vec::new();
for &(name, ref value) in sess.parse_sess.config.iter() {
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
- name: name,
+ name,
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
match *self {
PpmNormal | PpmEveryBodyLoops | PpmExpanded => {
let annotation = NoAnn {
- sess: sess,
+ sess,
hir_map: hir_map.map(|m| m.clone()),
};
f(&annotation)
PpmIdentified | PpmExpandedIdentified => {
let annotation = IdentifiedAnnotation {
- sess: sess,
+ sess,
hir_map: hir_map.map(|m| m.clone()),
};
f(&annotation)
}
PpmExpandedHygiene => {
let annotation = HygieneAnnotation {
- sess: sess,
+ sess,
};
f(&annotation)
}
match *self {
PpmNormal => {
let annotation = NoAnn {
- sess: sess,
+ sess,
hir_map: Some(hir_map.clone()),
};
f(&annotation, hir_map.forest.krate())
PpmIdentified => {
let annotation = IdentifiedAnnotation {
- sess: sess,
+ sess,
hir_map: Some(hir_map.clone()),
};
f(&annotation, hir_map.forest.krate())
|tcx, _, _, _| {
let empty_tables = ty::TypeckTables::empty(None);
let annotation = TypedAnnotation {
- tcx: tcx,
+ tcx,
tables: Cell::new(&empty_tables)
};
let _ignore = tcx.dep_graph.in_ignore();
})
.into_iter()
.collect(),
- rules: rules,
+ rules,
id: ast::DUMMY_NODE_ID,
span: syntax_pos::DUMMY_SP,
})
hir_map: &tcx.hir,
cfg: &cfg,
name: format!("node_{}", code.id()),
- labelled_edges: labelled_edges,
+ labelled_edges,
};
match code {
let lcfg = borrowck_dot::DataflowLabeller {
inner: lcfg,
- variants: variants,
+ variants,
borrowck_ctxt: &bccx,
analysis_data: &analysis_data,
};
pub fn new_with_code(level: Level, code: Option<String>, message: &str) -> Self {
Diagnostic {
- level: level,
+ level,
message: vec![(message.to_owned(), Style::NoStyle)],
- code: code,
+ code,
span: MultiSpan::new(),
children: vec![],
suggestions: vec![],
span: MultiSpan,
render_span: Option<RenderSpan>) {
let sub = SubDiagnostic {
- level: level,
+ level,
message: vec![(message.to_owned(), Style::NoStyle)],
- span: span,
- render_span: render_span,
+ span,
+ render_span,
};
self.children.push(sub);
}
span: MultiSpan,
render_span: Option<RenderSpan>) {
let sub = SubDiagnostic {
- level: level,
- message: message,
- span: span,
- render_span: render_span,
+ level,
+ message,
+ span,
+ render_span,
};
self.children.push(sub);
}
message: &str)
-> DiagnosticBuilder<'a> {
DiagnosticBuilder {
- handler: handler,
+ handler,
diagnostic: Diagnostic::new_with_code(level, code, message)
}
}
if color_config.use_color() {
let dst = Destination::from_stderr();
EmitterWriter {
- dst: dst,
+ dst,
cm: code_map,
}
} else {
}
// We don't have a line yet, create one
slot.lines.push(Line {
- line_index: line_index,
+ line_index,
annotations: vec![ann],
});
slot.lines.sort();
}
// This is the first time we're seeing the file
file_vec.push(FileWithAnnotatedLines {
- file: file,
+ file,
lines: vec![Line {
- line_index: line_index,
+ line_index,
annotations: vec![ann],
}],
multiline_depth: 0,
let lo = primary_spans.iter().map(|sp| sp.0.lo).min().unwrap();
let hi = primary_spans.iter().map(|sp| sp.0.hi).min().unwrap();
let bounding_span = Span {
- lo: lo,
- hi: hi,
+ lo,
+ hi,
ctxt: NO_EXPANSION,
};
let lines = cm.span_to_lines(bounding_span).unwrap();
Handler {
err_count: Cell::new(0),
emitter: RefCell::new(e),
- can_emit_warnings: can_emit_warnings,
- treat_err_as_bug: treat_err_as_bug,
+ can_emit_warnings,
+ treat_err_as_bug,
continue_after_error: Cell::new(true),
delayed_span_bug: RefCell::new(None),
}
// Find annotations supplied by user (if any).
let (if_this_changed, then_this_would_need) = {
- let mut visitor = IfThisChanged { tcx: tcx,
+ let mut visitor = IfThisChanged { tcx,
if_this_changed: vec![],
then_this_would_need: vec![] };
visitor.process_attrs(ast::CRATE_NODE_ID, &tcx.hir.krate().attrs);
debug!("query-nodes: {:?}", query.nodes());
let krate = tcx.hir.krate();
let mut dirty_clean_visitor = DirtyCleanVisitor {
- tcx: tcx,
+ tcx,
query: &query,
- dirty_inputs: dirty_inputs,
+ dirty_inputs,
checked_attrs: FxHashSet(),
};
krate.visit_all_item_likes(&mut dirty_clean_visitor);
let mut all_attrs = FindAllAttrs {
- tcx: tcx,
+ tcx,
attr_names: vec![ATTR_DIRTY, ATTR_CLEAN],
found_attrs: vec![],
};
tcx.dep_graph.with_ignore(||{
let krate = tcx.hir.krate();
let mut dirty_clean_visitor = DirtyCleanMetadataVisitor {
- tcx: tcx,
- prev_metadata_hashes: prev_metadata_hashes,
- current_metadata_hashes: current_metadata_hashes,
+ tcx,
+ prev_metadata_hashes,
+ current_metadata_hashes,
checked_attrs: FxHashSet(),
};
intravisit::walk_crate(&mut dirty_clean_visitor, krate);
let mut all_attrs = FindAllAttrs {
- tcx: tcx,
+ tcx,
attr_names: vec![ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA],
found_attrs: vec![],
};
incremental_hashes_map: &'a IncrementalHashesMap)
-> Self {
HashContext {
- tcx: tcx,
- incremental_hashes_map: incremental_hashes_map,
+ tcx,
+ incremental_hashes_map,
metadata_hashes: FxHashMap(),
crate_hashes: FxHashMap(),
}
{
pub(super) fn new(r: &'a mut GraphReduce<'g, N, I, O>) -> Self {
Classify {
- r: r,
+ r,
colors: vec![Color::White; r.in_graph.len_nodes()],
stack: vec![],
dag: Dag {
Predecessors {
reduced_graph: graph,
- bootstrap_outputs: bootstrap_outputs,
- hashes: hashes,
+ bootstrap_outputs,
+ hashes,
}
}
}
let work_product = WorkProduct {
cgu_name: cgu_name.to_string(),
input_hash: partition_hash,
- saved_files: saved_files,
+ saved_files,
};
sess.dep_graph.insert_work_product(&work_product_id, work_product);
::last_error().map(Err)
} else {
Some(Ok(Child {
- ptr: ptr,
+ ptr,
_data: marker::PhantomData,
}))
}
}
OptimizationDiagnostic {
- kind: kind,
+ kind,
pass_name: pass_name.expect("got a non-UTF8 pass name from LLVM"),
- function: function,
- line: line,
- column: column,
- filename: filename,
+ function,
+ line,
+ column,
+ filename,
message: message.expect("got a non-UTF8 OptimizationDiagnostic message from LLVM")
}
}
body: lazy_body,
tables: lazy_tables,
nested_bodies: lazy_nested_bodies,
- rvalue_promotable_to_static: rvalue_promotable_to_static
+ rvalue_promotable_to_static,
})
}
}
impl<'a> CrateLoader<'a> {
pub fn new(sess: &'a Session, cstore: &'a CStore, local_crate_name: &str) -> Self {
CrateLoader {
- sess: sess,
- cstore: cstore,
+ sess,
+ cstore,
next_crate_num: cstore.next_crate_num(),
local_crate_name: Symbol::intern(local_crate_name),
}
};
Some(ExternCrateInfo {
ident: i.ident.name,
- name: name,
+ name,
id: i.id,
dep_kind: if attr::contains_name(&i.attrs, "no_link") {
DepKind::UnexportedMacrosOnly
});
let mut cmeta = cstore::CrateMetadata {
- name: name,
+ name,
extern_crate: Cell::new(None),
def_path_table: Rc::new(def_path_table),
- exported_symbols: exported_symbols,
- trait_impls: trait_impls,
+ exported_symbols,
+ trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
}),
root: crate_root,
blob: metadata,
cnum_map: RefCell::new(cnum_map),
- cnum: cnum,
+ cnum,
codemap_import_info: RefCell::new(vec![]),
attribute_cache: RefCell::new([Vec::new(), Vec::new()]),
dep_kind: Cell::new(dep_kind),
source: cstore::CrateSource {
- dylib: dylib,
- rlib: rlib,
- rmeta: rmeta,
+ dylib,
+ rlib,
+ rmeta,
},
// Initialize this with an empty set. The field is populated below
// after we were able to deserialize its contents.
info!("falling back to a load");
let mut locate_ctxt = locator::Context {
sess: self.sess,
- span: span,
- ident: ident,
+ span,
+ ident,
crate_name: name,
hash: hash.map(|a| &*a),
filesearch: self.sess.target_filesearch(path_kind),
target: &self.sess.target.target,
triple: &self.sess.opts.target_triple,
- root: root,
+ root,
rejected_via_hash: vec![],
rejected_via_triple: vec![],
rejected_via_kind: vec![],
let mut target_only = false;
let mut locate_ctxt = locator::Context {
sess: self.sess,
- span: span,
+ span,
ident: info.ident,
crate_name: info.name,
hash: None,
};
ExtensionCrate {
- metadata: metadata,
+ metadata,
dylib: dylib.map(|p| p.0),
- target_only: target_only,
+ target_only,
}
}
.collect();
let lib = NativeLibrary {
name: n,
- kind: kind,
- cfg: cfg,
- foreign_items: foreign_items,
+ kind,
+ cfg,
+ foreign_items,
};
register_native_lib(self.sess, self.cstore, Some(m.span), lib);
}
statically_included_foreign_items: RefCell::new(FxHashSet()),
dllimport_foreign_items: RefCell::new(FxHashSet()),
visible_parent_map: RefCell::new(FxHashMap()),
- metadata_loader: metadata_loader,
+ metadata_loader,
}
}
{
self.get_crate_data(cnum).root.plugin_registrar_fn.map(|index| DefId {
krate: cnum,
- index: index
+ index,
})
}
{
self.get_crate_data(cnum).root.macro_derive_registrar.map(|index| DefId {
krate: cnum,
- index: index
+ index,
})
}
opaque: opaque::Decoder::new(self.raw_bytes(), pos),
cdata: self.cdata(),
sess: self.sess().or(tcx.map(|tcx| tcx.sess)),
- tcx: tcx,
+ tcx,
last_filemap_index: 0,
lazy_state: LazyState::NoNode,
}
fn local_def_id(&self, index: DefIndex) -> DefId {
DefId {
krate: self.cnum,
- index: index,
+ index,
}
}
for child_index in child.children.decode((self, sess)) {
if let Some(def) = self.get_def(child_index) {
callback(def::Export {
- def: def,
+ def,
ident: Ident::with_empty_ctxt(self.item_name(child_index)),
span: self.entry(child_index).span.decode((self, sess)),
});
};
ty::AssociatedItem {
- name: name,
- kind: kind,
+ name,
+ kind,
vis: item.visibility.decode(self),
defaultness: container.defaultness(),
def_id: self.local_def_id(id),
None
},
- crate_deps: crate_deps,
- dylib_dependency_formats: dylib_dependency_formats,
- lang_items: lang_items,
- lang_items_missing: lang_items_missing,
- native_libraries: native_libraries,
- codemap: codemap,
- def_path_table: def_path_table,
- impls: impls,
- exported_symbols: exported_symbols,
- index: index,
+ crate_deps,
+ dylib_dependency_formats,
+ lang_items,
+ lang_items_missing,
+ native_libraries,
+ codemap,
+ def_path_table,
+ impls,
+ exported_symbols,
+ index,
});
let total_bytes = self.position();
};
FnData {
constness: hir::Constness::NotConst,
- arg_names: arg_names,
+ arg_names,
sig: self.lazy(&tcx.fn_sig(def_id)),
}
} else {
bug!()
};
EntryKind::Method(self.lazy(&MethodData {
- fn_data: fn_data,
- container: container,
+ fn_data,
+ container,
has_self: trait_item.method_has_self_argument,
}))
}
};
Entry {
- kind: kind,
+ kind,
visibility: self.lazy(&trait_item.vis),
span: self.lazy(&ast_item.span),
attributes: self.encode_attributes(&ast_item.attrs),
bug!()
};
EntryKind::Method(self.lazy(&MethodData {
- fn_data: fn_data,
- container: container,
+ fn_data,
+ container,
has_self: impl_item.method_has_self_argument,
}))
}
};
Entry {
- kind: kind,
+ kind,
visibility: self.lazy(&impl_item.vis),
span: self.lazy(&ast_item.span),
attributes: self.encode_attributes(&ast_item.attrs),
}
hir::ItemFn(_, _, constness, .., body) => {
let data = FnData {
- constness: constness,
+ constness,
arg_names: self.encode_fn_arg_names_for_body(body),
sig: self.lazy(&tcx.fn_sig(def_id)),
};
EntryKind::Struct(self.lazy(&VariantData {
ctor_kind: variant.ctor_kind,
discr: variant.discr,
- struct_ctor: struct_ctor,
+ struct_ctor,
ctor_sig: None,
}), repr_options)
}
});
let data = ImplData {
- polarity: polarity,
- defaultness: defaultness,
+ polarity,
+ defaultness,
parent_impl: parent,
- coerce_unsized_info: coerce_unsized_info,
+ coerce_unsized_info,
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
};
};
Entry {
- kind: kind,
+ kind,
visibility: self.lazy(&ty::Visibility::from_hir(&item.vis, item.id, tcx)),
span: self.lazy(&item.span),
attributes: self.encode_attributes(&item.attrs),
debug!("IsolatedEncoder::encode_impls()");
let tcx = self.tcx;
let mut visitor = ImplVisitor {
- tcx: tcx,
+ tcx,
impls: FxHashMap(),
};
tcx.hir.krate().visit_all_item_likes(&mut visitor);
};
Entry {
- kind: kind,
+ kind,
visibility: self.lazy(&ty::Visibility::from_hir(&nitem.vis, nitem.id, tcx)),
span: self.lazy(&nitem.span),
attributes: self.encode_attributes(&nitem.attrs),
let (root, metadata_hashes) = {
let mut ecx = EncodeContext {
opaque: opaque::Encoder::new(&mut cursor),
- tcx: tcx,
- link_meta: link_meta,
- exported_symbols: exported_symbols,
+ tcx,
+ link_meta,
+ exported_symbols,
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
metadata_hashes: EncodedMetadataHashes::new(),
- compute_ich: compute_ich,
+ compute_ich,
};
// Encode the rustc version string in a predictable location.
pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
IndexBuilder {
items: Index::new(ecx.tcx.hir.definitions().def_index_counts_lo_hi()),
- ecx: ecx,
+ ecx,
}
}
if let Some(hash) = fingerprint {
ecx.metadata_hashes.hashes.push(EncodedMetadataHash {
def_index: id.index,
- hash: hash,
+ hash,
});
}
let tcx = ecx.tcx;
let compute_ich = ecx.compute_ich;
IsolatedEncoder {
- tcx: tcx,
- ecx: ecx,
+ tcx,
+ ecx,
hcx: if compute_ich {
// We are always hashing spans for things in metadata because
// don't know if a downstream crate will use them or not.
if let Some((h, m)) = slot {
libraries.insert(h,
Library {
- dylib: dylib,
- rlib: rlib,
- rmeta: rmeta,
+ dylib,
+ rlib,
+ rmeta,
metadata: m,
});
}
match slot {
Some((_, metadata)) => {
Some(Library {
- dylib: dylib,
- rlib: rlib,
- rmeta: rmeta,
- metadata: metadata,
+ dylib,
+ rlib,
+ rmeta,
+ metadata,
})
}
None => None,
impl<T> Lazy<T> {
pub fn with_position(position: usize) -> Lazy<T> {
Lazy {
- position: position,
+ position,
_marker: PhantomData,
}
}
pub fn with_position_and_length(position: usize, len: usize) -> LazySeq<T> {
LazySeq {
- len: len,
- position: position,
+ len,
+ position,
_marker: PhantomData,
}
}
impl<T> Tracked<T> {
pub fn new(state: T) -> Tracked<T> {
Tracked {
- state: state,
+ state,
}
}
source_info: SourceInfo,
extent: CodeExtent) {
self.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::EndRegion(extent),
});
}
lvalue: &Lvalue<'tcx>,
rvalue: Rvalue<'tcx>) {
self.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::Assign(lvalue.clone(), rvalue)
});
}
block,
self.block_data(block));
self.block_data_mut(block).terminator = Some(Terminator {
- source_info: source_info,
- kind: kind,
+ source_info,
+ kind,
});
}
}
if let Some(scope) = scope {
// schedule a shallow free of that memory, lest we unwind:
this.cfg.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::StorageLive(result.clone())
});
this.schedule_drop(expr_span, scope, &result, value.ty);
if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::StorageLive(temp.clone())
});
}
let cleanup = this.diverge_cleanup();
this.cfg.terminate(block, source_info, TerminatorKind::Call {
func: fun,
- args: args,
- cleanup: cleanup,
+ args,
+ cleanup,
destination: if diverges {
None
} else {
unpack!(block = this.as_local_operand(block, input))
}).collect();
this.cfg.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::InlineAsm {
asm: box asm.clone(),
- outputs: outputs,
- inputs: inputs
+ outputs,
+ inputs,
},
});
block.unit()
span: pattern.span,
match_pairs: vec![MatchPair::new(discriminant_lvalue.clone(), pattern)],
bindings: vec![],
- guard: guard,
- arm_index: arm_index,
+ guard,
+ arm_index,
}
})
.collect();
var_scope = Some(this.new_visibility_scope(scope_span));
}
let source_info = SourceInfo {
- span: span,
+ span,
scope: var_scope.unwrap()
};
this.declare_binding(source_info, mutability, name, var, ty);
let local_id = self.var_indices[&var];
let source_info = self.source_info(span);
self.cfg.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::StorageLive(Lvalue::Local(local_id))
});
Lvalue::Local(local_id)
var_id, name, var_ty, source_info);
let var = self.local_decls.push(LocalDecl::<'tcx> {
- mutability: mutability,
+ mutability,
ty: var_ty.clone(),
name: Some(name),
- source_info: source_info,
+ source_info,
is_user_variable: true,
});
self.var_indices.insert(var_id, var);
PatternKind::Binding { name, mutability, mode, var, ty, ref subpattern } => {
candidate.bindings.push(Binding {
- name: name,
- mutability: mutability,
+ name,
+ mutability,
span: match_pair.pattern.span,
source: match_pair.lvalue.clone(),
var_id: var,
discr: Operand::Consume(discr),
switch_ty: discr_ty,
values: From::from(values),
- targets: targets
+ targets,
});
target_blocks
}
).collect();
(targets.clone(), TerminatorKind::SwitchInt {
discr: Operand::Consume(lvalue.clone()),
- switch_ty: switch_ty,
+ switch_ty,
values: From::from(values),
- targets: targets,
+ targets,
})
};
self.cfg.terminate(block, source_info, terminator);
}),
args: vec![val, expect],
destination: Some((eq_result.clone(), eq_block)),
- cleanup: cleanup,
+ cleanup,
});
// check the result
.map(|(idx, subpattern)| {
let elem = ProjectionElem::ConstantIndex {
offset: idx as u32,
- min_length: min_length,
+ min_length,
from_end: false,
};
let lvalue = lvalue.clone().elem(elem);
.map(|(idx, subpattern)| {
let elem = ProjectionElem::ConstantIndex {
offset: (idx+1) as u32,
- min_length: min_length,
+ min_length,
from_end: true,
};
let lvalue = lvalue.clone().elem(elem);
impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
pub fn new(lvalue: Lvalue<'tcx>, pattern: &'pat Pattern<'tcx>) -> MatchPair<'pat, 'tcx> {
MatchPair {
- lvalue: lvalue,
- pattern: pattern,
+ lvalue,
+ pattern,
slice_len_checked: false,
}
}
literal: Literal<'tcx>)
-> Operand<'tcx> {
let constant = box Constant {
- span: span,
- ty: ty,
- literal: literal,
+ span,
+ ty,
+ literal,
};
Operand::Constant(constant)
}
// Convert the Mir to global types.
let mut globalizer = GlobalizeMir {
- tcx: tcx,
+ tcx,
span: mir.span
};
globalizer.visit_mir(&mut mir);
// Convert the Mir to global types.
let tcx = infcx.tcx.global_tcx();
let mut globalizer = GlobalizeMir {
- tcx: tcx,
+ tcx,
span: mir.span
};
globalizer.visit_mir(&mut mir);
};
let mut decl = UpvarDecl {
debug_name: keywords::Invalid.name(),
- by_ref: by_ref
+ by_ref,
};
if let Some(hir::map::NodeLocal(pat)) = tcx.hir.find(var_node_id) {
if let hir::PatKind::Binding(_, _, ref ident, _) = pat.node {
return_ty: Ty<'tcx>)
-> Builder<'a, 'gcx, 'tcx> {
let mut builder = Builder {
- hir: hir,
+ hir,
cfg: CFG { basic_blocks: IndexVec::new() },
fn_span: span,
- arg_count: arg_count,
+ arg_count,
scopes: vec![],
visibility_scopes: IndexVec::new(),
visibility_scope: ARGUMENT_VISIBILITY_SCOPE,
self.local_decls.push(LocalDecl {
mutability: Mutability::Not,
- ty: ty,
+ ty,
source_info: SourceInfo {
scope: ARGUMENT_VISIBILITY_SCOPE,
span: pattern.map_or(self.fn_span, |pat| pat.span)
},
- name: name,
+ name,
is_user_variable: false,
});
}
/// Given a span and this scope's visibility scope, make a SourceInfo.
fn source_info(&self, span: Span) -> SourceInfo {
SourceInfo {
- span: span,
+ span,
scope: self.visibility_scope
}
}
{
let extent = self.topmost_scope();
let scope = BreakableScope {
- extent: extent,
+ extent,
continue_block: loop_block,
- break_block: break_block,
- break_destination: break_destination,
+ break_block,
+ break_destination,
};
self.breakable_scopes.push(scope);
let res = f(self);
let parent = self.visibility_scope;
let scope = VisibilityScope::new(self.visibility_scopes.len());
self.visibility_scopes.push(VisibilityScopeData {
- span: span,
+ span,
parent_scope: Some(parent),
});
scope
/// Given a span and the current visibility scope, make a SourceInfo.
pub fn source_info(&self, span: Span) -> SourceInfo {
SourceInfo {
- span: span,
+ span,
scope: self.visibility_scope
}
}
let diverge_target = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::Drop {
- location: location,
+ location,
target: next_target,
unwind: diverge_target,
});
let diverge_target = self.diverge_cleanup();
self.cfg.terminate(block, source_info,
TerminatorKind::DropAndReplace {
- location: location,
- value: value,
+ location,
+ value,
target: next_target,
unwind: diverge_target,
});
self.cfg.terminate(block, source_info,
TerminatorKind::Assert {
- cond: cond,
- expected: expected,
- msg: msg,
+ cond,
+ expected,
+ msg,
target: success_block,
- cleanup: cleanup
+ cleanup,
});
success_block
match drop_data.location {
Lvalue::Local(index) if index.index() > arg_count => {
cfg.push(block, Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::StorageDead(drop_data.location.clone())
});
}
let visibility_scope = scope.visibility_scope;
let source_info = |span| SourceInfo {
- span: span,
+ span,
scope: visibility_scope
};
cfg.terminate(block, source_info(drop_data.span),
TerminatorKind::Drop {
location: drop_data.location.clone(),
- target: target,
+ target,
unwind: None
});
*cached_block = Some(block);
name_found(tcx.sess, attributes, "borrowck_graphviz_postflow");
let mut mbcx = DataflowBuilder {
- node_id: node_id,
- print_preflow_to: print_preflow_to,
- print_postflow_to: print_postflow_to,
+ node_id,
+ print_preflow_to,
+ print_postflow_to,
flow_state: DataflowAnalysis::new(tcx, mir, dead_unwinds, bd),
};
});
DataflowAnalysis {
- mir: mir,
- dead_unwinds: dead_unwinds,
+ mir,
+ dead_unwinds,
flow_state: DataflowState {
sets: AllSets {
- bits_per_block: bits_per_block,
- words_per_block: words_per_block,
+ bits_per_block,
+ words_per_block,
gen_sets: zeroes.clone(),
kill_sets: zeroes,
on_entry_sets: on_entry,
ProjectionElem::Subslice { from: from, to: to },
ProjectionElem::ConstantIndex {offset,min_length,from_end} =>
ProjectionElem::ConstantIndex {
- offset: offset,
- min_length: min_length,
- from_end: from_end
+ offset,
+ min_length,
+ from_end,
},
ProjectionElem::Downcast(a, u) =>
ProjectionElem::Downcast(a.clone(), u.clone()),
let mut path_map = IndexVec::new();
MoveDataBuilder {
- mir: mir,
- tcx: tcx,
- param_env: param_env,
+ mir,
+ tcx,
+ param_env,
data: MoveData {
moves: IndexVec::new(),
loc_map: LocationMap::new(mir),
}).collect(),
projections: FxHashMap(),
},
- move_paths: move_paths,
- path_map: path_map,
+ move_paths,
+ path_map,
}
}
}
let move_path = move_paths.push(MovePath {
next_sibling: None,
first_child: None,
- parent: parent,
- lvalue: lvalue
+ parent,
+ lvalue,
});
if let Some(parent) = parent {
Block {
targeted_by_break: self.targeted_by_break,
extent: CodeExtent::Misc(self.id),
- opt_destruction_extent: opt_destruction_extent,
+ opt_destruction_extent,
span: self.span,
- stmts: stmts,
+ stmts,
expr: self.expr.to_ref(),
}
}
kind: StmtKind::Let {
remainder_scope: remainder_extent,
init_scope: CodeExtent::Misc(id),
- pattern: pattern,
+ pattern,
initializer: local.init.to_ref(),
},
opt_destruction_extent: opt_dxn_ext,
let temp_lifetime = cx.region_maps.temporary_scope(block.id);
let expr = Expr {
ty: block_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: block.span,
kind: ExprKind::Block { body: block },
};
// Next, wrap this up in the expr's scope.
expr = Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: expr.ty,
span: self.span,
kind: ExprKind::Scope {
// Finally, create a destruction scope, if any.
if let Some(extent) = cx.region_maps.opt_destruction_extent(self.id) {
expr = Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: expr.ty,
span: self.span,
kind: ExprKind::Scope {
- extent: extent,
+ extent,
value: expr.to_ref(),
},
};
}),
span,
kind: ExprKind::Borrow {
- region: region,
+ region,
borrow_kind: to_borrow_kind(m),
arg: expr.to_ref(),
},
ExprKind::Call {
ty: expr.ty,
fun: expr.to_ref(),
- args: args,
+ args,
}
}
let arg_tys = args.iter().map(|e| cx.tables().expr_ty_adjusted(e));
let tupled_args = Expr {
ty: cx.tcx.mk_tup(arg_tys, false),
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::Tuple { fields: args.iter().map(ToRef::to_ref).collect() },
};
})
.collect();
ExprKind::Adt {
- adt_def: adt_def,
- substs: substs,
+ adt_def,
+ substs,
variant_index: index,
fields: field_refs,
base: None,
_ => span_bug!(expr.span, "type of & not region"),
};
ExprKind::Borrow {
- region: region,
+ region,
borrow_kind: to_borrow_kind(mutbl),
arg: expr.to_ref(),
}
_ => {
let op = bin_op(op.node);
ExprKind::Binary {
- op: op,
+ op,
lhs: lhs.to_ref(),
rhs: rhs.to_ref(),
}
ExprKind::Adt {
adt_def: adt,
variant_index: 0,
- substs: substs,
+ substs,
fields: field_refs,
base: base.as_ref().map(|base| {
FruInfo {
ExprKind::Adt {
adt_def: adt,
variant_index: index,
- substs: substs,
+ substs,
fields: field_refs,
base: None,
}
});
ExprKind::Closure {
closure_id: def_id,
- substs: substs,
- upvars: upvars,
+ substs,
+ upvars,
}
}
hir::ExprInlineAsm(ref asm, ref outputs, ref inputs) => {
ExprKind::InlineAsm {
- asm: asm,
+ asm,
outputs: outputs.to_ref(),
inputs: inputs.to_ref(),
}
ExprKind::Repeat {
value: v.to_ref(),
- count: count,
+ count,
}
}
hir::ExprRet(ref v) => ExprKind::Return { value: v.to_ref() },
};
Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: expr_ty,
span: expr.span,
- kind: kind,
+ kind,
}
}
cx.tables().node_substs(expr.hir_id))
});
Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: cx.tcx().mk_fn_def(def_id, substs),
span: expr.span,
kind: ExprKind::Literal {
Def::Const(def_id) |
Def::AssociatedConst(def_id) => ExprKind::Literal {
literal: Literal::Item {
- def_id: def_id,
- substs: substs,
+ def_id,
+ substs,
},
},
// We return a completely different ExprKind here to account for this special case.
ty::TyAdt(adt_def, substs) => {
ExprKind::Adt {
- adt_def: adt_def,
+ adt_def,
variant_index: adt_def.variant_index_with_id(def_id),
- substs: substs,
+ substs,
fields: vec![],
base: None,
}
});
Expr {
ty: closure_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::Deref {
arg: Expr {
ty: ref_closure_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::SelfRef,
}
});
Expr {
ty: closure_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::Deref {
arg: Expr {
ty: ref_closure_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::SelfRef,
}.to_ref(),
ty::ClosureKind::FnOnce => {
Expr {
ty: closure_ty,
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
span: expr.span,
kind: ExprKind::SelfRef,
}
ty::UpvarCapture::ByRef(borrow) => {
ExprKind::Deref {
arg: Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: cx.tcx.mk_ref(borrow.region,
ty::TypeAndMut {
ty: var_ty,
let temp_lifetime = cx.region_maps.temporary_scope(expr.id);
let fun = method_callee(cx, expr, custom_callee);
let ref_expr = Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: ref_ty,
span: expr.span,
kind: ExprKind::Call {
let var_ty = cx.tables()
.node_id_to_type(cx.tcx.hir.node_to_hir_id(var_node_id));
let captured_var = Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: var_ty,
span: closure_expr.span,
kind: convert_var(cx, closure_expr, freevar.def),
ty::BorrowKind::MutBorrow => BorrowKind::Mut,
};
Expr {
- temp_lifetime: temp_lifetime,
+ temp_lifetime,
ty: freevar_ty,
span: closure_expr.span,
kind: ExprKind::Borrow {
region: upvar_borrow.region,
- borrow_kind: borrow_kind,
+ borrow_kind,
arg: captured_var.to_ref(),
},
}.to_ref()
span
));
statements.push(Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::Assign(
Lvalue::Local(ref_rcvr),
Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, rcvr_l)
CallKind::Indirect => (rcvr, vec![]),
CallKind::Direct(def_id) => (
Operand::Constant(box Constant {
- span: span,
+ span,
ty: tcx.type_of(def_id),
literal: Literal::Value {
value: ConstVal::Function(def_id,
// BB #0
block(&mut blocks, statements, TerminatorKind::Call {
func: callee,
- args: args,
+ args,
destination: Some((Lvalue::Local(RETURN_POINTER),
BasicBlock::new(1))),
cleanup: if let Adjustment::RefMut = rcvr_adjustment {
let local_decls = local_decls_for_sig(&sig, span);
let source_info = SourceInfo {
- span: span,
+ span,
scope: ARGUMENT_VISIBILITY_SCOPE
};
// return = ADT(arg0, arg1, ...); return
let start_block = BasicBlockData {
statements: vec![Statement {
- source_info: source_info,
+ source_info,
kind: StatementKind::Assign(
Lvalue::Local(RETURN_POINTER),
Rvalue::Aggregate(
)
}],
terminator: Some(Terminator {
- source_info: source_info,
+ source_info,
kind: TerminatorKind::Return,
}),
is_cleanup: false
statements: vec![],
is_cleanup: block.is_cleanup,
terminator: Some(Terminator {
- source_info: source_info,
+ source_info,
kind: TerminatorKind::Goto { target: *destination }
})
};
fn new(dest_local: Local, constant: Constant<'tcx>)
-> ConstantPropagationVisitor<'tcx> {
ConstantPropagationVisitor {
- dest_local: dest_local,
- constant: constant,
+ dest_local,
+ constant,
uses_replaced: 0,
}
}
let elaborate_patch = {
let mir = &*mir;
let env = MoveDataParamEnv {
- move_data: move_data,
- param_env: param_env
+ move_data,
+ param_env,
};
let dead_unwinds = find_dead_unwinds(tcx, mir, id, &env);
let flow_inits =
|bd, p| &bd.move_data().move_paths[p]);
ElaborateDropsCtxt {
- tcx: tcx,
- mir: mir,
+ tcx,
+ mir,
env: &env,
- flow_inits: flow_inits,
- flow_uninits: flow_uninits,
+ flow_inits,
+ flow_uninits,
drop_flags: FxHashMap(),
patch: MirPatch::new(mir),
}.elaborate()
debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
self.patch.patch_terminator(bb, TerminatorKind::Drop {
location: location.clone(),
- target: target,
+ target,
unwind: Some(unwind)
});
}
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
Rvalue::Use(Operand::Constant(Box::new(Constant {
- span: span,
+ span,
ty: self.tcx.types.bool,
literal: Literal::Value { value: ConstVal::Bool(val) }
})))
impl<'a, 'tcx> EraseRegionsVisitor<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
EraseRegionsVisitor {
- tcx: tcx,
+ tcx,
in_validation_statement: false,
}
}
if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty {
callsites.push_back(CallSite {
callee: callee_def_id,
- substs: substs,
- bb: bb,
+ substs,
+ bb,
location: terminator.source_info
});
}
if callsite.callee != callee_def_id {
callsites.push_back(CallSite {
callee: callee_def_id,
- substs: substs,
- bb: bb,
+ substs,
+ bb,
location: terminator.source_info
});
}
let mut integrator = Integrator {
block_idx: bb_len,
args: &args,
- local_map: local_map,
- scope_map: scope_map,
- promoted_map: promoted_map,
+ local_map,
+ scope_map,
+ promoted_map,
_callsite: callsite,
destination: dest,
- return_block: return_block,
+ return_block,
cleanup_block: cleanup,
in_cleanup_block: false
};
kind => {
caller_mir[callsite.bb].terminator = Some(Terminator {
source_info: terminator.source_info,
- kind: kind
+ kind,
});
false
}
impl<'b, 'a, 'tcx:'b> OptimizationFinder<'b, 'a, 'tcx> {
fn new(mir: &'b Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> {
OptimizationFinder {
- mir: mir,
- tcx: tcx,
+ mir,
+ tcx,
optimizations: OptimizationList::default(),
}
}
}
}
tcx.hir.krate().visit_all_item_likes(&mut GatherCtors {
- tcx: tcx,
+ tcx,
set: &mut set,
}.as_deep_visitor());
impl<'a, 'gcx, 'tcx> NLLVisitor<'a, 'gcx, 'tcx> {
pub fn new(infcx: InferCtxt<'a, 'gcx, 'tcx>) -> Self {
NLLVisitor {
- infcx: infcx,
+ infcx,
lookup_map: HashMap::new(),
}
}
LvalueContext::Store |
LvalueContext::Call => {
*temp = TempState::Defined {
- location: location,
+ location,
uses: 0
};
return;
let mut collector = TempCollector {
temps: IndexVec::from_elem(TempState::Undefined, &mir.local_decls),
span: mir.span,
- mir: mir,
+ mir,
};
for (bb, data) in rpo {
collector.visit_basic_block_data(bb, data);
statements: vec![],
terminator: Some(Terminator {
source_info: SourceInfo {
- span: span,
+ span,
scope: ARGUMENT_VISIBILITY_SCOPE
},
kind: TerminatorKind::Return
let data = &mut self.promoted[last];
data.statements.push(Statement {
source_info: SourceInfo {
- span: span,
+ span,
scope: ARGUMENT_VISIBILITY_SCOPE
},
kind: StatementKind::Assign(Lvalue::Local(dest), rvalue)
Terminator {
source_info: terminator.source_info,
kind: mem::replace(&mut terminator.kind, TerminatorKind::Goto {
- target: target
+ target,
})
}
};
*self.promoted[last].terminator_mut() = Terminator {
kind: TerminatorKind::Call {
- func: func,
- args: args,
+ func,
+ args,
cleanup: None,
destination: Some((Lvalue::Local(new_temp), new_target))
},
fn promote_candidate(mut self, candidate: Candidate) {
let span = self.promoted.span;
let new_operand = Operand::Constant(box Constant {
- span: span,
+ span,
ty: self.promoted.return_ty,
literal: Literal::Promoted {
index: Promoted::new(self.source.promoted.len())
promoted: Mir::new(
IndexVec::new(),
Some(VisibilityScopeData {
- span: span,
+ span,
parent_scope: None
}).into_iter().collect(),
IndexVec::new(),
TerminatorKind::Drop { location: Lvalue::Local(index), target, .. } => {
if promoted(index) {
terminator.kind = TerminatorKind::Goto {
- target: target
+ target,
};
}
}
let temps = promote_consts::collect_temps(mir, &mut rpo);
rpo.reset();
Qualifier {
- mode: mode,
+ mode,
span: mir.span,
- def_id: def_id,
- mir: mir,
- rpo: rpo,
- tcx: tcx,
- param_env: param_env,
+ def_id,
+ mir,
+ rpo,
+ tcx,
+ param_env,
temp_qualif: IndexVec::from_elem(None, &mir.local_decls),
return_qualif: None,
qualif: Qualif::empty(),
let basic_blocks = mir.basic_blocks_mut();
CfgSimplifier {
- basic_blocks: basic_blocks,
- pred_count: pred_count
+ basic_blocks,
+ pred_count,
}
}
impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> {
fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self {
TypeVerifier {
- cx: cx,
- mir: mir,
+ cx,
+ mir,
last_span: mir.span,
errors_reported: false
}
}
} else {
LvalueTy::Downcast {
- adt_def: adt_def,
- substs: substs,
+ adt_def,
+ substs,
variant_index: index
}
}
param_env: ty::ParamEnv<'gcx>)
-> Self {
TypeChecker {
- infcx: infcx,
+ infcx,
fulfillment_cx: traits::FulfillmentContext::new(),
last_span: DUMMY_SP,
body_id,
location: Location) {
if let Some(ref mut info) = self.lvalue_mut_info(lvalue) {
info.defs_and_uses.push(Use {
- context: context,
- location: location,
+ context,
+ location,
})
}
self.super_lvalue(lvalue, context, location)
-> MutateUseVisitor<'tcx, F>
where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
MutateUseVisitor {
- query: query,
- callback: callback,
+ query,
+ callback,
phantom: PhantomData,
}
}
let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
- ty: ty,
+ ty,
mutbl: hir::Mutability::MutMutable
});
let ref_lvalue = self.new_temp(ref_ty);
fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
let block = TerminatorKind::Drop {
location: self.lvalue.clone(),
- target: target,
+ target,
unwind: unwind.into_option()
};
self.new_block(unwind, block)
);
mir[loc.block].statements.insert(
loc.statement_index, Statement {
- source_info: source_info,
+ source_info,
kind: stmt
});
delta += 1;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.hir.krate().visit_all_item_likes(&mut CheckCrateVisitor {
- tcx: tcx,
+ tcx,
tables: &ty::TypeckTables::empty(None),
in_fn: false,
promotable: false,
pub fn check_crate(sess: &Session, map: &Map) {
let krate = map.krate();
krate.visit_all_item_likes(&mut CheckLoopVisitor {
- sess: sess,
+ sess,
hir_map: map,
cx: Normal,
}.as_deep_visitor());
-> Result<(), ErrorReported>
{
let mut visitor = CheckCrateVisitor {
- sess: sess,
- hir_map: hir_map,
+ sess,
+ hir_map,
discriminant_map: NodeMap(),
detected_recursive_ids: NodeSet(),
};
impl<'a> PluginLoader<'a> {
fn new(sess: &'a Session, cstore: &'a CStore, crate_name: &str) -> Self {
PluginLoader {
- sess: sess,
+ sess,
reader: CrateLoader::new(sess, cstore, crate_name),
plugins: vec![],
}
let symbol = self.sess.generate_plugin_registrar_symbol(disambiguator, index);
let fun = self.dylink_registrar(span, lib, symbol);
self.plugins.push(PluginRegistrar {
- fun: fun,
- args: args,
+ fun,
+ args,
});
}
}
#[doc(hidden)]
pub fn new(sess: &'a Session, krate_span: Span) -> Registry<'a> {
Registry {
- sess: sess,
+ sess,
args_hidden: None,
- krate_span: krate_span,
+ krate_span,
syntax_exts: vec![],
early_lint_passes: vec![],
late_lint_passes: vec![],
item_def_id: self.tcx.hir.local_def_id(item_id),
span: self.tcx.hir.span(item_id),
min_visibility: ty::Visibility::Public,
- required_visibility: required_visibility,
+ required_visibility,
has_pub_restricted: self.has_pub_restricted,
- has_old_errors: has_old_errors,
+ has_old_errors,
}
}
}
// Check privacy of names not checked in previous compilation stages.
let mut visitor = NamePrivacyVisitor {
- tcx: tcx,
+ tcx,
tables: &empty_tables,
current_item: CRATE_NODE_ID,
empty_tables: &empty_tables,
// Check privacy of explicitly written types and traits as well as
// inferred types of expressions and patterns.
let mut visitor = TypePrivacyVisitor {
- tcx: tcx,
+ tcx,
tables: &empty_tables,
current_item: DefId::local(CRATE_DEF_INDEX),
span: krate.span,
// Build up a set of all exported items in the AST. This is a set of all
// items which are reachable from external crates based on visibility.
let mut visitor = EmbargoVisitor {
- tcx: tcx,
+ tcx,
access_levels: Default::default(),
prev_level: Some(AccessLevel::Public),
changed: false,
{
let mut visitor = ObsoleteVisiblePrivateTypesVisitor {
- tcx: tcx,
+ tcx,
access_levels: &visitor.access_levels,
in_variant: false,
old_error_set: NodeSet(),
let has_pub_restricted = {
let mut pub_restricted_visitor = PubRestrictedVisitor {
- tcx: tcx,
+ tcx,
has_pub_restricted: false
};
intravisit::walk_crate(&mut pub_restricted_visitor, krate);
// Check for private types and traits in public interfaces
let mut visitor = PrivateItemsInPublicInterfacesVisitor {
- tcx: tcx,
- has_pub_restricted: has_pub_restricted,
+ tcx,
+ has_pub_restricted,
old_error_set: &visitor.old_error_set,
inner_visibility: ty::Visibility::Public,
};
let subclass = SingleImport {
target: binding,
- source: source,
+ source,
result: self.per_ns(|_, _| Cell::new(Err(Undetermined))),
type_ns_only: false,
};
target: rename,
source: ident.node,
result: self.per_ns(|_, _| Cell::new(Err(Undetermined))),
- type_ns_only: type_ns_only,
+ type_ns_only,
};
let id = source_item.node.id;
self.add_import_directive(
}
ViewPathGlob(_) => {
let subclass = GlobImport {
- is_prelude: is_prelude,
+ is_prelude,
max_vis: Cell::new(ty::Visibility::Invisible),
};
self.add_import_directive(
(module, ty::Visibility::Public, sp, expansion).to_name_binding(self.arenas);
let directive = self.arenas.alloc_import_directive(ImportDirective {
id: item.id,
- parent: parent,
+ parent,
imported_module: Cell::new(Some(module)),
subclass: ImportDirectiveSubclass::ExternCrate,
span: item.span,
module_path: Vec::new(),
vis: Cell::new(vis),
- expansion: expansion,
+ expansion,
used: Cell::new(used),
});
self.potentially_unused_imports.push(directive);
parent: graph_root,
imported_module: Cell::new(Some(module)),
subclass: ImportDirectiveSubclass::MacroUse,
- span: span,
+ span,
module_path: Vec::new(),
vis: Cell::new(ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX))),
- expansion: expansion,
+ expansion,
used: Cell::new(false),
});
}
let mut visitor = UnusedImportCheckVisitor {
- resolver: resolver,
+ resolver,
unused_imports: NodeMap(),
};
visit::walk_crate(&mut visitor, krate);
fn new(kind: RibKind<'a>) -> Rib<'a> {
Rib {
bindings: FxHashMap(),
- kind: kind,
+ kind,
}
}
}
expansion: Mark,
span: Span) -> Self {
ModuleData {
- parent: parent,
- kind: kind,
- normal_ancestor_id: normal_ancestor_id,
+ parent,
+ kind,
+ normal_ancestor_id,
resolutions: RefCell::new(FxHashMap()),
legacy_macro_resolutions: RefCell::new(Vec::new()),
macro_resolutions: RefCell::new(Vec::new()),
globs: RefCell::new((Vec::new())),
traits: RefCell::new(None),
populated: Cell::new(normal_ancestor_id.is_local()),
- span: span,
- expansion: expansion,
+ span,
+ expansion,
}
}
macro_defs.insert(Mark::root(), root_def_id);
Resolver {
- session: session,
+ session,
- definitions: definitions,
+ definitions,
// The outermost module has def ID 0; this is not reflected in the
// AST.
- graph_root: graph_root,
+ graph_root,
prelude: None,
has_self: FxHashSet(),
freevars_seen: NodeMap(),
export_map: NodeMap(),
trait_map: NodeMap(),
- module_map: module_map,
+ module_map,
block_map: NodeMap(),
extern_module_map: FxHashMap(),
gated_errors: FxHashSet(),
disallowed_shadowing: Vec::new(),
- arenas: arenas,
+ arenas,
dummy_binding: arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Def(Def::Err),
expansion: Mark::root(),
use_extern_macros:
features.use_extern_macros || features.proc_macro || features.decl_macro,
- crate_loader: crate_loader,
+ crate_loader,
macro_names: FxHashSet(),
global_macros: FxHashMap(),
lexical_macro_resolutions: Vec::new(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
- invocations: invocations,
- macro_defs: macro_defs,
+ invocations,
+ macro_defs,
local_macro_def_scopes: FxHashMap(),
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
NameBindingKind::Import { .. } => false,
NameBindingKind::Ambiguity { b1, b2, legacy } => {
self.ambiguity_errors.push(AmbiguityError {
- span: span, name: ident.name, lexical: false, b1: b1, b2: b2, legacy: legacy,
+ span: span, name: ident.name, lexical: false, b1: b1, b2: b2, legacy,
});
if legacy {
self.record_use(ident, ns, b1, span);
if record_used {
vec.push(Freevar {
def: prev_def,
- span: span,
+ span,
});
seen.insert(node_id, depth);
}
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
is_sugared_doc: false,
- span: span,
+ span,
});
}
}
let name = ident.name;
self.ambiguity_errors.push(AmbiguityError {
span: path_span,
- name: name,
+ name,
b1: shadower,
b2: binding,
lexical: true,
binding.def() != shadowed_glob.def() {
self.ambiguity_errors.push(AmbiguityError {
span: path_span,
- name: name,
+ name,
lexical: false,
b1: binding,
b2: shadowed_glob,
let current_module = self.current_module;
let directive = self.arenas.alloc_import_directive(ImportDirective {
parent: current_module,
- module_path: module_path,
+ module_path,
imported_module: Cell::new(None),
- subclass: subclass,
- span: span,
- id: id,
+ subclass,
+ span,
+ id,
vis: Cell::new(vis),
- expansion: expansion,
+ expansion,
used: Cell::new(false),
});
self.arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Import {
- binding: binding,
- directive: directive,
+ binding,
+ directive,
used: Cell::new(false),
legacy_self_import: false,
},
span: directive.span,
- vis: vis,
+ vis,
expansion: directive.expansion,
})
}
legacy_self_import = Some(directive);
let binding = this.arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Import {
- binding: binding,
- directive: directive,
+ binding,
+ directive,
used: Cell::new(false),
legacy_self_import: true,
},
let span_utils = SpanUtils::new(&save_ctxt.tcx.sess);
DumpVisitor {
tcx: save_ctxt.tcx,
- save_ctxt: save_ctxt,
- dumper: dumper,
+ save_ctxt,
+ dumper,
span: span_utils.clone(),
cur_scope: CRATE_NODE_ID,
// mac_defs: HashSet::new(),
self.span_from_span(sub_span.expect("No span found for var ref"));
self.dumper.dump_ref(Ref {
kind: RefKind::Variable,
- span: span,
+ span,
ref_id: ::id_from_def_id(def.struct_variant().fields[idx.node].did),
});
}
impl<'a> DumpHandler<'a> {
pub fn new(odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
DumpHandler {
- odir: odir,
+ odir,
cratename: cratename.to_owned()
}
}
info!("Dumping crate {}", cratename);
let save_ctxt = SaveContext {
- tcx: tcx,
+ tcx,
tables: &ty::TypeckTables::empty(None),
- analysis: analysis,
+ analysis,
span_utils: SpanUtils::new(&tcx.sess),
config: find_config(config),
};
let value = value[2..value.len()-1].to_string();
rls_data::Attribute {
- value: value,
+ value,
span: scx.span_from_span(attr.span),
}
}).collect()
fn text_sig(text: String) -> Signature {
Signature {
- text: text,
+ text,
defs: vec![],
refs: vec![],
}
text.push('}');
Ok(Signature {
text,
- defs: defs,
- refs: refs,
+ defs,
+ refs,
})
}
ast::VariantData::Tuple(ref fields, id) => {
text.push(')');
Ok(Signature {
text,
- defs: defs,
- refs: refs,
+ defs,
+ refs,
})
}
ast::VariantData::Unit(id) => {
impl<'a> SpanUtils<'a> {
pub fn new(sess: &'a Session) -> SpanUtils<'a> {
SpanUtils {
- sess: sess,
+ sess,
err_count: Cell::new(0),
}
}
fn new(layout: TyLayout<'tcx>) -> ArgType<'tcx> {
ArgType {
kind: ArgKind::Direct,
- layout: layout,
+ layout,
cast: None,
pad: None,
attrs: ArgAttributes::default()
}
FnType {
- args: args,
- ret: ret,
+ args,
+ ret,
variadic: sig.variadic,
- cconv: cconv
+ cconv,
}
}
/// by `config`.
pub fn new(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> {
ArchiveBuilder {
- config: config,
+ config,
removals: Vec::new(),
additions: Vec::new(),
should_update_symbols: false,
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
};
self.additions.push(Addition::Archive {
- archive: archive,
+ archive,
skip: Box::new(skip),
});
Ok(())
output: &Path,
input: Option<&Path>) -> ArchiveConfig<'a> {
ArchiveConfig {
- sess: sess,
+ sess,
dst: output.to_path_buf(),
src: input.map(|p| p.to_path_buf()),
lib_search_paths: archive_search_paths(sess),
match sess.linker_flavor() {
LinkerFlavor::Msvc => {
Box::new(MsvcLinker {
- cmd: cmd,
- sess: sess,
+ cmd,
+ sess,
info: self
}) as Box<Linker>
}
LinkerFlavor::Em => {
Box::new(EmLinker {
- cmd: cmd,
- sess: sess,
+ cmd,
+ sess,
info: self
}) as Box<Linker>
}
LinkerFlavor::Gcc => {
Box::new(GccLinker {
- cmd: cmd,
- sess: sess,
+ cmd,
+ sess,
info: self,
hinted_static: false,
is_ld: false,
}
LinkerFlavor::Ld => {
Box::new(GccLinker {
- cmd: cmd,
- sess: sess,
+ cmd,
+ sess,
info: self,
hinted_static: false,
is_ld: true,
fn new(sess: &Session, passes: Vec<String>) -> ModuleConfig {
ModuleConfig {
tm: create_target_machine(sess),
- passes: passes,
+ passes,
opt_level: None,
opt_size: None,
let tm = config.tm;
let fv = HandlerFreeVars {
- cgcx: cgcx,
- diag_handler: diag_handler,
+ cgcx,
+ diag_handler,
};
let fv = &fv as *const HandlerFreeVars as *mut c_void;
-> WorkItem
{
WorkItem {
- mtrans: mtrans,
- config: config,
- output_names: output_names
+ mtrans,
+ config,
+ output_names,
}
}
let cgcx = CodegenContext {
crate_types: sess.crate_types.borrow().clone(),
- each_linked_rlib_for_lto: each_linked_rlib_for_lto,
+ each_linked_rlib_for_lto,
lto: sess.lto(),
no_landing_pads: sess.no_landing_pads(),
opts: Arc::new(sess.opts.clone()),
time_passes: sess.time_passes(),
- exported_symbols: exported_symbols,
+ exported_symbols,
plugin_passes: sess.plugin_llvm_passes.borrow().clone(),
remark: sess.opts.cg.remark.clone(),
worker: 0,
incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()),
- coordinator_send: coordinator_send,
+ coordinator_send,
diag_emitter: shared_emitter.clone(),
time_graph,
};
pub fn new(ccx: &'a CrateContext<'a, 'tcx>, name: String) -> StatRecorder<'a, 'tcx> {
let istart = ccx.stats().n_llvm_insns.get();
StatRecorder {
- ccx: ccx,
+ ccx,
name: Some(name),
- istart: istart,
+ istart,
}
}
}
let (llcx, llmod) =
context::create_context_and_module(tcx.sess, "allocator");
let modules = ModuleLlvm {
- llmod: llmod,
- llcx: llcx,
+ llmod,
+ llcx,
};
time(tcx.sess.time_passes(), "write allocator module", || {
allocator::trans(tcx, &modules, kind)
llvm::LLVMCreateBuilderInContext(ccx.llcx())
};
Builder {
- llbuilder: llbuilder,
- ccx: ccx,
+ llbuilder,
+ ccx,
}
}
{
let mut visitor = RootCollector {
- scx: scx,
- mode: mode,
+ scx,
+ mode,
exported_symbols,
output: &mut roots,
};
let mir = scx.tcx().instance_mir(instance.def);
let mut visitor = MirNeighborCollector {
- scx: scx,
+ scx,
mir: &mir,
- output: output,
+ output,
param_substs: instance.substs,
const_context,
};
impl Funclet {
pub fn new(cleanuppad: ValueRef) -> Funclet {
Funclet {
- cleanuppad: cleanuppad,
+ cleanuppad,
operand: OperandBundleDef::new("funclet", &[cleanuppad]),
}
}
let use_dll_storage_attrs = tcx.sess.target.target.options.is_like_msvc;
SharedCrateContext {
- tcx: tcx,
- check_overflow: check_overflow,
- use_dll_storage_attrs: use_dll_storage_attrs,
- output_filenames: output_filenames,
+ tcx,
+ check_overflow,
+ use_dll_storage_attrs,
+ output_filenames,
}
}
};
let local_ccx = LocalCrateContext {
- llmod: llmod,
- llcx: llcx,
+ llmod,
+ llcx,
stats: Stats::default(),
- codegen_unit: codegen_unit,
+ codegen_unit,
crate_trans_items,
exported_symbols,
instances: RefCell::new(FxHashMap()),
int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()),
str_slice_type: Type::from_ref(ptr::null_mut()),
- dbg_cx: dbg_cx,
+ dbg_cx,
eh_personality: Cell::new(None),
eh_unwind_resume: Cell::new(None),
rust_try_fn: Cell::new(None),
-> CrateContext<'a, 'tcx> {
assert!(local_ccxs.len() == 1);
CrateContext {
- shared: shared,
+ shared,
local_ccx: &local_ccxs[0]
}
}
loc.col.to_usize() as c_uint)
};
scopes[scope] = MirDebugScope {
- scope_metadata: scope_metadata,
+ scope_metadata,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos,
};
type_map.register_type_with_metadata(unfinished_type, metadata_stub);
UnfinishedMetadata {
- unfinished_type: unfinished_type,
- unique_type_id: unique_type_id,
- metadata_stub: metadata_stub,
- llvm_type: llvm_type,
- member_description_factory: member_description_factory,
+ unfinished_type,
+ unique_type_id,
+ metadata_stub,
+ llvm_type,
+ member_description_factory,
}
}
impl MetadataCreationResult {
fn new(metadata: DIType, already_stored_in_typemap: bool) -> MetadataCreationResult {
MetadataCreationResult {
- metadata: metadata,
- already_stored_in_typemap: already_stored_in_typemap
+ metadata,
+ already_stored_in_typemap,
}
}
}
let offset = FixedMemberOffset { bytes: offsets[i].bytes() as usize};
MemberDescription {
- name: name,
+ name,
llvm_type: type_of::in_memory_type_of(cx, fty),
type_metadata: type_metadata(cx, fty, self.span),
- offset: offset,
+ offset,
flags: DIFlags::FlagZero,
}
}).collect()
struct_llvm_type,
StructMDF(StructMemberDescriptionFactory {
ty: struct_type,
- variant: variant,
- substs: substs,
- span: span,
+ variant,
+ substs,
+ span,
})
)
}
TupleMDF(TupleMemberDescriptionFactory {
ty: tuple_type,
component_types: component_types.to_vec(),
- span: span,
+ span,
})
)
}
union_metadata_stub,
union_llvm_type,
UnionMDF(UnionMemberDescriptionFactory {
- variant: variant,
- substs: substs,
- span: span,
+ variant,
+ substs,
+ span,
})
)
}
let member_description_factory =
VariantMDF(VariantMemberDescriptionFactory {
offsets: &struct_def.offsets[..],
- args: args,
+ args,
discriminant_type_metadata: match discriminant_info {
RegularDiscriminant(discriminant_type_metadata) => {
Some(discriminant_type_metadata)
}
_ => None
},
- span: span,
+ span,
});
(metadata_stub, variant_llvm_type, member_description_factory)
enum_metadata,
enum_llvm_type,
EnumMDF(EnumMemberDescriptionFactory {
- enum_type: enum_type,
+ enum_type,
type_rep: type_rep.layout,
- discriminant_type_metadata: discriminant_type_metadata,
- containing_scope: containing_scope,
- file_metadata: file_metadata,
- span: span,
+ discriminant_type_metadata,
+ containing_scope,
+ file_metadata,
+ span,
}),
);
// DIBuilder inherits context from the module, so we'd better use the same one
let llcontext = unsafe { llvm::LLVMGetModuleContext(llmod) };
CrateDebugContext {
- llcontext: llcontext,
- llmod: llmod,
- builder: builder,
+ llcontext,
+ llmod,
+ builder,
created_files: RefCell::new(FxHashMap()),
created_enum_disr_types: RefCell::new(FxHashMap()),
type_map: RefCell::new(TypeMap::new()),
// Initialize fn debug context (including scope map and namespace map)
let fn_debug_context = FunctionDebugContextData {
- fn_metadata: fn_metadata,
+ fn_metadata,
source_locations_enabled: Cell::new(false),
defining_crate: def_id.krate,
};
impl InternalDebugLocation {
pub fn new(scope: DIScope, line: usize, col: usize) -> InternalDebugLocation {
KnownLocation {
- scope: scope,
- line: line,
- col: col,
+ scope,
+ line,
+ col,
}
}
}
Ref(ptr, align)
};
let op = OperandRef {
- val: val,
- ty: ty
+ val,
+ ty,
};
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
// If the tuple is immediate, the elements are as well
let op = OperandRef {
val: Immediate(elem),
- ty: ty
+ ty,
};
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
// Pair is always made up of immediates
let op = OperandRef {
val: Immediate(elem),
- ty: ty
+ ty,
};
self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
impl<'tcx> Const<'tcx> {
pub fn new(llval: ValueRef, ty: Ty<'tcx>) -> Const<'tcx> {
Const {
- llval: llval,
- ty: ty
+ llval,
+ ty,
}
}
};
OperandRef {
- val: val,
+ val,
ty: self.ty
}
}
args: IndexVec<mir::Local, Result<Const<'tcx>, ConstEvalErr<'tcx>>>)
-> MirConstContext<'a, 'tcx> {
let mut context = MirConstContext {
- ccx: ccx,
- mir: mir,
- substs: substs,
+ ccx,
+ mir,
+ substs,
locals: (0..mir.local_decls.len()).map(|_| None).collect(),
};
for (i, arg) in args.into_iter().enumerate() {
};
ConstLvalue {
base: projected,
- llextra: llextra,
+ llextra,
ty: projected_ty
}
}
};
LvalueRef {
llval: llprojected,
- llextra: llextra,
+ llextra,
ty: projected_ty,
alignment: align,
}
let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
let mut mircx = MirContext {
- mir: mir,
- llfn: llfn,
- fn_ty: fn_ty,
- ccx: ccx,
+ mir,
+ llfn,
+ fn_ty,
+ ccx,
llpersonalityslot: None,
blocks: block_bcxs,
unreachable_block: None,
- cleanup_kinds: cleanup_kinds,
- landing_pads: landing_pads,
+ cleanup_kinds,
+ landing_pads,
funclets: &funclets,
- scopes: scopes,
+ scopes,
locals: IndexVec::new(),
- debug_context: debug_context,
+ debug_context,
param_substs: {
assert!(!instance.substs.needs_infer());
instance.substs
OperandValue::Immediate(llarg)
};
let operand = OperandRef {
- val: val,
+ val,
ty: arg_ty
};
return LocalRef::Operand(Some(operand.unpack_if_pair(bcx)));
OperandValue::Immediate(C_null(llty))
};
OperandRef {
- val: val,
- ty: ty
+ val,
+ ty,
}
}
};
LvalueRef {
llval: llptr,
- llextra: llextra,
+ llextra,
ty: LvalueTy::from_ty(projected_ty),
alignment: Alignment::AbiAligned,
}
dest.llval, dest.ty, dest.alignment);
let field_index = active_field_index.unwrap_or(i);
val.ty = LvalueTy::Downcast {
- adt_def: adt_def,
+ adt_def,
substs: self.monomorphize(&substs),
- variant_index: variant_index,
+ variant_index,
};
let (lldest_i, align) = val.trans_field_ptr(&bcx, field_index);
self.store_operand(&bcx, lldest_i, align.to_align(), op);
}
};
let operand = OperandRef {
- val: val,
+ val,
ty: cast_ty
};
(bcx, operand)
omit_local_crate_name: bool)
-> Self {
DefPathBasedNames {
- tcx: tcx,
- omit_disambiguators: omit_disambiguators,
- omit_local_crate_name: omit_local_crate_name,
+ tcx,
+ omit_disambiguators,
+ omit_local_crate_name,
}
}
let name = tcx.hir.name(id);
tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: tcx.hir.local_def_id(id),
- index: index,
- name: name
+ index,
+ name,
}))
}
cur_ty: self.resolve_type_vars_if_possible(&base_ty),
obligations: vec![],
at_start: true,
- span: span,
+ span,
}
}
span: Span)
-> Result<CastCheck<'tcx>, ErrorReported> {
let check = CastCheck {
- expr: expr,
- expr_ty: expr_ty,
- expr_diverges: expr_diverges,
- cast_ty: cast_ty,
- cast_span: cast_span,
- span: span,
+ expr,
+ expr_ty,
+ expr_diverges,
+ cast_ty,
+ cast_span,
+ span,
};
// For better error messages, check for some obviously unsized
impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>, cause: ObligationCause<'tcx>) -> Self {
Coerce {
- fcx: fcx,
- cause: cause,
+ fcx,
+ cause,
use_lub: false,
}
}
call_expr: &'gcx hir::Expr)
-> ConfirmContext<'a, 'gcx, 'tcx> {
ConfirmContext {
- fcx: fcx,
- span: span,
- self_expr: self_expr,
- call_expr: call_expr,
+ fcx,
+ span,
+ self_expr,
+ call_expr,
}
}
mode: probe::Mode)
-> Self {
NoMatchData {
- static_candidates: static_candidates,
- unsatisfied_predicates: unsatisfied_predicates,
- out_of_scope_traits: out_of_scope_traits,
- mode: mode,
+ static_candidates,
+ unsatisfied_predicates,
+ out_of_scope_traits,
+ mode,
}
}
}
ty::Predicate::WellFormed(method_ty)));
let callee = MethodCallee {
- def_id: def_id,
+ def_id,
substs: trait_ref.substs,
sig: fn_sig,
};
}
} else {
vec![CandidateStep {
- self_ty: self_ty,
+ self_ty,
autoderefs: 0,
unsize: false,
}]
opt_simplified_steps: Option<Vec<ty::fast_reject::SimplifiedType>>)
-> ProbeContext<'a, 'gcx, 'tcx> {
ProbeContext {
- fcx: fcx,
- span: span,
- mode: mode,
- looking_for: looking_for,
+ fcx,
+ span,
+ mode,
+ looking_for,
inherent_candidates: Vec::new(),
extension_candidates: Vec::new(),
impl_dups: FxHashSet(),
steps: Rc::new(steps),
- opt_simplified_steps: opt_simplified_steps,
+ opt_simplified_steps,
static_candidates: Vec::new(),
private_candidate: None,
unsatisfied_predicates: Vec::new(),
let borrow = tcx.all_traits.borrow();
assert!(borrow.is_some());
AllTraits {
- borrow: borrow,
+ borrow,
idx: 0,
}
}
hir::DefaultBlock =>
(unsafety, self.def, self.unsafe_push_count),
};
- UnsafetyState{ def: def,
- unsafety: unsafety,
+ UnsafetyState{ def,
+ unsafety,
unsafe_push_count: count,
from_fn: false }
}
tables: MaybeInProgressTables {
maybe_tables: infcx.in_progress_tables,
},
- infcx: infcx,
+ infcx,
fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
locals: RefCell::new(NodeMap()),
deferred_call_resolutions: RefCell::new(DefIdMap()),
body_id: ast::NodeId)
-> FnCtxt<'a, 'gcx, 'tcx> {
FnCtxt {
- body_id: body_id,
+ body_id,
param_env,
err_count_on_creation: inh.tcx.sess.err_count(),
ret_coercion: None,
stack: Vec::new(),
by_id: NodeMap(),
}),
- inh: inh,
+ inh,
}
}
};
let ctxt = BreakableCtxt {
- coerce: coerce,
+ coerce,
may_break: false, // will get updated if/when we find a `break`
};
Subject(subject): Subject) -> RegionCtxt<'a, 'gcx, 'tcx> {
let region_maps = fcx.tcx.region_maps(subject);
RegionCtxt {
- fcx: fcx,
- region_maps: region_maps,
+ fcx,
+ region_maps,
repeating_scope: initial_repeating_scope,
body_id: initial_body_id,
call_site_scope: None,
let fcx = FnCtxt::new(&inh, param_env, id);
let wf_tys = f(&fcx, &mut CheckTypeWellFormedVisitor {
tcx: fcx.tcx.global_tcx(),
- code: code
+ code,
});
fcx.select_all_obligations_or_error();
fcx.regionck_item(id, span, &wf_tys);
pub fn new(tcx: TyCtxt<'a, 'gcx, 'gcx>)
-> CheckTypeWellFormedVisitor<'a, 'gcx> {
CheckTypeWellFormedVisitor {
- tcx: tcx,
+ tcx,
code: ObligationCauseCode::MiscObligation
}
}
CheckWfFcxBuilder {
inherited: Inherited::build(self.tcx, def_id),
code: self.code.clone(),
- id: id,
- span: span,
+ id,
+ span,
param_env: self.tcx.param_env(def_id),
}
}
ExplicitSelf::ByReference(region, mutbl) => {
fcx.tcx.mk_ref(region, ty::TypeAndMut {
ty: self_ty,
- mutbl: mutbl
+ mutbl,
})
}
ExplicitSelf::ByBox => fcx.tcx.mk_box(self_ty)
let owner = fcx.tcx.hir.definitions().node_to_hir_id(body.id().node_id);
WritebackCx {
- fcx: fcx,
+ fcx,
tables: ty::TypeckTables::empty(Some(DefId::local(owner.owner))),
- body: body
+ body,
}
}
Resolver {
tcx: fcx.tcx,
infcx: fcx,
- span: span,
- body: body,
+ span,
+ body,
}
}
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
-> ItemCtxt<'a,'tcx> {
ItemCtxt {
- tcx: tcx,
- item_def_id: item_def_id,
+ tcx,
+ item_def_id,
}
}
}
}
}).collect();
ty::VariantDef {
- did: did,
- name: name,
- discr: discr,
- fields: fields,
+ did,
+ name,
+ discr,
+ fields,
ctor_kind: CtorKind::from_hir(def),
}
}
types.extend(fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef {
index: type_start + i as u32,
name: Symbol::intern("<upvar>"),
- def_id: def_id,
+ def_id,
has_default: false,
object_lifetime_default: rl::Set1::Empty,
pure_wrt_drop: false,
tcx.alloc_generics(ty::Generics {
parent: parent_def_id,
- parent_regions: parent_regions,
- parent_types: parent_types,
- regions: regions,
- types: types,
- type_param_to_index: type_param_to_index,
+ parent_regions,
+ parent_types,
+ regions,
+ types,
+ type_param_to_index,
has_self: has_self || parent_has_self,
has_late_bound_regions: has_late_bound_regions(tcx, node),
})
ItemTrait(_, ref generics, .., ref items) => {
is_trait = Some((ty::TraitRef {
- def_id: def_id,
+ def_id,
substs: Substs::identity_for_item(tcx, def_id)
}, items));
generics
for param in early_bound_lifetimes_from_generics(tcx, ast_generics) {
let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: tcx.hir.local_def_id(param.lifetime.id),
- index: index,
+ index,
name: param.lifetime.name
}));
index += 1;
ty::GenericPredicates {
parent: generics.parent,
- predicates: predicates
+ predicates,
}
}
};
Bounds {
- region_bounds: region_bounds,
- implicitly_sized: implicitly_sized,
- trait_bounds: trait_bounds,
- projection_bounds: projection_bounds,
+ region_bounds,
+ implicitly_sized,
+ trait_bounds,
+ projection_bounds,
}
}
let mut collector = ParameterCollector {
parameters: vec![],
- include_nonconstraining: include_nonconstraining
+ include_nonconstraining,
};
t.visit_with(&mut collector);
collector.parameters
let invariant = terms_cx.arena.alloc(ConstantTerm(ty::Invariant));
let bivariant = terms_cx.arena.alloc(ConstantTerm(ty::Bivariant));
let mut constraint_cx = ConstraintContext {
- terms_cx: terms_cx,
- covariant: covariant,
- contravariant: contravariant,
- invariant: invariant,
- bivariant: bivariant,
+ terms_cx,
+ covariant,
+ contravariant,
+ invariant,
+ bivariant,
constraints: Vec::new(),
dependencies: TransitiveRelation::new(),
};
debug!("add_constraint(index={}, variance={:?})", index, variance);
self.constraints.push(Constraint {
inferred: InferredIndex(current.inferred_start.0 + index as usize),
- variance: variance,
+ variance,
});
}
arena: &'a mut TypedArena<VarianceTerm<'a>>)
-> TermsContext<'a, 'tcx> {
let mut terms_cx = TermsContext {
- tcx: tcx,
- arena: arena,
+ tcx,
+ arena,
inferred_starts: NodeMap(),
inferred_terms: vec![],
source: cx.tcx.def_span(did).clean(cx),
name: Some(name.clean(cx)),
attrs: load_attrs(cx, did),
- inner: inner,
+ inner,
visibility: Some(clean::Public),
stability: cx.tcx.lookup_stability(did).clean(cx),
deprecation: cx.tcx.lookup_deprecation(did).clean(cx),
let (generics, supertrait_bounds) = separate_supertrait_bounds(generics);
clean::Trait {
unsafety: cx.tcx.trait_def(did).unsafety,
- generics: generics,
+ generics,
items: trait_items,
bounds: supertrait_bounds,
}
decl: (did, sig).clean(cx),
generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
unsafety: sig.unsafety(),
- constness: constness,
+ constness,
abi: sig.abi(),
}
}
}),
source: tcx.def_span(did).clean(cx),
name: None,
- attrs: attrs,
+ attrs,
visibility: Some(clean::Inherited),
stability: tcx.lookup_stability(did).clean(cx),
deprecation: tcx.lookup_deprecation(did).clean(cx),
};
clean::MethodItem(clean::Method {
- unsafety: unsafety,
- constness: constness,
- decl: decl,
- generics: generics,
- abi: abi
+ unsafety,
+ constness,
+ decl,
+ generics,
+ abi,
})
}
ref r => panic!("not a tymethod: {:?}", r),
inner: clean::ImplItem(clean::Impl {
unsafety: hir::Unsafety::Normal, // FIXME: this should be decoded
provided_trait_methods: provided,
- trait_: trait_,
- for_: for_,
+ trait_,
+ for_,
generics: (tcx.generics_of(did), &predicates).clean(cx),
items: trait_items,
polarity: Some(polarity.clean(cx)),
}),
source: tcx.def_span(did).clean(cx),
name: None,
- attrs: attrs,
+ attrs,
visibility: Some(clean::Inherited),
stability: tcx.lookup_stability(did).clean(cx),
deprecation: tcx.lookup_deprecation(did).clean(cx),
let mut items = Vec::new();
fill_in(cx, did, &mut items);
return clean::Module {
- items: items,
+ items,
is_crate: false,
};
visibility: Some(Public),
stability: get_stability(cx, def_id),
deprecation: get_deprecation(cx, def_id),
- def_id: def_id,
+ def_id,
inner: PrimitiveItem(prim),
}
}));
let mut external_traits = cx.external_traits.borrow_mut();
Crate {
- name: name,
- src: src,
+ name,
+ src,
module: Some(module),
- externs: externs,
- primitives: primitives,
+ externs,
+ primitives,
access_levels: Arc::new(mem::replace(&mut access_levels, Default::default())),
external_traits: mem::replace(&mut external_traits, Default::default()),
}
name: cx.tcx.crate_name(*self).to_string(),
src: PathBuf::from(krate_src),
attrs: cx.tcx.get_attrs(root).clean(cx),
- primitives: primitives,
+ primitives,
}
}
}
def_id: cx.tcx.hir.local_def_id(self.id),
inner: ModuleItem(Module {
is_crate: self.is_crate,
- items: items
+ items,
})
}
}
ListAttributesIter {
attrs: self.iter(),
current_list: Vec::new().into_iter(),
- name: name
+ name,
}
}
}
inline::record_extern_fqn(cx, did, TypeKind::Trait);
TraitBound(PolyTrait {
trait_: ResolvedPath {
- path: path,
+ path,
typarams: None,
- did: did,
+ did,
is_generic: false,
},
lifetimes: vec![]
ty::TyTuple(ref tys, _) => tys.iter().map(|t| t.clean(cx)).collect(),
_ => {
return PathParameters::AngleBracketed {
- lifetimes: lifetimes,
+ lifetimes,
types: types.clean(cx),
- bindings: bindings
+ bindings,
}
}
};
// _ => Some(types[1].clean(cx))
// };
PathParameters::Parenthesized {
- inputs: inputs,
- output: output
+ inputs,
+ output,
}
},
_ => {
PathParameters::AngleBracketed {
- lifetimes: lifetimes,
+ lifetimes,
types: types.clean(cx),
- bindings: bindings
+ bindings,
}
}
}
TraitBound(
PolyTrait {
trait_: ResolvedPath {
- path: path,
+ path,
typarams: None,
did: self.def_id,
is_generic: false,
name = "_".to_string();
}
Argument {
- name: name,
+ name,
type_: ty.clean(cx),
}
}).collect()
visibility: None,
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
- inner: inner
+ inner,
}
}
}
visibility: self.vis.clean(cx),
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
- inner: inner
+ inner,
}
}
}
if provided {
MethodItem(Method {
unsafety: sig.unsafety(),
- generics: generics,
- decl: decl,
+ generics,
+ decl,
abi: sig.abi(),
// trait methods cannot (currently, at least) be const
} else {
TyMethodItem(TyMethod {
unsafety: sig.unsafety(),
- generics: generics,
- decl: decl,
+ generics,
+ decl,
abi: sig.abi(),
})
}
def_id: self.def_id,
attrs: inline::load_attrs(cx, self.def_id),
source: cx.tcx.def_span(self.def_id).clean(cx),
- inner: inner,
+ inner,
}
}
}
}
let trait_path = hir::Path {
span: self.span,
- def: def,
+ def,
segments: vec![].into(),
};
Type::QPath {
bounds.push(RegionBound(lifetime.clean(cx)));
}
ResolvedPath {
- path: path,
+ path,
typarams: Some(bounds),
- did: did,
- is_generic: is_generic,
+ did,
+ is_generic,
}
}
_ => Infer // shouldn't happen
let path = external_path(cx, &cx.tcx.item_name(did).as_str(),
None, false, vec![], substs);
ResolvedPath {
- path: path,
+ path,
typarams: None,
- did: did,
+ did,
is_generic: false,
}
}
inline::record_extern_fqn(cx, did, TypeKind::Trait);
let bound = TraitBound(PolyTrait {
trait_: ResolvedPath {
- path: path,
+ path,
typarams: None,
- did: did,
+ did,
is_generic: false,
},
lifetimes: vec![]
let path = external_path(cx, &cx.tcx.item_name(did).as_str(), Some(did),
false, bindings, principal.0.substs);
ResolvedPath {
- path: path,
+ path,
typarams: Some(typarams),
- did: did,
+ did,
is_generic: false,
}
} else {
global: false,
def: Def::Err,
segments: vec![PathSegment {
- name: name,
+ name,
params: PathParameters::AngleBracketed {
lifetimes: Vec::new(),
types: Vec::new(),
unsafety: self.unsafety,
generics: self.generics.clean(cx),
provided_trait_methods: provided,
- trait_: trait_,
+ trait_,
for_: self.for_.clean(cx),
- items: items,
+ items,
polarity: Some(self.polarity.clean(cx)),
}),
});
visibility: self.vis.clean(cx),
stability: get_stability(cx, cx.tcx.hir.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.tcx.hir.local_def_id(self.id)),
- inner: inner,
+ inner,
}
}
}
} else {
Some(register_def(cx, path.def))
},
- path: path,
+ path,
}
}
let warning_lint = lint::builtin::WARNINGS.name_lower();
let sessopts = config::Options {
- maybe_sysroot: maybe_sysroot,
- search_paths: search_paths,
+ maybe_sysroot,
+ search_paths,
crate_types: vec![config::CrateTypeRlib],
lint_opts: if !allow_warnings { vec![(warning_lint, lint::Allow)] } else { vec![] },
lint_cap: Some(lint::Allow),
- externs: externs,
+ externs,
target_triple: triple.unwrap_or(config::host_triple().to_string()),
// Ensure that rustdoc works even if rustc is feature-staged
unstable_features: UnstableFeatures::Allow,
actually_rustdoc: true,
debugging_opts: config::DebuggingOptions {
- force_unstable_if_unmarked: force_unstable_if_unmarked,
+ force_unstable_if_unmarked,
..config::basic_debugging_options()
},
..config::basic_options().clone()
};
let ctxt = DocContext {
- tcx: tcx,
+ tcx,
populated_all_crate_impls: Cell::new(false),
access_levels: RefCell::new(access_levels),
external_traits: Default::default(),
_ => self.fold_inner_recur(inner),
};
- Some(Item { attrs: attrs, name: name, source: source, inner: inner,
- visibility: visibility, stability: stability, deprecation: deprecation,
- def_id: def_id })
+ Some(Item { attrs, name, source, inner, visibility,
+ stability, deprecation, def_id })
}
fn fold_mod(&mut self, m: Module) -> Module {
impl<'a> Classifier<'a> {
pub fn new(lexer: lexer::StringReader<'a>, codemap: &'a CodeMap) -> Classifier<'a> {
Classifier {
- lexer: lexer,
- codemap: codemap,
+ lexer,
+ codemap,
in_attribute: false,
in_macro: false,
in_macro_nonterminal: false,
fn new(iter: I, toc: Option<&'b mut TocBuilder>) -> Self {
HeadingLinks {
inner: iter,
- toc: toc,
+ toc,
buf: VecDeque::new(),
}
}
should_panic: bool, no_run: bool, ignore: bool, rust: bool, test_harness: bool,
compile_fail: bool, allow_fail: bool, error_codes: Vec<String>) {
assert_eq!(LangString::parse(s), LangString {
- should_panic: should_panic,
- no_run: no_run,
- ignore: ignore,
- rust: rust,
- test_harness: test_harness,
- compile_fail: compile_fail,
- error_codes: error_codes,
+ should_panic,
+ no_run,
+ ignore,
+ rust,
+ test_harness,
+ compile_fail,
+ error_codes,
original: s.to_owned(),
- allow_fail: allow_fail,
+ allow_fail,
})
}
None => PathBuf::new(),
};
let mut scx = SharedContext {
- src_root: src_root,
- passes: passes,
+ src_root,
+ passes,
include_sources: true,
local_sources: FxHashMap(),
issue_tracker_base_url: None,
krate = render_sources(&dst, &mut scx, krate)?;
let cx = Context {
current: Vec::new(),
- dst: dst,
+ dst,
render_redirect_pages: false,
shared: Arc::new(scx),
- render_type: render_type,
+ render_type,
};
// Crawl the crate to build various caches used for the output
let mut cache = Cache {
impls: FxHashMap(),
- external_paths: external_paths,
+ external_paths,
paths: FxHashMap(),
implementors: FxHashMap(),
stack: Vec::new(),
access_levels: krate.access_levels.clone(),
orphan_impl_items: Vec::new(),
traits: mem::replace(&mut krate.external_traits, FxHashMap()),
- deref_trait_did: deref_trait_did,
- deref_mut_trait_did: deref_mut_trait_did,
- owned_box_did: owned_box_did,
+ deref_trait_did,
+ deref_mut_trait_did,
+ owned_box_did,
typarams: external_typarams,
};
let dst = dst.join("src").join(&krate.name);
try_err!(fs::create_dir_all(&dst), &dst);
let mut folder = SourceCollector {
- dst: dst,
- scx: scx,
+ dst,
+ scx,
};
Ok(folder.fold_crate(krate))
}
name: s.to_string(),
path: path.join("::").to_string(),
desc: plain_summary_line(item.doc_value()),
- parent: parent,
+ parent,
parent_idx: None,
search_type: get_index_search_type(&item),
});
where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true },
decl = Method {
decl: &f.decl,
- name_len: name_len,
+ name_len,
indent: 0,
})?;
document(w, cx, it)
decl = Method {
decl: d,
name_len: head_len,
- indent: indent,
+ indent,
},
where_clause = WhereClause {
gens: g,
- indent: indent,
- end_newline: end_newline,
+ indent,
+ end_newline,
})
}
match item.inner {
}
return {
- raw: raw,
- query: query,
- type: type,
+ raw,
+ query,
+ type,
id: query + type
};
}
searchWords.push(crate);
searchIndex.push({
- crate: crate,
+ crate,
ty: 1, // == ExternCrate
name: crate,
path: "",
}
self.chain.push(TocEntry {
- level: level,
- name: name,
- sec_number: sec_number,
- id: id,
+ level,
+ name,
+ sec_number,
+ id,
children: Toc { entries: Vec::new() }
});
PluginManager {
dylibs: Vec::new(),
callbacks: Vec::new(),
- prefix: prefix,
+ prefix,
}
}
search_paths: libs,
crate_types: vec![config::CrateTypeExecutable],
output_types: outputs,
- externs: externs,
+ externs,
cg: config::CodegenOptions {
prefer_dynamic: true,
.. config::basic_codegen_options()
tests: Vec::new(),
old_tests: HashMap::new(),
names: Vec::new(),
- cfgs: cfgs,
- libs: libs,
- externs: externs,
+ cfgs,
+ libs,
+ externs,
cnt: 0,
- use_headers: use_headers,
+ use_headers,
current_header: None,
- cratename: cratename,
- opts: opts,
- maybe_sysroot: maybe_sysroot,
+ cratename,
+ opts,
+ maybe_sysroot,
position: DUMMY_SP,
- codemap: codemap,
- filename: filename,
- render_type: render_type,
+ codemap,
+ filename,
+ render_type,
}
}
ignore: should_ignore,
// compiler failures are test failures
should_panic: testing::ShouldPanic::No,
- allow_fail: allow_fail,
+ allow_fail,
},
testfn: testing::DynTestFn(box move |()| {
let panic = io::set_panic(None);
RustdocVisitor {
module: Module::new(None),
attrs: hir::HirVec::new(),
- cx: cx,
+ cx,
view_item_stack: stack,
inlining: false,
inside_public_path: true,
let struct_type = struct_type_from_def(&*sd);
Struct {
id: item.id,
- struct_type: struct_type,
- name: name,
+ struct_type,
+ name,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
let struct_type = struct_type_from_def(&*sd);
Union {
id: item.id,
- struct_type: struct_type,
- name: name,
+ struct_type,
+ name,
vis: item.vis.clone(),
stab: self.stability(item.id),
depr: self.deprecation(item.id),
params: &hir::Generics) -> Enum {
debug!("Visiting enum");
Enum {
- name: name,
+ name,
variants: def.variants.iter().map(|v| Variant {
name: v.node.name,
attrs: v.node.attrs.clone(),
depr: self.deprecation(item.id),
attrs: item.attrs.clone(),
decl: fd.clone(),
- name: name,
+ name,
whence: item.span,
generics: gen.clone(),
unsafety: *unsafety,
- constness: constness,
+ constness,
abi: *abi,
- body: body,
+ body,
}
}
};
om.macros.push(Macro {
- def_id: def_id,
+ def_id,
attrs: def.attrs.clone().into(),
name: def.ident.name,
whence: def.span,
- matchers: matchers,
+ matchers,
stab: self.stability(def.id),
depr: self.deprecation(def.id),
imported_from: Some(imported_from),
om.extern_crates.push(ExternCrate {
cnum: cstore.extern_mod_stmt_cnum(item.id)
.unwrap_or(LOCAL_CRATE),
- name: name,
+ name,
path: p.map(|x|x.to_string()),
vis: item.vis.clone(),
attrs: item.attrs.clone(),
}
om.imports.push(Import {
- name: name,
+ name,
id: item.id,
vis: item.vis.clone(),
attrs: item.attrs.clone(),
let t = Typedef {
ty: ty.clone(),
gen: gen.clone(),
- name: name,
+ name,
id: item.id,
attrs: item.attrs.clone(),
whence: item.span,
mutability: mut_.clone(),
expr: exp.clone(),
id: item.id,
- name: name,
+ name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
type_: ty.clone(),
expr: exp.clone(),
id: item.id,
- name: name,
+ name,
attrs: item.attrs.clone(),
whence: item.span,
vis: item.vis.clone(),
.map(|ti| self.cx.tcx.hir.trait_item(ti.id).clone())
.collect();
let t = Trait {
- unsafety: unsafety,
- name: name,
- items: items,
+ unsafety,
+ name,
+ items,
generics: gen.clone(),
bounds: b.iter().cloned().collect(),
id: item.id,
.map(|ii| self.cx.tcx.hir.impl_item(ii.id).clone())
.collect();
let i = Impl {
- unsafety: unsafety,
- polarity: polarity,
- defaultness: defaultness,
+ unsafety,
+ polarity,
+ defaultness,
generics: gen.clone(),
trait_: tr.clone(),
for_: ty.clone(),
- items: items,
+ items,
attrs: item.attrs.clone(),
id: item.id,
whence: item.span,
// See comment above about ItemImpl.
if !self.inlining {
let i = DefaultImpl {
- unsafety: unsafety,
+ unsafety,
trait_: trait_ref.clone(),
id: item.id,
attrs: item.attrs.clone(),
attrs: def.attrs.clone(),
name: def.name,
whence: def.span,
- matchers: matchers,
+ matchers,
stab: self.stability(def.id),
depr: self.deprecation(def.id),
imported_from: None,
impl<'a, 'b, 'tcx> LibEmbargoVisitor<'a, 'b, 'tcx> {
pub fn new(cx: &'a ::core::DocContext<'b, 'tcx>) -> LibEmbargoVisitor<'a, 'b, 'tcx> {
LibEmbargoVisitor {
- cx: cx,
+ cx,
cstore: &*cx.sess().cstore,
access_levels: cx.access_levels.borrow_mut(),
prev_level: Some(AccessLevel::Public),
/// Creates a new encoder whose output will be written to the specified writer
pub fn new(writer: &'a mut fmt::Write) -> PrettyEncoder<'a> {
PrettyEncoder {
- writer: writer,
+ writer,
curr_indent: 0,
indent: 2,
is_emitting_map_key: false,
/// Creates the JSON parser.
pub fn new(rdr: T) -> Parser<T> {
let mut p = Parser {
- rdr: rdr,
+ rdr,
ch: Some('\x00'),
line: 1,
col: 0,
impl<'a> Decoder<'a> {
pub fn new(data: &'a [u8], position: usize) -> Decoder<'a> {
Decoder {
- data: data,
- position: position,
+ data,
+ position,
}
}
Empty(bucket) => {
// Found a hole!
return InternalEntry::Vacant {
- hash: hash,
+ hash,
elem: NoElem(bucket, displacement),
};
}
// We can finish the search early if we hit any bucket
// with a lower distance to initial bucket than we've probed.
return InternalEntry::Vacant {
- hash: hash,
+ hash,
elem: NeqElem(full, probe_displacement),
};
}
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
HashMap {
- hash_builder: hash_builder,
+ hash_builder,
resize_policy: DefaultResizePolicy::new(),
table: RawTable::new(0),
}
let resize_policy = DefaultResizePolicy::new();
let raw_cap = resize_policy.raw_capacity(capacity);
HashMap {
- hash_builder: hash_builder,
- resize_policy: resize_policy,
+ hash_builder,
+ resize_policy,
table: RawTable::new(raw_cap),
}
}
InternalEntry::Occupied { elem } => {
Some(Occupied(OccupiedEntry {
key: Some(key),
- elem: elem,
+ elem,
}))
}
InternalEntry::Vacant { hash, elem } => {
Some(Vacant(VacantEntry {
- hash: hash,
- key: key,
- elem: elem,
+ hash,
+ key,
+ elem,
}))
}
InternalEntry::TableIsEmpty => None,
pub fn difference<'a>(&'a self, other: &'a HashSet<T, S>) -> Difference<'a, T, S> {
Difference {
iter: self.iter(),
- other: other,
+ other,
}
}
pub fn intersection<'a>(&'a self, other: &'a HashSet<T, S>) -> Intersection<'a, T, S> {
Intersection {
iter: self.iter(),
- other: other,
+ other,
}
}
let ib_index = ib_index & table.capacity_mask;
Bucket {
raw: table.raw_bucket_at(ib_index),
- table: table,
+ table,
}
}
pub fn first(table: M) -> Bucket<K, V, M> {
Bucket {
raw: table.raw_bucket_at(0),
- table: table,
+ table,
}
}
match self.next().peek() {
Full(bucket) => {
Ok(GapThenFull {
- gap: gap,
+ gap,
full: bucket,
})
}
// Replace the marker regardless of lifetime bounds on parameters.
IntoIter {
iter: RawBuckets {
- raw: raw,
- elems_left: elems_left,
+ raw,
+ elems_left,
marker: marker::PhantomData,
},
table: self,
// Replace the marker regardless of lifetime bounds on parameters.
Drain {
iter: RawBuckets {
- raw: raw,
- elems_left: elems_left,
+ raw,
+ elems_left,
marker: marker::PhantomData,
},
table: Shared::from(self),
buffer.set_len(cap);
inner.initializer().initialize(&mut buffer);
BufReader {
- inner: inner,
+ inner,
buf: buffer.into_boxed_slice(),
pos: 0,
cap: 0,
fn _new(kind: ErrorKind, error: Box<error::Error+Send+Sync>) -> Error {
Error {
repr: Repr::Custom(Box::new(Custom {
- kind: kind,
- error: error,
+ kind,
+ error,
}))
}
}
Lazy {
lock: Mutex::new(),
ptr: Cell::new(ptr::null_mut()),
- init: init
+ init,
}
}
let mut any_data = 0;
let mut any_vtable = 0;
let mut data = Data {
- f: f,
+ f,
};
let r = __rust_maybe_catch_panic(do_call::<F, R>,
let prefix = parse_prefix(self.as_os_str());
Components {
path: self.as_u8_slice(),
- prefix: prefix,
+ prefix,
has_physical_root: has_physical_root(self.as_u8_slice(), prefix),
front: State::Prefix,
back: State::Body,
impl FromInner<(imp::Process, imp::StdioPipes)> for Child {
fn from_inner((handle, io): (imp::Process, imp::StdioPipes)) -> Child {
Child {
- handle: handle,
+ handle,
stdin: io.stdin.map(ChildStdin::from_inner),
stdout: io.stdout.map(ChildStdout::from_inner),
stderr: io.stderr.map(ChildStderr::from_inner),
let status = self.wait()?;
Ok(Output {
- status: status,
- stdout: stdout,
- stderr: stderr,
+ status,
+ stdout,
+ stderr,
})
}
}
inner: inner.clone(),
};
let signal_token = SignalToken {
- inner: inner
+ inner,
};
(wait_token, signal_token)
}
let id = self.next_id.get();
self.next_id.set(id + 1);
Handle {
- id: id,
+ id,
selector: self.inner.get(),
next: ptr::null_mut(),
prev: ptr::null_mut(),
added: false,
- rx: rx,
+ rx,
packet: rx,
}
}
lock: Mutex::new(State {
disconnected: false,
blocker: NoneBlocked,
- cap: cap,
+ cap,
canceled: None,
queue: Queue {
head: ptr::null_mut(),
}
Ok(Dns {
- transaction_id: transaction_id,
- flags: flags,
- queries: queries,
- answers: answers,
+ transaction_id,
+ flags,
+ queries,
+ answers,
})
}
}
{
let mut cx = Context {
idx: 0,
- frames: frames,
+ frames,
};
let result_unwind = unsafe {
uw::_Unwind_Backtrace(trace_fn,
}
Ok(SocketAddr {
- addr: addr,
- len: len,
+ addr,
+ len,
})
}
impl Handle {
pub fn new(raw: mx_handle_t) -> Handle {
Handle {
- raw: raw,
+ raw,
}
}
let program = os2c(program, &mut saw_nul);
Command {
argv: vec![program.as_ptr(), ptr::null()],
- program: program,
+ program,
args: Vec::new(),
env: None,
envp: None,
cwd: None,
uid: None,
gid: None,
- saw_nul: saw_nul,
+ saw_nul,
closures: Vec::new(),
stdin: None,
stdout: None,
impl<F> Weak<F> {
pub const fn new(name: &'static str) -> Weak<F> {
Weak {
- name: name,
+ name,
addr: AtomicUsize::new(1),
_marker: marker::PhantomData,
}
let backtrace_context = BacktraceContext {
handle: process,
- SymCleanup: SymCleanup,
- dbghelp: dbghelp,
+ SymCleanup,
+ dbghelp,
};
// Initialize this process's symbols
};
overlapped.hEvent = event.raw();
Ok(AsyncPipe {
- pipe: pipe,
- overlapped: overlapped,
- event: event,
- dst: dst,
+ pipe,
+ overlapped,
+ event,
+ dst,
state: State::NotReading,
})
}
unsafe fn register_dtor(key: Key, dtor: Dtor) {
let mut node = Box::new(Node {
- key: key,
- dtor: dtor,
+ key,
+ dtor,
next: ptr::null_mut(),
});
pub fn set(stack_guard: Option<usize>, thread: Thread) {
THREAD_INFO.with(|c| assert!(c.borrow().is_none()));
THREAD_INFO.with(move |c| *c.borrow_mut() = Some(ThreadInfo{
- stack_guard: stack_guard,
- thread: thread,
+ stack_guard,
+ thread,
}));
}
pub const fn new(dtor: Option<unsafe extern fn(*mut u8)>) -> StaticKey {
StaticKey {
key: atomic::AtomicUsize::new(0),
- dtor: dtor
+ dtor,
}
}
pub const unsafe fn new(inner: unsafe fn() -> Option<&'static UnsafeCell<Option<T>>>,
init: fn() -> T) -> LocalKey<T> {
LocalKey {
- inner: inner,
- init: init,
+ inner,
+ init,
}
}
}
debug_assert!(nanos < NANOS_PER_SEC);
Some(Duration {
- secs: secs,
- nanos: nanos,
+ secs,
+ nanos,
})
} else {
None
.and_then(|s| s.checked_add(extra_secs)) {
debug_assert!(nanos < NANOS_PER_SEC);
Some(Duration {
- secs: secs,
- nanos: nanos,
+ secs,
+ nanos,
})
} else {
None
where I: Iterator<Item = char>
{
Utf16Encoder {
- chars: chars,
+ chars,
extra: 0,
}
}
pub fn crate_root(span: Span) -> Self {
PathSegment {
identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() },
- span: span,
+ span,
parameters: None,
}
}
let infer_ty = P(Ty {
id: DUMMY_NODE_ID,
node: TyKind::ImplicitSelf,
- span: span,
+ span,
});
let arg = |mutbl, ty| Arg {
pat: P(Pat {
id: DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None),
- span: span,
+ span,
}),
- ty: ty,
+ ty,
id: DUMMY_NODE_ID,
};
match eself.node {
SelfKind::Region(lt, mutbl) => arg(Mutability::Immutable, P(Ty {
id: DUMMY_NODE_ID,
node: TyKind::Rptr(lt, MutTy { ty: infer_ty, mutbl: mutbl }),
- span: span,
+ span,
})),
}
}
PolyTraitRef {
bound_lifetimes: lifetimes,
trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID },
- span: span,
+ span,
}
}
}
/// Returns an inner attribute with the given value and span.
pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute {
Attribute {
- id: id,
+ id,
style: ast::AttrStyle::Inner,
path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
tokens: item.node.tokens(item.span),
/// Returns an outer attribute with the given value and span.
pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute {
Attribute {
- id: id,
+ id,
style: ast::AttrStyle::Outer,
path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
tokens: item.node.tokens(item.span),
let style = doc_comment_style(&text.as_str());
let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked));
Attribute {
- id: id,
- style: style,
+ id,
+ style,
path: ast::Path::from_ident(span, ast::Ident::from_str("doc")),
tokens: MetaItemKind::NameValue(lit).tokens(span),
is_sugared_doc: true,
- span: span,
+ span,
}
}
match (since, reason) {
(Some(since), Some(reason)) => {
rustc_depr = Some(RustcDeprecation {
- since: since,
- reason: reason,
+ since,
+ reason,
})
}
(None, _) => {
(Some(feature), reason, Some(issue)) => {
stab = Some(Stability {
level: Unstable {
- reason: reason,
+ reason,
issue: {
if let Ok(issue) = issue.as_str().parse() {
issue
}
}
},
- feature: feature,
+ feature,
rustc_depr: None,
})
}
(Some(feature), Some(since)) => {
stab = Some(Stability {
level: Stable {
- since: since,
+ since,
},
- feature: feature,
+ feature,
rustc_depr: None,
})
}
CodeMap {
files: RefCell::new(Vec::new()),
file_loader: Box::new(RealFileLoader),
- path_mapping: path_mapping,
+ path_mapping,
}
}
-> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
- file_loader: file_loader,
- path_mapping: path_mapping,
+ file_loader,
+ path_mapping,
}
}
let filemap = Rc::new(FileMap {
name: filename,
- name_was_remapped: name_was_remapped,
- crate_of_origin: crate_of_origin,
+ name_was_remapped,
+ crate_of_origin,
src: None,
- src_hash: src_hash,
+ src_hash,
external_src: RefCell::new(ExternalSource::AbsentOk),
- start_pos: start_pos,
- end_pos: end_pos,
+ start_pos,
+ end_pos,
lines: RefCell::new(file_local_lines),
multibyte_chars: RefCell::new(file_local_multibyte_chars),
});
assert!(chpos >= linechpos);
Loc {
file: f,
- line: line,
+ line,
col: chpos - linechpos,
}
}
let line_len = lo.file.get_line(line_index)
.map(|s| s.chars().count())
.unwrap_or(0);
- lines.push(LineInfo { line_index: line_index,
- start_col: start_col,
+ lines.push(LineInfo { line_index,
+ start_col,
end_col: CharPos::from_usize(line_len) });
start_col = CharPos::from_usize(0);
}
// For the last line, it extends from `start_col` to `hi.col`:
lines.push(LineInfo { line_index: hi.line - 1,
- start_col: start_col,
+ start_col,
end_col: hi.col });
Ok(FileLines {file: lo.file, lines: lines})
return Err(SpanSnippetError::MalformedForCodemap(
MalformedCodemapPositions {
name: local_begin.fm.name.clone(),
- source_len: source_len,
+ source_len,
begin_pos: local_begin.pos,
end_pos: local_end.pos,
}));
pub fn new(mapping: Vec<(String, String)>) -> FilePathMapping {
FilePathMapping {
- mapping: mapping
+ mapping,
}
}
let features;
{
let mut strip_unconfigured = StripUnconfigured {
- should_test: should_test,
- sess: sess,
+ should_test,
+ sess,
features: None,
};
self.process_cfg_attr(ast::Attribute {
id: attr::mk_attr_id(),
style: attr.style,
- path: path,
- tokens: tokens,
+ path,
+ tokens,
is_sugared_doc: false,
- span: span,
+ span,
})
} else {
None
// Add the error to the map.
with_registered_diagnostics(|diagnostics| {
let info = ErrorInfo {
- description: description,
+ description,
use_site: None
};
if diagnostics.insert(code.name, info).is_some() {
expr,
),
vis: ast::Visibility::Public,
- span: span,
+ span,
tokens: None,
})
]))
resolver: &'a mut Resolver)
-> ExtCtxt<'a> {
ExtCtxt {
- parse_sess: parse_sess,
- ecfg: ecfg,
+ parse_sess,
+ ecfg,
crate_root: None,
- resolver: resolver,
+ resolver,
resolve_err_count: 0,
current_expansion: ExpansionData {
mark: Mark::root(),
path.segments.push(ast::PathSegment {
identifier: ident.node,
span: ident.span,
- parameters: parameters,
+ parameters,
});
(ast::QSelf {
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
ast::MutTy {
- ty: ty,
- mutbl: mutbl
+ ty,
+ mutbl,
}
}
fn ty(&self, span: Span, ty: ast::TyKind) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
- span: span,
+ span,
node: ty
})
}
ident: id,
id: ast::DUMMY_NODE_ID,
attrs: attrs.into(),
- bounds: bounds,
- default: default,
- span: span
+ bounds,
+ default,
+ span,
}
}
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
ast::TraitRef {
- path: path,
+ path,
ref_id: ast::DUMMY_NODE_ID,
}
}
ast::PolyTraitRef {
bound_lifetimes: Vec::new(),
trait_ref: self.trait_ref(path),
- span: span,
+ span,
}
}
ast::LifetimeDef {
attrs: attrs.into(),
lifetime: self.lifetime(span, ident),
- bounds: bounds
+ bounds,
}
}
self.pat_ident(sp, ident)
};
let local = P(ast::Local {
- pat: pat,
+ pat,
ty: None,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
self.pat_ident(sp, ident)
};
let local = P(ast::Local {
- pat: pat,
+ pat,
ty: Some(typ),
init: Some(ex),
id: ast::DUMMY_NODE_ID,
ty: Some(ty),
init: None,
id: ast::DUMMY_NODE_ID,
- span: span,
+ span,
attrs: ast::ThinVec::new(),
});
ast::Stmt {
id: ast::DUMMY_NODE_ID,
node: ast::StmtKind::Local(local),
- span: span,
+ span,
}
}
}
fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> {
P(ast::Block {
- stmts: stmts,
+ stmts,
id: ast::DUMMY_NODE_ID,
rules: BlockCheckMode::Default,
- span: span,
+ span,
})
}
fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr> {
P(ast::Expr {
id: ast::DUMMY_NODE_ID,
- node: node,
- span: span,
+ node,
+ span,
attrs: ast::ThinVec::new(),
})
}
ast::Field {
ident: respan(span, name),
expr: e,
- span: span,
+ span,
is_shorthand: false,
attrs: ast::ThinVec::new(),
}
fn arm(&self, _span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm {
ast::Arm {
attrs: vec![],
- pats: pats,
+ pats,
guard: None,
body: expr
}
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
let arg_pat = self.pat_ident(span, ident);
ast::Arg {
- ty: ty,
+ ty,
pat: arg_pat,
id: ast::DUMMY_NODE_ID
}
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg>, output: P<ast::Ty>) -> P<ast::FnDecl> {
P(ast::FnDecl {
- inputs: inputs,
+ inputs,
output: ast::FunctionRetTy::Ty(output),
variadic: false
})
// Rust coding conventions
P(ast::Item {
ident: name,
- attrs: attrs,
+ attrs,
id: ast::DUMMY_NODE_ID,
- node: node,
+ node,
vis: ast::Visibility::Inherited,
- span: span,
+ span,
tokens: None,
})
}
let fields: Vec<_> = tys.into_iter().map(|ty| {
ast::StructField {
span: ty.span,
- ty: ty,
+ ty,
ident: None,
vis: ast::Visibility::Inherited,
attrs: Vec::new(),
respan(span,
ast::Variant_ {
- name: name,
+ name,
attrs: Vec::new(),
data: vdata,
disr_expr: None,
attrs,
ast::ItemKind::Mod(ast::Mod {
inner: inner_span,
- items: items,
+ items,
})
)
}
ident: keywords::Invalid.ident(),
attrs: vec![],
node: ast::ItemKind::Use(vp),
- vis: vis,
+ vis,
span: sp,
tokens: None,
})
kind: InvocationKind::Derive { path: path.clone(), item: item },
expansion_kind: invoc.expansion_kind,
expansion_data: ExpansionData {
- mark: mark,
+ mark,
..invoc.expansion_data.clone()
},
});
let pretty_name = Symbol::intern(&format!("derive({})", path));
let span = path.span;
let attr = ast::Attribute {
- path: path, tokens: TokenStream::empty(), span: span,
+ path, span,
+ tokens: TokenStream::empty(),
// irrelevant:
id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false,
};
fn collect(&mut self, expansion_kind: ExpansionKind, kind: InvocationKind) -> Expansion {
let mark = Mark::fresh(self.cx.current_expansion.mark);
self.invocations.push(Invocation {
- kind: kind,
- expansion_kind: expansion_kind,
+ kind,
+ expansion_kind,
expansion_data: ExpansionData {
- mark: mark,
+ mark,
depth: self.cx.current_expansion.depth + 1,
..self.cx.current_expansion.clone()
},
item.and_then(|item| match item.node {
ItemKind::Mac(mac) => {
self.collect(ExpansionKind::Items, InvocationKind::Bang {
- mac: mac,
+ mac,
ident: Some(item.ident),
span: item.span,
}).make_items()
impl<'feat> ExpansionConfig<'feat> {
pub fn default(crate_name: String) -> ExpansionConfig<'static> {
ExpansionConfig {
- crate_name: crate_name,
+ crate_name,
features: None,
recursion_limit: 1024,
trace_mac: false,
let vis = ast::Visibility::Inherited;
let span = DUMMY_SP;
let expr_placeholder = || P(ast::Expr {
- id: id, span: span,
+ id, span,
attrs: ast::ThinVec::new(),
node: ast::ExprKind::Mac(mac_placeholder()),
});
ExpansionKind::Expr => Expansion::Expr(expr_placeholder()),
ExpansionKind::OptExpr => Expansion::OptExpr(Some(expr_placeholder())),
ExpansionKind::Items => Expansion::Items(SmallVector::one(P(ast::Item {
- id: id, span: span, ident: ident, vis: vis, attrs: attrs,
+ id, span, ident, vis, attrs,
node: ast::ItemKind::Mac(mac_placeholder()),
tokens: None,
}))),
ExpansionKind::TraitItems => Expansion::TraitItems(SmallVector::one(ast::TraitItem {
- id: id, span: span, ident: ident, attrs: attrs,
+ id, span, ident, attrs,
node: ast::TraitItemKind::Macro(mac_placeholder()),
tokens: None,
})),
ExpansionKind::ImplItems => Expansion::ImplItems(SmallVector::one(ast::ImplItem {
- id: id, span: span, ident: ident, vis: vis, attrs: attrs,
+ id, span, ident, vis, attrs,
node: ast::ImplItemKind::Macro(mac_placeholder()),
defaultness: ast::Defaultness::Final,
tokens: None,
})),
ExpansionKind::Pat => Expansion::Pat(P(ast::Pat {
- id: id, span: span, node: ast::PatKind::Mac(mac_placeholder()),
+ id, span, node: ast::PatKind::Mac(mac_placeholder()),
})),
ExpansionKind::Ty => Expansion::Ty(P(ast::Ty {
- id: id, span: span, node: ast::TyKind::Mac(mac_placeholder()),
+ id, span, node: ast::TyKind::Mac(mac_placeholder()),
})),
ExpansionKind::Stmts => Expansion::Stmts(SmallVector::one({
let mac = P((mac_placeholder(), ast::MacStmtStyle::Braces, ast::ThinVec::new()));
- ast::Stmt { id: id, span: span, node: ast::StmtKind::Mac(mac) }
+ ast::Stmt { id, span, node: ast::StmtKind::Mac(mac) }
})),
}
}
impl<'a, 'b> PlaceholderExpander<'a, 'b> {
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
PlaceholderExpander {
- cx: cx,
+ cx,
expansions: HashMap::new(),
- monotonic: monotonic,
+ monotonic,
}
}
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
let tree = TokenTree::Delimited(span, Delimited {
- delim: delim,
+ delim,
tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
});
result = results.pop().unwrap();
sep: None,
idx: 0,
up: None,
- matches: matches,
+ matches,
match_lo: 0,
match_cur: 0,
match_hi: match_idx_hi,
stack: vec![],
sep: seq.separator.clone(),
idx: 0,
- matches: matches,
+ matches,
match_lo: item.match_cur,
match_cur: item.match_cur,
match_hi: item.match_cur + seq.num_captures,
let idx = item.idx;
item.stack.push(MatcherTtFrame {
elts: lower_elts,
- idx: idx,
+ idx,
});
item.idx = 0;
cur_items.push(item);
let expander: Box<_> = Box::new(MacroRulesMacroExpander {
name: def.ident,
- lhses: lhses,
- rhses: rhses,
- valid: valid,
+ lhses,
+ rhses,
+ valid,
});
if body.legacy {
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(span, Rc::new(SequenceRepetition {
tts: sequence,
- separator: separator,
- op: op,
+ separator,
+ op,
num_captures: name_captures,
}))
}
unstable: UnstableFeatures) {
maybe_stage_features(&sess.span_diagnostic, krate, unstable);
let ctx = Context {
- features: features,
+ features,
parse_sess: sess,
- plugin_attributes: plugin_attributes,
+ plugin_attributes,
};
visit::walk_crate(&mut PostExpansionVisitor { context: &ctx }, krate);
}
TyKind::BareFn(f) => {
TyKind::BareFn(f.map(|BareFnTy {lifetimes, unsafety, abi, decl}| BareFnTy {
lifetimes: fld.fold_lifetime_defs(lifetimes),
- unsafety: unsafety,
- abi: abi,
+ unsafety,
+ abi,
decl: fld.fold_fn_decl(decl)
}))
}
let qself = qself.map(|QSelf { ty, position }| {
QSelf {
ty: fld.fold_ty(ty),
- position: position
+ position,
}
});
TyKind::Path(qself, fld.fold_path(path))
pub fn noop_fold_foreign_mod<T: Folder>(ForeignMod {abi, items}: ForeignMod,
fld: &mut T) -> ForeignMod {
ForeignMod {
- abi: abi,
+ abi,
items: items.move_map(|x| fld.fold_foreign_item(x)),
}
}
FunctionRetTy::Ty(ty) => FunctionRetTy::Ty(fld.fold_ty(ty)),
FunctionRetTy::Default(span) => FunctionRetTy::Default(fld.new_span(span)),
},
- variadic: variadic
+ variadic,
})
}
predicates: predicates.move_map(|predicate| {
fld.fold_where_predicate(predicate)
}),
- span: span,
+ span,
}
}
pub fn noop_fold_mt<T: Folder>(MutTy {ty, mutbl}: MutTy, folder: &mut T) -> MutTy {
MutTy {
ty: folder.fold_ty(ty),
- mutbl: mutbl,
+ mutbl,
}
}
b.map(|Block {id, stmts, rules, span}| Block {
id: folder.new_id(id),
stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()),
- rules: rules,
+ rules,
span: folder.new_span(span),
})
}
folder: &mut T) -> Crate {
let mut items = folder.fold_item(P(ast::Item {
ident: keywords::Invalid.ident(),
- attrs: attrs,
+ attrs,
id: ast::DUMMY_NODE_ID,
vis: ast::Visibility::Public,
- span: span,
+ span,
node: ast::ItemKind::Mod(module),
tokens: None,
})).into_iter();
};
Crate {
- module: module,
- attrs: attrs,
- span: span,
+ module,
+ attrs,
+ span,
}
}
// FIXME: if this is replaced with a call to `folder.fold_tts` it causes
// an ICE during resolve... odd!
- tokens: tokens,
+ tokens,
}
}
let qself = qself.map(|QSelf { ty, position }| {
QSelf {
ty: folder.fold_ty(ty),
- position: position
+ position,
}
});
ExprKind::Path(qself, folder.fold_path(path))
code_map: Rc<CodeMap>) -> JsonEmitter {
JsonEmitter {
dst: Box::new(io::stderr()),
- registry: registry,
+ registry,
cm: code_map,
}
}
registry: Option<Registry>,
code_map: Rc<CodeMap>) -> JsonEmitter {
JsonEmitter {
- dst: dst,
- registry: registry,
+ dst,
+ registry,
cm: code_map,
}
}
Box::new(DiagnosticSpanMacroExpansion {
span: call_site,
macro_decl_name: bt.macro_decl_name,
- def_site_span: def_site_span,
+ def_site_span,
})
});
DiagnosticSpan {
line_end: end.line,
column_start: start.col.0 + 1,
column_end: end.col.0 + 1,
- is_primary: is_primary,
+ is_primary,
text: DiagnosticSpanLine::from_span(span, je),
suggested_replacement: suggestion.cloned(),
expansion: backtrace_step,
- label: label,
+ label,
}
}
DiagnosticCode {
code: s,
- explanation: explanation,
+ explanation,
}
})
}
Ok(ast::Attribute {
id: attr::mk_attr_id(),
- style: style,
- path: path,
- tokens: tokens,
+ style,
+ path,
+ tokens,
is_sugared_doc: false,
- span: span,
+ span,
})
}
if !lines.is_empty() {
comments.push(Comment {
style: if code_to_the_left { Trailing } else { Isolated },
- lines: lines,
+ lines,
pos: p,
});
}
}
debug!("<<< block comment");
comments.push(Comment {
- style: style,
- lines: lines,
+ style,
+ lines,
pos: p,
});
}
let source_text = (*filemap.src.as_ref().unwrap()).clone();
StringReader {
- sess: sess,
+ sess,
next_pos: filemap.start_pos,
pos: filemap.start_pos,
col: CharPos(0),
ch: Some('\n'),
- filemap: filemap,
+ filemap,
terminator: None,
save_new_lines_and_multibyte: true,
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
- source_text: source_text,
+ source_text,
fatal_errs: Vec::new(),
token: token::Eof,
span: syntax_pos::DUMMY_SP,
};
Some(TokenAndSpan {
- tok: tok,
+ tok,
sp: self.mk_sp(start_bpos, self.pos),
})
})
};
Some(TokenAndSpan {
- tok: tok,
+ tok,
sp: self.mk_sp(start_bpos, self.pos),
})
})
}
Ok(TokenTree::Delimited(span, Delimited {
- delim: delim,
+ delim,
tts: tts.into(),
}))
},
config: HashSet::new(),
missing_fragment_specifiers: RefCell::new(HashSet::new()),
included_mod_stack: RefCell::new(vec![]),
- code_map: code_map
+ code_map,
}
}
/// Create a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
let spanned = Spanned {
- span: span,
+ span,
node: keywords::Invalid.ident()
};
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), spanned, None),
- span: span
+ span,
});
let ty = Ty {
node: TyKind::Err,
- span: span,
+ span,
id: ast::DUMMY_NODE_ID
};
Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID }
desugar_doc_comments: bool)
-> Self {
let mut parser = Parser {
- sess: sess,
+ sess,
token: token::Underscore,
span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP,
prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(),
obsolete_set: HashSet::new(),
- recurse_into_file_modules: recurse_into_file_modules,
+ recurse_into_file_modules,
directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
root_module_name: None,
expected_tokens: Vec::new(),
}),
stack: Vec::new(),
},
- desugar_doc_comments: desugar_doc_comments,
+ desugar_doc_comments,
cfg_mods: true,
};
let (inputs, variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty()?;
let decl = P(FnDecl {
- inputs: inputs,
+ inputs,
output: ret_ty,
- variadic: variadic
+ variadic,
});
Ok(TyKind::BareFn(P(BareFnTy {
- abi: abi,
- unsafety: unsafety,
+ abi,
+ unsafety,
lifetimes: lifetime_defs,
- decl: decl
+ decl,
})))
}
generics.where_clause = self.parse_where_clause()?;
let sig = ast::MethodSig {
- unsafety: unsafety,
- constness: constness,
+ unsafety,
+ constness,
decl: d,
- generics: generics,
- abi: abi,
+ generics,
+ abi,
};
let body = match self.token {
Ok(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
- attrs: attrs,
- node: node,
+ attrs,
+ node,
span: lo.to(self.prev_span),
tokens: None,
})
Ok(Arg {
ty: t,
- pat: pat,
+ pat,
id: ast::DUMMY_NODE_ID,
})
}
};
Ok(Arg {
ty: t,
- pat: pat,
+ pat,
id: ast::DUMMY_NODE_ID
})
}
Ok(ast::Field {
ident: respan(lo.to(hi), fieldname),
span: lo.to(expr.span),
- expr: expr,
- is_shorthand: is_shorthand,
+ expr,
+ is_shorthand,
attrs: attrs.into(),
})
}
pub fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr {
id: ast::DUMMY_NODE_ID,
- node: node,
- span: span,
+ node,
+ span,
attrs: attrs.into(),
})
}
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprKind::Mac(codemap::Spanned {node: m, span: span}),
- span: span,
- attrs: attrs,
+ span,
+ attrs,
})
}
id: ast::DUMMY_NODE_ID,
node: ExprKind::Lit(lv_lit),
span: *span,
- attrs: attrs,
+ attrs,
})
}
}
Ok(ast::Arm {
- attrs: attrs,
- pats: pats,
- guard: guard,
+ attrs,
+ pats,
+ guard,
body: expr,
})
}
node: ast::FieldPat {
ident: fieldname,
pat: subpat,
- is_shorthand: is_shorthand,
+ is_shorthand,
attrs: attrs.into(),
}
});
};
let init = self.parse_initializer()?;
Ok(P(ast::Local {
- ty: ty,
- pat: pat,
- init: init,
+ ty,
+ pat,
+ init,
id: ast::DUMMY_NODE_ID,
span: lo.to(self.prev_span),
- attrs: attrs,
+ attrs,
}))
}
Ok(StructField {
span: lo.to(self.prev_span),
ident: Some(name),
- vis: vis,
+ vis,
id: ast::DUMMY_NODE_ID,
- ty: ty,
- attrs: attrs,
+ ty,
+ attrs,
})
}
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
- node: node,
+ node,
}
} else {
// if it has a special ident, it's definitely an item
let span = lo.to(hi);
Stmt {
id: ast::DUMMY_NODE_ID,
- span: span,
+ span,
node: StmtKind::Item({
self.mk_item(
span, id /*id is good here*/,
}
Ok(P(ast::Block {
- stmts: stmts,
+ stmts,
id: ast::DUMMY_NODE_ID,
rules: s,
span: lo.to(self.prev_span),
Ok(TyParam {
attrs: preceding_attrs.into(),
- ident: ident,
+ ident,
id: ast::DUMMY_NODE_ID,
- bounds: bounds,
- default: default,
- span: span,
+ bounds,
+ default,
+ span,
})
}
};
lifetime_defs.push(LifetimeDef {
attrs: attrs.into(),
- lifetime: lifetime,
- bounds: bounds,
+ lifetime,
+ bounds,
});
if seen_ty_param {
self.span_err(self.prev_span,
self.expect_gt()?;
Ok(ast::Generics {
lifetimes: lifetime_defs,
- ty_params: ty_params,
+ ty_params,
where_clause: WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
let ty = self.parse_ty()?;
bindings.push(TypeBinding {
id: ast::DUMMY_NODE_ID,
- ident: ident,
- ty: ty,
+ ident,
+ ty,
span: lo.to(self.prev_span),
});
seen_binding = true;
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
ast::WhereRegionPredicate {
span: lo.to(self.prev_span),
- lifetime: lifetime,
- bounds: bounds,
+ lifetime,
+ bounds,
}
));
} else if self.check_type() {
span: lo.to(self.prev_span),
bound_lifetimes: lifetime_defs,
bounded_ty: ty,
- bounds: bounds,
+ bounds,
}
));
// FIXME: Decide what should be used here, `=` or `==`.
ast::WhereEqPredicate {
span: lo.to(self.prev_span),
lhs_ty: ty,
- rhs_ty: rhs_ty,
+ rhs_ty,
id: ast::DUMMY_NODE_ID,
}
));
Ok(P(FnDecl {
inputs: args,
output: ret_ty,
- variadic: variadic
+ variadic,
}))
}
Ok(P(FnDecl {
inputs: inputs_captures,
- output: output,
+ output,
variadic: false
}))
}
fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
attrs: Vec<Attribute>) -> P<Item> {
P(Item {
- ident: ident,
- attrs: attrs,
+ ident,
+ attrs,
id: ast::DUMMY_NODE_ID,
- node: node,
- vis: vis,
- span: span,
+ node,
+ vis,
+ span,
tokens: None,
})
}
id: ast::DUMMY_NODE_ID,
span: lo.to(self.prev_span),
ident: name,
- vis: vis,
- defaultness: defaultness,
- attrs: attrs,
- node: node,
+ vis,
+ defaultness,
+ attrs,
+ node,
tokens: None,
})
}
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
Ok((ident, inner_attrs, ast::ImplItemKind::Method(ast::MethodSig {
- generics: generics,
- abi: abi,
- unsafety: unsafety,
- constness: constness,
- decl: decl
+ generics,
+ abi,
+ unsafety,
+ constness,
+ decl,
}, body)))
}
}
let ty = p.parse_ty()?;
Ok(StructField {
span: lo.to(p.span),
- vis: vis,
+ vis,
ident: None,
id: ast::DUMMY_NODE_ID,
- ty: ty,
- attrs: attrs,
+ ty,
+ attrs,
})
})?;
Ok(ast::Mod {
inner: inner_lo.to(hi),
- items: items
+ items,
})
}
ModulePath {
name: mod_name,
path_exists: default_exists || secondary_exists,
- result: result,
+ result,
}
}
Some("mod.rs") => DirectoryOwnership::Owned,
_ => DirectoryOwnership::UnownedViaMod(true),
},
- path: path,
+ path,
warn: false,
});
}
let hi = self.span;
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
- ident: ident,
- attrs: attrs,
+ ident,
+ attrs,
node: ForeignItemKind::Fn(decl, generics),
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
- vis: vis
+ vis,
})
}
let hi = self.span;
self.expect(&token::Semi)?;
Ok(ForeignItem {
- ident: ident,
- attrs: attrs,
+ ident,
+ attrs,
node: ForeignItemKind::Static(ty, mutbl),
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
- vis: vis
+ vis,
})
}
let prev_span = self.prev_span;
let m = ast::ForeignMod {
- abi: abi,
+ abi,
items: foreign_items
};
let invalid = keywords::Invalid.ident();
name: ident,
attrs: variant_attrs,
data: struct_def,
- disr_expr: disr_expr,
+ disr_expr,
};
variants.push(respan(vlo.to(self.prev_span), vr));
let rename = this.parse_rename()?;
let node = ast::PathListItem_ {
name: ident,
- rename: rename,
+ rename,
id: ast::DUMMY_NODE_ID
};
Ok(respan(lo.to(this.prev_span), node))
let n: usize = 55 * linewidth;
debug!("mk_printer {}", linewidth);
Printer {
- out: out,
+ out,
buf_len: n,
margin: linewidth as isize,
space: linewidth as isize,
literals: vec![].into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
- ann: ann,
+ ann,
}
}
State {
s: pp::mk_printer(out, DEFAULT_COLUMNS),
cm: Some(cm),
- comments: comments,
+ comments,
literals: literals.unwrap_or_default().into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
- ann: ann,
+ ann,
}
}
}
None => return
};
let mut v = ShowSpanVisitor {
- span_diagnostic: span_diagnostic,
- mode: mode,
+ span_diagnostic,
+ mode,
};
visit::walk_crate(&mut v, krate);
}
tokens: TokenStream::empty(),
id: attr::mk_attr_id(),
is_sugared_doc: false,
- span: span,
+ span,
}],
vis: ast::Visibility::Inherited,
node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
segments: ["{{root}}", name, "prelude", "v1"].into_iter().map(|name| {
ast::PathSegment::from_ident(ast::Ident::from_str(name), DUMMY_SP)
}).collect(),
- span: span,
+ span,
})))),
id: ast::DUMMY_NODE_ID,
ident: keywords::Invalid.ident(),
- span: span,
+ span,
tokens: None,
}));
allow_dead_code_item);
ast::Item {
- id: id,
- ident: ident,
+ id,
+ ident,
attrs: attrs.into_iter()
.filter(|attr| {
!attr.check_name("main") && !attr.check_name("start")
})
.chain(iter::once(allow_dead_code))
.collect(),
- node: node,
- vis: vis,
- span: span,
- tokens: tokens,
+ node,
+ vis,
+ span,
+ tokens,
}
}),
EntryPointType::None |
let reexport_mod = ast::Mod {
inner: DUMMY_SP,
- items: items,
+ items,
};
let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
let mark = Mark::fresh(Mark::root());
let mut cx: TestCtxt = TestCtxt {
- sess: sess,
+ sess,
span_diagnostic: sd,
ext_cx: ExtCtxt::new(sess, ExpansionConfig::default("test".to_string()), resolver),
path: Vec::new(),
testfns: Vec::new(),
- reexport_test_harness_main: reexport_test_harness_main,
+ reexport_test_harness_main,
is_test_crate: is_test_crate(&krate),
toplevel_reexport: None,
ctxt: SyntaxContext::empty().apply_mark(mark),
});
TestHarnessGenerator {
- cx: cx,
+ cx,
tests: Vec::new(),
tested_submods: Vec::new(),
}.fold_crate(krate)
};
P(ast::Item {
id: ast::DUMMY_NODE_ID,
- ident: ident,
+ ident,
node: vi,
attrs: vec![],
- vis: vis,
+ vis,
span: sp,
tokens: None,
})
outputs.push(ast::InlineAsmOutput {
constraint: output.unwrap_or(constraint),
expr: out,
- is_rw: is_rw,
- is_indirect: is_indirect,
+ is_rw,
+ is_indirect,
});
}
}
MacEager::expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
- asm: asm,
+ asm,
asm_str_style: asm_str_style.unwrap(),
- outputs: outputs,
- inputs: inputs,
+ outputs,
+ inputs,
clobbers: clobs,
- volatile: volatile,
- alignstack: alignstack,
- dialect: dialect,
+ volatile,
+ alignstack,
+ dialect,
ctxt: cx.backtrace(),
})),
span: sp,
let path = Path::new(v);
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
- path: path,
+ path,
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
is_unsafe: false,
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::clone::Clone),
additional_bounds: bounds,
let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]);
let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)];
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::cmp::Eq),
additional_bounds: Vec::new(),
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::cmp::Ord),
additional_bounds: Vec::new(),
}
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::cmp::PartialEq),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
is_unsafe: false,
supports_unions: false,
- methods: methods,
+ methods,
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![borrowed_self()],
- ret_ty: ret_ty,
+ ret_ty,
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
};
let trait_def = TraitDef {
- span: span,
+ span,
attributes: vec![],
path: path_std!(cx, core::cmp::PartialOrd),
additional_bounds: vec![],
generics: LifetimeBounds::empty(),
is_unsafe: false,
supports_unions: false,
- methods: methods,
+ methods,
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
Borrowed(None, ast::Mutability::Mutable));
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::fmt::Debug),
additional_bounds: Vec::new(),
let typaram = &*deriving::hygienic_type_parameter(item, "__D");
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: Path::new_(vec![krate, "Decodable"], None, vec![], true),
additional_bounds: Vec::new(),
let inline = cx.meta_word(span, Symbol::intern("inline"));
let attrs = vec![cx.attribute(span, inline)];
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: path_std!(cx, core::default::Default),
additional_bounds: Vec::new(),
let typaram = &*deriving::hygienic_type_parameter(item, "__S");
let trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
path: Path::new_(vec![krate, "Encodable"], None, vec![], true),
additional_bounds: Vec::new(),
}
let mut visitor = Visitor {
- ty_param_names: ty_param_names,
+ ty_param_names,
types: Vec::new(),
- span: span,
- cx: cx,
+ span,
+ cx,
};
visit::Visitor::visit_ty(&mut visitor, ty);
ast::ImplItem {
id: ast::DUMMY_NODE_ID,
span: self.span,
- ident: ident,
+ ident,
vis: ast::Visibility::Inherited,
defaultness: ast::Defaultness::Final,
attrs: Vec::new(),
span: self.span,
bound_lifetimes: vec![],
bounded_ty: ty,
- bounds: bounds,
+ bounds,
};
let predicate = ast::WherePredicate::BoundPredicate(predicate);
}
let trait_generics = Generics {
- lifetimes: lifetimes,
- ty_params: ty_params,
- where_clause: where_clause,
- span: span,
+ lifetimes,
+ ty_params,
+ where_clause,
+ span,
};
// Create the reference to the trait.
fields: &SubstructureFields)
-> P<Expr> {
let substructure = Substructure {
- type_ident: type_ident,
+ type_ident,
method_ident: cx.ident_of(self.name),
- self_args: self_args,
- nonself_args: nonself_args,
- fields: fields,
+ self_args,
+ nonself_args,
+ fields,
};
let mut f = self.combine_substructure.borrow_mut();
let f: &mut CombineSubstructureFunc = &mut *f;
ident: method_ident,
node: ast::ImplItemKind::Method(ast::MethodSig {
generics: fn_generics,
- abi: abi,
- unsafety: unsafety,
+ abi,
+ unsafety,
constness:
dummy_spanned(ast::Constness::NotConst),
decl: fn_decl,
let mut other_fields: Vec<vec::IntoIter<_>> = raw_fields.collect();
first_field.map(|(span, opt_id, field, attrs)| {
FieldInfo {
- span: span,
+ span,
name: opt_id,
self_: field,
other: other_fields.iter_mut()
}
})
.collect(),
- attrs: attrs,
+ attrs,
}
})
.collect()
name: opt_ident,
self_: self_getter_expr,
other: others,
- attrs: attrs,
+ attrs,
}
}).collect::<Vec<FieldInfo>>();
span: Span { ctxt: self.span.ctxt, ..pat.span },
node: ast::FieldPat {
ident: ident.unwrap(),
- pat: pat,
+ pat,
is_shorthand: false,
attrs: ast::ThinVec::new(),
},
global: bool)
-> Path<'r> {
Path {
- path: path,
- lifetime: lifetime,
- params: params,
- global: global,
+ path,
+ lifetime,
+ params,
+ global,
}
}
fn mk_generics(lifetimes: Vec<ast::LifetimeDef>, ty_params: Vec<ast::TyParam>, span: Span)
-> Generics {
Generics {
- lifetimes: lifetimes,
- ty_params: ty_params,
+ lifetimes,
+ ty_params,
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
- span: span,
+ span,
},
- span: span,
+ span,
}
}
let arg = Path::new_local(typaram);
let hash_trait_def = TraitDef {
- span: span,
+ span,
attributes: Vec::new(),
- path: path,
+ path,
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
is_unsafe: false,
stmts: vec![cx.stmt_expr(call)],
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),
- span: span,
+ span,
}))
}
};
let mut cx = Context {
- ecx: ecx,
- args: args,
- arg_types: arg_types,
- arg_unique_types: arg_unique_types,
- names: names,
+ ecx,
+ args,
+ arg_types,
+ arg_unique_types,
+ names,
curarg: 0,
arg_index_map: Vec::new(),
count_args: Vec::new(),
pieces: Vec::new(),
str_pieces: Vec::new(),
all_pieces_simple: true,
- macsp: macsp,
+ macsp,
fmtsp: fmt.span,
};
/// Returns an iterator over all substitutions in a given string.
pub fn iter_subs(s: &str) -> Substitutions {
Substitutions {
- s: s,
+ s,
}
}
let f = Format {
span: start.slice_between(end).unwrap(),
- parameter: parameter,
- flags: flags,
- width: width,
- precision: precision,
- length: length,
- type_: type_,
+ parameter,
+ flags,
+ width,
+ precision,
+ length,
+ type_,
};
Some((Substitution::Format(f), end.slice_after()))
}
/// Returns an iterator over all substitutions in a given string.
pub fn iter_subs(s: &str) -> Substitutions {
Substitutions {
- s: s,
+ s,
}
}
impl<'a> StrCursor<'a> {
pub fn new_at_start(s: &'a str) -> StrCursor<'a> {
StrCursor {
- s: s,
+ s,
at: 0,
}
}
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::GlobalAsm(P(ast::GlobalAsm {
- asm: asm,
+ asm,
ctxt: cx.backtrace(),
})),
vis: ast::Visibility::Inherited,
attr_macros: Vec::new(),
bang_macros: Vec::new(),
in_root: true,
- handler: handler,
- is_proc_macro_crate: is_proc_macro_crate,
- is_test_crate: is_test_crate,
+ handler,
+ is_proc_macro_crate,
+ is_test_crate,
};
visit::walk_crate(&mut collect, &krate);
(collect.derives, collect.attr_macros, collect.bang_macros)
if self.in_root && item.vis == ast::Visibility::Public {
self.derives.push(ProcMacroDerive {
span: item.span,
- trait_name: trait_name,
+ trait_name,
function_name: item.ident,
attrs: proc_attrs,
});
syntax_contexts.push(SyntaxContextData {
outer_mark: mark,
prev_ctxt: self,
- modern: modern,
+ modern,
});
SyntaxContext(syntax_contexts.len() as u32 - 1)
})
if !info.call_site.source_equal(&prev_span) {
result.push(MacroBacktrace {
call_site: info.call_site,
- macro_decl_name: macro_decl_name,
- def_site_span: def_site_span,
+ macro_decl_name,
+ def_site_span,
});
}
for &(span, ref label) in &self.span_labels {
span_labels.push(SpanLabel {
- span: span,
+ span,
is_primary: is_primary(span),
label: Some(label.clone())
});
for &span in &self.primary_spans {
if !span_labels.iter().any(|sl| sl.span == span) {
span_labels.push(SpanLabel {
- span: span,
+ span,
is_primary: true,
label: None
});
let multibyte_chars: Vec<MultiByteChar> =
d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
Ok(FileMap {
- name: name,
- name_was_remapped: name_was_remapped,
+ name,
+ name_was_remapped,
// `crate_of_origin` has to be set by the importer.
// This value matches up with rustc::hir::def_id::INVALID_CRATE.
// That constant is not available here unfortunately :(
crate_of_origin: ::std::u32::MAX - 1,
- start_pos: start_pos,
- end_pos: end_pos,
+ start_pos,
+ end_pos,
src: None,
- src_hash: src_hash,
+ src_hash,
external_src: RefCell::new(ExternalSource::AbsentOk),
lines: RefCell::new(lines),
multibyte_chars: RefCell::new(multibyte_chars)
let end_pos = start_pos.to_usize() + src.len();
FileMap {
- name: name,
- name_was_remapped: name_was_remapped,
+ name,
+ name_was_remapped,
crate_of_origin: 0,
src: Some(Rc::new(src)),
- src_hash: src_hash,
+ src_hash,
external_src: RefCell::new(ExternalSource::Unneeded),
- start_pos: start_pos,
+ start_pos,
end_pos: Pos::from_usize(end_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
- pos: pos,
- bytes: bytes,
+ pos,
+ bytes,
};
self.multibyte_chars.borrow_mut().push(mbc);
}
};
TerminfoTerminal {
- out: out,
+ out,
ti: terminfo,
num_colors: nc,
}
TermInfo {
names: vec!["cygwin".to_string()], // msys is a fork of an older cygwin version
bools: HashMap::new(),
- numbers: numbers,
- strings: strings,
+ numbers,
+ strings,
}
}
impl Metric {
pub fn new(value: f64, noise: f64) -> Metric {
Metric {
- value: value,
- noise: noise,
+ value,
+ noise,
}
}
}
};
let test_opts = TestOpts {
- list: list,
- filter: filter,
+ list,
+ filter,
filter_exact: exact,
- run_ignored: run_ignored,
- run_tests: run_tests,
- bench_benchmarks: bench_benchmarks,
- logfile: logfile,
- nocapture: nocapture,
- color: color,
- quiet: quiet,
- test_threads: test_threads,
+ run_ignored,
+ run_tests,
+ bench_benchmarks,
+ logfile,
+ nocapture,
+ color,
+ quiet,
+ test_threads,
skip: matches.opt_strs("skip"),
options: Options::new(),
};
};
Ok(ConsoleTestState {
- out: out,
- log_out: log_out,
+ out,
+ log_out,
use_color: use_color(opts),
quiet: opts.quiet,
total: 0,
let TestDescAndFn {desc, testfn} = test;
Some(TestDescAndFn {
desc: TestDesc { ignore: false, ..desc },
- testfn: testfn,
+ testfn,
})
} else {
None
};
TestDescAndFn {
desc: x.desc,
- testfn: testfn,
+ testfn,
}
}).collect()
}
/// negative direction represents a regression.
pub fn insert_metric(&mut self, name: &str, value: f64, noise: f64) {
let m = Metric {
- value: value,
- noise: noise,
+ value,
+ noise,
};
let MetricMap(ref mut map) = *self;
map.insert(name.to_owned(), m);
let mb_s = bs.bytes * 1000 / ns_iter;
BenchSamples {
- ns_iter_summ: ns_iter_summ,
+ ns_iter_summ,
mb_s: mb_s as usize,
}
}
t!(io::stdin().read_to_string(&mut passphrase));
Builder {
- rust_release: rust_release,
- cargo_release: cargo_release,
- rls_release: rls_release,
- input: input,
- output: output,
+ rust_release,
+ cargo_release,
+ rls_release,
+ input,
+ output,
gpg_passphrase: passphrase,
digests: BTreeMap::new(),
- s3_address: s3_address,
- date: date,
+ s3_address,
+ date,
rust_version: String::new(),
cargo_version: String::new(),
rls_version: String::new(),
msg);
Some((which,
Error {
- line_num: line_num,
- kind: kind,
- msg: msg,
+ line_num,
+ kind,
+ msg,
}))
}
let kind = ErrorKind::from_str(&diagnostic.level).ok();
expected_errors.push(Error {
line_num: span.line_start,
- kind: kind,
- msg: msg,
+ kind,
+ msg,
});
}
}
target_rustcflags: matches.opt_str("target-rustcflags"),
target: opt_str2(matches.opt_str("target")),
host: opt_str2(matches.opt_str("host")),
- gdb: gdb,
- gdb_version: gdb_version,
- gdb_native_rust: gdb_native_rust,
+ gdb,
+ gdb_version,
+ gdb_native_rust,
lldb_version: extract_lldb_version(matches.opt_str("lldb-version")),
llvm_version: matches.opt_str("llvm-version"),
system_llvm: matches.opt_present("system-llvm"),
lldb_python_dir: matches.opt_str("lldb-python-dir"),
verbose: matches.opt_present("verbose"),
quiet: matches.opt_present("quiet"),
- color: color,
+ color,
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
cc: matches.opt_str("cc").unwrap(),
test::TestDescAndFn {
desc: test::TestDesc {
name: make_test_name(config, testpaths),
- ignore: ignore,
- should_panic: should_panic,
+ ignore,
+ should_panic,
allow_fail: false,
},
testfn: make_test_closure(config, testpaths),
let Output { status, stdout, stderr } = process.wait_with_output().unwrap();
Ok(Result {
- status: status,
+ status,
out: String::from_utf8(stdout).unwrap(),
err: String::from_utf8(stderr).unwrap(),
})
let base_cx = TestCx { config: &config,
props: &base_props,
- testpaths: testpaths,
+ testpaths,
revision: None };
base_cx.init_all();
let rev_cx = TestCx {
config: &config,
props: &revision_props,
- testpaths: testpaths,
+ testpaths,
revision: Some(revision)
};
rev_cx.run_revision();
args.extend(self.props.compile_flags.iter().cloned());
ProcArgs {
prog: self.config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
+ args,
}
}
// FIXME (#9639): This needs to handle non-utf8 paths
ProcArgs {
prog: self.config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
+ args,
}
}
};
debugger_run_result = ProcRes {
- status: status,
+ status,
stdout: out,
stderr: err,
- cmdline: cmdline
+ cmdline,
};
if process.kill().is_err() {
println!("Adb process is already finished.");
self.dump_output(&out, &err);
ProcRes {
- status: status,
+ status,
stdout: out,
stderr: err,
cmdline: format!("{:?}", cmd)
}
DebuggerCommands {
- commands: commands,
- check_lines: check_lines,
- breakpoint_lines: breakpoint_lines,
+ commands,
+ check_lines,
+ breakpoint_lines,
}
}
let args = ProcArgs {
prog: self.config.rustdoc_path
.as_ref().expect("--rustdoc-path passed").to_str().unwrap().to_owned(),
- args: args,
+ args,
};
self.compose_and_run_compiler(args, None)
}
args.extend(self.props.compile_flags.iter().cloned());
ProcArgs {
prog: self.config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
+ args,
}
}
let prog = args.remove(0);
ProcArgs {
- prog: prog,
- args: args,
+ prog,
+ args,
}
}
working_dir).expect(&format!("failed to exec `{}`", prog));
self.dump_output(&out, &err);
ProcRes {
- status: status,
+ status,
stdout: out,
stderr: err,
- cmdline: cmdline,
+ cmdline,
}
}