CFG_LDPATH_armv7-unknown-linux-gnueabihf :=
CFG_RUN_armv7-unknown-linux-gnueabihf=$(2)
CFG_RUN_TARG_armv7-unknown-linux-gnueabihf=$(call CFG_RUN_armv7-unknown-linux-gnueabihf,,$(2))
-RUSTC_FLAGS_armv7-unknown-linux-gnueabihf := -C target-feature=+v7,+vfp2,+neon
+RUSTC_FLAGS_armv7-unknown-linux-gnueabihf :=
RUSTC_CROSS_FLAGS_armv7-unknown-linux-gnueabihf :=
CFG_GNU_TRIPLE_armv7-unknown-linux-gnueabihf := armv7-unknown-linux-gnueabihf
target_vendor: "unknown".to_string(),
options: TargetOptions {
- features: "+v7,+vfp2,+neon".to_string(),
+ features: "+v7,+vfp3,+neon".to_string(),
cpu: "cortex-a8".to_string(),
.. base
}
let extra_words = self.data.len() - num_words;
self.data.extend((0..extra_words).map(|_| 0));
}
+
+ /// Iterates over indexes of set bits in a sorted order
+ pub fn iter<'a>(&'a self) -> BitVectorIter<'a> {
+ BitVectorIter {
+ iter: self.data.iter(),
+ current: 0,
+ idx: 0
+ }
+ }
+}
+
+pub struct BitVectorIter<'a> {
+ iter: ::std::slice::Iter<'a, u64>,
+ current: u64,
+ idx: usize
+}
+
+impl<'a> Iterator for BitVectorIter<'a> {
+ type Item = usize;
+ fn next(&mut self) -> Option<usize> {
+ while self.current == 0 {
+ self.current = if let Some(&i) = self.iter.next() {
+ if i == 0 {
+ self.idx += 64;
+ continue;
+ } else {
+ self.idx = u64s(self.idx) * 64;
+ i
+ }
+ } else {
+ return None;
+ }
+ }
+ let offset = self.current.trailing_zeros() as usize;
+ self.current >>= offset;
+ self.current >>= 1; // shift otherwise overflows for 0b1000_0000_…_0000
+ self.idx += offset + 1;
+ return Some(self.idx - 1);
+ }
}
/// A "bit matrix" is basically a square matrix of booleans
(word, mask)
}
+#[test]
+fn bitvec_iter_works() {
+ let mut bitvec = BitVector::new(100);
+ bitvec.insert(1);
+ bitvec.insert(10);
+ bitvec.insert(19);
+ bitvec.insert(62);
+ bitvec.insert(63);
+ bitvec.insert(64);
+ bitvec.insert(65);
+ bitvec.insert(66);
+ bitvec.insert(99);
+ assert_eq!(bitvec.iter().collect::<Vec<_>>(), [1, 10, 19, 62, 63, 64, 65, 66, 99]);
+}
+
+#[test]
+fn bitvec_iter_works_2() {
+ let mut bitvec = BitVector::new(300);
+ bitvec.insert(1);
+ bitvec.insert(10);
+ bitvec.insert(19);
+ bitvec.insert(62);
+ bitvec.insert(66);
+ bitvec.insert(99);
+ bitvec.insert(299);
+ assert_eq!(bitvec.iter().collect::<Vec<_>>(), [1, 10, 19, 62, 66, 99, 299]);
+
+}
+
+#[test]
+fn bitvec_iter_works_3() {
+ let mut bitvec = BitVector::new(319);
+ bitvec.insert(0);
+ bitvec.insert(127);
+ bitvec.insert(191);
+ bitvec.insert(255);
+ bitvec.insert(319);
+ assert_eq!(bitvec.iter().collect::<Vec<_>>(), [0, 127, 191, 255, 319]);
+}
+
#[test]
fn union_two_vecs() {
let mut vec1 = BitVector::new(65);
pub mod erase_regions;
pub mod no_landing_pads;
pub mod type_check;
-mod util;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use rustc_data_structures::bitvec::BitVector;
use rustc::middle::const_eval::ConstVal;
use rustc::middle::infer;
use rustc::mir::repr::*;
-use transform::util;
use rustc::mir::transform::MirPass;
pub struct SimplifyCfg;
}
fn remove_dead_blocks(&self, mir: &mut Mir) {
- let mut seen = vec![false; mir.basic_blocks.len()];
-
+ let mut seen = BitVector::new(mir.basic_blocks.len());
// These blocks are always required.
- seen[START_BLOCK.index()] = true;
- seen[END_BLOCK.index()] = true;
+ seen.insert(START_BLOCK.index());
+ seen.insert(END_BLOCK.index());
- let mut worklist = vec![START_BLOCK];
+ let mut worklist = Vec::with_capacity(4);
+ worklist.push(START_BLOCK);
while let Some(bb) = worklist.pop() {
for succ in mir.basic_block_data(bb).terminator().successors().iter() {
- if !seen[succ.index()] {
- seen[succ.index()] = true;
+ if seen.insert(succ.index()) {
worklist.push(*succ);
}
}
}
-
- util::retain_basic_blocks(mir, &seen);
+ retain_basic_blocks(mir, &seen);
}
fn remove_goto_chains(&self, mir: &mut Mir) -> bool {
for bb in mir.all_basic_blocks() {
let basic_block = mir.basic_block_data_mut(bb);
let mut terminator = basic_block.terminator_mut();
-
*terminator = match *terminator {
Terminator::If { ref targets, .. } if targets.0 == targets.1 => {
changed = true;
Terminator::Goto { target: targets.0 }
}
+
Terminator::If { ref targets, cond: Operand::Constant(Constant {
literal: Literal::Value {
value: ConstVal::Bool(cond)
Terminator::Goto { target: targets.1 }
}
}
+
Terminator::SwitchInt { ref targets, .. } if targets.len() == 1 => {
Terminator::Goto { target: targets[0] }
}
mir.basic_blocks.shrink_to_fit();
}
}
+
+/// Mass removal of basic blocks to keep the ID-remapping cheap.
+fn retain_basic_blocks(mir: &mut Mir, keep: &BitVector) {
+ let num_blocks = mir.basic_blocks.len();
+
+ let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
+ let mut used_blocks = 0;
+ for alive_index in keep.iter() {
+ replacements[alive_index] = BasicBlock::new(used_blocks);
+ if alive_index != used_blocks {
+ // Swap the next alive block data with the current available slot. Since alive_index is
+ // non-decreasing this is a valid operation.
+ mir.basic_blocks.swap(alive_index, used_blocks);
+ }
+ used_blocks += 1;
+ }
+ mir.basic_blocks.truncate(used_blocks);
+
+ for bb in mir.all_basic_blocks() {
+ for target in mir.basic_block_data_mut(bb).terminator_mut().successors_mut() {
+ *target = replacements[target.index()];
+ }
+ }
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::mir::repr::*;
-
-/// Update basic block ids in all terminators using the given replacements,
-/// useful e.g. after removal of several basic blocks to update all terminators
-/// in a single pass
-pub fn update_basic_block_ids(mir: &mut Mir, replacements: &[BasicBlock]) {
- for bb in mir.all_basic_blocks() {
- for target in mir.basic_block_data_mut(bb).terminator_mut().successors_mut() {
- *target = replacements[target.index()];
- }
- }
-}
-
-/// Mass removal of basic blocks to keep the ID-remapping cheap.
-pub fn retain_basic_blocks(mir: &mut Mir, keep: &[bool]) {
- let num_blocks = mir.basic_blocks.len();
-
- // Check that we have a usage flag for every block
- assert_eq!(num_blocks, keep.len());
-
- let first_dead = match keep.iter().position(|&k| !k) {
- None => return,
- Some(first_dead) => first_dead,
- };
-
- // `replacements` maps the old block ids to the new ones
- let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
-
- let mut dead = 0;
- for i in first_dead..num_blocks {
- if keep[i] {
- replacements[i] = BasicBlock::new(i - dead);
- mir.basic_blocks.swap(i, i - dead);
- } else {
- dead += 1;
- }
- }
- mir.basic_blocks.truncate(num_blocks - dead);
-
- update_basic_block_ids(mir, &replacements);
-}
use externalfiles::ExternalHtml;
-use serialize::json::{self, ToJson};
+use serialize::json::{ToJson, Json, as_json};
use syntax::{abi, ast};
use syntax::feature_gate::UnstableFeatures;
use rustc::middle::cstore::LOCAL_CRATE;
path: String,
desc: String,
parent: Option<DefId>,
+ parent_idx: Option<usize>,
search_type: Option<IndexItemFunctionType>,
}
+impl ToJson for IndexItem {
+ fn to_json(&self) -> Json {
+ assert_eq!(self.parent.is_some(), self.parent_idx.is_some());
+
+ let mut data = Vec::with_capacity(6);
+ data.push((self.ty as usize).to_json());
+ data.push(self.name.to_json());
+ data.push(self.path.to_json());
+ data.push(self.desc.to_json());
+ data.push(self.parent_idx.to_json());
+ data.push(self.search_type.to_json());
+
+ Json::Array(data)
+ }
+}
+
/// A type used for the search index.
struct Type {
name: Option<String>,
}
-impl fmt::Display for Type {
- /// Formats type as {name: $name}.
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // Wrapping struct fmt should never call us when self.name is None,
- // but just to be safe we write `null` in that case.
+impl ToJson for Type {
+ fn to_json(&self) -> Json {
match self.name {
- Some(ref n) => write!(f, "{{\"name\":\"{}\"}}", n),
- None => write!(f, "null")
+ Some(ref name) => {
+ let mut data = BTreeMap::new();
+ data.insert("name".to_owned(), name.to_json());
+ Json::Object(data)
+ },
+ None => Json::Null
}
}
}
output: Option<Type>
}
-impl fmt::Display for IndexItemFunctionType {
- /// Formats a full fn type as a JSON {inputs: [Type], outputs: Type/null}.
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+impl ToJson for IndexItemFunctionType {
+ fn to_json(&self) -> Json {
// If we couldn't figure out a type, just write `null`.
- if self.inputs.iter().any(|ref i| i.name.is_none()) ||
- (self.output.is_some() && self.output.as_ref().unwrap().name.is_none()) {
- return write!(f, "null")
+ if self.inputs.iter().chain(self.output.iter()).any(|ref i| i.name.is_none()) {
+ Json::Null
+ } else {
+ let mut data = BTreeMap::new();
+ data.insert("inputs".to_owned(), self.inputs.to_json());
+ data.insert("output".to_owned(), self.output.to_json());
+ Json::Object(data)
}
-
- let inputs: Vec<String> = self.inputs.iter().map(|ref t| {
- format!("{}", t)
- }).collect();
- try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.join(",")));
-
- match self.output {
- Some(ref t) => try!(write!(f, "{}", t)),
- None => try!(write!(f, "null"))
- };
-
- Ok(try!(write!(f, "}}")))
}
}
cx.krate(krate)
}
+/// Build the search index from the collected metadata
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
- // Build the search index from the collected metadata
let mut nodeid_to_pathid = HashMap::new();
- let mut pathid_to_nodeid = Vec::new();
- {
- let Cache { ref mut search_index,
- ref orphan_methods,
- ref mut paths, .. } = *cache;
-
- // Attach all orphan methods to the type's definition if the type
- // has since been learned.
- for &(did, ref item) in orphan_methods {
- match paths.get(&did) {
- Some(&(ref fqp, _)) => {
- // Needed to determine `self` type.
- let parent_basename = Some(fqp[fqp.len() - 1].clone());
- search_index.push(IndexItem {
- ty: shortty(item),
- name: item.name.clone().unwrap(),
- path: fqp[..fqp.len() - 1].join("::"),
- desc: Escape(&shorter(item.doc_value())).to_string(),
- parent: Some(did),
- search_type: get_index_search_type(&item, parent_basename),
- });
- },
- None => {}
- }
- }
-
- // Reduce `NodeId` in paths into smaller sequential numbers,
- // and prune the paths that do not appear in the index.
- for item in search_index.iter() {
- match item.parent {
- Some(nodeid) => {
- if !nodeid_to_pathid.contains_key(&nodeid) {
- let pathid = pathid_to_nodeid.len();
- nodeid_to_pathid.insert(nodeid, pathid);
- pathid_to_nodeid.push(nodeid);
- }
- }
- None => {}
- }
+ let mut crate_items = Vec::with_capacity(cache.search_index.len());
+ let mut crate_paths = Vec::<Json>::new();
+
+ let Cache { ref mut search_index,
+ ref orphan_methods,
+ ref mut paths, .. } = *cache;
+
+ // Attach all orphan methods to the type's definition if the type
+ // has since been learned.
+ for &(did, ref item) in orphan_methods {
+ match paths.get(&did) {
+ Some(&(ref fqp, _)) => {
+ // Needed to determine `self` type.
+ let parent_basename = Some(fqp[fqp.len() - 1].clone());
+ search_index.push(IndexItem {
+ ty: shortty(item),
+ name: item.name.clone().unwrap(),
+ path: fqp[..fqp.len() - 1].join("::"),
+ desc: Escape(&shorter(item.doc_value())).to_string(),
+ parent: Some(did),
+ parent_idx: None,
+ search_type: get_index_search_type(&item, parent_basename),
+ });
+ },
+ None => {}
}
- assert_eq!(nodeid_to_pathid.len(), pathid_to_nodeid.len());
}
- // Collect the index into a string
- let mut w = io::Cursor::new(Vec::new());
- write!(&mut w, r#"searchIndex['{}'] = {{"items":["#, krate.name).unwrap();
+ // Reduce `NodeId` in paths into smaller sequential numbers,
+ // and prune the paths that do not appear in the index.
+ let mut lastpath = String::new();
+ let mut lastpathid = 0usize;
- let mut lastpath = "".to_string();
- for (i, item) in cache.search_index.iter().enumerate() {
- // Omit the path if it is same to that of the prior item.
- let path;
- if lastpath == item.path {
- path = "";
- } else {
- lastpath = item.path.to_string();
- path = &item.path;
- };
+ for item in search_index {
+ item.parent_idx = item.parent.map(|nodeid| {
+ if nodeid_to_pathid.contains_key(&nodeid) {
+ *nodeid_to_pathid.get(&nodeid).unwrap()
+ } else {
+ let pathid = lastpathid;
+ nodeid_to_pathid.insert(nodeid, pathid);
+ lastpathid += 1;
- if i > 0 {
- write!(&mut w, ",").unwrap();
- }
- write!(&mut w, r#"[{},"{}","{}",{}"#,
- item.ty as usize, item.name, path,
- item.desc.to_json().to_string()).unwrap();
- match item.parent {
- Some(nodeid) => {
- let pathid = *nodeid_to_pathid.get(&nodeid).unwrap();
- write!(&mut w, ",{}", pathid).unwrap();
+ let &(ref fqp, short) = paths.get(&nodeid).unwrap();
+ crate_paths.push(((short as usize), fqp.last().unwrap().clone()).to_json());
+ pathid
}
- None => write!(&mut w, ",null").unwrap()
- }
- match item.search_type {
- Some(ref t) => write!(&mut w, ",{}", t).unwrap(),
- None => write!(&mut w, ",null").unwrap()
- }
- write!(&mut w, "]").unwrap();
- }
-
- write!(&mut w, r#"],"paths":["#).unwrap();
+ });
- for (i, &did) in pathid_to_nodeid.iter().enumerate() {
- let &(ref fqp, short) = cache.paths.get(&did).unwrap();
- if i > 0 {
- write!(&mut w, ",").unwrap();
+ // Omit the parent path if it is same to that of the prior item.
+ if lastpath == item.path {
+ item.path.clear();
+ } else {
+ lastpath = item.path.clone();
}
- write!(&mut w, r#"[{},"{}"]"#,
- short as usize, *fqp.last().unwrap()).unwrap();
+ crate_items.push(item.to_json());
}
- write!(&mut w, "]}};").unwrap();
+ let crate_doc = krate.module.as_ref().map(|module| {
+ Escape(&shorter(module.doc_value())).to_string()
+ }).unwrap_or(String::new());
- String::from_utf8(w.into_inner()).unwrap()
+ let mut crate_data = BTreeMap::new();
+ crate_data.insert("doc".to_owned(), Json::String(crate_doc));
+ crate_data.insert("items".to_owned(), Json::Array(crate_items));
+ crate_data.insert("paths".to_owned(), Json::Array(crate_paths));
+
+ // Collect the index into a string
+ format!("searchIndex[{}] = {};",
+ as_json(&krate.name),
+ Json::Object(crate_data))
}
fn write_shared(cx: &Context,
if !line.starts_with(key) {
continue
}
- if line.starts_with(&format!("{}['{}']", key, krate)) {
+ if line.starts_with(&format!(r#"{}["{}"]"#, key, krate)) {
continue
}
ret.push(line.to_string());
path: path.join("::").to_string(),
desc: Escape(&shorter(item.doc_value())).to_string(),
parent: parent,
+ parent_idx: None,
search_type: get_index_search_type(&item, parent_basename),
});
}
let js_dst = this.dst.join("sidebar-items.js");
let mut js_out = BufWriter::new(try_err!(File::create(&js_dst), &js_dst));
try_err!(write!(&mut js_out, "initSidebarItems({});",
- json::as_json(&items)), &js_dst);
+ as_json(&items)), &js_dst);
}
for item in m.items {
displayPath = "";
href = rootPath + item.path.replace(/::/g, '/') +
'/' + type + '.' + name + '.html';
+ } else if (type === "externcrate") {
+ displayPath = "";
+ href = rootPath + name + '/index.html';
} else if (item.parent !== undefined) {
var myparent = item.parent;
var anchor = '#' + type + '.' + name;
for (var crate in rawSearchIndex) {
if (!rawSearchIndex.hasOwnProperty(crate)) { continue; }
+ searchWords.push(crate);
+ searchIndex.push({
+ crate: crate,
+ ty: 1, // == ExternCrate
+ name: crate,
+ path: "",
+ desc: rawSearchIndex[crate].doc,
+ type: null,
+ });
+
// an array of [(Number) item type,
// (String) name,
// (String) full path or empty string for previous path,
}
.content a.primitive { color: #39a7bf; }
-.content span.mod, .content a.mod, block a.current.mod { color: #4d76ae; }
+.content span.externcrate, span.mod, .content a.mod, block a.current.mod { color: #4d76ae; }
.content span.fn, .content a.fn, .block a.current.fn,
.content span.method, .content a.method, .block a.current.method,
.content span.tymethod, .content a.tymethod, .block a.current.tymethod,
use deriving::generic::*;
use deriving::generic::ty::*;
-use syntax::ast::{MetaItem, Expr};
+use syntax::ast::{MetaItem, Expr, VariantData};
use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
cx.expr_call_global(field.span, fn_path.clone(), args)
};
+ let vdata;
match *substr.fields {
- Struct(ref af) => {
+ Struct(vdata_, ref af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident]);
all_fields = af;
+ vdata = vdata_;
}
EnumMatching(_, variant, ref af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.node.name]);
all_fields = af;
+ vdata = &variant.node.data;
},
EnumNonMatchingCollapsed (..) => {
cx.span_bug(trait_span,
}
}
- if !all_fields.is_empty() && all_fields[0].name.is_none() {
- // enum-like
- let subcalls = all_fields.iter().map(subcall).collect();
- let path = cx.expr_path(ctor_path);
- cx.expr_call(trait_span, path, subcalls)
- } else {
- // struct-like
- let fields = all_fields.iter().map(|field| {
- let ident = match field.name {
- Some(i) => i,
- None => {
- cx.span_bug(trait_span,
- &format!("unnamed field in normal struct in \
- `derive({})`", name))
- }
- };
- cx.field_imm(field.span, ident, subcall(field))
- }).collect::<Vec<_>>();
+ match *vdata {
+ VariantData::Struct(..) => {
+ let fields = all_fields.iter().map(|field| {
+ let ident = match field.name {
+ Some(i) => i,
+ None => {
+ cx.span_bug(trait_span,
+ &format!("unnamed field in normal struct in \
+ `derive({})`", name))
+ }
+ };
+ cx.field_imm(field.span, ident, subcall(field))
+ }).collect::<Vec<_>>();
- if fields.is_empty() {
- // no fields, so construct like `None`
- cx.expr_path(ctor_path)
- } else {
cx.expr_struct(trait_span, ctor_path, fields)
}
+ VariantData::Tuple(..) => {
+ let subcalls = all_fields.iter().map(subcall).collect();
+ let path = cx.expr_path(ctor_path);
+ cx.expr_call(trait_span, path, subcalls)
+ }
+ VariantData::Unit(..) => {
+ cx.expr_path(ctor_path)
+ }
}
}
// build fmt.debug_struct(<name>).field(<fieldname>, &<fieldval>)....build()
// or fmt.debug_tuple(<name>).field(&<fieldval>)....build()
// based on the "shape".
- let ident = match *substr.fields {
- Struct(_) => substr.type_ident,
- EnumMatching(_, v, _) => v.node.name,
+ let (ident, is_struct) = match *substr.fields {
+ Struct(vdata, _) => (substr.type_ident, vdata.is_struct()),
+ EnumMatching(_, v, _) => (v.node.name, v.node.data.is_struct()),
EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => {
cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
}
let fmt = substr.nonself_args[0].clone();
let stmts = match *substr.fields {
- Struct(ref fields) | EnumMatching(_, _, ref fields) => {
+ Struct(_, ref fields) | EnumMatching(_, _, ref fields) => {
let mut stmts = vec![];
- if fields.is_empty() || fields[0].name.is_none() {
+ if !is_struct {
// tuple struct/"normal" variant
let expr = cx.expr_method_call(span,
fmt,
let encode = cx.ident_of("encode");
return match *substr.fields {
- Struct(ref fields) => {
+ Struct(_, ref fields) => {
let emit_struct_field = cx.ident_of("emit_struct_field");
let mut stmts = Vec::new();
for (i, &FieldInfo {
/// A summary of the possible sets of fields.
pub enum SubstructureFields<'a> {
- Struct(Vec<FieldInfo<'a>>),
+ Struct(&'a ast::VariantData, Vec<FieldInfo<'a>>),
/// Matching variants of the enum: variant index, ast::Variant,
/// fields: the field name is only non-`None` in the case of a struct
/// variant.
type_ident,
self_args,
nonself_args,
- &Struct(fields));
+ &Struct(struct_def, fields));
// make a series of nested matches, to destructure the
// structs. This is actually right-to-left, but it shouldn't
fields in generic `derive`"),
// named fields
(_, false) => Named(named_idents),
- // tuple structs (includes empty structs)
- (_, _) => Unnamed(just_spans)
+ // empty structs
+ _ if struct_def.is_struct() => Named(named_idents),
+ _ => Unnamed(just_spans),
}
}
P<Expr>,
&'a [ast::Attribute])>) {
if struct_def.fields().is_empty() {
- return (cx.pat_enum(self.span, struct_path, vec![]), vec![]);
+ if struct_def.is_struct() {
+ return (cx.pat_struct(self.span, struct_path, vec![]), vec![]);
+ } else {
+ return (cx.pat_enum(self.span, struct_path, vec![]), vec![]);
+ }
}
let mut paths = Vec::new();
// struct_type is definitely not Unknown, since struct_def.fields
// must be nonempty to reach here
- let pattern = if struct_type == Record {
+ let pattern = if struct_def.is_struct() {
let field_pats = subpats.into_iter().zip(&ident_expr)
.map(|(pat, &(_, id, _, _))| {
// id is guaranteed to be Some
F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>,
{
match *substructure.fields {
- EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
+ EnumMatching(_, _, ref all_fields) | Struct(_, ref all_fields) => {
if use_foldl {
all_fields.iter().fold(base, |old, field| {
f(cx,
F: FnOnce(&mut ExtCtxt, Span, Vec<P<Expr>>) -> P<Expr>,
{
match *substructure.fields {
- EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
+ EnumMatching(_, _, ref all_fields) | Struct(_, ref all_fields) => {
// call self_n.method(other_1_n, other_2_n, ...)
let called = all_fields.iter().map(|field| {
cx.expr_method_call(field.span,
let mut stmts = Vec::new();
let fields = match *substr.fields {
- Struct(ref fs) => fs,
+ Struct(_, ref fs) => fs,
EnumMatching(index, variant, ref fs) => {
// Determine the discriminant. We will feed this value to the byte
// iteration function.
fn totalsum_substructure(cx: &mut ExtCtxt, trait_span: Span,
substr: &Substructure) -> P<ast::Expr> {
let fields = match *substr.fields {
- Struct(ref fs) | EnumMatching(_, _, ref fs) => fs,
+ Struct(_, ref fs) | EnumMatching(_, _, ref fs) => fs,
_ => cx.span_bug(trait_span, "impossible substructure")
};
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// `#[derive(Trait)]` works for empty structs/variants with braces
+
+#![feature(braced_empty_structs)]
+#![feature(rustc_private)]
+
+extern crate serialize as rustc_serialize;
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
+ Default, Debug, RustcEncodable, RustcDecodable)]
+struct S {}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
+ Debug, RustcEncodable, RustcDecodable)]
+enum E {
+ V {},
+ U,
+}
+
+fn main() {
+ let s = S {};
+ let s1 = s;
+ let s2 = s.clone();
+ assert_eq!(s, s1);
+ assert_eq!(s, s2);
+ assert!(!(s < s1));
+ assert_eq!(format!("{:?}", s), "S");
+
+ let e = E::V {};
+ let e1 = e;
+ let e2 = e.clone();
+ assert_eq!(e, e1);
+ assert_eq!(e, e2);
+ assert!(!(e < e1));
+ assert_eq!(format!("{:?}", e), "V");
+}