1 //! Some code that abstracts away much of the boilerplate of writing
2 //! `derive` instances for traits. Among other things it manages getting
3 //! access to the fields of the 4 different sorts of structs and enum
4 //! variants, as well as creating the method and impl ast instances.
6 //! Supported features (fairly exhaustive):
8 //! - Methods taking any number of parameters of any type, and returning
9 //! any type, other than vectors, bottom and closures.
10 //! - Generating `impl`s for types with type parameters and lifetimes
11 //! (e.g., `Option<T>`), the parameters are automatically given the
12 //! current trait as a bound. (This includes separate type parameters
13 //! and lifetimes for methods.)
14 //! - Additional bounds on the type parameters (`TraitDef.additional_bounds`)
16 //! The most important thing for implementors is the `Substructure` and
17 //! `SubstructureFields` objects. The latter groups 5 possibilities of the
20 //! - `Struct`, when `Self` is a struct (including tuple structs, e.g
21 //! `struct T(i32, char)`).
22 //! - `EnumMatching`, when `Self` is an enum and all the arguments are the
23 //! same variant of the enum (e.g., `Some(1)`, `Some(3)` and `Some(4)`)
24 //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments
25 //! are not the same variant (e.g., `None`, `Some(1)` and `None`).
26 //! - `StaticEnum` and `StaticStruct` for static methods, where the type
27 //! being derived upon is either an enum or struct respectively. (Any
28 //! argument with type Self is just grouped among the non-self
31 //! In the first two cases, the values from the corresponding fields in
32 //! all the arguments are grouped together. For `EnumNonMatchingCollapsed`
33 //! this isn't possible (different variants have different fields), so the
34 //! fields are inaccessible. (Previous versions of the deriving infrastructure
35 //! had a way to expand into code that could access them, at the cost of
36 //! generating exponential amounts of code; see issue #15375). There are no
37 //! fields with values in the static cases, so these are treated entirely
40 //! The non-static cases have `Option<ident>` in several places associated
41 //! with field `expr`s. This represents the name of the field it is
42 //! associated with. It is only not `None` when the associated field has
43 //! an identifier in the source code. For example, the `x`s in the
47 //! # #![allow(dead_code)]
48 //! struct A { x : i32 }
58 //! The `i32`s in `B` and `C0` don't have an identifier, so the
59 //! `Option<ident>`s would be `None` for them.
61 //! In the static cases, the structure is summarized, either into the just
62 //! spans of the fields or a list of spans and the field idents (for tuple
63 //! structs and record structs, respectively), or a list of these, for
64 //! enums (one for each variant). For empty struct and empty enum
65 //! variants, it is represented as a count of 0.
67 //! # "`cs`" functions
69 //! The `cs_...` functions ("combine substructure) are designed to
70 //! make life easier by providing some pre-made recipes for common
71 //! threads; mostly calling the function being derived on all the
72 //! arguments and then combining them back together in some way (or
73 //! letting the user chose that). They are not meant to be the only
74 //! way to handle the structures that this code creates.
78 //! The following simplified `PartialEq` is used for in-code examples:
82 //! fn eq(&self, other: &Self) -> bool;
84 //! impl PartialEq for i32 {
85 //! fn eq(&self, other: &i32) -> bool {
91 //! Some examples of the values of `SubstructureFields` follow, using the
92 //! above `PartialEq`, `A`, `B` and `C`.
96 //! When generating the `expr` for the `A` impl, the `SubstructureFields` is
99 //! Struct(vec![FieldInfo {
100 //! span: <span of x>
101 //! name: Some(<ident of x>),
102 //! self_: <expr for &self.x>,
103 //! other: vec![<expr for &other.x]
107 //! For the `B` impl, called with `B(a)` and `B(b)`,
110 //! Struct(vec![FieldInfo {
111 //! span: <span of `i32`>,
113 //! self_: <expr for &a>
114 //! other: vec![<expr for &b>]
120 //! When generating the `expr` for a call with `self == C0(a)` and `other
121 //! == C0(b)`, the SubstructureFields is
124 //! EnumMatching(0, <ast::Variant for C0>,
126 //! span: <span of i32>
128 //! self_: <expr for &a>,
129 //! other: vec![<expr for &b>]
133 //! For `C1 {x}` and `C1 {x}`,
136 //! EnumMatching(1, <ast::Variant for C1>,
138 //! span: <span of x>
139 //! name: Some(<ident of x>),
140 //! self_: <expr for &self.x>,
141 //! other: vec![<expr for &other.x>]
145 //! For `C0(a)` and `C1 {x}` ,
148 //! EnumNonMatchingCollapsed(
149 //! vec![<ident of self>, <ident of __arg_1>],
150 //! &[<ast::Variant for C0>, <ast::Variant for C1>],
151 //! &[<ident for self index value>, <ident of __arg_1 index value>])
154 //! It is the same for when the arguments are flipped to `C1 {x}` and
155 //! `C0(a)`; the only difference is what the values of the identifiers
156 //! <ident for self index value> and <ident of __arg_1 index value> will
157 //! be in the generated code.
159 //! `EnumNonMatchingCollapsed` deliberately provides far less information
160 //! than is generally available for a given pair of variants; see #15375
165 //! A static method on the types above would result in,
168 //! StaticStruct(<ast::VariantData of A>, Named(vec![(<ident of x>, <span of x>)]))
170 //! StaticStruct(<ast::VariantData of B>, Unnamed(vec![<span of x>]))
172 //! StaticEnum(<ast::EnumDef of C>,
173 //! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])),
174 //! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))])
177 pub use StaticFields::*;
178 pub use SubstructureFields::*;
180 use std::cell::RefCell;
184 use rustc_data_structures::thin_vec::ThinVec;
185 use rustc_target::spec::abi::Abi;
186 use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind};
187 use syntax::ast::{VariantData, GenericParamKind, GenericArg};
189 use syntax::ext::base::{Annotatable, ExtCtxt};
190 use syntax::ext::build::AstBuilder;
191 use syntax::source_map::{self, respan};
192 use syntax::util::map_in_place::MapInPlace;
194 use syntax::symbol::{Symbol, keywords};
195 use syntax::parse::ParseSess;
196 use syntax_pos::{DUMMY_SP, Span};
198 use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
204 pub struct TraitDef<'a> {
205 /// The span for the current #[derive(Foo)] header.
208 pub attributes: Vec<ast::Attribute>,
210 /// Path of the trait, including any type parameters
213 /// Additional bounds required of any type parameters of the type,
214 /// other than the current trait
215 pub additional_bounds: Vec<Ty<'a>>,
217 /// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder`
218 pub generics: LifetimeBounds<'a>,
220 /// Is it an `unsafe` trait?
223 /// Can this trait be derived for unions?
224 pub supports_unions: bool,
226 pub methods: Vec<MethodDef<'a>>,
228 pub associated_types: Vec<(ast::Ident, Ty<'a>)>,
232 pub struct MethodDef<'a> {
233 /// name of the method
235 /// List of generics, e.g., `R: rand::Rng`
236 pub generics: LifetimeBounds<'a>,
238 /// Whether there is a self argument (outer Option) i.e., whether
239 /// this is a static function, and whether it is a pointer (inner
241 pub explicit_self: Option<Option<PtrTy<'a>>>,
243 /// Arguments other than the self argument
244 pub args: Vec<(Ty<'a>, &'a str)>,
249 pub attributes: Vec<ast::Attribute>,
251 // Is it an `unsafe fn`?
254 /// Can we combine fieldless variants for enums into a single match arm?
255 pub unify_fieldless_variants: bool,
257 pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>,
260 /// All the data about the data structure/method being derived upon.
261 pub struct Substructure<'a> {
263 pub type_ident: Ident,
264 /// ident of the method
265 pub method_ident: Ident,
266 /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments
267 pub self_args: &'a [P<Expr>],
268 /// verbatim access to any other arguments
269 pub nonself_args: &'a [P<Expr>],
270 pub fields: &'a SubstructureFields<'a>,
273 /// Summary of the relevant parts of a struct/enum field.
274 pub struct FieldInfo<'a> {
276 /// None for tuple structs/normal enum variants, Some for normal
277 /// structs/struct enum variants.
278 pub name: Option<Ident>,
279 /// The expression corresponding to this field of `self`
280 /// (specifically, a reference to it).
282 /// The expressions corresponding to references to this field in
283 /// the other `Self` arguments.
284 pub other: Vec<P<Expr>>,
285 /// The attributes on the field
286 pub attrs: &'a [ast::Attribute],
289 /// Fields for a static method
290 pub enum StaticFields {
291 /// Tuple and unit structs/enum variants like this.
292 Unnamed(Vec<Span>, bool /*is tuple*/),
293 /// Normal structs/struct variants.
294 Named(Vec<(Ident, Span)>),
297 /// A summary of the possible sets of fields.
298 pub enum SubstructureFields<'a> {
299 Struct(&'a ast::VariantData, Vec<FieldInfo<'a>>),
300 /// Matching variants of the enum: variant index, variant count, ast::Variant,
301 /// fields: the field name is only non-`None` in the case of a struct
303 EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo<'a>>),
305 /// Non-matching variants of the enum, but with all state hidden from
306 /// the consequent code. The first component holds `Ident`s for all of
307 /// the `Self` arguments; the second component is a slice of all of the
308 /// variants for the enum itself, and the third component is a list of
309 /// `Ident`s bound to the variant index values for each of the actual
310 /// input `Self` arguments.
311 EnumNonMatchingCollapsed(Vec<Ident>, &'a [ast::Variant], &'a [Ident]),
313 /// A static method where `Self` is a struct.
314 StaticStruct(&'a ast::VariantData, StaticFields),
315 /// A static method where `Self` is an enum.
316 StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>),
321 /// Combine the values of all the fields together. The last argument is
322 /// all the fields of all the structures.
323 pub type CombineSubstructureFunc<'a> =
324 Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>;
326 /// Deal with non-matching enum variants. The tuple is a list of
327 /// identifiers (one for each `Self` argument, which could be any of the
328 /// variants since they have been collapsed together) and the identifiers
329 /// holding the variant index value for each of the `Self` arguments. The
330 /// last argument is all the non-`Self` args of the method being derived.
331 pub type EnumNonMatchCollapsedFunc<'a> =
332 Box<dyn FnMut(&mut ExtCtxt<'_>, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
334 pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>)
335 -> RefCell<CombineSubstructureFunc<'a>> {
339 /// This method helps to extract all the type parameters referenced from a
340 /// type. For a type parameter `<T>`, it looks for either a `TyPath` that
341 /// is not global and starts with `T`, or a `TyQPath`.
342 fn find_type_parameters(ty: &ast::Ty,
343 ty_param_names: &[ast::Name],
349 struct Visitor<'a, 'b: 'a> {
352 ty_param_names: &'a [ast::Name],
353 types: Vec<P<ast::Ty>>,
356 impl<'a, 'b> visit::Visitor<'a> for Visitor<'a, 'b> {
357 fn visit_ty(&mut self, ty: &'a ast::Ty) {
358 if let ast::TyKind::Path(_, ref path) = ty.node {
359 if let Some(segment) = path.segments.first() {
360 if self.ty_param_names.contains(&segment.ident.name) {
361 self.types.push(P(ty.clone()));
366 visit::walk_ty(self, ty)
369 fn visit_mac(&mut self, mac: &ast::Mac) {
370 let span = mac.span.with_ctxt(self.span.ctxt());
371 self.cx.span_err(span, "`derive` cannot be used on items with type macros");
375 let mut visitor = Visitor {
382 visit::Visitor::visit_ty(&mut visitor, ty);
387 impl<'a> TraitDef<'a> {
389 cx: &mut ExtCtxt<'_>,
390 mitem: &ast::MetaItem,
391 item: &'a Annotatable,
392 push: &mut dyn FnMut(Annotatable)) {
393 self.expand_ext(cx, mitem, item, push, false);
396 pub fn expand_ext(self,
397 cx: &mut ExtCtxt<'_>,
398 mitem: &ast::MetaItem,
399 item: &'a Annotatable,
400 push: &mut dyn FnMut(Annotatable),
401 from_scratch: bool) {
403 Annotatable::Item(ref item) => {
404 let is_packed = item.attrs.iter().any(|attr| {
405 for r in attr::find_repr_attrs(&cx.parse_sess, attr) {
406 if let attr::ReprPacked(_) = r {
412 let has_no_type_params = match item.node {
413 ast::ItemKind::Struct(_, ref generics) |
414 ast::ItemKind::Enum(_, ref generics) |
415 ast::ItemKind::Union(_, ref generics) => {
416 !generics.params.iter().any(|param| match param.kind {
417 ast::GenericParamKind::Type { .. } => true,
422 // Non-ADT derive is an error, but it should have been
424 // libsyntax/ext/expand.rs:MacroExpander::expand()
429 attr::contains_name(&item.attrs, "rustc_copy_clone_marker") &&
431 let use_temporaries = is_packed && is_always_copy;
433 let newitem = match item.node {
434 ast::ItemKind::Struct(ref struct_def, ref generics) => {
435 self.expand_struct_def(cx, &struct_def, item.ident, generics, from_scratch,
438 ast::ItemKind::Enum(ref enum_def, ref generics) => {
439 // We ignore `use_temporaries` here, because
440 // `repr(packed)` enums cause an error later on.
442 // This can only cause further compilation errors
443 // downstream in blatantly illegal code, so it
445 self.expand_enum_def(cx, enum_def, &item.attrs,
446 item.ident, generics, from_scratch)
448 ast::ItemKind::Union(ref struct_def, ref generics) => {
449 if self.supports_unions {
450 self.expand_struct_def(cx, &struct_def, item.ident,
451 generics, from_scratch,
454 cx.span_err(mitem.span,
455 "this trait cannot be derived for unions");
461 // Keep the lint attributes of the previous item to control how the
462 // generated implementations are linted
463 let mut attrs = newitem.attrs.clone();
464 attrs.extend(item.attrs
466 .filter(|a| a.ident_str().map_or(false, |name| {
467 ["allow", "warn", "deny", "forbid", "stable", "unstable"].contains(&name)
470 push(Annotatable::Item(P(ast::Item { attrs: attrs, ..(*newitem).clone() })))
473 // Non-Item derive is an error, but it should have been
475 // libsyntax/ext/expand.rs:MacroExpander::expand()
481 /// Given that we are deriving a trait `DerivedTrait` for a type like:
483 /// ```ignore (only-for-syntax-highlight)
484 /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait {
487 /// b1: <B as DeclaredTrait>::Item,
488 /// c1: <C as WhereTrait>::Item,
489 /// c2: Option<<C as WhereTrait>::Item>,
494 /// create an impl like:
496 /// ```ignore (only-for-syntax-highlight)
497 /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where
499 /// A: DerivedTrait + B1 + ... + BN,
500 /// B: DerivedTrait + B1 + ... + BN,
501 /// C: DerivedTrait + B1 + ... + BN,
502 /// B::Item: DerivedTrait + B1 + ... + BN,
503 /// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN,
510 /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and
511 /// therefore does not get bound by the derived trait.
512 fn create_derived_impl(&self,
513 cx: &mut ExtCtxt<'_>,
516 field_tys: Vec<P<ast::Ty>>,
517 methods: Vec<ast::ImplItem>)
519 let trait_path = self.path.to_path(cx, self.span, type_ident, generics);
521 // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem`
522 let associated_types = self.associated_types.iter().map(|&(ident, ref type_def)| {
524 id: ast::DUMMY_NODE_ID,
527 vis: respan(self.span.shrink_to_lo(), ast::VisibilityKind::Inherited),
528 defaultness: ast::Defaultness::Final,
530 generics: Generics::default(),
531 node: ast::ImplItemKind::Type(type_def.to_ty(cx, self.span, type_ident, generics)),
536 let Generics { mut params, mut where_clause, span } = self.generics
537 .to_generics(cx, self.span, type_ident, generics);
539 // Create the generic parameters
540 params.extend(generics.params.iter().map(|param| match param.kind {
541 GenericParamKind::Lifetime { .. } => param.clone(),
542 GenericParamKind::Type { .. } => {
543 // I don't think this can be moved out of the loop, since
544 // a GenericBound requires an ast id
546 // extra restrictions on the generics parameters to the
547 // type being derived upon
548 self.additional_bounds.iter().map(|p| {
549 cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))
551 // require the current trait
552 iter::once(cx.trait_bound(trait_path.clone()))
554 // also add in any bounds from the declaration
555 param.bounds.iter().cloned()
558 cx.typaram(self.span, param.ident, vec![], bounds, None)
560 GenericParamKind::Const { .. } => param.clone(),
563 // and similarly for where clauses
564 where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| {
566 ast::WherePredicate::BoundPredicate(ref wb) => {
567 ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
569 bound_generic_params: wb.bound_generic_params.clone(),
570 bounded_ty: wb.bounded_ty.clone(),
571 bounds: wb.bounds.iter().cloned().collect(),
574 ast::WherePredicate::RegionPredicate(ref rb) => {
575 ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate {
577 lifetime: rb.lifetime,
578 bounds: rb.bounds.iter().cloned().collect(),
581 ast::WherePredicate::EqPredicate(ref we) => {
582 ast::WherePredicate::EqPredicate(ast::WhereEqPredicate {
583 id: ast::DUMMY_NODE_ID,
585 lhs_ty: we.lhs_ty.clone(),
586 rhs_ty: we.rhs_ty.clone(),
593 // Extra scope required here so ty_params goes out of scope before params is moved
595 let mut ty_params = params.iter()
596 .filter_map(|param| match param.kind {
597 ast::GenericParamKind::Type { .. } => Some(param),
602 if ty_params.peek().is_some() {
603 let ty_param_names: Vec<ast::Name> = ty_params
604 .map(|ty_param| ty_param.ident.name)
607 for field_ty in field_tys {
608 let tys = find_type_parameters(&field_ty, &ty_param_names, self.span, cx);
611 // if we have already handled this type, skip it
612 if let ast::TyKind::Path(_, ref p) = ty.node {
613 if p.segments.len() == 1 &&
614 ty_param_names.contains(&p.segments[0].ident.name) {
618 let mut bounds: Vec<_> = self.additional_bounds
621 cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))
625 // require the current trait
626 bounds.push(cx.trait_bound(trait_path.clone()));
628 let predicate = ast::WhereBoundPredicate {
630 bound_generic_params: Vec::new(),
635 let predicate = ast::WherePredicate::BoundPredicate(predicate);
636 where_clause.predicates.push(predicate);
642 let trait_generics = Generics {
648 // Create the reference to the trait.
649 let trait_ref = cx.trait_ref(trait_path);
651 let self_params: Vec<_> = generics.params.iter().map(|param| match param.kind {
652 GenericParamKind::Lifetime { .. } => {
653 GenericArg::Lifetime(cx.lifetime(self.span, param.ident))
655 GenericParamKind::Type { .. } => {
656 GenericArg::Type(cx.ty_ident(self.span, param.ident))
658 GenericParamKind::Const { .. } => {
659 GenericArg::Const(cx.const_ident(self.span, param.ident))
663 // Create the type of `self`.
664 let path = cx.path_all(self.span, false, vec![type_ident], self_params, vec![]);
665 let self_type = cx.ty_path(path);
667 let attr = cx.attribute(self.span,
668 cx.meta_word(self.span,
669 Symbol::intern("automatically_derived")));
670 // Just mark it now since we know that it'll end up used downstream
671 attr::mark_used(&attr);
672 let opt_trait_ref = Some(trait_ref);
674 let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications"));
675 cx.attribute(self.span, cx.meta_list(self.span, Symbol::intern("allow"), vec![word]))
678 let mut a = vec![attr, unused_qual];
679 a.extend(self.attributes.iter().cloned());
681 let unsafety = if self.is_unsafe {
682 ast::Unsafety::Unsafe
684 ast::Unsafety::Normal
688 keywords::Invalid.ident(),
690 ast::ItemKind::Impl(unsafety,
691 ast::ImplPolarity::Positive,
692 ast::Defaultness::Final,
696 methods.into_iter().chain(associated_types).collect()))
699 fn expand_struct_def(&self,
700 cx: &mut ExtCtxt<'_>,
701 struct_def: &'a VariantData,
705 use_temporaries: bool)
707 let field_tys: Vec<P<ast::Ty>> = struct_def.fields()
709 .map(|field| field.ty.clone())
712 let methods = self.methods
715 let (explicit_self, self_args, nonself_args, tys) =
716 method_def.split_self_nonself_args(cx, self, type_ident, generics);
718 let body = if from_scratch || method_def.is_static() {
719 method_def.expand_static_struct_method_body(cx,
726 method_def.expand_struct_method_body(cx,
735 method_def.create_method(cx,
746 self.create_derived_impl(cx, type_ident, generics, field_tys, methods)
749 fn expand_enum_def(&self,
750 cx: &mut ExtCtxt<'_>,
751 enum_def: &'a EnumDef,
752 type_attrs: &[ast::Attribute],
757 let mut field_tys = Vec::new();
759 for variant in &enum_def.variants {
760 field_tys.extend(variant.node
764 .map(|field| field.ty.clone()));
767 let methods = self.methods
770 let (explicit_self, self_args, nonself_args, tys) =
771 method_def.split_self_nonself_args(cx, self, type_ident, generics);
773 let body = if from_scratch || method_def.is_static() {
774 method_def.expand_static_enum_method_body(cx,
781 method_def.expand_enum_method_body(cx,
790 method_def.create_method(cx,
801 self.create_derived_impl(cx, type_ident, generics, field_tys, methods)
805 fn find_repr_type_name(sess: &ParseSess, type_attrs: &[ast::Attribute]) -> &'static str {
806 let mut repr_type_name = "isize";
807 for a in type_attrs {
808 for r in &attr::find_repr_attrs(sess, a) {
809 repr_type_name = match *r {
810 attr::ReprPacked(_) | attr::ReprSimd | attr::ReprAlign(_) | attr::ReprTransparent =>
813 attr::ReprC => "i32",
815 attr::ReprInt(attr::SignedInt(ast::IntTy::Isize)) => "isize",
816 attr::ReprInt(attr::SignedInt(ast::IntTy::I8)) => "i8",
817 attr::ReprInt(attr::SignedInt(ast::IntTy::I16)) => "i16",
818 attr::ReprInt(attr::SignedInt(ast::IntTy::I32)) => "i32",
819 attr::ReprInt(attr::SignedInt(ast::IntTy::I64)) => "i64",
820 attr::ReprInt(attr::SignedInt(ast::IntTy::I128)) => "i128",
822 attr::ReprInt(attr::UnsignedInt(ast::UintTy::Usize)) => "usize",
823 attr::ReprInt(attr::UnsignedInt(ast::UintTy::U8)) => "u8",
824 attr::ReprInt(attr::UnsignedInt(ast::UintTy::U16)) => "u16",
825 attr::ReprInt(attr::UnsignedInt(ast::UintTy::U32)) => "u32",
826 attr::ReprInt(attr::UnsignedInt(ast::UintTy::U64)) => "u64",
827 attr::ReprInt(attr::UnsignedInt(ast::UintTy::U128)) => "u128",
834 impl<'a> MethodDef<'a> {
835 fn call_substructure_method(&self,
836 cx: &mut ExtCtxt<'_>,
837 trait_: &TraitDef<'_>,
839 self_args: &[P<Expr>],
840 nonself_args: &[P<Expr>],
841 fields: &SubstructureFields<'_>)
843 let substructure = Substructure {
845 method_ident: cx.ident_of(self.name),
850 let mut f = self.combine_substructure.borrow_mut();
851 let f: &mut CombineSubstructureFunc<'_> = &mut *f;
852 f(cx, trait_.span, &substructure)
856 cx: &mut ExtCtxt<'_>,
857 trait_: &TraitDef<'_>,
861 self.ret_ty.to_ty(cx, trait_.span, type_ident, generics)
864 fn is_static(&self) -> bool {
865 self.explicit_self.is_none()
868 fn split_self_nonself_args
870 cx: &mut ExtCtxt<'_>,
871 trait_: &TraitDef<'_>,
874 -> (Option<ast::ExplicitSelf>, Vec<P<Expr>>, Vec<P<Expr>>, Vec<(Ident, P<ast::Ty>)>) {
876 let mut self_args = Vec::new();
877 let mut nonself_args = Vec::new();
878 let mut arg_tys = Vec::new();
879 let mut nonstatic = false;
881 let ast_explicit_self = self.explicit_self.as_ref().map(|self_ptr| {
882 let (self_expr, explicit_self) = ty::get_explicit_self(cx, trait_.span, self_ptr);
884 self_args.push(self_expr);
890 for (ty, name) in self.args.iter() {
891 let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
892 let ident = cx.ident_of(name).gensym();
893 arg_tys.push((ident, ast_ty));
895 let arg_expr = cx.expr_ident(trait_.span, ident);
898 // for static methods, just treat any Self
899 // arguments as a normal arg
900 Self_ if nonstatic => {
901 self_args.push(arg_expr);
903 Ptr(ref ty, _) if (if let Self_ = **ty { true } else { false }) && nonstatic => {
904 self_args.push(cx.expr_deref(trait_.span, arg_expr))
907 nonself_args.push(arg_expr);
912 (ast_explicit_self, self_args, nonself_args, arg_tys)
915 fn create_method(&self,
916 cx: &mut ExtCtxt<'_>,
917 trait_: &TraitDef<'_>,
921 explicit_self: Option<ast::ExplicitSelf>,
922 arg_types: Vec<(Ident, P<ast::Ty>)>,
926 // create the generics that aren't for Self
927 let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics);
930 let self_args = explicit_self.map(|explicit_self| {
931 ast::Arg::from_self(explicit_self,
932 keywords::SelfLower.ident().with_span_pos(trait_.span))
934 let nonself_args = arg_types.into_iter()
935 .map(|(name, ty)| cx.arg(trait_.span, name, ty));
936 self_args.into_iter().chain(nonself_args).collect()
939 let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident);
941 let method_ident = cx.ident_of(self.name);
942 let fn_decl = cx.fn_decl(args, ast::FunctionRetTy::Ty(ret_type));
943 let body_block = cx.block_expr(body);
945 let unsafety = if self.is_unsafe {
946 ast::Unsafety::Unsafe
948 ast::Unsafety::Normal
951 // Create the method.
953 id: ast::DUMMY_NODE_ID,
954 attrs: self.attributes.clone(),
955 generics: fn_generics,
957 vis: respan(trait_.span.shrink_to_lo(), ast::VisibilityKind::Inherited),
958 defaultness: ast::Defaultness::Final,
960 node: ast::ImplItemKind::Method(ast::MethodSig {
961 header: ast::FnHeader {
963 ..ast::FnHeader::default()
973 /// #[derive(PartialEq)]
975 /// struct A { x: i32, y: i32 }
977 /// // equivalent to:
978 /// impl PartialEq for A {
979 /// fn eq(&self, other: &A) -> bool {
981 /// A {x: ref __self_0_0, y: ref __self_0_1} => {
983 /// A {x: ref __self_1_0, y: ref __self_1_1} => {
984 /// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1)
992 /// // or if A is repr(packed) - note fields are matched by-value
993 /// // instead of by-reference.
994 /// impl PartialEq for A {
995 /// fn eq(&self, other: &A) -> bool {
997 /// A {x: __self_0_0, y: __self_0_1} => {
999 /// A {x: __self_1_0, y: __self_1_1} => {
1000 /// __self_0_0.eq(&__self_1_0) && __self_0_1.eq(&__self_1_1)
1008 fn expand_struct_method_body<'b>(&self,
1009 cx: &mut ExtCtxt<'_>,
1010 trait_: &TraitDef<'b>,
1011 struct_def: &'b VariantData,
1013 self_args: &[P<Expr>],
1014 nonself_args: &[P<Expr>],
1015 use_temporaries: bool)
1018 let mut raw_fields = Vec::new(); // Vec<[fields of self],
1019 // [fields of next Self arg], [etc]>
1020 let mut patterns = Vec::new();
1021 for i in 0..self_args.len() {
1022 let struct_path = cx.path(DUMMY_SP, vec![type_ident]);
1023 let (pat, ident_expr) = trait_.create_struct_pattern(cx,
1026 &format!("__self_{}", i),
1027 ast::Mutability::Immutable,
1030 raw_fields.push(ident_expr);
1033 // transpose raw_fields
1034 let fields = if !raw_fields.is_empty() {
1035 let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter());
1036 let first_field = raw_fields.next().unwrap();
1037 let mut other_fields: Vec<vec::IntoIter<_>> = raw_fields.collect();
1038 first_field.map(|(span, opt_id, field, attrs)| {
1043 other: other_fields.iter_mut()
1045 match l.next().unwrap() {
1055 cx.span_bug(trait_.span,
1056 "no self arguments to non-static method in generic \
1060 // body of the inner most destructuring match
1061 let mut body = self.call_substructure_method(cx,
1066 &Struct(struct_def, fields));
1068 // make a series of nested matches, to destructure the
1069 // structs. This is actually right-to-left, but it shouldn't
1071 for (arg_expr, pat) in self_args.iter().zip(patterns) {
1072 body = cx.expr_match(trait_.span,
1074 vec![cx.arm(trait_.span, vec![pat.clone()], body)])
1080 fn expand_static_struct_method_body(&self,
1081 cx: &mut ExtCtxt<'_>,
1082 trait_: &TraitDef<'_>,
1083 struct_def: &VariantData,
1085 self_args: &[P<Expr>],
1086 nonself_args: &[P<Expr>])
1088 let summary = trait_.summarise_struct(cx, struct_def);
1090 self.call_substructure_method(cx,
1095 &StaticStruct(struct_def, summary))
1099 /// #[derive(PartialEq)]
1106 /// // is equivalent to
1108 /// impl PartialEq for A {
1109 /// fn eq(&self, other: &A) -> ::bool {
1110 /// match (&*self, &*other) {
1111 /// (&A1, &A1) => true,
1112 /// (&A2(ref self_0),
1113 /// &A2(ref __arg_1_0)) => (*self_0).eq(&(*__arg_1_0)),
1115 /// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 };
1116 /// let __arg_1_vi = match *other { A1(..) => 0, A2(..) => 1 };
1124 /// (Of course `__self_vi` and `__arg_1_vi` are unused for
1125 /// `PartialEq`, and those subcomputations will hopefully be removed
1126 /// as their results are unused. The point of `__self_vi` and
1127 /// `__arg_1_vi` is for `PartialOrd`; see #15503.)
1128 fn expand_enum_method_body<'b>(&self,
1129 cx: &mut ExtCtxt<'_>,
1130 trait_: &TraitDef<'b>,
1131 enum_def: &'b EnumDef,
1132 type_attrs: &[ast::Attribute],
1134 self_args: Vec<P<Expr>>,
1135 nonself_args: &[P<Expr>])
1137 self.build_enum_match_tuple(cx,
1147 /// Creates a match for a tuple of all `self_args`, where either all
1148 /// variants match, or it falls into a catch-all for when one variant
1151 /// There are N + 1 cases because is a case for each of the N
1152 /// variants where all of the variants match, and one catch-all for
1153 /// when one does not match.
1155 /// As an optimization we generate code which checks whether all variants
1156 /// match first which makes llvm see that C-like enums can be compiled into
1157 /// a simple equality check (for PartialEq).
1159 /// The catch-all handler is provided access the variant index values
1160 /// for each of the self-args, carried in precomputed variables.
1163 /// let __self0_vi = unsafe {
1164 /// std::intrinsics::discriminant_value(&self) } as i32;
1165 /// let __self1_vi = unsafe {
1166 /// std::intrinsics::discriminant_value(&arg1) } as i32;
1167 /// let __self2_vi = unsafe {
1168 /// std::intrinsics::discriminant_value(&arg2) } as i32;
1170 /// if __self0_vi == __self1_vi && __self0_vi == __self2_vi && ... {
1172 /// (Variant1, Variant1, ...) => Body1
1173 /// (Variant2, Variant2, ...) => Body2,
1175 /// _ => ::core::intrinsics::unreachable()
1179 /// ... // catch-all remainder can inspect above variant index values.
1182 fn build_enum_match_tuple<'b>(&self,
1183 cx: &mut ExtCtxt<'_>,
1184 trait_: &TraitDef<'b>,
1185 enum_def: &'b EnumDef,
1186 type_attrs: &[ast::Attribute],
1188 mut self_args: Vec<P<Expr>>,
1189 nonself_args: &[P<Expr>])
1191 let sp = trait_.span;
1192 let variants = &enum_def.variants;
1194 let self_arg_names = iter::once("__self".to_string()).chain(
1198 .map(|(arg_count, _self_arg)|
1199 format!("__arg_{}", arg_count)
1201 ).collect::<Vec<String>>();
1203 let self_arg_idents = self_arg_names.iter()
1204 .map(|name| cx.ident_of(&name[..]))
1205 .collect::<Vec<ast::Ident>>();
1207 // The `vi_idents` will be bound, solely in the catch-all, to
1208 // a series of let statements mapping each self_arg to an int
1209 // value corresponding to its discriminant.
1210 let vi_idents = self_arg_names.iter()
1212 let vi_suffix = format!("{}_vi", &name[..]);
1213 cx.ident_of(&vi_suffix[..]).gensym()
1215 .collect::<Vec<ast::Ident>>();
1217 // Builds, via callback to call_substructure_method, the
1218 // delegated expression that handles the catch-all case,
1219 // using `__variants_tuple` to drive logic if necessary.
1220 let catch_all_substructure =
1221 EnumNonMatchingCollapsed(self_arg_idents, &variants[..], &vi_idents[..]);
1223 let first_fieldless = variants.iter().find(|v| v.node.data.fields().is_empty());
1225 // These arms are of the form:
1226 // (Variant1, Variant1, ...) => Body1
1227 // (Variant2, Variant2, ...) => Body2
1229 // where each tuple has length = self_args.len()
1230 let mut match_arms: Vec<ast::Arm> = variants.iter()
1232 .filter(|&(_, v)| !(self.unify_fieldless_variants && v.node.data.fields().is_empty()))
1233 .map(|(index, variant)| {
1234 let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| {
1235 let (p, idents) = trait_.create_enum_variant_pattern(cx,
1239 ast::Mutability::Immutable);
1240 (cx.pat(sp, PatKind::Ref(p, ast::Mutability::Immutable)), idents)
1243 // A single arm has form (&VariantK, &VariantK, ...) => BodyK
1244 // (see "Final wrinkle" note below for why.)
1245 let mut subpats = Vec::with_capacity(self_arg_names.len());
1246 let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1);
1247 let first_self_pat_idents = {
1248 let (p, idents) = mk_self_pat(cx, &self_arg_names[0]);
1252 for self_arg_name in &self_arg_names[1..] {
1253 let (p, idents) = mk_self_pat(cx, &self_arg_name[..]);
1255 self_pats_idents.push(idents);
1258 // Here is the pat = `(&VariantK, &VariantK, ...)`
1259 let single_pat = cx.pat_tuple(sp, subpats);
1261 // For the BodyK, we need to delegate to our caller,
1262 // passing it an EnumMatching to indicate which case
1265 // All of the Self args have the same variant in these
1266 // cases. So we transpose the info in self_pats_idents
1267 // to gather the getter expressions together, in the
1268 // form that EnumMatching expects.
1270 // The transposition is driven by walking across the
1271 // arg fields of the variant for the first self pat.
1272 let field_tuples = first_self_pat_idents.into_iter().enumerate()
1273 // For each arg field of self, pull out its getter expr ...
1274 .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| {
1275 // ... but FieldInfo also wants getter expr
1276 // for matching other arguments of Self type;
1277 // so walk across the *other* self_pats_idents
1278 // and pull out getter for same field in each
1279 // of them (using `field_index` tracked above).
1280 // That is the heart of the transposition.
1281 let others = self_pats_idents.iter().map(|fields| {
1282 let (_, _opt_ident, ref other_getter_expr, _) =
1283 fields[field_index];
1285 // All Self args have same variant, so
1286 // opt_idents are the same. (Assert
1287 // here to make it self-evident that
1288 // it is okay to ignore `_opt_ident`.)
1289 assert!(opt_ident == _opt_ident);
1291 other_getter_expr.clone()
1292 }).collect::<Vec<P<Expr>>>();
1294 FieldInfo { span: sp,
1296 self_: self_getter_expr,
1300 }).collect::<Vec<FieldInfo<'_>>>();
1302 // Now, for some given VariantK, we have built up
1303 // expressions for referencing every field of every
1304 // Self arg, assuming all are instances of VariantK.
1305 // Build up code associated with such a case.
1306 let substructure = EnumMatching(index, variants.len(), variant, field_tuples);
1307 let arm_expr = self.call_substructure_method(cx,
1314 cx.arm(sp, vec![single_pat], arm_expr)
1318 let default = match first_fieldless {
1319 Some(v) if self.unify_fieldless_variants => {
1320 // We need a default case that handles the fieldless variants.
1321 // The index and actual variant aren't meaningful in this case,
1322 // so just use whatever
1323 let substructure = EnumMatching(0, variants.len(), v, Vec::new());
1324 Some(self.call_substructure_method(cx,
1331 _ if variants.len() > 1 && self_args.len() > 1 => {
1332 // Since we know that all the arguments will match if we reach
1333 // the match expression we add the unreachable intrinsics as the
1334 // result of the catch all which should help llvm in optimizing it
1335 Some(deriving::call_intrinsic(cx, sp, "unreachable", vec![]))
1339 if let Some(arm) = default {
1340 match_arms.push(cx.arm(sp, vec![cx.pat_wild(sp)], arm));
1343 // We will usually need the catch-all after matching the
1344 // tuples `(VariantK, VariantK, ...)` for each VariantK of the
1347 // * when there is only one Self arg, the arms above suffice
1348 // (and the deriving we call back into may not be prepared to
1349 // handle EnumNonMatchCollapsed), and,
1351 // * when the enum has only one variant, the single arm that
1352 // is already present always suffices.
1354 // * In either of the two cases above, if we *did* add a
1355 // catch-all `_` match, it would trigger the
1356 // unreachable-pattern error.
1358 if variants.len() > 1 && self_args.len() > 1 {
1359 // Build a series of let statements mapping each self_arg
1360 // to its discriminant value. If this is a C-style enum
1361 // with a specific repr type, then casts the values to
1362 // that type. Otherwise casts to `i32` (the default repr
1365 // i.e., for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
1366 // with three Self args, builds three statements:
1369 // let __self0_vi = unsafe {
1370 // std::intrinsics::discriminant_value(&self) } as i32;
1371 // let __self1_vi = unsafe {
1372 // std::intrinsics::discriminant_value(&arg1) } as i32;
1373 // let __self2_vi = unsafe {
1374 // std::intrinsics::discriminant_value(&arg2) } as i32;
1376 let mut index_let_stmts: Vec<ast::Stmt> = Vec::with_capacity(vi_idents.len() + 1);
1378 // We also build an expression which checks whether all discriminants are equal
1379 // discriminant_test = __self0_vi == __self1_vi && __self0_vi == __self2_vi && ...
1380 let mut discriminant_test = cx.expr_bool(sp, true);
1382 let target_type_name = find_repr_type_name(&cx.parse_sess, type_attrs);
1384 let mut first_ident = None;
1385 for (&ident, self_arg) in vi_idents.iter().zip(&self_args) {
1386 let self_addr = cx.expr_addr_of(sp, self_arg.clone());
1388 deriving::call_intrinsic(cx, sp, "discriminant_value", vec![self_addr]);
1390 let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name));
1391 let variant_disr = cx.expr_cast(sp, variant_value, target_ty);
1392 let let_stmt = cx.stmt_let(sp, false, ident, variant_disr);
1393 index_let_stmts.push(let_stmt);
1397 let first_expr = cx.expr_ident(sp, first);
1398 let id = cx.expr_ident(sp, ident);
1399 let test = cx.expr_binary(sp, BinOpKind::Eq, first_expr, id);
1401 cx.expr_binary(sp, BinOpKind::And, discriminant_test, test)
1404 first_ident = Some(ident);
1409 let arm_expr = self.call_substructure_method(cx,
1414 &catch_all_substructure);
1416 // Final wrinkle: the self_args are expressions that deref
1417 // down to desired places, but we cannot actually deref
1418 // them when they are fed as r-values into a tuple
1419 // expression; here add a layer of borrowing, turning
1420 // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
1421 self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg));
1422 let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args));
1424 // Lastly we create an expression which branches on all discriminants being equal
1425 // if discriminant_test {
1427 // (Variant1, Variant1, ...) => Body1
1428 // (Variant2, Variant2, ...) => Body2,
1430 // _ => ::core::intrinsics::unreachable()
1434 // <delegated expression referring to __self0_vi, et al.>
1436 let all_match = cx.expr_match(sp, match_arg, match_arms);
1437 let arm_expr = cx.expr_if(sp, discriminant_test, all_match, Some(arm_expr));
1438 index_let_stmts.push(cx.stmt_expr(arm_expr));
1439 cx.expr_block(cx.block(sp, index_let_stmts))
1440 } else if variants.is_empty() {
1441 // As an additional wrinkle, For a zero-variant enum A,
1442 // currently the compiler
1443 // will accept `fn (a: &Self) { match *a { } }`
1444 // but rejects `fn (a: &Self) { match (&*a,) { } }`
1445 // as well as `fn (a: &Self) { match ( *a,) { } }`
1447 // This means that the strategy of building up a tuple of
1448 // all Self arguments fails when Self is a zero variant
1449 // enum: rustc rejects the expanded program, even though
1450 // the actual code tends to be impossible to execute (at
1451 // least safely), according to the type system.
1453 // The most expedient fix for this is to just let the
1454 // code fall through to the catch-all. But even this is
1455 // error-prone, since the catch-all as defined above would
1456 // generate code like this:
1458 // _ => { let __self0 = match *self { };
1459 // let __self1 = match *__arg_0 { };
1460 // <catch-all-expr> }
1462 // Which is yields bindings for variables which type
1463 // inference cannot resolve to unique types.
1465 // One option to the above might be to add explicit type
1466 // annotations. But the *only* reason to go down that path
1467 // would be to try to make the expanded output consistent
1468 // with the case when the number of enum variants >= 1.
1470 // That just isn't worth it. In fact, trying to generate
1471 // sensible code for *any* deriving on a zero-variant enum
1472 // does not make sense. But at the same time, for now, we
1473 // do not want to cause a compile failure just because the
1474 // user happened to attach a deriving to their
1475 // zero-variant enum.
1477 // Instead, just generate a failing expression for the
1478 // zero variant case, skipping matches and also skipping
1479 // delegating back to the end user code entirely.
1481 // (See also #4499 and #12609; note that some of the
1482 // discussions there influence what choice we make here;
1483 // e.g., if we feature-gate `match x { ... }` when x refers
1484 // to an uninhabited type (e.g., a zero-variant enum or a
1485 // type holding such an enum), but do not feature-gate
1486 // zero-variant enums themselves, then attempting to
1487 // derive Debug on such a type could here generate code
1488 // that needs the feature gate enabled.)
1490 deriving::call_intrinsic(cx, sp, "unreachable", vec![])
1493 // Final wrinkle: the self_args are expressions that deref
1494 // down to desired places, but we cannot actually deref
1495 // them when they are fed as r-values into a tuple
1496 // expression; here add a layer of borrowing, turning
1497 // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
1498 self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg));
1499 let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args));
1500 cx.expr_match(sp, match_arg, match_arms)
1504 fn expand_static_enum_method_body(&self,
1505 cx: &mut ExtCtxt<'_>,
1506 trait_: &TraitDef<'_>,
1509 self_args: &[P<Expr>],
1510 nonself_args: &[P<Expr>])
1512 let summary = enum_def.variants
1515 let sp = v.span.with_ctxt(trait_.span.ctxt());
1516 let summary = trait_.summarise_struct(cx, &v.node.data);
1517 (v.node.ident, sp, summary)
1520 self.call_substructure_method(cx,
1525 &StaticEnum(enum_def, summary))
1529 // general helper methods.
1530 impl<'a> TraitDef<'a> {
1531 fn summarise_struct(&self, cx: &mut ExtCtxt<'_>, struct_def: &VariantData) -> StaticFields {
1532 let mut named_idents = Vec::new();
1533 let mut just_spans = Vec::new();
1534 for field in struct_def.fields() {
1535 let sp = field.span.with_ctxt(self.span.ctxt());
1537 Some(ident) => named_idents.push((ident, sp)),
1538 _ => just_spans.push(sp),
1542 match (just_spans.is_empty(), named_idents.is_empty()) {
1544 cx.span_bug(self.span,
1545 "a struct with named and unnamed \
1546 fields in generic `derive`")
1549 (_, false) => Named(named_idents),
1551 _ if struct_def.is_struct() => Named(named_idents),
1552 _ => Unnamed(just_spans, struct_def.is_tuple()),
1556 fn create_subpatterns(&self,
1557 cx: &mut ExtCtxt<'_>,
1558 field_paths: Vec<ast::Ident>,
1559 mutbl: ast::Mutability,
1560 use_temporaries: bool)
1561 -> Vec<P<ast::Pat>> {
1564 let binding_mode = if use_temporaries {
1565 ast::BindingMode::ByValue(ast::Mutability::Immutable)
1567 ast::BindingMode::ByRef(mutbl)
1570 PatKind::Ident(binding_mode, (*path).clone(), None))
1575 fn create_struct_pattern
1577 cx: &mut ExtCtxt<'_>,
1578 struct_path: ast::Path,
1579 struct_def: &'a VariantData,
1581 mutbl: ast::Mutability,
1582 use_temporaries: bool)
1583 -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>)
1585 let mut paths = Vec::new();
1586 let mut ident_exprs = Vec::new();
1587 for (i, struct_field) in struct_def.fields().iter().enumerate() {
1588 let sp = struct_field.span.with_ctxt(self.span.ctxt());
1589 let ident = cx.ident_of(&format!("{}_{}", prefix, i)).gensym();
1590 paths.push(ident.with_span_pos(sp));
1591 let val = cx.expr_path(cx.path_ident(sp, ident));
1592 let val = if use_temporaries {
1595 cx.expr_deref(sp, val)
1597 let val = cx.expr(sp, ast::ExprKind::Paren(val));
1599 ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..]));
1602 let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries);
1603 let pattern = match *struct_def {
1604 VariantData::Struct(..) => {
1605 let field_pats = subpats.into_iter()
1607 .map(|(pat, &(sp, ident, ..))| {
1608 if ident.is_none() {
1609 cx.span_bug(sp, "a braced struct with unnamed fields in `derive`");
1611 source_map::Spanned {
1612 span: pat.span.with_ctxt(self.span.ctxt()),
1613 node: ast::FieldPat {
1614 ident: ident.unwrap(),
1616 is_shorthand: false,
1617 attrs: ThinVec::new(),
1622 cx.pat_struct(self.span, struct_path, field_pats)
1624 VariantData::Tuple(..) => {
1625 cx.pat_tuple_struct(self.span, struct_path, subpats)
1627 VariantData::Unit(..) => {
1628 cx.pat_path(self.span, struct_path)
1632 (pattern, ident_exprs)
1635 fn create_enum_variant_pattern
1637 cx: &mut ExtCtxt<'_>,
1638 enum_ident: ast::Ident,
1639 variant: &'a ast::Variant,
1641 mutbl: ast::Mutability)
1642 -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) {
1643 let sp = variant.span.with_ctxt(self.span.ctxt());
1644 let variant_path = cx.path(sp, vec![enum_ident, variant.node.ident]);
1645 let use_temporaries = false; // enums can't be repr(packed)
1646 self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl,
1651 // helpful premade recipes
1653 pub fn cs_fold_fields<'a, F>(use_foldl: bool,
1656 cx: &mut ExtCtxt<'_>,
1657 all_fields: &[FieldInfo<'a>])
1659 where F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>
1662 all_fields.iter().fold(base, |old, field| {
1663 f(cx, field.span, old, field.self_.clone(), &field.other)
1666 all_fields.iter().rev().fold(base, |old, field| {
1667 f(cx, field.span, old, field.self_.clone(), &field.other)
1672 pub fn cs_fold_enumnonmatch(mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
1673 cx: &mut ExtCtxt<'_>,
1675 substructure: &Substructure<'_>)
1678 match *substructure.fields {
1679 EnumNonMatchingCollapsed(ref all_args, _, tuple) => {
1682 (&all_args[..], tuple),
1683 substructure.nonself_args)
1685 _ => cx.span_bug(trait_span, "cs_fold_enumnonmatch expected an EnumNonMatchingCollapsed")
1689 pub fn cs_fold_static(cx: &mut ExtCtxt<'_>,
1693 cx.span_bug(trait_span, "static function in `derive`")
1696 /// Fold the fields. `use_foldl` controls whether this is done
1697 /// left-to-right (`true`) or right-to-left (`false`).
1698 pub fn cs_fold<F>(use_foldl: bool,
1701 enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
1702 cx: &mut ExtCtxt<'_>,
1704 substructure: &Substructure<'_>)
1706 where F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>
1708 match *substructure.fields {
1709 EnumMatching(.., ref all_fields) |
1710 Struct(_, ref all_fields) => {
1711 cs_fold_fields(use_foldl, f, base, cx, all_fields)
1713 EnumNonMatchingCollapsed(..) => {
1714 cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure)
1716 StaticEnum(..) | StaticStruct(..) => {
1717 cs_fold_static(cx, trait_span)
1722 /// Function to fold over fields, with three cases, to generate more efficient and concise code.
1723 /// When the `substructure` has grouped fields, there are two cases:
1724 /// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`).
1725 /// One or more fields: call the base case function on the first value (which depends on
1726 /// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the
1728 /// When the `substructure` is a `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f`
1729 /// is returned. Statics may not be folded over.
1730 /// See `cs_op` in `partial_ord.rs` for a model example.
1731 pub fn cs_fold1<F, B>(use_foldl: bool,
1734 enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
1735 cx: &mut ExtCtxt<'_>,
1737 substructure: &Substructure<'_>)
1739 where F: FnMut(&mut ExtCtxt<'_>, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>,
1740 B: FnMut(&mut ExtCtxt<'_>, Option<(Span, P<Expr>, &[P<Expr>])>) -> P<Expr>
1742 match *substructure.fields {
1743 EnumMatching(.., ref all_fields) |
1744 Struct(_, ref all_fields) => {
1745 let (base, all_fields) = match (all_fields.is_empty(), use_foldl) {
1747 let field = &all_fields[0];
1748 let args = (field.span, field.self_.clone(), &field.other[..]);
1749 (b(cx, Some(args)), &all_fields[1..])
1752 let idx = all_fields.len() - 1;
1753 let field = &all_fields[idx];
1754 let args = (field.span, field.self_.clone(), &field.other[..]);
1755 (b(cx, Some(args)), &all_fields[..idx])
1757 (true, _) => (b(cx, None), &all_fields[..])
1760 cs_fold_fields(use_foldl, f, base, cx, all_fields)
1762 EnumNonMatchingCollapsed(..) => {
1763 cs_fold_enumnonmatch(enum_nonmatch_f, cx, trait_span, substructure)
1765 StaticEnum(..) | StaticStruct(..) => {
1766 cs_fold_static(cx, trait_span)
1771 /// Call the method that is being derived on all the fields, and then
1772 /// process the collected results. i.e.
1774 /// ```ignore (only-for-syntax-highlight)
1775 /// f(cx, span, vec![self_1.method(__arg_1_1, __arg_2_1),
1776 /// self_2.method(__arg_1_2, __arg_2_2)])
1779 pub fn cs_same_method<F>(f: F,
1780 mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
1781 cx: &mut ExtCtxt<'_>,
1783 substructure: &Substructure<'_>)
1785 where F: FnOnce(&mut ExtCtxt<'_>, Span, Vec<P<Expr>>) -> P<Expr>
1787 match *substructure.fields {
1788 EnumMatching(.., ref all_fields) |
1789 Struct(_, ref all_fields) => {
1790 // call self_n.method(other_1_n, other_2_n, ...)
1791 let called = all_fields.iter()
1793 cx.expr_method_call(field.span,
1794 field.self_.clone(),
1795 substructure.method_ident,
1798 .map(|e| cx.expr_addr_of(field.span, e.clone()))
1803 f(cx, trait_span, called)
1805 EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => {
1808 (&all_self_args[..], tuple),
1809 substructure.nonself_args)
1811 StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"),
1815 /// Returns `true` if the type has no value fields
1816 /// (for an enum, no variant has any fields)
1817 pub fn is_type_without_fields(item: &Annotatable) -> bool {
1818 if let Annotatable::Item(ref item) = *item {
1820 ast::ItemKind::Enum(ref enum_def, _) => {
1821 enum_def.variants.iter().all(|v| v.node.data.fields().is_empty())
1823 ast::ItemKind::Struct(ref variant_data, _) => variant_data.fields().is_empty(),