Avoid unnecessary blocks in derive output.

By not committing to either block form or expression form until
necessary, we can avoid lots of unnecessary blocks.
This commit is contained in:
Nicholas Nethercote 2022-06-28 13:10:36 +10:00
parent d4ecc4fb5c
commit 5762d2385e
12 changed files with 406 additions and 427 deletions

View File

@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, Generics, ItemKind, MetaItem, VariantData};
use rustc_ast::{self as ast, Generics, ItemKind, MetaItem, VariantData};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span;
@ -98,7 +97,7 @@ fn cs_clone_simple(
trait_span: Span,
substr: &Substructure<'_>,
is_union: bool,
) -> P<Expr> {
) -> BlockOrExpr {
let mut stmts = Vec::new();
let mut process_variant = |variant: &VariantData| {
for field in variant.fields() {
@ -139,8 +138,7 @@ fn cs_clone_simple(
),
}
}
stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span))));
cx.expr_block(cx.block(trait_span, stmts))
BlockOrExpr::new_mixed(stmts, cx.expr_deref(trait_span, cx.expr_self(trait_span)))
}
fn cs_clone(
@ -148,7 +146,7 @@ fn cs_clone(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
) -> BlockOrExpr {
let ctor_path;
let all_fields;
let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]);
@ -177,7 +175,7 @@ fn cs_clone(
}
}
match *vdata {
let expr = match *vdata {
VariantData::Struct(..) => {
let fields = all_fields
.iter()
@ -201,5 +199,6 @@ fn cs_clone(
cx.expr_call(trait_span, path, subcalls)
}
VariantData::Unit(..) => cx.expr_path(ctor_path),
}
};
BlockOrExpr::new_expr(expr)
}

View File

@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_ast::{self as ast, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
@ -52,7 +51,7 @@ fn cs_total_eq_assert(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
) -> BlockOrExpr {
let mut stmts = Vec::new();
let mut process_variant = |variant: &ast::VariantData| {
for field in variant.fields() {
@ -78,5 +77,5 @@ fn cs_total_eq_assert(
}
_ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"),
}
cx.expr_block(cx.block(trait_span, stmts))
BlockOrExpr::new_stmts(stmts)
}

View File

@ -3,7 +3,7 @@ use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_ast::{self as ast, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
@ -51,7 +51,7 @@ pub fn ordering_collapsed(
cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt])
}
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let test_id = Ident::new(sym::cmp, span);
let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
@ -70,7 +70,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
// cmp => cmp
// }
//
cs_fold(
let expr = cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
@ -107,5 +107,6 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
cx,
span,
substr,
)
);
BlockOrExpr::new_expr(expr)
}

View File

@ -15,8 +15,6 @@ pub fn expand_deriving_partial_eq(
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_op(
cx: &mut ExtCtxt<'_>,
span: Span,
@ -24,7 +22,7 @@ pub fn expand_deriving_partial_eq(
op: BinOpKind,
combiner: BinOpKind,
base: bool,
) -> P<Expr> {
) -> BlockOrExpr {
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`");
@ -33,7 +31,7 @@ pub fn expand_deriving_partial_eq(
cx.expr_binary(span, op, self_f, other_f.clone())
};
cs_fold1(
let expr = cs_fold1(
true, // use foldl
|cx, span, subexpr, self_f, other_fs| {
let eq = op(cx, span, self_f, other_fs);
@ -52,13 +50,14 @@ pub fn expand_deriving_partial_eq(
cx,
span,
substr,
)
);
BlockOrExpr::new_expr(expr)
}
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true)
}
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false)
}

View File

@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{path_std, pathvec_std};
use rustc_ast::ptr::P;
use rustc_ast::{Expr, MetaItem};
use rustc_ast::MetaItem;
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
@ -48,7 +47,7 @@ pub fn expand_deriving_partial_ord(
trait_def.expand(cx, mitem, item, push)
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let test_id = Ident::new(sym::cmp, span);
let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
let ordering_expr = cx.expr_path(ordering.clone());
@ -69,7 +68,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
// cmp => cmp
// }
//
cs_fold(
let expr = cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
@ -110,5 +109,6 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
cx,
span,
substr,
)
);
BlockOrExpr::new_expr(expr)
}

View File

@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_ast::{self as ast, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
@ -42,7 +41,7 @@ pub fn expand_deriving_debug(
trait_def.expand(cx, mitem, item, push)
}
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr {
let (ident, vdata, fields) = match substr.fields {
Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
@ -74,7 +73,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
if fields.is_empty() {
// Special case for no fields.
let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]);
cx.expr_call_global(span, fn_path_write_str, vec![fmt, name])
let expr = cx.expr_call_global(span, fn_path_write_str, vec![fmt, name]);
BlockOrExpr::new_expr(expr)
} else if fields.len() <= CUTOFF {
// Few enough fields that we can use a specific-length method.
let debug = if is_struct {
@ -100,7 +100,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
let field = cx.expr_addr_of(field.span, field);
args.push(field);
}
cx.expr_call_global(span, fn_path_debug, args)
let expr = cx.expr_call_global(span, fn_path_debug, args);
BlockOrExpr::new_expr(expr)
} else {
// Enough fields that we must use the any-length method.
let mut name_exprs = Vec::with_capacity(fields.len());
@ -176,8 +177,6 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
stmts.push(names_let.unwrap());
}
stmts.push(values_let);
stmts.push(cx.stmt_expr(expr));
cx.expr_block(cx.block(span, stmts))
BlockOrExpr::new_mixed(stmts, expr)
}
}

View File

@ -62,7 +62,7 @@ fn decodable_substructure(
trait_span: Span,
substr: &Substructure<'_>,
krate: Symbol,
) -> P<Expr> {
) -> BlockOrExpr {
let decoder = substr.nonself_args[0].clone();
let recurse = vec![
Ident::new(krate, trait_span),
@ -74,7 +74,7 @@ fn decodable_substructure(
let blkarg = Ident::new(sym::_d, trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg);
match *substr.fields {
let expr = match *substr.fields {
StaticStruct(_, ref summary) => {
let nfields = match *summary {
Unnamed(ref fields, _) => fields.len(),
@ -173,7 +173,8 @@ fn decodable_substructure(
)
}
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
}
};
BlockOrExpr::new_expr(expr)
}
/// Creates a decoder for a single enum variant/struct:

View File

@ -1,11 +1,10 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use rustc_ast::ptr::P;
use rustc_ast as ast;
use rustc_ast::walk_list;
use rustc_ast::EnumDef;
use rustc_ast::VariantData;
use rustc_ast::{Expr, MetaItem};
use rustc_errors::Applicability;
use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt};
use rustc_span::symbol::Ident;
@ -16,7 +15,7 @@ use smallvec::SmallVec;
pub fn expand_deriving_default(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
mitem: &ast::MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
@ -59,12 +58,12 @@ fn default_struct_substructure(
trait_span: Span,
substr: &Substructure<'_>,
summary: &StaticFields,
) -> P<Expr> {
) -> BlockOrExpr {
// Note that `kw::Default` is "default" and `sym::Default` is "Default"!
let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]);
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
match summary {
let expr = match summary {
Unnamed(ref fields, is_tuple) => {
if !is_tuple {
cx.expr_ident(trait_span, substr.type_ident)
@ -80,31 +79,27 @@ fn default_struct_substructure(
.collect();
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
}
}
};
BlockOrExpr::new_expr(expr)
}
fn default_enum_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
enum_def: &EnumDef,
) -> P<Expr> {
let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span) else {
return DummyResult::raw_expr(trait_span, true);
) -> BlockOrExpr {
let expr = if let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span)
&& let Ok(_) = validate_default_attribute(cx, default_variant)
{
// We now know there is exactly one unit variant with exactly one `#[default]` attribute.
cx.expr_path(cx.path(
default_variant.span,
vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident],
))
} else {
DummyResult::raw_expr(trait_span, true)
};
// At this point, we know that there is exactly one variant with a `#[default]` attribute. The
// attribute hasn't yet been validated.
if let Err(()) = validate_default_attribute(cx, default_variant) {
return DummyResult::raw_expr(trait_span, true);
}
// We now know there is exactly one unit variant with exactly one `#[default]` attribute.
cx.expr_path(cx.path(
default_variant.span,
vec![Ident::new(kw::SelfUpper, default_variant.span), default_variant.ident],
))
BlockOrExpr::new_expr(expr)
}
fn extract_default_variant<'a>(

View File

@ -89,8 +89,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::pathvec_std;
use rustc_ast::ptr::P;
use rustc_ast::{Expr, ExprKind, MetaItem, Mutability};
use rustc_ast::{ExprKind, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
@ -147,7 +146,7 @@ fn encodable_substructure(
trait_span: Span,
substr: &Substructure<'_>,
krate: Symbol,
) -> P<Expr> {
) -> BlockOrExpr {
let encoder = substr.nonself_args[0].clone();
// throw an underscore in front to suppress unused variable warnings
let blkarg = Ident::new(sym::_e, trait_span);
@ -208,7 +207,7 @@ fn encodable_substructure(
let fn_emit_struct_path =
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct]);
cx.expr_call_global(
let expr = cx.expr_call_global(
trait_span,
fn_emit_struct_path,
vec![
@ -217,7 +216,8 @@ fn encodable_substructure(
cx.expr_usize(trait_span, fields.len()),
blk,
],
)
);
BlockOrExpr::new_expr(expr)
}
EnumMatching(idx, _, variant, ref fields) => {
@ -279,12 +279,12 @@ fn encodable_substructure(
let blk = cx.lambda1(trait_span, call, blkarg);
let fn_emit_enum_path: Vec<_> =
cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_enum]);
let ret = cx.expr_call_global(
let expr = cx.expr_call_global(
trait_span,
fn_emit_enum_path,
vec![encoder, cx.expr_str(trait_span, substr.type_ident.name), blk],
);
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
BlockOrExpr::new_mixed(vec![me], expr)
}
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),

View File

@ -296,7 +296,7 @@ pub enum SubstructureFields<'a> {
/// Combine the values of all the fields together. The last argument is
/// all the fields of all the structures.
pub type CombineSubstructureFunc<'a> =
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>;
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> BlockOrExpr + 'a>;
/// Deal with non-matching enum variants. The slice is the identifiers holding
/// the variant index value for each of the `Self` arguments.
@ -314,6 +314,48 @@ struct TypeParameter {
ty: P<ast::Ty>,
}
// The code snippets built up for derived code are sometimes used as blocks
// (e.g. in a function body) and sometimes used as expressions (e.g. in a match
// arm). This structure avoids committing to either form until necessary,
// avoiding the insertion of any unnecessary blocks.
//
// The statements come before the expression.
pub struct BlockOrExpr(Vec<ast::Stmt>, Option<P<Expr>>);
impl BlockOrExpr {
pub fn new_stmts(stmts: Vec<ast::Stmt>) -> BlockOrExpr {
BlockOrExpr(stmts, None)
}
pub fn new_expr(expr: P<Expr>) -> BlockOrExpr {
BlockOrExpr(vec![], Some(expr))
}
pub fn new_mixed(stmts: Vec<ast::Stmt>, expr: P<Expr>) -> BlockOrExpr {
BlockOrExpr(stmts, Some(expr))
}
// Converts it into a block.
fn into_block(mut self, cx: &ExtCtxt<'_>, span: Span) -> P<ast::Block> {
if let Some(expr) = self.1 {
self.0.push(cx.stmt_expr(expr));
}
cx.block(span, self.0)
}
// Converts it into an expression.
fn into_expr(self, cx: &ExtCtxt<'_>, span: Span) -> P<Expr> {
if self.0.is_empty() {
match self.1 {
None => cx.expr_block(cx.block(span, vec![])),
Some(expr) => expr,
}
} else {
cx.expr_block(self.into_block(cx, span))
}
}
}
/// This method helps to extract all the type parameters referenced from a
/// type. For a type parameter `<T>`, it looks for either a `TyPath` that
/// is not global and starts with `T`, or a `TyQPath`.
@ -827,7 +869,7 @@ impl<'a> MethodDef<'a> {
type_ident: Ident,
nonself_args: &[P<Expr>],
fields: &SubstructureFields<'_>,
) -> P<Expr> {
) -> BlockOrExpr {
let span = trait_.span;
let substructure = Substructure { type_ident, nonself_args, fields };
let mut f = self.combine_substructure.borrow_mut();
@ -902,7 +944,7 @@ impl<'a> MethodDef<'a> {
generics: &Generics,
explicit_self: Option<ast::ExplicitSelf>,
arg_types: Vec<(Ident, P<ast::Ty>)>,
body: P<Expr>,
body: BlockOrExpr,
) -> P<ast::AssocItem> {
let span = trait_.span;
// Create the generics that aren't for `Self`.
@ -921,7 +963,7 @@ impl<'a> MethodDef<'a> {
let method_ident = Ident::new(self.name, span);
let fn_decl = cx.fn_decl(args, ast::FnRetTy::Ty(ret_type));
let body_block = cx.block_expr(body);
let body_block = body.into_block(cx, span);
let trait_lo_sp = span.shrink_to_lo();
@ -986,7 +1028,7 @@ impl<'a> MethodDef<'a> {
nonself_args: &[P<Expr>],
use_temporaries: bool,
is_packed: bool,
) -> P<Expr> {
) -> BlockOrExpr {
let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]>
let span = trait_.span;
let mut patterns = Vec::new();
@ -1047,16 +1089,14 @@ impl<'a> MethodDef<'a> {
);
if !is_packed {
body.span = span;
body
} else {
// Do the let-destructuring.
let mut stmts: Vec<_> = iter::zip(self_args, patterns)
.map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone()))
.collect();
stmts.push(cx.stmt_expr(body));
cx.expr_block(cx.block(span, stmts))
stmts.extend(std::mem::take(&mut body.0));
BlockOrExpr(stmts, body.1)
}
}
@ -1067,7 +1107,7 @@ impl<'a> MethodDef<'a> {
struct_def: &VariantData,
type_ident: Ident,
nonself_args: &[P<Expr>],
) -> P<Expr> {
) -> BlockOrExpr {
let summary = trait_.summarise_struct(cx, struct_def);
self.call_substructure_method(
@ -1130,7 +1170,7 @@ impl<'a> MethodDef<'a> {
type_ident: Ident,
mut self_args: Vec<P<Expr>>,
nonself_args: &[P<Expr>],
) -> P<Expr> {
) -> BlockOrExpr {
let span = trait_.span;
let variants = &enum_def.variants;
@ -1253,13 +1293,9 @@ impl<'a> MethodDef<'a> {
// Self arg, assuming all are instances of VariantK.
// Build up code associated with such a case.
let substructure = EnumMatching(index, variants.len(), variant, field_tuples);
let arm_expr = self.call_substructure_method(
cx,
trait_,
type_ident,
nonself_args,
&substructure,
);
let arm_expr = self
.call_substructure_method(cx, trait_, type_ident, nonself_args, &substructure)
.into_expr(cx, span);
cx.arm(span, single_pat, arm_expr)
})
@ -1271,13 +1307,16 @@ impl<'a> MethodDef<'a> {
// The index and actual variant aren't meaningful in this case,
// so just use whatever
let substructure = EnumMatching(0, variants.len(), v, Vec::new());
Some(self.call_substructure_method(
cx,
trait_,
type_ident,
nonself_args,
&substructure,
))
Some(
self.call_substructure_method(
cx,
trait_,
type_ident,
nonself_args,
&substructure,
)
.into_expr(cx, span),
)
}
_ if variants.len() > 1 && self_args.len() > 1 => {
// Since we know that all the arguments will match if we reach
@ -1341,13 +1380,15 @@ impl<'a> MethodDef<'a> {
}
}
let arm_expr = self.call_substructure_method(
cx,
trait_,
type_ident,
nonself_args,
&catch_all_substructure,
);
let arm_expr = self
.call_substructure_method(
cx,
trait_,
type_ident,
nonself_args,
&catch_all_substructure,
)
.into_expr(cx, span);
// Final wrinkle: the self_args are expressions that deref
// down to desired places, but we cannot actually deref
@ -1371,8 +1412,7 @@ impl<'a> MethodDef<'a> {
// }
let all_match = cx.expr_match(span, match_arg, match_arms);
let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr));
index_let_stmts.push(cx.stmt_expr(arm_expr));
cx.expr_block(cx.block(span, index_let_stmts))
BlockOrExpr(index_let_stmts, Some(arm_expr))
} else if variants.is_empty() {
// As an additional wrinkle, For a zero-variant enum A,
// currently the compiler
@ -1423,7 +1463,7 @@ impl<'a> MethodDef<'a> {
// derive Debug on such a type could here generate code
// that needs the feature gate enabled.)
deriving::call_unreachable(cx, span)
BlockOrExpr(vec![], Some(deriving::call_unreachable(cx, span)))
} else {
// Final wrinkle: the self_args are expressions that deref
// down to desired places, but we cannot actually deref
@ -1432,7 +1472,7 @@ impl<'a> MethodDef<'a> {
// `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`.
self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg));
let match_arg = cx.expr(span, ast::ExprKind::Tup(self_args));
cx.expr_match(span, match_arg, match_arms)
BlockOrExpr(vec![], Some(cx.expr_match(span, match_arg, match_arms)))
}
}
@ -1443,7 +1483,7 @@ impl<'a> MethodDef<'a> {
enum_def: &EnumDef,
type_ident: Ident,
nonself_args: &[P<Expr>],
) -> P<Expr> {
) -> BlockOrExpr {
let summary = enum_def
.variants
.iter()

View File

@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{self, path_std, pathvec_std};
use rustc_ast::ptr::P;
use rustc_ast::{Expr, MetaItem, Mutability};
use rustc_ast::{MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
@ -45,7 +44,11 @@ pub fn expand_deriving_hash(
hash_trait_def.expand(cx, mitem, item, push);
}
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
fn hash_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> BlockOrExpr {
let [state_expr] = substr.nonself_args else {
cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`");
};
@ -81,6 +84,5 @@ fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructu
stmts.extend(
fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())),
);
cx.expr_block(cx.block(trait_span, stmts))
BlockOrExpr::new_stmts(stmts)
}

View File

@ -28,7 +28,7 @@ struct Empty;
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Empty {
#[inline]
fn clone(&self) -> Empty { { *self } }
fn clone(&self) -> Empty { *self }
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -49,7 +49,7 @@ impl ::core::default::Default for Empty {
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Empty {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { {} }
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {}
}
impl ::core::marker::StructuralPartialEq for Empty {}
#[automatically_derived]
@ -65,7 +65,7 @@ impl ::core::cmp::Eq for Empty {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} }
fn assert_receiver_is_total_eq(&self) -> () {}
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -95,11 +95,9 @@ struct Point {
impl ::core::clone::Clone for Point {
#[inline]
fn clone(&self) -> Point {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}
#[automatically_derived]
@ -128,10 +126,8 @@ impl ::core::default::Default for Point {
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Point {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
::core::hash::Hash::hash(&self.x, state);
::core::hash::Hash::hash(&self.y, state)
}
::core::hash::Hash::hash(&self.x, state);
::core::hash::Hash::hash(&self.y, state)
}
}
impl ::core::marker::StructuralPartialEq for Point {}
@ -155,10 +151,8 @@ impl ::core::cmp::Eq for Point {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
@ -229,15 +223,13 @@ impl ::core::clone::Clone for Big {
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Big {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
{
let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] =
&[&&self.b1, &&self.b2, &&self.b3, &&self.b4, &&self.b5,
&&self.b6, &&self.b7, &&self.b8];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big",
names, values)
}
let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] =
&[&&self.b1, &&self.b2, &&self.b3, &&self.b4, &&self.b5,
&&self.b6, &&self.b7, &&self.b8];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big", names,
values)
}
}
#[automatically_derived]
@ -261,16 +253,14 @@ impl ::core::default::Default for Big {
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Big {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
::core::hash::Hash::hash(&self.b1, state);
::core::hash::Hash::hash(&self.b2, state);
::core::hash::Hash::hash(&self.b3, state);
::core::hash::Hash::hash(&self.b4, state);
::core::hash::Hash::hash(&self.b5, state);
::core::hash::Hash::hash(&self.b6, state);
::core::hash::Hash::hash(&self.b7, state);
::core::hash::Hash::hash(&self.b8, state)
}
::core::hash::Hash::hash(&self.b1, state);
::core::hash::Hash::hash(&self.b2, state);
::core::hash::Hash::hash(&self.b3, state);
::core::hash::Hash::hash(&self.b4, state);
::core::hash::Hash::hash(&self.b5, state);
::core::hash::Hash::hash(&self.b6, state);
::core::hash::Hash::hash(&self.b7, state);
::core::hash::Hash::hash(&self.b8, state)
}
}
impl ::core::marker::StructuralPartialEq for Big {}
@ -300,16 +290,14 @@ impl ::core::cmp::Eq for Big {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
@ -416,7 +404,8 @@ struct Packed(u32);
impl ::core::clone::Clone for Packed {
#[inline]
fn clone(&self) -> Packed {
{ let _: ::core::clone::AssertParamIsClone<u32>; *self }
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}
#[automatically_derived]
@ -426,11 +415,9 @@ impl ::core::marker::Copy for Packed { }
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Packed {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
{
let Self(__self_0_0) = *self;
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Packed",
&&__self_0_0)
}
let Self(__self_0_0) = *self;
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Packed",
&&__self_0_0)
}
}
#[automatically_derived]
@ -443,10 +430,8 @@ impl ::core::default::Default for Packed {
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Packed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
let Self(__self_0_0) = *self;
{ ::core::hash::Hash::hash(&__self_0_0, state) }
}
let Self(__self_0_0) = *self;
::core::hash::Hash::hash(&__self_0_0, state)
}
}
impl ::core::marker::StructuralPartialEq for Packed {}
@ -455,19 +440,15 @@ impl ::core::marker::StructuralPartialEq for Packed {}
impl ::core::cmp::PartialEq for Packed {
#[inline]
fn eq(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 == __self_1_0
}
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 == __self_1_0
}
#[inline]
fn ne(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 != __self_1_0
}
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 != __self_1_0
}
}
impl ::core::marker::StructuralEq for Packed {}
@ -478,7 +459,7 @@ impl ::core::cmp::Eq for Packed {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{ let _: ::core::cmp::AssertParamIsEq<u32>; }
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
@ -487,15 +468,12 @@ impl ::core::cmp::PartialOrd for Packed {
#[inline]
fn partial_cmp(&self, other: &Packed)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::PartialOrd::partial_cmp(&__self_0_0,
&__self_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
}
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::PartialOrd::partial_cmp(&__self_0_0, &__self_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
}
}
}
@ -504,13 +482,11 @@ impl ::core::cmp::PartialOrd for Packed {
impl ::core::cmp::Ord for Packed {
#[inline]
fn cmp(&self, other: &Packed) -> ::core::cmp::Ordering {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal,
cmp => cmp,
}
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal,
cmp => cmp,
}
}
}
@ -521,7 +497,7 @@ enum Enum0 {}
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Enum0 {
#[inline]
fn clone(&self) -> Enum0 { { *self } }
fn clone(&self) -> Enum0 { *self }
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -556,7 +532,7 @@ impl ::core::cmp::Eq for Enum0 {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} }
fn assert_receiver_is_total_eq(&self) -> () {}
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -642,7 +618,7 @@ impl ::core::cmp::Eq for Enum1 {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{ let _: ::core::cmp::AssertParamIsEq<u32>; }
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
@ -693,7 +669,7 @@ enum Fieldless {
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Fieldless {
#[inline]
fn clone(&self) -> Fieldless { { *self } }
fn clone(&self) -> Fieldless { *self }
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -733,13 +709,11 @@ impl ::core::marker::StructuralPartialEq for Fieldless {}
impl ::core::cmp::PartialEq for Fieldless {
#[inline]
fn eq(&self, other: &Fieldless) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) { _ => true, }
} else { false }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) { _ => true, }
} else { false }
}
}
impl ::core::marker::StructuralEq for Fieldless {}
@ -749,7 +723,7 @@ impl ::core::cmp::Eq for Fieldless {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () { {} }
fn assert_receiver_is_total_eq(&self) -> () {}
}
#[automatically_derived]
#[allow(unused_qualifications)]
@ -757,19 +731,16 @@ impl ::core::cmp::PartialOrd for Fieldless {
#[inline]
fn partial_cmp(&self, other: &Fieldless)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
}
}
}
#[automatically_derived]
@ -777,15 +748,11 @@ impl ::core::cmp::PartialOrd for Fieldless {
impl ::core::cmp::Ord for Fieldless {
#[inline]
fn cmp(&self, other: &Fieldless) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) { _ => ::core::cmp::Ordering::Equal, }
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
@ -806,12 +773,10 @@ enum Mixed {
impl ::core::clone::Clone for Mixed {
#[inline]
fn clone(&self) -> Mixed {
{
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
let _: ::core::clone::AssertParamIsClone<u32>;
*self
}
}
#[automatically_derived]
@ -868,37 +833,33 @@ impl ::core::marker::StructuralPartialEq for Mixed {}
impl ::core::cmp::PartialEq for Mixed {
#[inline]
fn eq(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
*__self_0 == *__arg_1_0 && *__self_1 == *__arg_1_1,
_ => true,
}
} else { false }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
*__self_0 == *__arg_1_0 && *__self_1 == *__arg_1_1,
_ => true,
}
} else { false }
}
#[inline]
fn ne(&self, other: &Mixed) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
*__self_0 != *__arg_1_0 || *__self_1 != *__arg_1_1,
_ => false,
}
} else { true }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
*__self_0 != *__arg_1_0 || *__self_1 != *__arg_1_1,
_ => false,
}
} else { true }
}
}
impl ::core::marker::StructuralEq for Mixed {}
@ -909,11 +870,9 @@ impl ::core::cmp::Eq for Mixed {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<u32>;
}
}
#[automatically_derived]
@ -922,42 +881,39 @@ impl ::core::cmp::PartialOrd for Mixed {
#[inline]
fn partial_cmp(&self, other: &Mixed)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_1,
&*__arg_1_1) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_1,
&*__arg_1_1) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
_ =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
}
}
}
#[automatically_derived]
@ -965,32 +921,30 @@ impl ::core::cmp::PartialOrd for Mixed {
impl ::core::cmp::Ord for Mixed {
#[inline]
fn cmp(&self, other: &Mixed) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
_ => ::core::cmp::Ordering::Equal,
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
@ -1058,39 +1012,35 @@ impl ::core::marker::StructuralPartialEq for Fielded {}
impl ::core::cmp::PartialEq for Fielded {
#[inline]
fn eq(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { false }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
*__self_0 == *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { false }
}
#[inline]
fn ne(&self, other: &Fielded) -> bool {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { true }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
*__self_0 != *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { true }
}
}
impl ::core::marker::StructuralEq for Fielded {}
@ -1101,11 +1051,9 @@ impl ::core::cmp::Eq for Fielded {
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<bool>;
let _: ::core::cmp::AssertParamIsEq<Option<i32>>;
}
let _: ::core::cmp::AssertParamIsEq<u32>;
let _: ::core::cmp::AssertParamIsEq<bool>;
let _: ::core::cmp::AssertParamIsEq<Option<i32>>;
}
}
#[automatically_derived]
@ -1114,42 +1062,39 @@ impl ::core::cmp::PartialOrd for Fielded {
#[inline]
fn partial_cmp(&self, other: &Fielded)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi,
&__arg_1_vi)
}
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else {
::core::cmp::PartialOrd::partial_cmp(&__self_vi, &__arg_1_vi)
}
}
}
#[automatically_derived]
@ -1157,33 +1102,31 @@ impl ::core::cmp::PartialOrd for Fielded {
impl ::core::cmp::Ord for Fielded {
#[inline]
fn cmp(&self, other: &Fielded) -> ::core::cmp::Ordering {
{
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
let __self_vi = ::core::intrinsics::discriminant_value(&*self);
let __arg_1_vi = ::core::intrinsics::discriminant_value(&*other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }
}
}
@ -1198,7 +1141,8 @@ pub union Union {
impl ::core::clone::Clone for Union {
#[inline]
fn clone(&self) -> Union {
{ let _: ::core::clone::AssertParamIsCopy<Self>; *self }
let _: ::core::clone::AssertParamIsCopy<Self>;
*self
}
}
#[automatically_derived]