refactor: use shorthand fields

This commit is contained in:
teresy 2018-11-06 15:05:44 -05:00
parent 1dceaddfbe
commit eca11b99a7
57 changed files with 68 additions and 68 deletions

View File

@ -502,7 +502,7 @@ pub fn from_str(_: &str) -> String {
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> { pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
match str::from_utf8(&vec) { match str::from_utf8(&vec) {
Ok(..) => Ok(String { vec: vec }), Ok(..) => Ok(String { vec }),
Err(e) => { Err(e) => {
Err(FromUtf8Error { Err(FromUtf8Error {
bytes: vec, bytes: vec,

View File

@ -29,7 +29,7 @@ pub struct DwarfReader {
impl DwarfReader { impl DwarfReader {
pub fn new(ptr: *const u8) -> DwarfReader { pub fn new(ptr: *const u8) -> DwarfReader {
DwarfReader { ptr: ptr } DwarfReader { ptr }
} }
// DWARF streams are packed, so e.g. a u32 would not necessarily be aligned // DWARF streams are packed, so e.g. a u32 would not necessarily be aligned

View File

@ -41,7 +41,7 @@ struct PanicData {
} }
pub unsafe fn panic(data: Box<dyn Any + Send>) -> u32 { pub unsafe fn panic(data: Box<dyn Any + Send>) -> u32 {
let panic_ctx = Box::new(PanicData { data: data }); let panic_ctx = Box::new(PanicData { data });
let params = [Box::into_raw(panic_ctx) as c::ULONG_PTR]; let params = [Box::into_raw(panic_ctx) as c::ULONG_PTR];
c::RaiseException(RUST_PANIC, c::RaiseException(RUST_PANIC,
c::EXCEPTION_NONCONTINUABLE, c::EXCEPTION_NONCONTINUABLE,

View File

@ -26,7 +26,7 @@ pub struct OpportunisticTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
impl<'a, 'gcx, 'tcx> OpportunisticTypeResolver<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> OpportunisticTypeResolver<'a, 'gcx, 'tcx> {
pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
OpportunisticTypeResolver { infcx: infcx } OpportunisticTypeResolver { infcx }
} }
} }
@ -54,7 +54,7 @@ pub struct OpportunisticTypeAndRegionResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
impl<'a, 'gcx, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> {
pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self {
OpportunisticTypeAndRegionResolver { infcx: infcx } OpportunisticTypeAndRegionResolver { infcx }
} }
} }

View File

@ -169,7 +169,7 @@ pub fn instantiate(&mut self, vid: ty::TyVid, ty: Ty<'tcx>) {
// Hack: we only need this so that `types_escaping_snapshot` // Hack: we only need this so that `types_escaping_snapshot`
// can see what has been unified; see the Delegate impl for // can see what has been unified; see the Delegate impl for
// more details. // more details.
self.values.record(Instantiate { vid: vid }); self.values.record(Instantiate { vid });
} }
/// Creates a new type variable. /// Creates a new type variable.

View File

@ -43,7 +43,7 @@ fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
value2.min_vid value2.min_vid
}; };
Ok(RegionVidKey { min_vid: min_vid }) Ok(RegionVidKey { min_vid })
} }
} }

View File

@ -469,7 +469,7 @@ pub fn local_deprecation_entry(&self, id: HirId) -> Option<DeprecationEntry> {
/// Cross-references the feature names of unstable APIs with enabled /// Cross-references the feature names of unstable APIs with enabled
/// features and possibly prints errors. /// features and possibly prints errors.
pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut checker = Checker { tcx: tcx }; let mut checker = Checker { tcx };
tcx.hir.krate().visit_all_item_likes(&mut checker.as_deep_visitor()); tcx.hir.krate().visit_all_item_likes(&mut checker.as_deep_visitor());
} }

View File

@ -2871,7 +2871,7 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
use mir::TerminatorKind::*; use mir::TerminatorKind::*;
let kind = match self.kind { let kind = match self.kind {
Goto { target } => Goto { target: target }, Goto { target } => Goto { target },
SwitchInt { SwitchInt {
ref discr, ref discr,
switch_ty, switch_ty,

View File

@ -32,7 +32,7 @@ pub enum PlaceTy<'tcx> {
impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> { impl<'a, 'gcx, 'tcx> PlaceTy<'tcx> {
pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> { pub fn from_ty(ty: Ty<'tcx>) -> PlaceTy<'tcx> {
PlaceTy::Ty { ty: ty } PlaceTy::Ty { ty }
} }
pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> {

View File

@ -269,7 +269,7 @@ fn project_and_unify_type<'cx, 'gcx, 'tcx>(
}, },
Err(err) => { Err(err) => {
debug!("project_and_unify_type: equating types encountered error {:?}", err); debug!("project_and_unify_type: equating types encountered error {:?}", err);
Err(MismatchedProjectionTypes { err: err }) Err(MismatchedProjectionTypes { err })
} }
} }
} }

View File

@ -3434,7 +3434,7 @@ fn confirm_builtin_unsize_candidate(
_ => bug!(), _ => bug!(),
}; };
Ok(VtableBuiltinData { nested: nested }) Ok(VtableBuiltinData { nested })
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////

View File

@ -34,7 +34,7 @@ pub struct Match<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
impl<'a, 'gcx, 'tcx> Match<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> Match<'a, 'gcx, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Match<'a, 'gcx, 'tcx> { pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Match<'a, 'gcx, 'tcx> {
Match { tcx: tcx } Match { tcx }
} }
} }

View File

@ -82,7 +82,7 @@ fn has_escaping_regions(&self) -> bool {
} }
fn has_type_flags(&self, flags: TypeFlags) -> bool { fn has_type_flags(&self, flags: TypeFlags) -> bool {
self.visit_with(&mut HasTypeFlagsVisitor { flags: flags }) self.visit_with(&mut HasTypeFlagsVisitor { flags })
} }
fn has_projections(&self) -> bool { fn has_projections(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_PROJECTION) self.has_type_flags(TypeFlags::HAS_PROJECTION)

View File

@ -190,7 +190,7 @@ impl ObjectFile {
pub fn new(llmb: &'static mut MemoryBuffer) -> Option<ObjectFile> { pub fn new(llmb: &'static mut MemoryBuffer) -> Option<ObjectFile> {
unsafe { unsafe {
let llof = LLVMCreateObjectFile(llmb)?; let llof = LLVMCreateObjectFile(llmb)?;
Some(ObjectFile { llof: llof }) Some(ObjectFile { llof })
} }
} }
} }

View File

@ -328,7 +328,7 @@ fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
funclet, succ, kind); funclet, succ, kind);
match kind { match kind {
CleanupKind::NotCleanup => { CleanupKind::NotCleanup => {
result[succ] = CleanupKind::Internal { funclet: funclet }; result[succ] = CleanupKind::Internal { funclet };
} }
CleanupKind::Funclet => { CleanupKind::Funclet => {
if funclet != succ { if funclet != succ {

View File

@ -32,7 +32,7 @@ pub fn report_symbol_names<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
} }
tcx.dep_graph.with_ignore(|| { tcx.dep_graph.with_ignore(|| {
let mut visitor = SymbolNamesTest { tcx: tcx }; let mut visitor = SymbolNamesTest { tcx };
tcx.hir.krate().visit_all_item_likes(&mut visitor); tcx.hir.krate().visit_all_item_likes(&mut visitor);
}) })
} }

View File

@ -214,7 +214,7 @@ pub fn new(p: &Path,
unsafe { libc::close(fd); } unsafe { libc::close(fd); }
Err(err) Err(err)
} else { } else {
Ok(Lock { fd: fd }) Ok(Lock { fd })
} }
} }
} }

View File

@ -31,7 +31,7 @@ impl Svh {
/// compute the SVH from some HIR, you want the `calculate_svh` /// compute the SVH from some HIR, you want the `calculate_svh`
/// function found in `librustc_incremental`. /// function found in `librustc_incremental`.
pub fn new(hash: u64) -> Svh { pub fn new(hash: u64) -> Svh {
Svh { hash: hash } Svh { hash }
} }
pub fn as_u64(&self) -> u64 { pub fn as_u64(&self) -> u64 {

View File

@ -794,7 +794,7 @@ fn get_lints(&self) -> LintArray {
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImproperCTypes { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImproperCTypes {
fn check_foreign_item(&mut self, cx: &LateContext, it: &hir::ForeignItem) { fn check_foreign_item(&mut self, cx: &LateContext, it: &hir::ForeignItem) {
let mut vis = ImproperCTypesVisitor { cx: cx }; let mut vis = ImproperCTypesVisitor { cx };
let abi = cx.tcx.hir.get_foreign_abi(it.id); let abi = cx.tcx.hir.get_foreign_abi(it.id);
if abi != Abi::RustIntrinsic && abi != Abi::PlatformIntrinsic { if abi != Abi::RustIntrinsic && abi != Abi::PlatformIntrinsic {
match it.node { match it.node {

View File

@ -323,7 +323,7 @@ fn encode_info_for_items(&mut self) -> Index {
index.record(DefId::local(CRATE_DEF_INDEX), index.record(DefId::local(CRATE_DEF_INDEX),
IsolatedEncoder::encode_info_for_mod, IsolatedEncoder::encode_info_for_mod,
FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &vis))); FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &vis)));
let mut visitor = EncodeVisitor { index: index }; let mut visitor = EncodeVisitor { index };
krate.visit_all_item_likes(&mut visitor.as_deep_visitor()); krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
for macro_def in &krate.exported_macros { for macro_def in &krate.exported_macros {
visitor.visit_macro_def(macro_def); visitor.visit_macro_def(macro_def);

View File

@ -453,7 +453,7 @@ pub fn exit_scope(&mut self,
} }
let scope = &self.scopes[len - scope_count]; let scope = &self.scopes[len - scope_count];
self.cfg.terminate(block, scope.source_info(span), self.cfg.terminate(block, scope.source_info(span),
TerminatorKind::Goto { target: target }); TerminatorKind::Goto { target });
} }
/// Creates a path that performs all required cleanup for dropping a generator. /// Creates a path that performs all required cleanup for dropping a generator.
@ -1019,7 +1019,7 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
} else { } else {
let block = cfg.start_new_cleanup_block(); let block = cfg.start_new_cleanup_block();
cfg.push_end_region(tcx, block, source_info(span), scope.region_scope); cfg.push_end_region(tcx, block, source_info(span), scope.region_scope);
cfg.terminate(block, source_info(span), TerminatorKind::Goto { target: target }); cfg.terminate(block, source_info(span), TerminatorKind::Goto { target });
*cached_block = Some(block); *cached_block = Some(block);
block block
} }

View File

@ -28,7 +28,7 @@ pub struct HaveBeenBorrowedLocals<'a, 'tcx: 'a> {
impl<'a, 'tcx: 'a> HaveBeenBorrowedLocals<'a, 'tcx> { impl<'a, 'tcx: 'a> HaveBeenBorrowedLocals<'a, 'tcx> {
pub fn new(mir: &'a Mir<'tcx>) pub fn new(mir: &'a Mir<'tcx>)
-> Self { -> Self {
HaveBeenBorrowedLocals { mir: mir } HaveBeenBorrowedLocals { mir }
} }
pub fn mir(&self) -> &Mir<'tcx> { pub fn mir(&self) -> &Mir<'tcx> {

View File

@ -21,7 +21,7 @@ pub struct MaybeStorageLive<'a, 'tcx: 'a> {
impl<'a, 'tcx: 'a> MaybeStorageLive<'a, 'tcx> { impl<'a, 'tcx: 'a> MaybeStorageLive<'a, 'tcx> {
pub fn new(mir: &'a Mir<'tcx>) pub fn new(mir: &'a Mir<'tcx>)
-> Self { -> Self {
MaybeStorageLive { mir: mir } MaybeStorageLive { mir }
} }
pub fn mir(&self) -> &Mir<'tcx> { pub fn mir(&self) -> &Mir<'tcx> {

View File

@ -52,7 +52,7 @@ fn visit_body(&mut self, body: &'tcx hir::Body) {
} }
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.hir.krate().visit_all_item_likes(&mut OuterVisitor { tcx: tcx }.as_deep_visitor()); tcx.hir.krate().visit_all_item_likes(&mut OuterVisitor { tcx }.as_deep_visitor());
tcx.sess.abort_if_errors(); tcx.sess.abort_if_errors();
} }

View File

@ -459,7 +459,7 @@ fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> {
}) })
.collect(); .collect();
PatternKind::Leaf { subpatterns: subpatterns } PatternKind::Leaf { subpatterns }
} }
ty::Error => { // Avoid ICE (#50577) ty::Error => { // Avoid ICE (#50577)
return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) }; return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
@ -666,13 +666,13 @@ fn lower_variant_or_leaf(
subpatterns, subpatterns,
} }
} else { } else {
PatternKind::Leaf { subpatterns: subpatterns } PatternKind::Leaf { subpatterns }
} }
} }
Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) | Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) | Def::SelfCtor(..) => { Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) | Def::SelfCtor(..) => {
PatternKind::Leaf { subpatterns: subpatterns } PatternKind::Leaf { subpatterns }
} }
_ => { _ => {

View File

@ -495,7 +495,7 @@ fn elaborate_replace(
let target = self.patch.new_block(BasicBlockData { let target = self.patch.new_block(BasicBlockData {
statements: vec![assign], statements: vec![assign],
terminator: Some(Terminator { terminator: Some(Terminator {
kind: TerminatorKind::Goto { target: target }, kind: TerminatorKind::Goto { target },
..*terminator ..*terminator
}), }),
is_cleanup: false, is_cleanup: false,

View File

@ -302,7 +302,7 @@ fn run_pass<'a, 'tcx>(&self,
let map = make_local_map(&mut mir.local_decls, marker.locals); let map = make_local_map(&mut mir.local_decls, marker.locals);
// Update references to all vars and tmps now // Update references to all vars and tmps now
LocalUpdater { map: map }.visit_mir(mir); LocalUpdater { map }.visit_mir(mir);
mir.local_decls.shrink_to_fit(); mir.local_decls.shrink_to_fit();
} }
} }

View File

@ -57,7 +57,7 @@ fn run_pass<'a, 'tcx>(&self,
TerminatorKind::Assert { TerminatorKind::Assert {
target, cond: Operand::Constant(ref c), expected, .. target, cond: Operand::Constant(ref c), expected, ..
} if (c.literal.assert_bool(tcx) == Some(true)) == expected => { } if (c.literal.assert_bool(tcx) == Some(true)) == expected => {
TerminatorKind::Goto { target: target } TerminatorKind::Goto { target }
}, },
TerminatorKind::FalseEdges { real_target, .. } => { TerminatorKind::FalseEdges { real_target, .. } => {
TerminatorKind::Goto { target: real_target } TerminatorKind::Goto { target: real_target }

View File

@ -691,5 +691,5 @@ pub fn check_crate(session: &Session, krate: &Crate) {
is_banned: false, is_banned: false,
}, krate); }, krate);
visit::walk_crate(&mut AstValidator { session: session }, krate) visit::walk_crate(&mut AstValidator { session }, krate)
} }

View File

@ -4424,7 +4424,7 @@ fn lookup_import_candidates_from_module<FilterFn>(&mut self,
// declared as public (due to pruning, we don't explore // declared as public (due to pruning, we don't explore
// outside crate private modules => no need to check this) // outside crate private modules => no need to check this)
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public { if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
candidates.push(ImportSuggestion { path: path }); candidates.push(ImportSuggestion { path });
} }
} }
} }
@ -4533,7 +4533,7 @@ fn find_module(&mut self,
span: name_binding.span, span: name_binding.span,
segments: path_segments, segments: path_segments,
}; };
result = Some((module, ImportSuggestion { path: path })); result = Some((module, ImportSuggestion { path }));
} else { } else {
// add the module to the lookup // add the module to the lookup
if seen_modules.insert(module.def_id().unwrap()) { if seen_modules.insert(module.def_id().unwrap()) {

View File

@ -71,7 +71,7 @@ pub fn with_callback(
config: Config, config: Config,
) -> JsonDumper<CallbackOutput<'b>> { ) -> JsonDumper<CallbackOutput<'b>> {
JsonDumper { JsonDumper {
output: CallbackOutput { callback: callback }, output: CallbackOutput { callback },
config: config.clone(), config: config.clone(),
result: Analysis::new(config), result: Analysis::new(config),
} }

View File

@ -962,7 +962,7 @@ fn non_enum_variant(&self, struct_def: &hir::VariantData) -> AdtVariant<'tcx> {
AdtField { ty: field_ty, span: field.span } AdtField { ty: field_ty, span: field.span }
}) })
.collect(); .collect();
AdtVariant { fields: fields } AdtVariant { fields }
} }
fn enum_variants(&self, enum_def: &hir::EnumDef) -> Vec<AdtVariant<'tcx>> { fn enum_variants(&self, enum_def: &hir::EnumDef) -> Vec<AdtVariant<'tcx>> {

View File

@ -17,7 +17,7 @@
use rustc::hir; use rustc::hir;
pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut orphan = OrphanChecker { tcx: tcx }; let mut orphan = OrphanChecker { tcx };
tcx.hir.krate().visit_all_item_likes(&mut orphan); tcx.hir.krate().visit_all_item_likes(&mut orphan);
} }

View File

@ -16,7 +16,7 @@
use rustc::hir::{self, Unsafety}; use rustc::hir::{self, Unsafety};
pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut unsafety = UnsafetyChecker { tcx: tcx }; let mut unsafety = UnsafetyChecker { tcx };
tcx.hir.krate().visit_all_item_likes(&mut unsafety); tcx.hir.krate().visit_all_item_likes(&mut unsafety);
} }

View File

@ -62,7 +62,7 @@
// Main entry point // Main entry point
pub fn collect_item_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { pub fn collect_item_types<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut visitor = CollectItemTypesVisitor { tcx: tcx }; let mut visitor = CollectItemTypesVisitor { tcx };
tcx.hir tcx.hir
.krate() .krate()
.visit_all_item_likes(&mut visitor.as_deep_visitor()); .visit_all_item_likes(&mut visitor.as_deep_visitor());

View File

@ -62,7 +62,7 @@ pub fn impl_wf_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
// We will tag this as part of the WF check -- logically, it is, // We will tag this as part of the WF check -- logically, it is,
// but it's one that we must perform earlier than the rest of // but it's one that we must perform earlier than the rest of
// WfCheck. // WfCheck.
tcx.hir.krate().visit_all_item_likes(&mut ImplWfCheck { tcx: tcx }); tcx.hir.krate().visit_all_item_likes(&mut ImplWfCheck { tcx });
} }
struct ImplWfCheck<'a, 'tcx: 'a> { struct ImplWfCheck<'a, 'tcx: 'a> {

View File

@ -2959,7 +2959,7 @@ fn clean(&self, cx: &DocContext) -> Item {
source: cx.tcx.def_span(self.did).clean(cx), source: cx.tcx.def_span(self.did).clean(cx),
visibility: Some(Inherited), visibility: Some(Inherited),
def_id: self.did, def_id: self.did,
inner: VariantItem(Variant { kind: kind }), inner: VariantItem(Variant { kind }),
stability: get_stability(cx, self.did), stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did), deprecation: get_deprecation(cx, self.did),
} }

View File

@ -2854,7 +2854,7 @@ fn new(k: usize) -> Droppable {
slot.borrow_mut()[k] += 1; slot.borrow_mut()[k] += 1;
}); });
Droppable { k: k } Droppable { k }
} }
} }

View File

@ -877,7 +877,7 @@ pub fn open<P: AsRef<Path>>(&self, path: P) -> io::Result<File> {
fn _open(&self, path: &Path) -> io::Result<File> { fn _open(&self, path: &Path) -> io::Result<File> {
let inner = fs_imp::File::open(path, &self.0)?; let inner = fs_imp::File::open(path, &self.0)?;
Ok(File { inner: inner }) Ok(File { inner })
} }
} }

View File

@ -150,7 +150,7 @@ pub struct Repeat { byte: u8 }
/// assert_eq!(buffer, [0b101, 0b101, 0b101]); /// assert_eq!(buffer, [0b101, 0b101, 0b101]);
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn repeat(byte: u8) -> Repeat { Repeat { byte: byte } } pub fn repeat(byte: u8) -> Repeat { Repeat { byte } }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl Read for Repeat { impl Read for Repeat {

View File

@ -931,7 +931,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
impl<T> SyncSender<T> { impl<T> SyncSender<T> {
fn new(inner: Arc<sync::Packet<T>>) -> SyncSender<T> { fn new(inner: Arc<sync::Packet<T>>) -> SyncSender<T> {
SyncSender { inner: inner } SyncSender { inner }
} }
/// Sends a value on this synchronous channel. /// Sends a value on this synchronous channel.

View File

@ -32,7 +32,7 @@ pub fn now() -> Instant {
let mut t = mem::uninitialized(); let mut t = mem::uninitialized();
let ret = abi::clock_time_get(abi::clockid::MONOTONIC, 0, &mut t); let ret = abi::clock_time_get(abi::clockid::MONOTONIC, 0, &mut t);
assert_eq!(ret, abi::errno::SUCCESS); assert_eq!(ret, abi::errno::SUCCESS);
Instant { t: t } Instant { t }
} }
} }
@ -71,7 +71,7 @@ pub fn now() -> SystemTime {
let mut t = mem::uninitialized(); let mut t = mem::uninitialized();
let ret = abi::clock_time_get(abi::clockid::REALTIME, 0, &mut t); let ret = abi::clock_time_get(abi::clockid::REALTIME, 0, &mut t);
assert_eq!(ret, abi::errno::SUCCESS); assert_eq!(ret, abi::errno::SUCCESS);
SystemTime { t: t } SystemTime { t }
} }
} }

View File

@ -21,7 +21,7 @@ pub struct FileDesc {
impl FileDesc { impl FileDesc {
pub fn new(fd: usize) -> FileDesc { pub fn new(fd: usize) -> FileDesc {
FileDesc { fd: fd } FileDesc { fd }
} }
pub fn raw(&self) -> usize { self.fd } pub fn raw(&self) -> usize { self.fd }

View File

@ -264,7 +264,7 @@ pub fn open(path: &Path, opts: &OpenOptions) -> io::Result<File> {
pub fn file_attr(&self) -> io::Result<FileAttr> { pub fn file_attr(&self) -> io::Result<FileAttr> {
let mut stat = syscall::Stat::default(); let mut stat = syscall::Stat::default();
cvt(syscall::fstat(self.0.raw(), &mut stat))?; cvt(syscall::fstat(self.0.raw(), &mut stat))?;
Ok(FileAttr { stat: stat }) Ok(FileAttr { stat })
} }
pub fn fsync(&self) -> io::Result<()> { pub fn fsync(&self) -> io::Result<()> {

View File

@ -19,7 +19,7 @@ pub struct Error {
impl Error { impl Error {
pub fn new(errno: i32) -> Error { pub fn new(errno: i32) -> Error {
Error { errno: errno } Error { errno }
} }
pub fn mux(result: Result<usize>) -> usize { pub fn mux(result: Result<usize>) -> usize {

View File

@ -38,7 +38,7 @@ pub unsafe fn new<'a>(_stack: usize, p: Box<dyn FnBox() + 'a>) -> io::Result<Thr
panic!("thread failed to exit"); panic!("thread failed to exit");
} else { } else {
mem::forget(p); mem::forget(p);
Ok(Thread { id: id }) Ok(Thread { id })
} }
} }

View File

@ -187,7 +187,7 @@ pub fn sub_duration(&self, other: &Duration) -> SystemTime {
impl From<syscall::TimeSpec> for SystemTime { impl From<syscall::TimeSpec> for SystemTime {
fn from(t: syscall::TimeSpec) -> SystemTime { fn from(t: syscall::TimeSpec) -> SystemTime {
SystemTime { t: Timespec { t: t } } SystemTime { t: Timespec { t } }
} }
} }

View File

@ -41,7 +41,7 @@ fn max_len() -> usize {
impl FileDesc { impl FileDesc {
pub fn new(fd: c_int) -> FileDesc { pub fn new(fd: c_int) -> FileDesc {
FileDesc { fd: fd } FileDesc { fd }
} }
pub fn raw(&self) -> c_int { self.fd } pub fn raw(&self) -> c_int { self.fd }

View File

@ -317,7 +317,7 @@ pub fn metadata(&self) -> io::Result<FileAttr> {
cvt(unsafe { cvt(unsafe {
fstatat64(fd, self.entry.d_name.as_ptr(), &mut stat, libc::AT_SYMLINK_NOFOLLOW) fstatat64(fd, self.entry.d_name.as_ptr(), &mut stat, libc::AT_SYMLINK_NOFOLLOW)
})?; })?;
Ok(FileAttr { stat: stat }) Ok(FileAttr { stat })
} }
#[cfg(not(any(target_os = "linux", target_os = "emscripten", target_os = "android")))] #[cfg(not(any(target_os = "linux", target_os = "emscripten", target_os = "android")))]
@ -526,7 +526,7 @@ pub fn file_attr(&self) -> io::Result<FileAttr> {
cvt(unsafe { cvt(unsafe {
fstat64(self.0.raw(), &mut stat) fstat64(self.0.raw(), &mut stat)
})?; })?;
Ok(FileAttr { stat: stat }) Ok(FileAttr { stat })
} }
pub fn fsync(&self) -> io::Result<()> { pub fn fsync(&self) -> io::Result<()> {
@ -807,7 +807,7 @@ pub fn stat(p: &Path) -> io::Result<FileAttr> {
cvt(unsafe { cvt(unsafe {
stat64(p.as_ptr(), &mut stat) stat64(p.as_ptr(), &mut stat)
})?; })?;
Ok(FileAttr { stat: stat }) Ok(FileAttr { stat })
} }
pub fn lstat(p: &Path) -> io::Result<FileAttr> { pub fn lstat(p: &Path) -> io::Result<FileAttr> {
@ -816,7 +816,7 @@ pub fn lstat(p: &Path) -> io::Result<FileAttr> {
cvt(unsafe { cvt(unsafe {
lstat64(p.as_ptr(), &mut stat) lstat64(p.as_ptr(), &mut stat)
})?; })?;
Ok(FileAttr { stat: stat }) Ok(FileAttr { stat })
} }
pub fn canonicalize(p: &Path) -> io::Result<PathBuf> { pub fn canonicalize(p: &Path) -> io::Result<PathBuf> {

View File

@ -217,7 +217,7 @@ fn from(t: libc::timeval) -> SystemTime {
impl From<libc::timespec> for SystemTime { impl From<libc::timespec> for SystemTime {
fn from(t: libc::timespec) -> SystemTime { fn from(t: libc::timespec) -> SystemTime {
SystemTime { t: Timespec { t: t } } SystemTime { t: Timespec { t } }
} }
} }
@ -332,7 +332,7 @@ pub fn sub_duration(&self, other: &Duration) -> SystemTime {
impl From<libc::timespec> for SystemTime { impl From<libc::timespec> for SystemTime {
fn from(t: libc::timespec) -> SystemTime { fn from(t: libc::timespec) -> SystemTime {
SystemTime { t: Timespec { t: t } } SystemTime { t: Timespec { t } }
} }
} }

View File

@ -241,7 +241,7 @@ impl<'a> DropGuard<'a> {
fn new(lock: &'a Mutex) -> DropGuard<'a> { fn new(lock: &'a Mutex) -> DropGuard<'a> {
unsafe { unsafe {
lock.lock(); lock.lock();
DropGuard { lock: lock } DropGuard { lock }
} }
} }
} }

View File

@ -170,7 +170,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
impl From<c::FILETIME> for SystemTime { impl From<c::FILETIME> for SystemTime {
fn from(t: c::FILETIME) -> SystemTime { fn from(t: c::FILETIME) -> SystemTime {
SystemTime { t: t } SystemTime { t }
} }
} }

View File

@ -174,7 +174,7 @@ impl<T> PoisonError<T> {
/// [`RwLock::read`]: ../../std/sync/struct.RwLock.html#method.read /// [`RwLock::read`]: ../../std/sync/struct.RwLock.html#method.read
#[stable(feature = "sync_poison", since = "1.2.0")] #[stable(feature = "sync_poison", since = "1.2.0")]
pub fn new(guard: T) -> PoisonError<T> { pub fn new(guard: T) -> PoisonError<T> {
PoisonError { guard: guard } PoisonError { guard }
} }
/// Consumes this error indicating that a lock is poisoned, returning the /// Consumes this error indicating that a lock is poisoned, returning the

View File

@ -67,7 +67,7 @@ impl CodePoint {
/// Only use when `value` is known to be less than or equal to 0x10FFFF. /// Only use when `value` is known to be less than or equal to 0x10FFFF.
#[inline] #[inline]
pub unsafe fn from_u32_unchecked(value: u32) -> CodePoint { pub unsafe fn from_u32_unchecked(value: u32) -> CodePoint {
CodePoint { value: value } CodePoint { value }
} }
/// Creates a new `CodePoint` if the value is a valid code point. /// Creates a new `CodePoint` if the value is a valid code point.
@ -76,7 +76,7 @@ pub unsafe fn from_u32_unchecked(value: u32) -> CodePoint {
#[inline] #[inline]
pub fn from_u32(value: u32) -> Option<CodePoint> { pub fn from_u32(value: u32) -> Option<CodePoint> {
match value { match value {
0 ..= 0x10FFFF => Some(CodePoint { value: value }), 0 ..= 0x10FFFF => Some(CodePoint { value }),
_ => None _ => None
} }
} }

View File

@ -126,7 +126,7 @@ fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
} }
} }
Box::new(ExpandResult { p: p }) Box::new(ExpandResult { p })
} }
// include_str! : read the given file, insert it as a literal string expr // include_str! : read the given file, insert it as a literal string expr

View File

@ -944,7 +944,7 @@ pub fn noop_fold_item_kind<T: Folder>(i: ItemKind, folder: &mut T) -> ItemKind {
ItemKind::Enum(enum_definition, generics) => { ItemKind::Enum(enum_definition, generics) => {
let generics = folder.fold_generics(generics); let generics = folder.fold_generics(generics);
let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x)); let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x));
ItemKind::Enum(ast::EnumDef { variants: variants }, generics) ItemKind::Enum(ast::EnumDef { variants }, generics)
} }
ItemKind::Struct(struct_def, generics) => { ItemKind::Struct(struct_def, generics) => {
let generics = folder.fold_generics(generics); let generics = folder.fold_generics(generics);

View File

@ -6871,7 +6871,7 @@ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef>
_ => () _ => ()
} }
Ok(ast::EnumDef { variants: variants }) Ok(ast::EnumDef { variants })
} }
/// Parse an "enum" declaration /// Parse an "enum" declaration