auto merge of #17813 : P1start/rust/lint-field-shorthand, r=huonw

Closes #17792.
This commit is contained in:
bors 2014-10-24 15:12:24 +00:00
commit 083578ddec
53 changed files with 266 additions and 203 deletions

View File

@ -3877,6 +3877,7 @@ match x {
If you have a struct, you can destructure it inside of a pattern:
```{rust}
# #![allow(non_shorthand_field_patterns)]
struct Point {
x: int,
y: int,
@ -3892,6 +3893,7 @@ match origin {
If we only care about some of the values, we don't have to give them all names:
```{rust}
# #![allow(non_shorthand_field_patterns)]
struct Point {
x: int,
y: int,
@ -3977,6 +3979,7 @@ You can also define methods that do not take a `self` parameter. Here's a
pattern that's very common in Rust code:
```{rust}
# #![allow(non_shorthand_field_patterns)]
struct Circle {
x: f64,
y: f64,

View File

@ -434,7 +434,7 @@ impl<K: Ord, V> TreeMap<K, V> {
/// assert_eq!(vec, vec![("a", 1), ("b", 2), ("c", 3)]);
/// ```
pub fn into_iter(self) -> MoveEntries<K, V> {
let TreeMap { root: root, length: length } = self;
let TreeMap { root, length } = self;
let stk = match root {
None => vec!(),
Some(box tn) => vec!(tn)
@ -898,11 +898,11 @@ impl<K, V> Iterator<(K, V)> for MoveEntries<K,V> {
fn next(&mut self) -> Option<(K, V)> {
while !self.stack.is_empty() {
let TreeNode {
key: key,
value: value,
left: left,
right: right,
level: level
key,
value,
left,
right,
level,
} = self.stack.pop().unwrap();
match left {

View File

@ -235,10 +235,10 @@ impl OptGroup {
/// (Both short and long names correspond to different Opts).
pub fn long_to_short(&self) -> Opt {
let OptGroup {
short_name: short_name,
long_name: long_name,
hasarg: hasarg,
occur: occur,
short_name,
long_name,
hasarg,
occur,
..
} = (*self).clone();
@ -671,11 +671,11 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
let desc_sep = format!("\n{}", " ".repeat(24));
let mut rows = opts.iter().map(|optref| {
let OptGroup{short_name: short_name,
long_name: long_name,
hint: hint,
desc: desc,
hasarg: hasarg,
let OptGroup{short_name,
long_name,
hint,
desc,
hasarg,
..} = (*optref).clone();
let mut row = " ".repeat(4);

View File

@ -636,7 +636,7 @@ impl Scheduler {
mem::transmute(&**next_task.sched.as_mut().unwrap());
let current_task: &mut GreenTask = match sched.cleanup_job {
Some(CleanupJob { task: ref mut task, .. }) => &mut **task,
Some(CleanupJob { ref mut task, .. }) => &mut **task,
None => rtabort!("no cleanup job")
};
@ -953,7 +953,7 @@ impl CleanupJob {
}
pub fn run(self, sched: &mut Scheduler) {
let CleanupJob { task: task, f: f } = self;
let CleanupJob { task, f } = self;
f.to_fn()(sched, task)
}
}

View File

@ -1143,6 +1143,40 @@ impl LintPass for UnusedImportBraces {
}
}
declare_lint!(NON_SHORTHAND_FIELD_PATTERNS, Warn,
"using `Struct { x: x }` instead of `Struct { x }`")
pub struct NonShorthandFieldPatterns;
impl LintPass for NonShorthandFieldPatterns {
fn get_lints(&self) -> LintArray {
lint_array!(NON_SHORTHAND_FIELD_PATTERNS)
}
fn check_pat(&mut self, cx: &Context, pat: &ast::Pat) {
let def_map = cx.tcx.def_map.borrow();
match pat.node {
ast::PatStruct(_, ref v, _) => {
for fieldpat in v.iter()
.filter(|fieldpat| !fieldpat.node.is_shorthand)
.filter(|fieldpat| def_map.find(&fieldpat.node.pat.id)
== Some(&def::DefLocal(fieldpat.node.pat.id))) {
match fieldpat.node.pat.node {
ast::PatIdent(_, ident, None) if ident.node.as_str()
== fieldpat.node.ident.as_str() => {
cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span,
format!("the `{}:` in this pattern is redundant and can \
be removed", ident.node.as_str()).as_slice())
},
_ => {},
}
}
},
_ => {}
}
}
}
declare_lint!(pub UNUSED_UNSAFE, Warn,
"unnecessary use of an `unsafe` block")
@ -1523,12 +1557,12 @@ impl LintPass for Stability {
def_id
}
typeck::MethodTypeParam(typeck::MethodParam {
trait_ref: ref trait_ref,
ref trait_ref,
method_num: index,
..
}) |
typeck::MethodTraitObject(typeck::MethodObject {
trait_ref: ref trait_ref,
ref trait_ref,
method_num: index,
..
}) => {

View File

@ -202,6 +202,7 @@ impl LintStore {
NonUpperCaseGlobals,
UnusedParens,
UnusedImportBraces,
NonShorthandFieldPatterns,
UnusedUnsafe,
UnsafeBlocks,
UnusedMut,

View File

@ -1093,9 +1093,9 @@ impl<'a> rbml_writer_helpers for Encoder<'a> {
this.emit_enum_variant_arg(1, |this| idx.encode(this))
})
}
ty::UnsizeVtable(ty::TyTrait { def_id: def_id,
ty::UnsizeVtable(ty::TyTrait { def_id,
bounds: ref b,
substs: ref substs },
ref substs },
self_ty) => {
this.emit_enum_variant("UnsizeVtable", 2, 4, |this| {
this.emit_enum_variant_arg(

View File

@ -132,7 +132,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
ast::PatStruct(_, ref subpats, _) => {
let pats_exit =
self.pats_all(subpats.iter().map(|f| &f.pat), pred);
self.pats_all(subpats.iter().map(|f| &f.node.pat), pred);
self.add_node(pat.id, [pats_exit])
}

View File

@ -413,12 +413,16 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor,
};
if is_structure {
let fields = ty::lookup_struct_fields(cx.tcx, vid);
let field_pats: Vec<FieldPat> = fields.into_iter()
let field_pats: Vec<Spanned<FieldPat>> = fields.into_iter()
.zip(pats)
.filter(|&(_, ref pat)| pat.node != PatWild(PatWildSingle))
.map(|(field, pat)| FieldPat {
ident: Ident::new(field.name),
pat: pat
.map(|(field, pat)| Spanned {
span: DUMMY_SP,
node: FieldPat {
ident: Ident::new(field.name),
pat: pat,
is_shorthand: true,
}
}).collect();
let has_more_fields = field_pats.len() < pats_len;
PatStruct(def_to_path(cx.tcx, vid), field_pats, has_more_fields)
@ -427,7 +431,7 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor,
}
}
ty::ty_rptr(_, ty::mt { ty: ty, .. }) => {
ty::ty_rptr(_, ty::mt { ty, .. }) => {
match ty::get(ty).sty {
ty::ty_vec(_, Some(n)) => match ctor {
&Single => {
@ -495,7 +499,7 @@ fn all_constructors(cx: &MatchCheckCtxt, left_ty: ty::t,
ty::ty_nil =>
vec!(ConstantValue(const_nil)),
ty::ty_rptr(_, ty::mt { ty: ty, .. }) => match ty::get(ty).sty {
ty::ty_rptr(_, ty::mt { ty, .. }) => match ty::get(ty).sty {
ty::ty_vec(_, None) =>
range_inclusive(0, max_slice_length).map(|length| Slice(length)).collect(),
_ => vec!(Single)
@ -692,7 +696,7 @@ pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: ty::t) ->
match ty::get(ty).sty {
ty::ty_tup(ref fs) => fs.len(),
ty::ty_uniq(_) => 1u,
ty::ty_rptr(_, ty::mt { ty: ty, .. }) => match ty::get(ty).sty {
ty::ty_rptr(_, ty::mt { ty, .. }) => match ty::get(ty).sty {
ty::ty_vec(_, None) => match *ctor {
Slice(length) => length,
ConstantValue(_) => 0u,
@ -740,7 +744,7 @@ fn range_covered_by_constructor(ctor: &Constructor,
pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
constructor: &Constructor, col: uint, arity: uint) -> Option<Vec<&'a Pat>> {
let &Pat {
id: pat_id, node: ref node, span: pat_span
id: pat_id, ref node, span: pat_span
} = raw_pat(r[col]);
let head: Option<Vec<&Pat>> = match node {
@ -806,8 +810,8 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
class_id.map(|variant_id| {
let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id);
let args = struct_fields.iter().map(|sf| {
match pattern_fields.iter().find(|f| f.ident.name == sf.name) {
Some(ref f) => &*f.pat,
match pattern_fields.iter().find(|f| f.node.ident.name == sf.name) {
Some(ref f) => &*f.node.pat,
_ => DUMMY_WILD_PAT
}
}).collect();

View File

@ -25,7 +25,7 @@ use syntax::parse::token::InternedString;
use syntax::ptr::P;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::{ast, ast_map, ast_util};
use syntax::{ast, ast_map, ast_util, codemap};
use std::rc::Rc;
use std::collections::hashmap::Vacant;
@ -115,7 +115,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
match tcx.map.find(enum_def.node) {
None => None,
Some(ast_map::NodeItem(it)) => match it.node {
ItemEnum(ast::EnumDef { variants: ref variants }, _) => {
ItemEnum(ast::EnumDef { ref variants }, _) => {
variant_expr(variants.as_slice(), variant_def.node)
}
_ => None
@ -133,7 +133,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
let expr_id = match csearch::maybe_get_item_ast(tcx, enum_def,
|a, b, c, d| astencode::decode_inlined_item(a, b, c, d)) {
csearch::found(&ast::IIItem(ref item)) => match item.node {
ItemEnum(ast::EnumDef { variants: ref variants }, _) => {
ItemEnum(ast::EnumDef { ref variants }, _) => {
// NOTE this doesn't do the right thing, it compares inlined
// NodeId's to the original variant_def's NodeId, but they
// come from different crates, so they will likely never match.
@ -336,9 +336,13 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P<Pat> {
}
ExprStruct(ref path, ref fields, None) => {
let field_pats = fields.iter().map(|field| FieldPat {
ident: field.ident.node,
pat: const_expr_to_pat(tcx, &*field.expr)
let field_pats = fields.iter().map(|field| codemap::Spanned {
span: codemap::DUMMY_SP,
node: FieldPat {
ident: field.ident.node,
pat: const_expr_to_pat(tcx, &*field.expr),
is_shorthand: true,
},
}).collect();
PatStruct(path.clone(), field_pats, false)
}

View File

@ -102,12 +102,12 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
}
typeck::MethodStaticUnboxedClosure(_) => {}
typeck::MethodTypeParam(typeck::MethodParam {
trait_ref: ref trait_ref,
ref trait_ref,
method_num: index,
..
}) |
typeck::MethodTraitObject(typeck::MethodObject {
trait_ref: ref trait_ref,
ref trait_ref,
method_num: index,
..
}) => {
@ -156,7 +156,8 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
}
}
fn handle_field_pattern_match(&mut self, lhs: &ast::Pat, pats: &[ast::FieldPat]) {
fn handle_field_pattern_match(&mut self, lhs: &ast::Pat,
pats: &[codemap::Spanned<ast::FieldPat>]) {
let id = match (*self.tcx.def_map.borrow())[lhs.id] {
def::DefVariant(_, id, _) => id,
_ => {
@ -174,7 +175,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
let fields = ty::lookup_struct_fields(self.tcx, id);
for pat in pats.iter() {
let field_id = fields.iter()
.find(|field| field.name == pat.ident.name).unwrap().id;
.find(|field| field.name == pat.node.ident.name).unwrap().id;
self.live_symbols.insert(field_id.node);
}
}

View File

@ -179,8 +179,8 @@ impl OverloadedCallType {
MethodStaticUnboxedClosure(def_id) => {
OverloadedCallType::from_unboxed_closure(tcx, def_id)
}
MethodTypeParam(MethodParam { trait_ref: ref trait_ref, .. }) |
MethodTraitObject(MethodObject { trait_ref: ref trait_ref, .. }) => {
MethodTypeParam(MethodParam { ref trait_ref, .. }) |
MethodTraitObject(MethodObject { ref trait_ref, .. }) => {
OverloadedCallType::from_trait_id(tcx, trait_ref.def_id)
}
}

View File

@ -433,7 +433,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
ty::AdjustDerefRef(
ty::AutoDerefRef {
autoref: None, autoderefs: autoderefs}) => {
autoref: None, autoderefs}) => {
// Equivalent to *expr or something similar.
self.cat_expr_autoderefd(expr, autoderefs)
}
@ -1222,9 +1222,9 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
ast::PatStruct(_, ref field_pats, _) => {
// {f1: p1, ..., fN: pN}
for fp in field_pats.iter() {
let field_ty = if_ok!(self.pat_ty(&*fp.pat)); // see (*2)
let cmt_field = self.cat_field(pat, cmt.clone(), fp.ident.name, field_ty);
if_ok!(self.cat_pattern(cmt_field, &*fp.pat, |x,y,z| op(x,y,z)));
let field_ty = if_ok!(self.pat_ty(&*fp.node.pat)); // see (*2)
let cmt_field = self.cat_field(pat, cmt.clone(), fp.node.ident.name, field_ty);
if_ok!(self.cat_pattern(cmt_field, &*fp.node.pat, |x,y,z| op(x,y,z)));
}
}
@ -1524,7 +1524,7 @@ impl Repr for InteriorKind {
fn element_kind(t: ty::t) -> ElementKind {
match ty::get(t).sty {
ty::ty_rptr(_, ty::mt{ty:ty, ..}) |
ty::ty_rptr(_, ty::mt{ty, ..}) |
ty::ty_uniq(ty) => match ty::get(ty).sty {
ty::ty_vec(_, None) => VecElement,
_ => OtherElement

View File

@ -739,9 +739,9 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
resolve::LastMod(resolve::DependsOn(def)) => {
self.report_error(ck_public(def));
},
resolve::LastImport{value_priv: value_priv,
resolve::LastImport{value_priv,
value_used: check_value,
type_priv: type_priv,
type_priv,
type_used: check_type} => {
// This dance with found_error is because we don't want to report
// a privacy error twice for the same directive.
@ -828,8 +828,8 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
MethodStaticUnboxedClosure(_) => {}
// Trait methods are always all public. The only controlling factor
// is whether the trait itself is accessible or not.
MethodTypeParam(MethodParam { trait_ref: ref trait_ref, .. }) |
MethodTraitObject(MethodObject { trait_ref: ref trait_ref, .. }) => {
MethodTypeParam(MethodParam { ref trait_ref, .. }) |
MethodTraitObject(MethodObject { ref trait_ref, .. }) => {
self.report_error(self.ensure_public(span, trait_ref.def_id,
None, "source trait"));
}
@ -991,7 +991,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
ty::ty_struct(id, _) => {
for field in fields.iter() {
self.check_field(pattern.span, id,
NamedField(field.ident));
NamedField(field.node.ident));
}
}
ty::ty_enum(_, _) => {
@ -999,7 +999,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> {
Some(&def::DefVariant(_, variant_id, _)) => {
for field in fields.iter() {
self.check_field(pattern.span, variant_id,
NamedField(field.ident));
NamedField(field.node.ident));
}
}
_ => self.tcx.sess.span_bug(pattern.span,

View File

@ -647,7 +647,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
ast::PatIdent(ast::BindByRef(_), _, _) => true,
ast::PatStruct(_, ref field_pats, _) => {
field_pats.iter().any(|fp| is_binding_pat(&*fp.pat))
field_pats.iter().any(|fp| is_binding_pat(&*fp.node.pat))
}
ast::PatVec(ref pats1, ref pats2, ref pats3) => {

View File

@ -972,28 +972,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
self.span.snippet(p.span)).as_slice());
}
};
// The AST doesn't give us a span for the struct field, so we have
// to figure out where it is by assuming it's the token before each colon.
let field_spans = self.span.sub_spans_before_tokens(p.span,
token::COMMA,
token::COLON);
if fields.len() != field_spans.len() {
self.sess.span_bug(p.span,
format!("Mismatched field count in '{}', found {}, expected {}",
self.span.snippet(p.span), field_spans.len(), fields.len()
).as_slice());
}
for (field, &span) in fields.iter().zip(field_spans.iter()) {
for &Spanned { node: ref field, span } in fields.iter() {
self.visit_pat(&*field.pat);
if span.is_none() {
continue;
}
let fields = ty::lookup_struct_fields(&self.analysis.ty_cx, struct_def);
for f in fields.iter() {
if f.name == field.ident.name {
self.fmt.ref_str(recorder::VarRef,
p.span,
span,
Some(span),
f.id,
self.cur_scope);
break;

View File

@ -276,58 +276,6 @@ impl<'a> SpanUtils<'a> {
}
}
// Return an owned vector of the subspans of the tokens that come before tok2
// which is before tok1. If there is no instance of tok2 before tok1, then that
// place in the result is None.
// Everything returned must be inside a set of (non-angle) brackets, but no
// more deeply nested than that.
pub fn sub_spans_before_tokens(&self,
span: Span,
tok1: Token,
tok2: Token) -> Vec<Option<Span>> {
let mut sub_spans : Vec<Option<Span>> = vec!();
let mut toks = self.retokenise_span(span);
let mut prev = toks.next_token();
let mut next = toks.next_token();
let mut stored_val = false;
let mut found_val = false;
let mut bracket_count = 0u;
while next.tok != token::EOF {
if bracket_count == 1 {
if next.tok == tok2 {
sub_spans.push(self.make_sub_span(span, Some(prev.sp)));
stored_val = true;
found_val = false;
}
if next.tok == tok1 {
if !stored_val {
sub_spans.push(None);
} else {
stored_val = false;
}
found_val = false;
}
if !stored_val &&
is_ident(&next.tok) {
found_val = true;
}
}
bracket_count += match next.tok {
token::LPAREN | token::LBRACE => 1,
token::RPAREN | token::RBRACE => -1,
_ => 0
};
prev = next;
next = toks.next_token();
}
if found_val {
sub_spans.push(None);
}
return sub_spans;
}
pub fn sub_span_after_keyword(&self,
span: Span,
keyword: keywords::Keyword) -> Option<Span> {

View File

@ -82,7 +82,7 @@ impl<'v> Visitor<'v> for Annotator {
// work around lack of pattern matching for @ types
ProvidedMethod(ref method) => {
match **method {
Method {attrs: ref attrs, id: id, ..} => (id, attrs),
Method {ref attrs, id, ..} => (id, attrs),
}
}

View File

@ -165,7 +165,7 @@ impl Substs {
}
pub fn erase_regions(self) -> Substs {
let Substs { types: types, regions: _ } = self;
let Substs { types, regions: _ } = self;
Substs { types: types, regions: ErasedRegions }
}

View File

@ -1281,7 +1281,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
}
ty::ty_rptr(_, ty::mt { ty: referent_ty, mutbl: mutbl }) => {
ty::ty_rptr(_, ty::mt { ty: referent_ty, mutbl }) => {
// &mut T or &T
match bound {
ty::BoundCopy => {

View File

@ -1692,10 +1692,10 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
expr::with_field_tys(tcx, pat_ty, Some(pat.id), |discr, field_tys| {
for f in fields.iter() {
let ix = ty::field_idx_strict(tcx, f.ident.name, field_tys);
let ix = ty::field_idx_strict(tcx, f.node.ident.name, field_tys);
let fldptr = adt::trans_field_ptr(bcx, &*pat_repr, val,
discr, ix);
bcx = bind_irrefutable_pat(bcx, &*f.pat, fldptr, cleanup_scope);
bcx = bind_irrefutable_pat(bcx, &*f.node.pat, fldptr, cleanup_scope);
}
})
}

View File

@ -765,7 +765,7 @@ pub fn trans_set_discr(bcx: Block, r: &Repr, val: ValueRef, discr: Disr) {
Store(bcx, C_null(llptrty), val)
}
}
StructWrappedNullablePointer { nonnull: ref nonnull, nndiscr, ptrfield, .. } => {
StructWrappedNullablePointer { ref nonnull, nndiscr, ptrfield, .. } => {
if discr != nndiscr {
let (llptrptr, llptrty) = match ptrfield {
ThinPointer(field) =>
@ -806,8 +806,8 @@ pub fn num_args(r: &Repr, discr: Disr) -> uint {
RawNullablePointer { nndiscr, ref nullfields, .. } => {
if discr == nndiscr { 1 } else { nullfields.len() }
}
StructWrappedNullablePointer { nonnull: ref nonnull, nndiscr,
nullfields: ref nullfields, .. } => {
StructWrappedNullablePointer { ref nonnull, nndiscr,
ref nullfields, .. } => {
if discr == nndiscr { nonnull.fields.len() } else { nullfields.len() }
}
}
@ -988,7 +988,7 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
C_null(type_of::sizing_type_of(ccx, nnty))
}
}
StructWrappedNullablePointer { nonnull: ref nonnull, nndiscr, .. } => {
StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
if discr == nndiscr {
C_struct(ccx, build_const_struct(ccx,
nonnull,

View File

@ -401,7 +401,7 @@ pub fn malloc_raw_dyn_proc<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: ty::t) -> Resu
let llalign = C_uint(ccx, llalign_of_min(bcx.ccx(), llty));
// Allocate space and store the destructor pointer:
let Result {bcx: bcx, val: llbox} = malloc_raw_dyn(bcx, ptr_llty, t, size, llalign);
let Result {bcx, val: llbox} = malloc_raw_dyn(bcx, ptr_llty, t, size, llalign);
let dtor_ptr = GEPi(bcx, llbox, [0u, abi::box_field_drop_glue]);
let drop_glue_field_ty = type_of(ccx, ty::mk_nil_ptr(bcx.tcx()));
let drop_glue = PointerCast(bcx, glue::get_drop_glue(ccx, ty::mk_uniq(bcx.tcx(), t)),

View File

@ -102,7 +102,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
fn datum_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
-> Callee<'blk, 'tcx> {
let DatumBlock {bcx: mut bcx, datum} = expr::trans(bcx, expr);
let DatumBlock {mut bcx, datum} = expr::trans(bcx, expr);
match ty::get(datum.ty).sty {
ty::ty_bare_fn(..) => {
let llval = datum.to_llscalarish(bcx);

View File

@ -190,7 +190,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
// allocate closure in the heap
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let Result {bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_string(tcx, cbox_ty));

View File

@ -312,7 +312,7 @@ pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
(),
|(), bcx, lloption| {
let Result {
bcx: bcx,
bcx,
val: _
} = callee::trans_call_inner(bcx,
Some(loop_info),

View File

@ -1449,7 +1449,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
// Handle other generic parameters
let actual_types = param_substs.substs.types.get_slice(subst::FnSpace);
for (index, &ast::TyParam{ ident: ident, .. }) in generics.ty_params.iter().enumerate() {
for (index, &ast::TyParam{ ident, .. }) in generics.ty_params.iter().enumerate() {
let actual_type = actual_types[index];
// Add actual type name to <...> clause of function name
let actual_type_name = compute_debuginfo_type_name(cx,
@ -3344,7 +3344,10 @@ fn populate_scope_map(cx: &CrateContext,
ast::PatStruct(_, ref field_pats, _) => {
scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
for &ast::FieldPat { pat: ref sub_pat, .. } in field_pats.iter() {
for &codemap::Spanned {
node: ast::FieldPat { pat: ref sub_pat, .. },
..
} in field_pats.iter() {
walk_pattern(cx, &**sub_pat, scope_stack, scope_map);
}
}
@ -3602,8 +3605,8 @@ fn populate_scope_map(cx: &CrateContext,
}
}
ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs,
outputs: ref outputs,
ast::ExprInlineAsm(ast::InlineAsm { ref inputs,
ref outputs,
.. }) => {
// inputs, outputs: Vec<(String, P<Expr>)>
for &(_, ref exp) in inputs.iter() {

View File

@ -324,7 +324,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
_ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}",
bcx.ty_to_string(unsized_ty)).as_slice())
},
&ty::UnsizeVtable(ty::TyTrait { def_id: def_id, substs: ref substs, .. }, _) => {
&ty::UnsizeVtable(ty::TyTrait { def_id, ref substs, .. }, _) => {
let substs = substs.with_self_ty(unsized_ty);
let trait_ref =
Rc::new(ty::TraitRef { def_id: def_id,

View File

@ -132,8 +132,8 @@ pub fn trans_method_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
typeck::MethodTypeParam(typeck::MethodParam {
trait_ref: ref trait_ref,
method_num: method_num
ref trait_ref,
method_num
}) => {
let trait_ref =
Rc::new(trait_ref.subst(bcx.tcx(),
@ -564,7 +564,7 @@ pub fn get_vtable(bcx: Block,
traits::VtableImpl(
traits::VtableImplData {
impl_def_id: id,
substs: substs,
substs,
nested: _ }) => {
emit_vtable_methods(bcx, id, substs).into_iter()
}

View File

@ -378,13 +378,13 @@ pub fn type_of_adjust(cx: &ctxt, adj: &AutoAdjustment) -> Option<t> {
fn type_of_autoref(cx: &ctxt, autoref: &AutoRef) -> Option<t> {
match autoref {
&AutoUnsize(ref k) => match k {
&UnsizeVtable(TyTrait { def_id, substs: ref substs, bounds }, _) => {
&UnsizeVtable(TyTrait { def_id, ref substs, bounds }, _) => {
Some(mk_trait(cx, def_id, substs.clone(), bounds))
}
_ => None
},
&AutoUnsizeUniq(ref k) => match k {
&UnsizeVtable(TyTrait { def_id, substs: ref substs, bounds }, _) => {
&UnsizeVtable(TyTrait { def_id, ref substs, bounds }, _) => {
Some(mk_uniq(cx, mk_trait(cx, def_id, substs.clone(), bounds)))
}
_ => None
@ -3522,7 +3522,7 @@ pub fn unsize_ty(cx: &ctxt,
format!("UnsizeStruct with bad sty: {}",
ty_to_string(cx, ty)).as_slice())
},
&UnsizeVtable(TyTrait { def_id, substs: ref substs, bounds }, _) => {
&UnsizeVtable(TyTrait { def_id, ref substs, bounds }, _) => {
mk_trait(cx, def_id, substs.clone(), bounds)
}
}
@ -5484,7 +5484,7 @@ pub fn accumulate_lifetimes_in_type(accumulator: &mut Vec<ty::Region>,
ty_rptr(region, _) => accumulator.push(region),
ty_enum(_, ref substs) |
ty_trait(box TyTrait {
substs: ref substs,
ref substs,
..
}) |
ty_struct(_, ref substs) => {

View File

@ -361,7 +361,7 @@ impl TypeFoldable for ty::UnsizeKind {
match *self {
ty::UnsizeLength(len) => ty::UnsizeLength(len),
ty::UnsizeStruct(box ref k, n) => ty::UnsizeStruct(box k.fold_with(folder), n),
ty::UnsizeVtable(ty::TyTrait{bounds, def_id, substs: ref substs}, self_ty) => {
ty::UnsizeVtable(ty::TyTrait{bounds, def_id, ref substs}, self_ty) => {
ty::UnsizeVtable(
ty::TyTrait {
bounds: bounds.fold_with(folder),

View File

@ -382,7 +382,7 @@ pub fn ast_path_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(
-> TypeAndSubsts {
let tcx = this.tcx();
let ty::Polytype {
generics: generics,
generics,
ty: decl_ty
} = this.get_item_ty(did);
@ -411,7 +411,7 @@ pub fn ast_path_to_ty_relaxed<'tcx, AC: AstConv<'tcx>,
-> TypeAndSubsts {
let tcx = this.tcx();
let ty::Polytype {
generics: generics,
generics,
ty: decl_ty
} = this.get_item_ty(did);

View File

@ -27,7 +27,7 @@ use std::collections::{HashMap, HashSet};
use syntax::ast;
use syntax::ast_util;
use syntax::parse::token;
use syntax::codemap::Span;
use syntax::codemap::{Span, Spanned};
use syntax::print::pprust;
use syntax::ptr::P;
@ -294,7 +294,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
/// `etc` is true if the pattern said '...' and false otherwise.
pub fn check_struct_pat_fields(pcx: &pat_ctxt,
span: Span,
fields: &[ast::FieldPat],
fields: &[Spanned<ast::FieldPat>],
class_fields: Vec<ty::field_ty>,
class_id: ast::DefId,
substitutions: &subst::Substs,
@ -310,7 +310,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
// Typecheck each field.
let mut found_fields = HashSet::new();
for field in fields.iter() {
for &Spanned { node: ref field, span } in fields.iter() {
match field_map.find_mut(&field.ident.name) {
Some(&(_, true)) => {
// Check the pattern anyway, so that attempts to look
@ -356,7 +356,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
}
pub fn check_struct_pat(pcx: &pat_ctxt, span: Span,
fields: &[ast::FieldPat], etc: bool,
fields: &[Spanned<ast::FieldPat>], etc: bool,
struct_id: ast::DefId,
substitutions: &subst::Substs) {
let _fcx = pcx.fcx;
@ -373,7 +373,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
span: Span,
expected: ty::t,
path: &ast::Path,
fields: &[ast::FieldPat],
fields: &[Spanned<ast::FieldPat>],
etc: bool,
enum_id: ast::DefId,
substitutions: &subst::Substs) {
@ -416,7 +416,7 @@ pub fn check_struct_like_enum_variant_pat(pcx: &pat_ctxt,
if ty::type_is_error(fcx.node_ty(pat_id)) {
for field in fields.iter() {
check_pat(pcx, &*field.pat, ty::mk_err());
check_pat(pcx, &*field.node.pat, ty::mk_err());
}
}
}

View File

@ -1606,8 +1606,8 @@ impl<'a, 'tcx> LookupContext<'a, 'tcx> {
MethodStaticUnboxedClosure(_) => {
false
}
MethodTypeParam(MethodParam { trait_ref: ref trait_ref, .. }) |
MethodTraitObject(MethodObject { trait_ref: ref trait_ref, .. }) => {
MethodTypeParam(MethodParam { ref trait_ref, .. }) |
MethodTraitObject(MethodObject { ref trait_ref, .. }) => {
Some(trait_ref.def_id) == self.tcx().lang_items.drop_trait()
}
};

View File

@ -2875,8 +2875,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
};
let fn_sig = match *fn_sty {
ty::ty_bare_fn(ty::BareFnTy {sig: ref sig, ..}) |
ty::ty_closure(box ty::ClosureTy {sig: ref sig, ..}) => sig,
ty::ty_bare_fn(ty::BareFnTy {ref sig, ..}) |
ty::ty_closure(box ty::ClosureTy {ref sig, ..}) => sig,
_ => {
fcx.type_error_message(call_expr.span, |actual| {
format!("expected function, found `{}`", actual)

View File

@ -845,7 +845,7 @@ fn check_expr_fn_block(rcx: &mut Rcx,
match ty::get(function_type).sty {
ty::ty_closure(box ty::ClosureTy{store: ty::RegionTraitStore(..),
bounds: ref bounds,
ref bounds,
..}) => {
// For closure, ensure that the variables outlive region
// bound, since they are captured by reference.

View File

@ -71,7 +71,7 @@ use syntax::visit;
pub fn collect_item_types(ccx: &CrateCtxt) {
fn collect_intrinsic_type(ccx: &CrateCtxt,
lang_item: ast::DefId) {
let ty::Polytype { ty: ty, .. } =
let ty::Polytype { ty, .. } =
ccx.get_item_ty(lang_item);
ccx.tcx.intrinsic_defs.borrow_mut().insert(lang_item, ty);
}

View File

@ -1354,11 +1354,11 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
lifetime: ast::Lifetime)
-> ast::Path {
let RebuildPathInfo {
path: path,
indexes: indexes,
expected: expected,
anon_nums: anon_nums,
region_names: region_names,
path,
indexes,
expected,
anon_nums,
region_names,
} = rebuild_info;
let last_seg = path.segments.last().unwrap();

View File

@ -805,7 +805,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
variance);
}
ty::ty_param(ty::ParamTy { def_id: ref def_id, .. }) => {
ty::ty_param(ty::ParamTy { ref def_id, .. }) => {
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
match self.terms_cx.inferred_map.find(&def_id.node) {
Some(&index) => {

View File

@ -17,7 +17,7 @@ use syntax::ast_util;
use syntax::ast_util::PostExpansionMethod;
use syntax::attr;
use syntax::attr::{AttributeMethods, AttrMetaMethods};
use syntax::codemap::{DUMMY_SP, Pos};
use syntax::codemap::{DUMMY_SP, Pos, Spanned};
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::ptr::P;
@ -2045,7 +2045,7 @@ fn name_from_pat(p: &ast::Pat) -> String {
PatEnum(ref p, _) => path_to_string(p),
PatStruct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", path_to_string(name),
fields.iter().map(|fp|
fields.iter().map(|&Spanned { node: ref fp, .. }|
format!("{}: {}", fp.ident.as_str(), name_from_pat(&*fp.pat)))
.collect::<Vec<String>>().connect(", "),
if etc { ", ..." } else { "" }

View File

@ -142,7 +142,7 @@ fn summarize_item(item: &Item) -> (Counts, Option<ModuleSummary>) {
.sum();
(item_counts + subcounts, None)
}
ModuleItem(Module { items: ref items, .. }) => {
ModuleItem(Module { ref items, .. }) => {
let mut counts = item_counts;
let mut submodules = Vec::new();

View File

@ -340,6 +340,7 @@ pub struct Pat {
pub struct FieldPat {
pub ident: Ident,
pub pat: P<Pat>,
pub is_shorthand: bool,
}
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
@ -374,7 +375,7 @@ pub enum Pat_ {
/// "None" means a * pattern where we don't bind the fields to names.
PatEnum(Path, Option<Vec<P<Pat>>>),
PatStruct(Path, Vec<FieldPat>, bool),
PatStruct(Path, Vec<Spanned<FieldPat>>, bool),
PatTup(Vec<P<Pat>>),
PatBox(P<Pat>),
PatRegion(P<Pat>), // reference pattern

View File

@ -602,7 +602,7 @@ pub fn walk_pat(pat: &Pat, it: |&Pat| -> bool) -> bool {
match pat.node {
PatIdent(_, _, Some(ref p)) => walk_pat(&**p, it),
PatStruct(_, ref fields, _) => {
fields.iter().all(|field| walk_pat(&*field.pat, |p| it(p)))
fields.iter().all(|field| walk_pat(&*field.node.pat, |p| it(p)))
}
PatEnum(_, Some(ref s)) | PatTup(ref s) => {
s.iter().all(|p| walk_pat(&**p, |p| it(p)))

View File

@ -169,7 +169,7 @@ pub trait AstBuilder {
bm: ast::BindingMode) -> P<ast::Pat>;
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>> ) -> P<ast::Pat>;
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> P<ast::Pat>;
path: ast::Path, field_pats: Vec<Spanned<ast::FieldPat>> ) -> P<ast::Pat>;
fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat>;
fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>;
@ -796,7 +796,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.pat(span, pat)
}
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat>) -> P<ast::Pat> {
path: ast::Path, field_pats: Vec<Spanned<ast::FieldPat>>) -> P<ast::Pat> {
let pat = ast::PatStruct(path, field_pats, false);
self.pat(span, pat)
}

View File

@ -1248,7 +1248,10 @@ impl<'a> TraitDef<'a> {
let pattern = if struct_type == Record {
let field_pats = subpats.into_iter().zip(ident_expr.iter()).map(|(pat, &(_, id, _))| {
// id is guaranteed to be Some
ast::FieldPat { ident: id.unwrap(), pat: pat }
codemap::Spanned {
span: pat.span,
node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: true },
}
}).collect();
cx.pat_struct(self.span, matching_path, field_pats)
} else {

View File

@ -250,7 +250,7 @@ pub fn parse(sess: &ParseSess,
let mut next_eis = Vec::new(); // or proceed normally
let mut eof_eis = Vec::new();
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
let TokenAndSpan { tok, sp } = rdr.peek();
/* we append new items to this while we go */
loop {

View File

@ -1139,10 +1139,12 @@ pub fn noop_fold_pat<T: Folder>(p: P<Pat>, folder: &mut T) -> P<Pat> {
PatStruct(pth, fields, etc) => {
let pth = folder.fold_path(pth);
let fs = fields.move_map(|f| {
ast::FieldPat {
ident: f.ident,
pat: folder.fold_pat(f.pat)
}
Spanned { span: folder.new_span(f.span),
node: ast::FieldPat {
ident: f.node.ident,
pat: folder.fold_pat(f.node.pat),
is_shorthand: f.node.is_shorthand,
}}
});
PatStruct(pth, fs, etc)
}

View File

@ -366,7 +366,7 @@ pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler,
let bstart = rdr.last_pos;
rdr.next_token();
//discard, and look ahead; we're working with internal state
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
let TokenAndSpan { tok, sp } = rdr.peek();
if token::is_lit(&tok) {
rdr.with_str_from(bstart, |s| {
debug!("tok lit: {}", s);

View File

@ -3100,7 +3100,7 @@ impl<'a> Parser<'a> {
}
/// Parse the fields of a struct-like pattern
fn parse_pat_fields(&mut self) -> (Vec<ast::FieldPat> , bool) {
fn parse_pat_fields(&mut self) -> (Vec<codemap::Spanned<ast::FieldPat>> , bool) {
let mut fields = Vec::new();
let mut etc = false;
let mut first = true;
@ -3113,6 +3113,9 @@ impl<'a> Parser<'a> {
if self.token == token::RBRACE { break }
}
let lo = self.span.lo;
let hi;
if self.token == token::DOTDOT {
self.bump();
if self.token != token::RBRACE {
@ -3134,7 +3137,7 @@ impl<'a> Parser<'a> {
let fieldname = self.parse_ident();
let subpat = if self.token == token::COLON {
let (subpat, is_shorthand) = if self.token == token::COLON {
match bind_type {
BindByRef(..) | BindByValue(MutMutable) => {
let token_str = self.this_token_to_string();
@ -3145,16 +3148,22 @@ impl<'a> Parser<'a> {
}
self.bump();
self.parse_pat()
let pat = self.parse_pat();
hi = pat.span.hi;
(pat, false)
} else {
hi = self.last_span.hi;
let fieldpath = codemap::Spanned{span:self.last_span, node: fieldname};
P(ast::Pat {
(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: PatIdent(bind_type, fieldpath, None),
span: self.last_span
})
}), true)
};
fields.push(ast::FieldPat { ident: fieldname, pat: subpat });
fields.push(codemap::Spanned { span: mk_sp(lo, hi),
node: ast::FieldPat { ident: fieldname,
pat: subpat,
is_shorthand: is_shorthand }});
}
return (fields, etc);
}
@ -3665,9 +3674,9 @@ impl<'a> Parser<'a> {
// wouldn't it be more uniform to parse view items only, here?
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
items: items,
attrs_remaining,
view_items,
items,
..
} = self.parse_items_and_view_items(first_item_attrs,
false, false);
@ -4705,8 +4714,8 @@ impl<'a> Parser<'a> {
// parse all of the items up to closing or an attribute.
// view items are legal here.
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
attrs_remaining,
view_items,
items: starting_items,
..
} = self.parse_items_and_view_items(first_item_attrs, true, true);
@ -4978,10 +4987,10 @@ impl<'a> Parser<'a> {
first_item_attrs: Vec<Attribute> )
-> ForeignMod {
let ParsedItemsAndViewItems {
attrs_remaining: attrs_remaining,
view_items: view_items,
attrs_remaining,
view_items,
items: _,
foreign_items: foreign_items
foreign_items,
} = self.parse_foreign_items(first_item_attrs, true);
if !attrs_remaining.is_empty() {
let last_span = self.last_span;

View File

@ -1983,12 +1983,12 @@ impl<'a> State<'a> {
Consistent, fields.as_slice(),
|s, f| {
try!(s.cbox(indent_unit));
try!(s.print_ident(f.ident));
try!(s.print_ident(f.node.ident));
try!(s.word_nbsp(":"));
try!(s.print_pat(&*f.pat));
try!(s.print_pat(&*f.node.pat));
s.end()
},
|f| f.pat.span));
|f| f.node.pat.span));
if etc {
if fields.len() != 0u { try!(self.word_space(",")); }
try!(word(&mut self.s, ".."));

View File

@ -429,7 +429,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
PatStruct(ref path, ref fields, _) => {
visitor.visit_path(path, pattern.id);
for field in fields.iter() {
visitor.visit_pat(&*field.pat)
visitor.visit_pat(&*field.node.pat)
}
}
PatTup(ref tuple_elements) => {

View File

@ -38,7 +38,7 @@ enum XYZ {
fn field_match_in_patterns(b: XYZ) -> String {
match b {
Y { a: a, .. } => a,
Y { a, .. } => a,
_ => "".to_string()
}
}

View File

@ -0,0 +1,64 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(bad_style, unused_variables)]
#![deny(non_shorthand_field_patterns)]
struct Foo {
x: int,
y: int,
}
fn main() {
{
let Foo {
x: x, //~ ERROR the `x:` in this pattern is redundant
y: ref y, //~ ERROR the `y:` in this pattern is redundant
} = Foo { x: 0, y: 0 };
let Foo {
x,
ref y,
} = Foo { x: 0, y: 0 };
}
{
const x: int = 1;
match (Foo { x: 1, y: 1 }) {
Foo { x: x, ..} => {},
_ => {},
}
}
{
struct Bar {
x: x,
}
struct x;
match (Bar { x: x }) {
Bar { x: x } => {},
}
}
{
struct Bar {
x: Foo,
}
enum Foo { x }
match (Bar { x: x }) {
Bar { x: x } => {},
}
}
}