Auto merge of #126965 - matthiaskrgr:rollup-x3kamn8, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #126302 (Detect unused structs which derived Default)
 - #126885 (Remove internal `PathBuf::as_mut_vec`)
 - #126916 (Specify target specific linker for `riscv64gc-gnu` job)
 - #126926 (Tweak a confusing comment in `create_match_candidates`)
 - #126927 (core: VaArgSafe is an unsafe trait)
 - #126932 (Tweak `FlatPat::new` to avoid a temporarily-invalid state)
 - #126946 (Add missing slash in `const_eval_select` doc comment)
 - #126947 (Delegation: ast lowering refactor)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-06-25 21:13:48 +00:00
commit fda509e817
19 changed files with 227 additions and 116 deletions

View File

@ -66,14 +66,18 @@ pub(crate) fn delegation_has_self(&self, item_id: NodeId, path_id: NodeId, span:
let Ok(sig_id) = sig_id else { let Ok(sig_id) = sig_id else {
return false; return false;
}; };
if let Some(local_sig_id) = sig_id.as_local() { self.has_self(sig_id, span)
}
fn has_self(&self, def_id: DefId, span: Span) -> bool {
if let Some(local_sig_id) = def_id.as_local() {
// The value may be missing due to recursive delegation. // The value may be missing due to recursive delegation.
// Error will be emmited later during HIR ty lowering. // Error will be emmited later during HIR ty lowering.
self.resolver.delegation_fn_sigs.get(&local_sig_id).map_or(false, |sig| sig.has_self) self.resolver.delegation_fn_sigs.get(&local_sig_id).map_or(false, |sig| sig.has_self)
} else { } else {
match self.tcx.def_kind(sig_id) { match self.tcx.def_kind(def_id) {
DefKind::Fn => false, DefKind::Fn => false,
DefKind::AssocFn => self.tcx.associated_item(sig_id).fn_has_self_parameter, DefKind::AssocFn => self.tcx.associated_item(def_id).fn_has_self_parameter,
_ => span_bug!(span, "unexpected DefKind for delegation item"), _ => span_bug!(span, "unexpected DefKind for delegation item"),
} }
} }
@ -107,12 +111,17 @@ fn get_delegation_sig_id(
span: Span, span: Span,
) -> Result<DefId, ErrorGuaranteed> { ) -> Result<DefId, ErrorGuaranteed> {
let sig_id = if self.is_in_trait_impl { item_id } else { path_id }; let sig_id = if self.is_in_trait_impl { item_id } else { path_id };
let sig_id = self.get_resolution_id(sig_id, span)
self.resolver.get_partial_res(sig_id).and_then(|r| r.expect_full_res().opt_def_id()); }
sig_id.ok_or_else(|| {
self.tcx fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
.dcx() let def_id =
.span_delayed_bug(span, "LoweringContext: couldn't resolve delegation item") self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
def_id.ok_or_else(|| {
self.tcx.dcx().span_delayed_bug(
span,
format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
)
}) })
} }
@ -122,7 +131,7 @@ fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir>
predicates: &[], predicates: &[],
has_where_clause_predicates: false, has_where_clause_predicates: false,
where_clause_span: span, where_clause_span: span,
span: span, span,
}) })
} }
@ -222,12 +231,7 @@ fn generate_arg(&mut self, param_id: HirId, span: Span) -> hir::Expr<'hir> {
})); }));
let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments }); let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
hir::Expr {
hir_id: self.next_id(),
kind: hir::ExprKind::Path(hir::QPath::Resolved(None, path)),
span,
}
} }
fn lower_delegation_body( fn lower_delegation_body(
@ -236,19 +240,11 @@ fn lower_delegation_body(
param_count: usize, param_count: usize,
span: Span, span: Span,
) -> BodyId { ) -> BodyId {
let path = self.lower_qpath(
delegation.id,
&delegation.qself,
&delegation.path,
ParamMode::Optional,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
let block = delegation.body.as_deref(); let block = delegation.body.as_deref();
self.lower_body(|this| { self.lower_body(|this| {
let mut parameters: Vec<hir::Param<'_>> = Vec::new(); let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
let mut args: Vec<hir::Expr<'hir>> = Vec::new(); let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
for idx in 0..param_count { for idx in 0..param_count {
let (param, pat_node_id) = this.generate_param(span); let (param, pat_node_id) = this.generate_param(span);
@ -264,11 +260,7 @@ fn lower_delegation_body(
}; };
self_resolver.visit_block(block); self_resolver.visit_block(block);
let block = this.lower_block(block, false); let block = this.lower_block(block, false);
hir::Expr { this.mk_expr(hir::ExprKind::Block(block, None), block.span)
hir_id: this.next_id(),
kind: hir::ExprKind::Block(block, None),
span: block.span,
}
} else { } else {
let pat_hir_id = this.lower_node_id(pat_node_id); let pat_hir_id = this.lower_node_id(pat_node_id);
this.generate_arg(pat_hir_id, span) this.generate_arg(pat_hir_id, span)
@ -276,43 +268,41 @@ fn lower_delegation_body(
args.push(arg); args.push(arg);
} }
let args = self.arena.alloc_from_iter(args); let final_expr = this.finalize_body_lowering(delegation, args, span);
let final_expr = this.generate_call(path, args);
(this.arena.alloc_from_iter(parameters), final_expr) (this.arena.alloc_from_iter(parameters), final_expr)
}) })
} }
fn generate_call( // Generates fully qualified call for the resulting body.
fn finalize_body_lowering(
&mut self, &mut self,
path: hir::QPath<'hir>, delegation: &Delegation,
args: &'hir [hir::Expr<'hir>], args: Vec<hir::Expr<'hir>>,
span: Span,
) -> hir::Expr<'hir> { ) -> hir::Expr<'hir> {
let callee = self.arena.alloc(hir::Expr { let path = self.lower_qpath(
hir_id: self.next_id(), delegation.id,
kind: hir::ExprKind::Path(path), &delegation.qself,
span: path.span(), &delegation.path,
}); ParamMode::Optional,
ImplTraitContext::Disallowed(ImplTraitPosition::Path),
None,
);
let expr = self.arena.alloc(hir::Expr { let args = self.arena.alloc_from_iter(args);
hir_id: self.next_id(), let path_expr = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
kind: hir::ExprKind::Call(callee, args), let call = self.arena.alloc(self.mk_expr(hir::ExprKind::Call(path_expr, args), span));
span: path.span(),
});
let block = self.arena.alloc(hir::Block { let block = self.arena.alloc(hir::Block {
stmts: &[], stmts: &[],
expr: Some(expr), expr: Some(call),
hir_id: self.next_id(), hir_id: self.next_id(),
rules: hir::BlockCheckMode::DefaultBlock, rules: hir::BlockCheckMode::DefaultBlock,
span: path.span(), span,
targeted_by_break: false, targeted_by_break: false,
}); });
hir::Expr { self.mk_expr(hir::ExprKind::Block(block, None), span)
hir_id: self.next_id(),
kind: hir::ExprKind::Block(block, None),
span: path.span(),
}
} }
fn generate_delegation_error( fn generate_delegation_error(
@ -333,11 +323,7 @@ fn generate_delegation_error(
let header = self.generate_header_error(); let header = self.generate_header_error();
let sig = hir::FnSig { decl, header, span }; let sig = hir::FnSig { decl, header, span };
let body_id = self.lower_body(|this| { let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span)));
let expr =
hir::Expr { hir_id: this.next_id(), kind: hir::ExprKind::Err(err), span: span };
(&[], expr)
});
DelegationResults { generics, body_id, sig } DelegationResults { generics, body_id, sig }
} }
@ -349,6 +335,11 @@ fn generate_header_error(&self) -> hir::FnHeader {
abi: abi::Abi::Rust, abi: abi::Abi::Rust,
} }
} }
#[inline]
fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
hir::Expr { hir_id: self.next_id(), kind, span }
}
} }
struct SelfResolver<'a> { struct SelfResolver<'a> {

View File

@ -358,8 +358,11 @@ fn create_match_candidates<'pat>(
where where
'a: 'pat, 'a: 'pat,
{ {
// Assemble a list of candidates: there is one candidate per pattern, // Assemble the initial list of candidates. These top-level candidates
// which means there may be more than one candidate *per arm*. // are 1:1 with the original match arms, but other parts of match
// lowering also introduce subcandidates (for subpatterns), and will
// also flatten candidates in some cases. So in general a list of
// candidates does _not_ necessarily correspond to a list of arms.
arms.iter() arms.iter()
.copied() .copied()
.map(|arm| { .map(|arm| {
@ -1031,6 +1034,12 @@ fn is_empty(&self) -> bool {
} }
/// A pattern in a form suitable for generating code. /// A pattern in a form suitable for generating code.
///
/// Here, "flat" indicates that the pattern's match pairs have been recursively
/// simplified by [`Builder::simplify_match_pairs`]. They are not necessarily
/// flat in an absolute sense.
///
/// Will typically be incorporated into a [`Candidate`].
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct FlatPat<'pat, 'tcx> { struct FlatPat<'pat, 'tcx> {
/// To match the pattern, all of these must be satisfied... /// To match the pattern, all of these must be satisfied...
@ -1042,23 +1051,25 @@ struct FlatPat<'pat, 'tcx> {
} }
impl<'tcx, 'pat> FlatPat<'pat, 'tcx> { impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
/// Creates a `FlatPat` containing a simplified [`MatchPair`] list/forest
/// for the given pattern.
fn new( fn new(
place: PlaceBuilder<'tcx>, place: PlaceBuilder<'tcx>,
pattern: &'pat Pat<'tcx>, pattern: &'pat Pat<'tcx>,
cx: &mut Builder<'_, 'tcx>, cx: &mut Builder<'_, 'tcx>,
) -> Self { ) -> Self {
let is_never = pattern.is_never_pattern(); // First, recursively build a tree of match pairs for the given pattern.
let mut flat_pat = FlatPat { let mut match_pairs = vec![MatchPair::new(place, pattern, cx)];
match_pairs: vec![MatchPair::new(place, pattern, cx)], let mut extra_data = PatternExtraData {
extra_data: PatternExtraData { span: pattern.span,
span: pattern.span, bindings: Vec::new(),
bindings: Vec::new(), ascriptions: Vec::new(),
ascriptions: Vec::new(), is_never: pattern.is_never_pattern(),
is_never,
},
}; };
cx.simplify_match_pairs(&mut flat_pat.match_pairs, &mut flat_pat.extra_data); // Partly-flatten and sort the match pairs, while recording extra data.
flat_pat cx.simplify_match_pairs(&mut match_pairs, &mut extra_data);
Self { match_pairs, extra_data }
} }
} }
@ -1104,9 +1115,12 @@ fn new(
has_guard: bool, has_guard: bool,
cx: &mut Builder<'_, 'tcx>, cx: &mut Builder<'_, 'tcx>,
) -> Self { ) -> Self {
// Use `FlatPat` to build simplified match pairs, then immediately
// incorporate them into a new candidate.
Self::from_flat_pat(FlatPat::new(place, pattern, cx), has_guard) Self::from_flat_pat(FlatPat::new(place, pattern, cx), has_guard)
} }
/// Incorporates an already-simplified [`FlatPat`] into a new candidate.
fn from_flat_pat(flat_pat: FlatPat<'pat, 'tcx>, has_guard: bool) -> Self { fn from_flat_pat(flat_pat: FlatPat<'pat, 'tcx>, has_guard: bool) -> Self {
Candidate { Candidate {
match_pairs: flat_pat.match_pairs, match_pairs: flat_pat.match_pairs,

View File

@ -95,6 +95,8 @@ pub(crate) fn false_edges(
} }
impl<'pat, 'tcx> MatchPair<'pat, 'tcx> { impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
/// Recursively builds a `MatchPair` tree for the given pattern and its
/// subpatterns.
pub(in crate::build) fn new( pub(in crate::build) fn new(
mut place_builder: PlaceBuilder<'tcx>, mut place_builder: PlaceBuilder<'tcx>,
pattern: &'pat Pat<'tcx>, pattern: &'pat Pat<'tcx>,

View File

@ -399,6 +399,31 @@ fn should_ignore_item(&mut self, def_id: DefId) -> bool {
return false; return false;
} }
// don't ignore impls for Enums and pub Structs whose methods don't have self receiver,
// cause external crate may call such methods to construct values of these types
if let Some(local_impl_of) = impl_of.as_local()
&& let Some(local_def_id) = def_id.as_local()
&& let Some(fn_sig) =
self.tcx.hir().fn_sig_by_hir_id(self.tcx.local_def_id_to_hir_id(local_def_id))
&& matches!(fn_sig.decl.implicit_self, hir::ImplicitSelfKind::None)
&& let TyKind::Path(hir::QPath::Resolved(_, path)) =
self.tcx.hir().expect_item(local_impl_of).expect_impl().self_ty.kind
&& let Res::Def(def_kind, did) = path.res
{
match def_kind {
// for example, #[derive(Default)] pub struct T(i32);
// external crate can call T::default() to construct T,
// so that don't ignore impl Default for pub Enum and Structs
DefKind::Struct | DefKind::Union if self.tcx.visibility(did).is_public() => {
return false;
}
// don't ignore impl Default for Enums,
// cause we don't know which variant is constructed
DefKind::Enum => return false,
_ => (),
};
}
if let Some(trait_of) = self.tcx.trait_id_of_impl(impl_of) if let Some(trait_of) = self.tcx.trait_id_of_impl(impl_of)
&& self.tcx.has_attr(trait_of, sym::rustc_trivial_field_reads) && self.tcx.has_attr(trait_of, sym::rustc_trivial_field_reads)
{ {

View File

@ -396,7 +396,7 @@ fn show_arc() {
// Make sure deriving works with Arc<T> // Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
struct Foo { struct _Foo {
inner: Arc<i32>, inner: Arc<i32>,
} }

View File

@ -103,6 +103,7 @@
/// ``` /// ```
#[cfg_attr(not(test), rustc_diagnostic_item = "Default")] #[cfg_attr(not(test), rustc_diagnostic_item = "Default")]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(bootstrap), rustc_trivial_field_reads)]
pub trait Default: Sized { pub trait Default: Sized {
/// Returns the "default value" for a type. /// Returns the "default value" for a type.
/// ///

View File

@ -484,7 +484,7 @@ mod sealed_trait {
all supported platforms", all supported platforms",
issue = "44930" issue = "44930"
)] )]
pub trait VaArgSafe {} pub unsafe trait VaArgSafe {}
} }
macro_rules! impl_va_arg_safe { macro_rules! impl_va_arg_safe {
@ -494,7 +494,7 @@ macro_rules! impl_va_arg_safe {
reason = "the `c_variadic` feature has not been properly tested on \ reason = "the `c_variadic` feature has not been properly tested on \
all supported platforms", all supported platforms",
issue = "44930")] issue = "44930")]
impl sealed_trait::VaArgSafe for $t {} unsafe impl sealed_trait::VaArgSafe for $t {}
)+ )+
} }
} }
@ -509,14 +509,15 @@ impl sealed_trait::VaArgSafe for $t {}
all supported platforms", all supported platforms",
issue = "44930" issue = "44930"
)] )]
impl<T> sealed_trait::VaArgSafe for *mut T {} unsafe impl<T> sealed_trait::VaArgSafe for *mut T {}
#[unstable( #[unstable(
feature = "c_variadic", feature = "c_variadic",
reason = "the `c_variadic` feature has not been properly tested on \ reason = "the `c_variadic` feature has not been properly tested on \
all supported platforms", all supported platforms",
issue = "44930" issue = "44930"
)] )]
impl<T> sealed_trait::VaArgSafe for *const T {} unsafe impl<T> sealed_trait::VaArgSafe for *const T {}
#[unstable( #[unstable(
feature = "c_variadic", feature = "c_variadic",

View File

@ -2579,7 +2579,7 @@ pub const fn ptr_guaranteed_cmp<T>(ptr: *const T, other: *const T) -> u8 {
/// fn runtime() -> i32 { 1 } /// fn runtime() -> i32 { 1 }
/// const fn compiletime() -> i32 { 2 } /// const fn compiletime() -> i32 { 2 }
/// ///
// // ⚠ This code violates the required equivalence of `compiletime` /// // ⚠ This code violates the required equivalence of `compiletime`
/// // and `runtime`. /// // and `runtime`.
/// const_eval_select((), compiletime, runtime) /// const_eval_select((), compiletime, runtime)
/// } /// }

View File

@ -552,10 +552,20 @@ pub fn leak<'a>(self) -> &'a mut OsStr {
OsStr::from_inner_mut(self.inner.leak()) OsStr::from_inner_mut(self.inner.leak())
} }
/// Part of a hack to make PathBuf::push/pop more efficient. /// Provides plumbing to core `Vec::truncate`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline] #[inline]
pub(crate) fn as_mut_vec_for_path_buf(&mut self) -> &mut Vec<u8> { pub(crate) fn truncate(&mut self, len: usize) {
self.inner.as_mut_vec_for_path_buf() self.inner.truncate(len);
}
/// Provides plumbing to core `Vec::extend_from_slice`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline]
pub(crate) fn extend_from_slice(&mut self, other: &[u8]) {
self.inner.extend_from_slice(other);
} }
} }

View File

@ -1163,11 +1163,6 @@ pub struct PathBuf {
} }
impl PathBuf { impl PathBuf {
#[inline]
fn as_mut_vec(&mut self) -> &mut Vec<u8> {
self.inner.as_mut_vec_for_path_buf()
}
/// Allocates an empty `PathBuf`. /// Allocates an empty `PathBuf`.
/// ///
/// # Examples /// # Examples
@ -1290,7 +1285,8 @@ pub fn push<P: AsRef<Path>>(&mut self, path: P) {
fn _push(&mut self, path: &Path) { fn _push(&mut self, path: &Path) {
// in general, a separator is needed if the rightmost byte is not a separator // in general, a separator is needed if the rightmost byte is not a separator
let mut need_sep = self.as_mut_vec().last().map(|c| !is_sep_byte(*c)).unwrap_or(false); let buf = self.inner.as_encoded_bytes();
let mut need_sep = buf.last().map(|c| !is_sep_byte(*c)).unwrap_or(false);
// in the special case of `C:` on Windows, do *not* add a separator // in the special case of `C:` on Windows, do *not* add a separator
let comps = self.components(); let comps = self.components();
@ -1304,7 +1300,7 @@ fn _push(&mut self, path: &Path) {
// absolute `path` replaces `self` // absolute `path` replaces `self`
if path.is_absolute() || path.prefix().is_some() { if path.is_absolute() || path.prefix().is_some() {
self.as_mut_vec().truncate(0); self.inner.truncate(0);
// verbatim paths need . and .. removed // verbatim paths need . and .. removed
} else if comps.prefix_verbatim() && !path.inner.is_empty() { } else if comps.prefix_verbatim() && !path.inner.is_empty() {
@ -1349,7 +1345,7 @@ fn _push(&mut self, path: &Path) {
// `path` has a root but no prefix, e.g., `\windows` (Windows only) // `path` has a root but no prefix, e.g., `\windows` (Windows only)
} else if path.has_root() { } else if path.has_root() {
let prefix_len = self.components().prefix_remaining(); let prefix_len = self.components().prefix_remaining();
self.as_mut_vec().truncate(prefix_len); self.inner.truncate(prefix_len);
// `path` is a pure relative path // `path` is a pure relative path
} else if need_sep { } else if need_sep {
@ -1382,7 +1378,7 @@ fn _push(&mut self, path: &Path) {
pub fn pop(&mut self) -> bool { pub fn pop(&mut self) -> bool {
match self.parent().map(|p| p.as_u8_slice().len()) { match self.parent().map(|p| p.as_u8_slice().len()) {
Some(len) => { Some(len) => {
self.as_mut_vec().truncate(len); self.inner.truncate(len);
true true
} }
None => false, None => false,
@ -1510,15 +1506,14 @@ fn _set_extension(&mut self, extension: &OsStr) -> bool {
// truncate until right after the file stem // truncate until right after the file stem
let end_file_stem = file_stem[file_stem.len()..].as_ptr().addr(); let end_file_stem = file_stem[file_stem.len()..].as_ptr().addr();
let start = self.inner.as_encoded_bytes().as_ptr().addr(); let start = self.inner.as_encoded_bytes().as_ptr().addr();
let v = self.as_mut_vec(); self.inner.truncate(end_file_stem.wrapping_sub(start));
v.truncate(end_file_stem.wrapping_sub(start));
// add the new extension, if any // add the new extension, if any
let new = extension.as_encoded_bytes(); let new = extension;
if !new.is_empty() { if !new.is_empty() {
v.reserve_exact(new.len() + 1); self.inner.reserve_exact(new.len() + 1);
v.push(b'.'); self.inner.push(OsStr::new("."));
v.extend_from_slice(new); self.inner.push(new);
} }
true true
@ -2645,18 +2640,18 @@ fn _with_extension(&self, extension: &OsStr) -> PathBuf {
None => { None => {
// Enough capacity for the extension and the dot // Enough capacity for the extension and the dot
let capacity = self_len + extension.len() + 1; let capacity = self_len + extension.len() + 1;
let whole_path = self_bytes.iter(); let whole_path = self_bytes;
(capacity, whole_path) (capacity, whole_path)
} }
Some(previous_extension) => { Some(previous_extension) => {
let capacity = self_len + extension.len() - previous_extension.len(); let capacity = self_len + extension.len() - previous_extension.len();
let path_till_dot = self_bytes[..self_len - previous_extension.len()].iter(); let path_till_dot = &self_bytes[..self_len - previous_extension.len()];
(capacity, path_till_dot) (capacity, path_till_dot)
} }
}; };
let mut new_path = PathBuf::with_capacity(new_capacity); let mut new_path = PathBuf::with_capacity(new_capacity);
new_path.as_mut_vec().extend(slice_to_copy); new_path.inner.extend_from_slice(slice_to_copy);
new_path.set_extension(extension); new_path.set_extension(extension);
new_path new_path
} }

View File

@ -202,10 +202,20 @@ pub fn into_rc(&self) -> Rc<Slice> {
self.as_slice().into_rc() self.as_slice().into_rc()
} }
/// Part of a hack to make PathBuf::push/pop more efficient. /// Provides plumbing to core `Vec::truncate`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline] #[inline]
pub(crate) fn as_mut_vec_for_path_buf(&mut self) -> &mut Vec<u8> { pub(crate) fn truncate(&mut self, len: usize) {
&mut self.inner self.inner.truncate(len);
}
/// Provides plumbing to core `Vec::extend_from_slice`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline]
pub(crate) fn extend_from_slice(&mut self, other: &[u8]) {
self.inner.extend_from_slice(other);
} }
} }

View File

@ -165,10 +165,20 @@ pub fn into_rc(&self) -> Rc<Slice> {
self.as_slice().into_rc() self.as_slice().into_rc()
} }
/// Part of a hack to make PathBuf::push/pop more efficient. /// Provides plumbing to core `Vec::truncate`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline] #[inline]
pub(crate) fn as_mut_vec_for_path_buf(&mut self) -> &mut Vec<u8> { pub(crate) fn truncate(&mut self, len: usize) {
self.inner.as_mut_vec_for_path_buf() self.inner.truncate(len);
}
/// Provides plumbing to core `Vec::extend_from_slice`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline]
pub(crate) fn extend_from_slice(&mut self, other: &[u8]) {
self.inner.extend_from_slice(other);
} }
} }

View File

@ -474,13 +474,13 @@ pub fn from_box(boxed: Box<Wtf8>) -> Wtf8Buf {
Wtf8Buf { bytes: bytes.into_vec(), is_known_utf8: false } Wtf8Buf { bytes: bytes.into_vec(), is_known_utf8: false }
} }
/// Part of a hack to make PathBuf::push/pop more efficient. /// Provides plumbing to core `Vec::extend_from_slice`.
/// More well behaving alternative to allowing outer types
/// full mutable access to the core `Vec`.
#[inline] #[inline]
pub(crate) fn as_mut_vec_for_path_buf(&mut self) -> &mut Vec<u8> { pub(crate) fn extend_from_slice(&mut self, other: &[u8]) {
// FIXME: this function should not even exist, as it implies violating Wtf8Buf invariants self.bytes.extend_from_slice(other);
// For now, simply assume that is about to happen. self.is_known_utf8 = self.is_known_utf8 || self.next_surrogate(0).is_none();
self.is_known_utf8 = false;
&mut self.bytes
} }
} }

View File

@ -91,7 +91,9 @@ RUN sh /scripts/sccache.sh
# Avoid "fatal: detected dubious ownership in repository at '/checkout'" error # Avoid "fatal: detected dubious ownership in repository at '/checkout'" error
RUN git config --global --add safe.directory /checkout RUN git config --global --add safe.directory /checkout
ENV RUST_CONFIGURE_ARGS --qemu-riscv64-rootfs=/tmp/rootfs ENV RUST_CONFIGURE_ARGS \
--qemu-riscv64-rootfs=/tmp/rootfs \
--set target.riscv64gc-unknown-linux-gnu.linker=riscv64-linux-gnu-gcc
ENV SCRIPT python3 ../x.py --stage 2 test --host='' --target riscv64gc-unknown-linux-gnu ENV SCRIPT python3 ../x.py --stage 2 test --host='' --target riscv64gc-unknown-linux-gnu
ENV NO_CHANGE_USER=1 ENV NO_CHANGE_USER=1

View File

@ -110,10 +110,10 @@ error[E0308]: mismatched types
--> $DIR/explicit-paths.rs:78:30 --> $DIR/explicit-paths.rs:78:30
| |
LL | reuse <S2 as Trait>::foo1; LL | reuse <S2 as Trait>::foo1;
| ---------------^^^^ | ^^^^
| | | | |
| | expected `&S2`, found `&S` | expected `&S2`, found `&S`
| arguments to this function are incorrect | arguments to this function are incorrect
| |
= note: expected reference `&S2` = note: expected reference `&S2`
found reference `&S` found reference `&S`

View File

@ -22,6 +22,6 @@ enum MyOption<T> {
} }
fn main() { fn main() {
assert_eq!(Foo::default(), Foo::Alpha); assert!(matches!(Foo::default(), Foo::Alpha));
assert!(matches!(MyOption::<NotDefault>::default(), MyOption::None)); assert!(matches!(MyOption::<NotDefault>::default(), MyOption::None));
} }

View File

@ -7,6 +7,7 @@
use std::panic::catch_unwind; use std::panic::catch_unwind;
#[allow(dead_code)]
#[derive(Default)] #[derive(Default)]
struct Guard; struct Guard;

View File

@ -0,0 +1,25 @@
#![deny(dead_code)]
#[derive(Default)]
struct T; //~ ERROR struct `T` is never constructed
#[derive(Default)]
struct Used;
#[derive(Default)]
enum E {
#[default]
A,
B, //~ ERROR variant `B` is never constructed
}
// external crate can call T2::default() to construct T2,
// so that no warnings for pub adts
#[derive(Default)]
pub struct T2 {
_unread: i32,
}
fn main() {
let _x: Used = Default::default();
}

View File

@ -0,0 +1,24 @@
error: struct `T` is never constructed
--> $DIR/unused-struct-derive-default.rs:4:8
|
LL | struct T;
| ^
|
= note: `T` has a derived impl for the trait `Default`, but this is intentionally ignored during dead code analysis
note: the lint level is defined here
--> $DIR/unused-struct-derive-default.rs:1:9
|
LL | #![deny(dead_code)]
| ^^^^^^^^^
error: variant `B` is never constructed
--> $DIR/unused-struct-derive-default.rs:13:5
|
LL | enum E {
| - variant in this enum
...
LL | B,
| ^
error: aborting due to 2 previous errors