Auto merge of #109043 - matthiaskrgr:rollup-genkz0e, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #108726 (tidy: enforce comment blocks to have an even number of backticks) - #108797 (Allow binary files to go through the `FileLoader`) - #108841 (Add suggestion to diagnostic when user has array but trait wants slice. (rebased)) - #108984 (bootstrap: document tidy) - #109013 (Give proper error message when tcx wasn't passed to decoder) - #109017 (remove duplicated calls to sort_string) - #109018 (Expand on the allocator comment in `rustc-main`) - #109028 (Add eslint checks for rustdoc-js tester) - #109034 (Commit some tests for the new solver + lazy norm) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
24c0b81c1f
@ -24,6 +24,15 @@
|
|||||||
// The two crates we link to here, `std` and `rustc_driver`, are both dynamic
|
// The two crates we link to here, `std` and `rustc_driver`, are both dynamic
|
||||||
// libraries. So we must reference jemalloc symbols one way or another, because
|
// libraries. So we must reference jemalloc symbols one way or another, because
|
||||||
// this file is the only object code in the rustc executable.
|
// this file is the only object code in the rustc executable.
|
||||||
|
//
|
||||||
|
// NOTE: if you are reading this comment because you want to set a custom `global_allocator` for
|
||||||
|
// benchmarking, consider using the benchmarks in the `rustc-perf` collector suite instead:
|
||||||
|
// https://github.com/rust-lang/rustc-perf/blob/master/collector/README.md#profiling
|
||||||
|
//
|
||||||
|
// NOTE: if you are reading this comment because you want to replace jemalloc with another allocator
|
||||||
|
// to compare their performance, see
|
||||||
|
// https://github.com/rust-lang/rust/commit/b90cfc887c31c3e7a9e6d462e2464db1fe506175#diff-43914724af6e464c1da2171e4a9b6c7e607d5bc1203fa95c0ab85be4122605ef
|
||||||
|
// for an example of how to do so.
|
||||||
|
|
||||||
#[unix_sigpipe = "sig_dfl"]
|
#[unix_sigpipe = "sig_dfl"]
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -1052,6 +1052,7 @@ fn create_method(
|
|||||||
/// ::core::hash::Hash::hash(&{ self.y }, state)
|
/// ::core::hash::Hash::hash(&{ self.y }, state)
|
||||||
/// }
|
/// }
|
||||||
/// }
|
/// }
|
||||||
|
/// ```
|
||||||
fn expand_struct_method_body<'b>(
|
fn expand_struct_method_body<'b>(
|
||||||
&self,
|
&self,
|
||||||
cx: &mut ExtCtxt<'_>,
|
cx: &mut ExtCtxt<'_>,
|
||||||
|
@ -438,6 +438,7 @@ fn build_enum_variant_member_di_node<'ll, 'tcx>(
|
|||||||
/// DW_TAG_structure_type (type of variant 1)
|
/// DW_TAG_structure_type (type of variant 1)
|
||||||
/// DW_TAG_structure_type (type of variant 2)
|
/// DW_TAG_structure_type (type of variant 2)
|
||||||
/// DW_TAG_structure_type (type of variant 3)
|
/// DW_TAG_structure_type (type of variant 3)
|
||||||
|
/// ```
|
||||||
struct VariantMemberInfo<'a, 'll> {
|
struct VariantMemberInfo<'a, 'll> {
|
||||||
variant_index: VariantIdx,
|
variant_index: VariantIdx,
|
||||||
variant_name: Cow<'a, str>,
|
variant_name: Cow<'a, str>,
|
||||||
|
@ -41,7 +41,7 @@ impl Add for Foo {
|
|||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let mut x: Foo = Foo(5);
|
let mut x: Foo = Foo(5);
|
||||||
x += Foo(7); // error, `+= cannot be applied to the type `Foo`
|
x += Foo(7); // error, `+=` cannot be applied to the type `Foo`
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -3,14 +3,14 @@ An unknown tool name was found in a scoped lint.
|
|||||||
Erroneous code examples:
|
Erroneous code examples:
|
||||||
|
|
||||||
```compile_fail,E0710
|
```compile_fail,E0710
|
||||||
#[allow(clipp::filter_map)] // error!`
|
#[allow(clipp::filter_map)] // error!
|
||||||
fn main() {
|
fn main() {
|
||||||
// business logic
|
// business logic
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
```compile_fail,E0710
|
```compile_fail,E0710
|
||||||
#[warn(clipp::filter_map)] // error!`
|
#[warn(clipp::filter_map)] // error!
|
||||||
fn main() {
|
fn main() {
|
||||||
// business logic
|
// business logic
|
||||||
}
|
}
|
||||||
|
@ -398,7 +398,7 @@ fn sig_of_closure_no_expectation(
|
|||||||
///
|
///
|
||||||
/// Here:
|
/// Here:
|
||||||
/// - E would be `fn(&u32) -> &u32`.
|
/// - E would be `fn(&u32) -> &u32`.
|
||||||
/// - S would be `fn(&u32) ->
|
/// - S would be `fn(&u32) -> ?T`
|
||||||
/// - E' is `&'!0 u32 -> &'!0 u32`
|
/// - E' is `&'!0 u32 -> &'!0 u32`
|
||||||
/// - S' is `&'?0 u32 -> ?T`
|
/// - S' is `&'?0 u32 -> ?T`
|
||||||
///
|
///
|
||||||
|
@ -104,7 +104,7 @@ pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorGuaranteed> {
|
|||||||
let (mention_influencer, influencer_point) =
|
let (mention_influencer, influencer_point) =
|
||||||
if sup_origin.span().overlaps(param.param_ty_span) {
|
if sup_origin.span().overlaps(param.param_ty_span) {
|
||||||
// Account for `async fn` like in `async-await/issues/issue-62097.rs`.
|
// Account for `async fn` like in `async-await/issues/issue-62097.rs`.
|
||||||
// The desugaring of `async `fn`s causes `sup_origin` and `param` to point at the same
|
// The desugaring of `async fn`s causes `sup_origin` and `param` to point at the same
|
||||||
// place (but with different `ctxt`, hence `overlaps` instead of `==` above).
|
// place (but with different `ctxt`, hence `overlaps` instead of `==` above).
|
||||||
//
|
//
|
||||||
// This avoids the following:
|
// This avoids the following:
|
||||||
|
@ -311,8 +311,11 @@ fn decode<'a, 'tcx, M: Metadata<'a, 'tcx>>(
|
|||||||
impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
|
impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn tcx(&self) -> TyCtxt<'tcx> {
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
||||||
debug_assert!(self.tcx.is_some(), "missing TyCtxt in DecodeContext");
|
let Some(tcx) = self.tcx else {
|
||||||
self.tcx.unwrap()
|
bug!("No TyCtxt found for decoding. \
|
||||||
|
You need to explicitly pass `(crate_metadata_ref, tcx)` to `decode` instead of just `crate_metadata_ref`.");
|
||||||
|
};
|
||||||
|
tcx
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
@ -454,7 +457,12 @@ fn decode(d: &mut DecodeContext<'a, 'tcx>) -> ast::AttrId {
|
|||||||
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for SyntaxContext {
|
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for SyntaxContext {
|
||||||
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SyntaxContext {
|
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SyntaxContext {
|
||||||
let cdata = decoder.cdata();
|
let cdata = decoder.cdata();
|
||||||
let sess = decoder.sess.unwrap();
|
|
||||||
|
let Some(sess) = decoder.sess else {
|
||||||
|
bug!("Cannot decode SyntaxContext without Session.\
|
||||||
|
You need to explicitly pass `(crate_metadata_ref, tcx)` to `decode` instead of just `crate_metadata_ref`.");
|
||||||
|
};
|
||||||
|
|
||||||
let cname = cdata.root.name;
|
let cname = cdata.root.name;
|
||||||
rustc_span::hygiene::decode_syntax_context(decoder, &cdata.hygiene_context, |_, id| {
|
rustc_span::hygiene::decode_syntax_context(decoder, &cdata.hygiene_context, |_, id| {
|
||||||
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
|
debug!("SpecializedDecoder<SyntaxContext>: decoding {}", id);
|
||||||
@ -471,7 +479,11 @@ fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SyntaxContext {
|
|||||||
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnId {
|
impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for ExpnId {
|
||||||
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> ExpnId {
|
fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> ExpnId {
|
||||||
let local_cdata = decoder.cdata();
|
let local_cdata = decoder.cdata();
|
||||||
let sess = decoder.sess.unwrap();
|
|
||||||
|
let Some(sess) = decoder.sess else {
|
||||||
|
bug!("Cannot decode ExpnId without Session. \
|
||||||
|
You need to explicitly pass `(crate_metadata_ref, tcx)` to `decode` instead of just `crate_metadata_ref`.");
|
||||||
|
};
|
||||||
|
|
||||||
let cnum = CrateNum::decode(decoder);
|
let cnum = CrateNum::decode(decoder);
|
||||||
let index = u32::decode(decoder);
|
let index = u32::decode(decoder);
|
||||||
@ -520,7 +532,8 @@ fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Span {
|
|||||||
let hi = lo + len;
|
let hi = lo + len;
|
||||||
|
|
||||||
let Some(sess) = decoder.sess else {
|
let Some(sess) = decoder.sess else {
|
||||||
bug!("Cannot decode Span without Session.")
|
bug!("Cannot decode Span without Session. \
|
||||||
|
You need to explicitly pass `(crate_metadata_ref, tcx)` to `decode` instead of just `crate_metadata_ref`.")
|
||||||
};
|
};
|
||||||
|
|
||||||
// Index of the file in the corresponding crate's list of encoded files.
|
// Index of the file in the corresponding crate's list of encoded files.
|
||||||
|
@ -123,6 +123,7 @@ fn dump_matched_mir_node<'tcx, F>(
|
|||||||
// see notes on #41697 above
|
// see notes on #41697 above
|
||||||
let def_path =
|
let def_path =
|
||||||
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
|
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
|
||||||
|
// ignore-tidy-odd-backticks the literal below is fine
|
||||||
write!(file, "// MIR for `{}", def_path)?;
|
write!(file, "// MIR for `{}", def_path)?;
|
||||||
match body.source.promoted {
|
match body.source.promoted {
|
||||||
None => write!(file, "`")?,
|
None => write!(file, "`")?,
|
||||||
|
@ -151,12 +151,8 @@ fn report_maybe_different(expected: &str, found: &str) -> String {
|
|||||||
.into(),
|
.into(),
|
||||||
RegionsPlaceholderMismatch => "one type is more general than the other".into(),
|
RegionsPlaceholderMismatch => "one type is more general than the other".into(),
|
||||||
ArgumentSorts(values, _) | Sorts(values) => {
|
ArgumentSorts(values, _) | Sorts(values) => {
|
||||||
let mut expected = values.expected.sort_string(tcx);
|
let expected = values.expected.sort_string(tcx);
|
||||||
let mut found = values.found.sort_string(tcx);
|
let found = values.found.sort_string(tcx);
|
||||||
if expected == found {
|
|
||||||
expected = values.expected.sort_string(tcx);
|
|
||||||
found = values.found.sort_string(tcx);
|
|
||||||
}
|
|
||||||
report_maybe_different(&expected, &found).into()
|
report_maybe_different(&expected, &found).into()
|
||||||
}
|
}
|
||||||
Traits(values) => {
|
Traits(values) => {
|
||||||
|
@ -1886,6 +1886,7 @@ fn bind_and_guard_matched_candidate<'pat>(
|
|||||||
// let place = Foo::new();
|
// let place = Foo::new();
|
||||||
// match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
|
// match place { Foo { .. } if { let tmp1 = &place; inspect(*tmp1) }
|
||||||
// => { let tmp2 = place; feed(tmp2) }, ... }
|
// => { let tmp2 = place; feed(tmp2) }, ... }
|
||||||
|
// ```
|
||||||
//
|
//
|
||||||
// And an input like:
|
// And an input like:
|
||||||
//
|
//
|
||||||
|
@ -100,6 +100,9 @@ pub trait FileLoader {
|
|||||||
|
|
||||||
/// Read the contents of a UTF-8 file into memory.
|
/// Read the contents of a UTF-8 file into memory.
|
||||||
fn read_file(&self, path: &Path) -> io::Result<String>;
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
||||||
|
|
||||||
|
/// Read the contents of a potentially non-UTF-8 file into memory.
|
||||||
|
fn read_binary_file(&self, path: &Path) -> io::Result<Vec<u8>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A FileLoader that uses std::fs to load real files.
|
/// A FileLoader that uses std::fs to load real files.
|
||||||
@ -113,6 +116,10 @@ fn file_exists(&self, path: &Path) -> bool {
|
|||||||
fn read_file(&self, path: &Path) -> io::Result<String> {
|
fn read_file(&self, path: &Path) -> io::Result<String> {
|
||||||
fs::read_to_string(path)
|
fs::read_to_string(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn read_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
|
||||||
|
fs::read(path)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This is a [SourceFile] identifier that is used to correlate source files between
|
/// This is a [SourceFile] identifier that is used to correlate source files between
|
||||||
@ -220,9 +227,7 @@ pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
|||||||
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
||||||
/// takes place.
|
/// takes place.
|
||||||
pub fn load_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
|
pub fn load_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
|
||||||
// Ideally, this should use `self.file_loader`, but it can't
|
let bytes = self.file_loader.read_binary_file(path)?;
|
||||||
// deal with binary files yet.
|
|
||||||
let bytes = fs::read(path)?;
|
|
||||||
|
|
||||||
// We need to add file to the `SourceMap`, so that it is present
|
// We need to add file to the `SourceMap`, so that it is present
|
||||||
// in dep-info. There's also an edge case that file might be both
|
// in dep-info. There's also an edge case that file might be both
|
||||||
|
@ -248,7 +248,8 @@ pub(super) fn assemble_and_evaluate_candidates<G: GoalKind<'tcx>>(
|
|||||||
///
|
///
|
||||||
/// To deal with this, we first try to normalize the self type and add the candidates for the normalized
|
/// To deal with this, we first try to normalize the self type and add the candidates for the normalized
|
||||||
/// self type to the list of candidates in case that succeeds. Note that we can't just eagerly return in
|
/// self type to the list of candidates in case that succeeds. Note that we can't just eagerly return in
|
||||||
/// this case as projections as self types add `
|
/// this case as projections as self types add
|
||||||
|
// FIXME complete the unfinished sentence above
|
||||||
fn assemble_candidates_after_normalizing_self_ty<G: GoalKind<'tcx>>(
|
fn assemble_candidates_after_normalizing_self_ty<G: GoalKind<'tcx>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
goal: Goal<'tcx, G>,
|
goal: Goal<'tcx, G>,
|
||||||
|
@ -1024,7 +1024,7 @@ fn report_selection_error(
|
|||||||
// Can't show anything else useful, try to find similar impls.
|
// Can't show anything else useful, try to find similar impls.
|
||||||
let impl_candidates = self.find_similar_impl_candidates(trait_predicate);
|
let impl_candidates = self.find_similar_impl_candidates(trait_predicate);
|
||||||
if !self.report_similar_impl_candidates(
|
if !self.report_similar_impl_candidates(
|
||||||
impl_candidates,
|
&impl_candidates,
|
||||||
trait_ref,
|
trait_ref,
|
||||||
body_def_id,
|
body_def_id,
|
||||||
&mut err,
|
&mut err,
|
||||||
@ -1060,7 +1060,7 @@ fn report_selection_error(
|
|||||||
let impl_candidates =
|
let impl_candidates =
|
||||||
self.find_similar_impl_candidates(trait_pred);
|
self.find_similar_impl_candidates(trait_pred);
|
||||||
self.report_similar_impl_candidates(
|
self.report_similar_impl_candidates(
|
||||||
impl_candidates,
|
&impl_candidates,
|
||||||
trait_ref,
|
trait_ref,
|
||||||
body_def_id,
|
body_def_id,
|
||||||
&mut err,
|
&mut err,
|
||||||
@ -1068,6 +1068,13 @@ fn report_selection_error(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.maybe_suggest_convert_to_slice(
|
||||||
|
&mut err,
|
||||||
|
trait_ref,
|
||||||
|
impl_candidates.as_slice(),
|
||||||
|
span,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Changing mutability doesn't make a difference to whether we have
|
// Changing mutability doesn't make a difference to whether we have
|
||||||
@ -1514,7 +1521,7 @@ fn find_similar_impl_candidates(
|
|||||||
|
|
||||||
fn report_similar_impl_candidates(
|
fn report_similar_impl_candidates(
|
||||||
&self,
|
&self,
|
||||||
impl_candidates: Vec<ImplCandidate<'tcx>>,
|
impl_candidates: &[ImplCandidate<'tcx>],
|
||||||
trait_ref: ty::PolyTraitRef<'tcx>,
|
trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
body_def_id: LocalDefId,
|
body_def_id: LocalDefId,
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
@ -2004,7 +2011,7 @@ fn find_similar_impl_candidates(
|
|||||||
|
|
||||||
fn report_similar_impl_candidates(
|
fn report_similar_impl_candidates(
|
||||||
&self,
|
&self,
|
||||||
impl_candidates: Vec<ImplCandidate<'tcx>>,
|
impl_candidates: &[ImplCandidate<'tcx>],
|
||||||
trait_ref: ty::PolyTraitRef<'tcx>,
|
trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
body_def_id: LocalDefId,
|
body_def_id: LocalDefId,
|
||||||
err: &mut Diagnostic,
|
err: &mut Diagnostic,
|
||||||
@ -2113,7 +2120,8 @@ fn report_similar_impl_candidates(
|
|||||||
// Prefer more similar candidates first, then sort lexicographically
|
// Prefer more similar candidates first, then sort lexicographically
|
||||||
// by their normalized string representation.
|
// by their normalized string representation.
|
||||||
let mut normalized_impl_candidates_and_similarities = impl_candidates
|
let mut normalized_impl_candidates_and_similarities = impl_candidates
|
||||||
.into_iter()
|
.iter()
|
||||||
|
.copied()
|
||||||
.map(|ImplCandidate { trait_ref, similarity }| {
|
.map(|ImplCandidate { trait_ref, similarity }| {
|
||||||
// FIXME(compiler-errors): This should be using `NormalizeExt::normalize`
|
// FIXME(compiler-errors): This should be using `NormalizeExt::normalize`
|
||||||
let normalized = self
|
let normalized = self
|
||||||
@ -2326,7 +2334,7 @@ fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) {
|
|||||||
);
|
);
|
||||||
if impl_candidates.len() < 10 {
|
if impl_candidates.len() < 10 {
|
||||||
self.report_similar_impl_candidates(
|
self.report_similar_impl_candidates(
|
||||||
impl_candidates,
|
impl_candidates.as_slice(),
|
||||||
trait_ref,
|
trait_ref,
|
||||||
obligation.cause.body_id,
|
obligation.cause.body_id,
|
||||||
&mut err,
|
&mut err,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
// ignore-tidy-filelength
|
// ignore-tidy-filelength
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
DefIdOrName, FindExprBySpan, Obligation, ObligationCause, ObligationCauseCode,
|
DefIdOrName, FindExprBySpan, ImplCandidate, Obligation, ObligationCause, ObligationCauseCode,
|
||||||
PredicateObligation,
|
PredicateObligation,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -382,6 +382,14 @@ fn probe_assoc_types_at_expr(
|
|||||||
body_id: hir::HirId,
|
body_id: hir::HirId,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
) -> Vec<Option<(Span, (DefId, Ty<'tcx>))>>;
|
) -> Vec<Option<(Span, (DefId, Ty<'tcx>))>>;
|
||||||
|
|
||||||
|
fn maybe_suggest_convert_to_slice(
|
||||||
|
&self,
|
||||||
|
err: &mut Diagnostic,
|
||||||
|
trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>,
|
||||||
|
candidate_impls: &[ImplCandidate<'tcx>],
|
||||||
|
span: Span,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn predicate_constraint(generics: &hir::Generics<'_>, pred: ty::Predicate<'_>) -> (Span, String) {
|
fn predicate_constraint(generics: &hir::Generics<'_>, pred: ty::Predicate<'_>) -> (Span, String) {
|
||||||
@ -2220,7 +2228,7 @@ fn maybe_note_obligation_cause_for_async_await(
|
|||||||
// - `BuiltinDerivedObligation` with a generator witness (A)
|
// - `BuiltinDerivedObligation` with a generator witness (A)
|
||||||
// - `BuiltinDerivedObligation` with a generator (A)
|
// - `BuiltinDerivedObligation` with a generator (A)
|
||||||
// - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
|
// - `BuiltinDerivedObligation` with `impl std::future::Future` (A)
|
||||||
// - `BindingObligation` with `impl_send (Send requirement)
|
// - `BindingObligation` with `impl_send` (Send requirement)
|
||||||
//
|
//
|
||||||
// The first obligation in the chain is the most useful and has the generator that captured
|
// The first obligation in the chain is the most useful and has the generator that captured
|
||||||
// the type. The last generator (`outer_generator` below) has information about where the
|
// the type. The last generator (`outer_generator` below) has information about where the
|
||||||
@ -3826,6 +3834,73 @@ fn probe_assoc_types_at_expr(
|
|||||||
}
|
}
|
||||||
assocs_in_this_method
|
assocs_in_this_method
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// If the type that failed selection is an array or a reference to an array,
|
||||||
|
/// but the trait is implemented for slices, suggest that the user converts
|
||||||
|
/// the array into a slice.
|
||||||
|
fn maybe_suggest_convert_to_slice(
|
||||||
|
&self,
|
||||||
|
err: &mut Diagnostic,
|
||||||
|
trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>,
|
||||||
|
candidate_impls: &[ImplCandidate<'tcx>],
|
||||||
|
span: Span,
|
||||||
|
) {
|
||||||
|
// Three cases where we can make a suggestion:
|
||||||
|
// 1. `[T; _]` (array of T)
|
||||||
|
// 2. `&[T; _]` (reference to array of T)
|
||||||
|
// 3. `&mut [T; _]` (mutable reference to array of T)
|
||||||
|
let (element_ty, mut mutability) = match *trait_ref.skip_binder().self_ty().kind() {
|
||||||
|
ty::Array(element_ty, _) => (element_ty, None),
|
||||||
|
|
||||||
|
ty::Ref(_, pointee_ty, mutability) => match *pointee_ty.kind() {
|
||||||
|
ty::Array(element_ty, _) => (element_ty, Some(mutability)),
|
||||||
|
_ => return,
|
||||||
|
},
|
||||||
|
|
||||||
|
_ => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Go through all the candidate impls to see if any of them is for
|
||||||
|
// slices of `element_ty` with `mutability`.
|
||||||
|
let mut is_slice = |candidate: Ty<'tcx>| match *candidate.kind() {
|
||||||
|
ty::RawPtr(ty::TypeAndMut { ty: t, mutbl: m }) | ty::Ref(_, t, m) => {
|
||||||
|
if matches!(*t.kind(), ty::Slice(e) if e == element_ty)
|
||||||
|
&& m == mutability.unwrap_or(m)
|
||||||
|
{
|
||||||
|
// Use the candidate's mutability going forward.
|
||||||
|
mutability = Some(m);
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Grab the first candidate that matches, if any, and make a suggestion.
|
||||||
|
if let Some(slice_ty) = candidate_impls
|
||||||
|
.iter()
|
||||||
|
.map(|trait_ref| trait_ref.trait_ref.self_ty())
|
||||||
|
.filter(|t| is_slice(*t))
|
||||||
|
.next()
|
||||||
|
{
|
||||||
|
let msg = &format!("convert the array to a `{}` slice instead", slice_ty);
|
||||||
|
|
||||||
|
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
|
||||||
|
let mut suggestions = vec![];
|
||||||
|
if snippet.starts_with('&') {
|
||||||
|
} else if let Some(hir::Mutability::Mut) = mutability {
|
||||||
|
suggestions.push((span.shrink_to_lo(), "&mut ".into()));
|
||||||
|
} else {
|
||||||
|
suggestions.push((span.shrink_to_lo(), "&".into()));
|
||||||
|
}
|
||||||
|
suggestions.push((span.shrink_to_hi(), "[..]".into()));
|
||||||
|
err.multipart_suggestion_verbose(msg, suggestions, Applicability::MaybeIncorrect);
|
||||||
|
} else {
|
||||||
|
err.span_help(span, msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a hint to add a missing borrow or remove an unnecessary one.
|
/// Add a hint to add a missing borrow or remove an unnecessary one.
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
//! It defines a "skeleton" of how they should be folded.
|
//! It defines a "skeleton" of how they should be folded.
|
||||||
//! - `TypeSuperFoldable`. This is implemented only for each type of interest,
|
//! - `TypeSuperFoldable`. This is implemented only for each type of interest,
|
||||||
//! and defines the folding "skeleton" for these types.
|
//! and defines the folding "skeleton" for these types.
|
||||||
//! - `TypeFolder`/`FallibleTypeFolder. One of these is implemented for each
|
//! - `TypeFolder`/`FallibleTypeFolder`. One of these is implemented for each
|
||||||
//! folder. This defines how types of interest are folded.
|
//! folder. This defines how types of interest are folded.
|
||||||
//!
|
//!
|
||||||
//! This means each fold is a mixture of (a) generic folding operations, and (b)
|
//! This means each fold is a mixture of (a) generic folding operations, and (b)
|
||||||
|
@ -741,6 +741,7 @@ macro_rules! describe {
|
|||||||
doc::EmbeddedBook,
|
doc::EmbeddedBook,
|
||||||
doc::EditionGuide,
|
doc::EditionGuide,
|
||||||
doc::StyleGuide,
|
doc::StyleGuide,
|
||||||
|
doc::Tidy,
|
||||||
),
|
),
|
||||||
Kind::Dist => describe!(
|
Kind::Dist => describe!(
|
||||||
dist::Docs,
|
dist::Docs,
|
||||||
|
@ -882,6 +882,7 @@ fn run(self, builder: &Builder<'_>) {
|
|||||||
// "cargo-credential-wincred",
|
// "cargo-credential-wincred",
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
tool_doc!(Tidy, "tidy", "src/tools/tidy", ["tidy"]);
|
||||||
|
|
||||||
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct ErrorIndex {
|
pub struct ErrorIndex {
|
||||||
|
@ -52,4 +52,5 @@ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
|
|||||||
reuse lint && \
|
reuse lint && \
|
||||||
# Runs checks to ensure that there are no ES5 issues in our JS code.
|
# Runs checks to ensure that there are no ES5 issues in our JS code.
|
||||||
es-check es6 ../src/librustdoc/html/static/js/*.js && \
|
es-check es6 ../src/librustdoc/html/static/js/*.js && \
|
||||||
eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js
|
eslint -c ../src/librustdoc/html/static/.eslintrc.js ../src/librustdoc/html/static/js/*.js && \
|
||||||
|
eslint -c ../src/tools/rustdoc-js/.eslintrc.js ../src/tools/rustdoc-js/tester.js
|
||||||
|
96
src/tools/rustdoc-js/.eslintrc.js
Normal file
96
src/tools/rustdoc-js/.eslintrc.js
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
module.exports = {
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"node": true,
|
||||||
|
"es6": true
|
||||||
|
},
|
||||||
|
"extends": "eslint:recommended",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2015,
|
||||||
|
"sourceType": "module"
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"linebreak-style": [
|
||||||
|
"error",
|
||||||
|
"unix"
|
||||||
|
],
|
||||||
|
"semi": [
|
||||||
|
"error",
|
||||||
|
"always"
|
||||||
|
],
|
||||||
|
"quotes": [
|
||||||
|
"error",
|
||||||
|
"double"
|
||||||
|
],
|
||||||
|
"linebreak-style": [
|
||||||
|
"error",
|
||||||
|
"unix"
|
||||||
|
],
|
||||||
|
"no-trailing-spaces": "error",
|
||||||
|
"no-var": ["error"],
|
||||||
|
"prefer-const": ["error"],
|
||||||
|
"prefer-arrow-callback": ["error"],
|
||||||
|
"brace-style": [
|
||||||
|
"error",
|
||||||
|
"1tbs",
|
||||||
|
{ "allowSingleLine": false }
|
||||||
|
],
|
||||||
|
"keyword-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "before": true, "after": true }
|
||||||
|
],
|
||||||
|
"arrow-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "before": true, "after": true }
|
||||||
|
],
|
||||||
|
"key-spacing": [
|
||||||
|
"error",
|
||||||
|
{ "beforeColon": false, "afterColon": true, "mode": "strict" }
|
||||||
|
],
|
||||||
|
"func-call-spacing": ["error", "never"],
|
||||||
|
"space-infix-ops": "error",
|
||||||
|
"space-before-function-paren": ["error", "never"],
|
||||||
|
"space-before-blocks": "error",
|
||||||
|
"comma-dangle": ["error", "always-multiline"],
|
||||||
|
"comma-style": ["error", "last"],
|
||||||
|
"max-len": ["error", { "code": 100, "tabWidth": 4 }],
|
||||||
|
"eol-last": ["error", "always"],
|
||||||
|
"arrow-parens": ["error", "as-needed"],
|
||||||
|
"no-unused-vars": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"argsIgnorePattern": "^_",
|
||||||
|
"varsIgnorePattern": "^_"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"eqeqeq": "error",
|
||||||
|
"no-const-assign": "error",
|
||||||
|
"no-debugger": "error",
|
||||||
|
"no-dupe-args": "error",
|
||||||
|
"no-dupe-else-if": "error",
|
||||||
|
"no-dupe-keys": "error",
|
||||||
|
"no-duplicate-case": "error",
|
||||||
|
"no-ex-assign": "error",
|
||||||
|
"no-fallthrough": "error",
|
||||||
|
"no-invalid-regexp": "error",
|
||||||
|
"no-import-assign": "error",
|
||||||
|
"no-self-compare": "error",
|
||||||
|
"no-template-curly-in-string": "error",
|
||||||
|
"block-scoped-var": "error",
|
||||||
|
"guard-for-in": "error",
|
||||||
|
"no-alert": "error",
|
||||||
|
"no-confusing-arrow": "error",
|
||||||
|
"no-div-regex": "error",
|
||||||
|
"no-floating-decimal": "error",
|
||||||
|
"no-implicit-globals": "error",
|
||||||
|
"no-implied-eval": "error",
|
||||||
|
"no-label-var": "error",
|
||||||
|
"no-lonely-if": "error",
|
||||||
|
"no-mixed-operators": "error",
|
||||||
|
"no-multi-assign": "error",
|
||||||
|
"no-return-assign": "error",
|
||||||
|
"no-script-url": "error",
|
||||||
|
"no-sequences": "error",
|
||||||
|
"no-div-regex": "error",
|
||||||
|
}
|
||||||
|
};
|
@ -1,5 +1,5 @@
|
|||||||
const fs = require('fs');
|
const fs = require("fs");
|
||||||
const path = require('path');
|
const path = require("path");
|
||||||
|
|
||||||
function loadContent(content) {
|
function loadContent(content) {
|
||||||
const Module = module.constructor;
|
const Module = module.constructor;
|
||||||
@ -15,7 +15,7 @@ function loadContent(content) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function readFile(filePath) {
|
function readFile(filePath) {
|
||||||
return fs.readFileSync(filePath, 'utf8');
|
return fs.readFileSync(filePath, "utf8");
|
||||||
}
|
}
|
||||||
|
|
||||||
function contentToDiffLine(key, value) {
|
function contentToDiffLine(key, value) {
|
||||||
@ -25,41 +25,41 @@ function contentToDiffLine(key, value) {
|
|||||||
// This function is only called when no matching result was found and therefore will only display
|
// This function is only called when no matching result was found and therefore will only display
|
||||||
// the diff between the two items.
|
// the diff between the two items.
|
||||||
function betterLookingDiff(entry, data) {
|
function betterLookingDiff(entry, data) {
|
||||||
let output = ' {\n';
|
let output = " {\n";
|
||||||
const spaces = ' ';
|
const spaces = " ";
|
||||||
for (const key in entry) {
|
for (const key in entry) {
|
||||||
if (!entry.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(entry, key)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!data || !data.hasOwnProperty(key)) {
|
if (!data || !Object.prototype.hasOwnProperty.call(data, key)) {
|
||||||
output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
|
output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n";
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const value = data[key];
|
const value = data[key];
|
||||||
if (value !== entry[key]) {
|
if (value !== entry[key]) {
|
||||||
output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
|
output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n";
|
||||||
output += '+' + spaces + contentToDiffLine(key, value) + '\n';
|
output += "+" + spaces + contentToDiffLine(key, value) + "\n";
|
||||||
} else {
|
} else {
|
||||||
output += spaces + contentToDiffLine(key, value) + '\n';
|
output += spaces + contentToDiffLine(key, value) + "\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return output + ' }';
|
return output + " }";
|
||||||
}
|
}
|
||||||
|
|
||||||
function lookForEntry(entry, data) {
|
function lookForEntry(entry, data) {
|
||||||
return data.findIndex(data_entry => {
|
return data.findIndex(data_entry => {
|
||||||
let allGood = true;
|
let allGood = true;
|
||||||
for (const key in entry) {
|
for (const key in entry) {
|
||||||
if (!entry.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(entry, key)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let value = data_entry[key];
|
let value = data_entry[key];
|
||||||
// To make our life easier, if there is a "parent" type, we add it to the path.
|
// To make our life easier, if there is a "parent" type, we add it to the path.
|
||||||
if (key === 'path' && data_entry['parent'] !== undefined) {
|
if (key === "path" && data_entry["parent"] !== undefined) {
|
||||||
if (value.length > 0) {
|
if (value.length > 0) {
|
||||||
value += '::' + data_entry['parent']['name'];
|
value += "::" + data_entry["parent"]["name"];
|
||||||
} else {
|
} else {
|
||||||
value = data_entry['parent']['name'];
|
value = data_entry["parent"]["name"];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (value !== entry[key]) {
|
if (value !== entry[key]) {
|
||||||
@ -95,7 +95,7 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
|
|||||||
fieldsToCheck = [];
|
fieldsToCheck = [];
|
||||||
}
|
}
|
||||||
for (const field of fieldsToCheck) {
|
for (const field of fieldsToCheck) {
|
||||||
if (!expected.hasOwnProperty(field)) {
|
if (!Object.prototype.hasOwnProperty.call(expected, field)) {
|
||||||
let text = `${queryName}==> Mandatory key \`${field}\` is not present`;
|
let text = `${queryName}==> Mandatory key \`${field}\` is not present`;
|
||||||
if (fullPath.length > 0) {
|
if (fullPath.length > 0) {
|
||||||
text += ` in field \`${fullPath}\``;
|
text += ` in field \`${fullPath}\``;
|
||||||
@ -117,22 +117,22 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
|
|||||||
error_text.push(`${queryName}==> EXPECTED has extra value in array from field ` +
|
error_text.push(`${queryName}==> EXPECTED has extra value in array from field ` +
|
||||||
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(expected[i])}\``);
|
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(expected[i])}\``);
|
||||||
} else {
|
} else {
|
||||||
valueCheck(fullPath + '[' + i + ']', expected[i], result[i], error_text, queryName);
|
valueCheck(fullPath + "[" + i + "]", expected[i], result[i], error_text, queryName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (; i < result.length; ++i) {
|
for (; i < result.length; ++i) {
|
||||||
error_text.push(`${queryName}==> RESULT has extra value in array from field ` +
|
error_text.push(`${queryName}==> RESULT has extra value in array from field ` +
|
||||||
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(result[i])}\` ` +
|
`\`${fullPath}\` (position ${i}): \`${JSON.stringify(result[i])}\` ` +
|
||||||
'compared to EXPECTED');
|
"compared to EXPECTED");
|
||||||
}
|
}
|
||||||
} else if (expected !== null && typeof expected !== "undefined" &&
|
} else if (expected !== null && typeof expected !== "undefined" &&
|
||||||
expected.constructor == Object) {
|
expected.constructor == Object) { // eslint-disable-line eqeqeq
|
||||||
for (const key in expected) {
|
for (const key in expected) {
|
||||||
if (!expected.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(expected, key)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!result.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(result, key)) {
|
||||||
error_text.push('==> Unknown key "' + key + '"');
|
error_text.push("==> Unknown key \"" + key + "\"");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let result_v = result[key];
|
let result_v = result[key];
|
||||||
@ -147,13 +147,13 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
|
|||||||
});
|
});
|
||||||
result_v = result_v.join("");
|
result_v = result_v.join("");
|
||||||
}
|
}
|
||||||
const obj_path = fullPath + (fullPath.length > 0 ? '.' : '') + key;
|
const obj_path = fullPath + (fullPath.length > 0 ? "." : "") + key;
|
||||||
valueCheck(obj_path, expected[key], result_v, error_text, queryName);
|
valueCheck(obj_path, expected[key], result_v, error_text, queryName);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const expectedValue = JSON.stringify(expected);
|
const expectedValue = JSON.stringify(expected);
|
||||||
const resultValue = JSON.stringify(result);
|
const resultValue = JSON.stringify(result);
|
||||||
if (expectedValue != resultValue) {
|
if (expectedValue !== resultValue) {
|
||||||
error_text.push(`${queryName}==> Different values for field \`${fullPath}\`:\n` +
|
error_text.push(`${queryName}==> Different values for field \`${fullPath}\`:\n` +
|
||||||
`EXPECTED: \`${expectedValue}\`\nRESULT: \`${resultValue}\``);
|
`EXPECTED: \`${expectedValue}\`\nRESULT: \`${resultValue}\``);
|
||||||
}
|
}
|
||||||
@ -164,7 +164,7 @@ function runParser(query, expected, parseQuery, queryName) {
|
|||||||
const error_text = [];
|
const error_text = [];
|
||||||
checkNeededFields("", expected, error_text, queryName, null);
|
checkNeededFields("", expected, error_text, queryName, null);
|
||||||
if (error_text.length === 0) {
|
if (error_text.length === 0) {
|
||||||
valueCheck('', expected, parseQuery(query), error_text, queryName);
|
valueCheck("", expected, parseQuery(query), error_text, queryName);
|
||||||
}
|
}
|
||||||
return error_text;
|
return error_text;
|
||||||
}
|
}
|
||||||
@ -177,16 +177,16 @@ function runSearch(query, expected, doSearch, loadedFile, queryName) {
|
|||||||
const error_text = [];
|
const error_text = [];
|
||||||
|
|
||||||
for (const key in expected) {
|
for (const key in expected) {
|
||||||
if (!expected.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(expected, key)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (!results.hasOwnProperty(key)) {
|
if (!Object.prototype.hasOwnProperty.call(results, key)) {
|
||||||
error_text.push('==> Unknown key "' + key + '"');
|
error_text.push("==> Unknown key \"" + key + "\"");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const entry = expected[key];
|
const entry = expected[key];
|
||||||
|
|
||||||
if (exact_check == true && entry.length !== results[key].length) {
|
if (exact_check && entry.length !== results[key].length) {
|
||||||
error_text.push(queryName + "==> Expected exactly " + entry.length +
|
error_text.push(queryName + "==> Expected exactly " + entry.length +
|
||||||
" results but found " + results[key].length + " in '" + key + "'");
|
" results but found " + results[key].length + " in '" + key + "'");
|
||||||
}
|
}
|
||||||
@ -268,7 +268,7 @@ function runCheck(loadedFile, key, callback) {
|
|||||||
function runChecks(testFile, doSearch, parseQuery) {
|
function runChecks(testFile, doSearch, parseQuery) {
|
||||||
let checkExpected = false;
|
let checkExpected = false;
|
||||||
let checkParsed = false;
|
let checkParsed = false;
|
||||||
let testFileContent = readFile(testFile) + 'exports.QUERY = QUERY;';
|
let testFileContent = readFile(testFile) + "exports.QUERY = QUERY;";
|
||||||
|
|
||||||
if (testFileContent.indexOf("FILTER_CRATE") !== -1) {
|
if (testFileContent.indexOf("FILTER_CRATE") !== -1) {
|
||||||
testFileContent += "exports.FILTER_CRATE = FILTER_CRATE;";
|
testFileContent += "exports.FILTER_CRATE = FILTER_CRATE;";
|
||||||
@ -277,11 +277,11 @@ function runChecks(testFile, doSearch, parseQuery) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (testFileContent.indexOf("\nconst EXPECTED") !== -1) {
|
if (testFileContent.indexOf("\nconst EXPECTED") !== -1) {
|
||||||
testFileContent += 'exports.EXPECTED = EXPECTED;';
|
testFileContent += "exports.EXPECTED = EXPECTED;";
|
||||||
checkExpected = true;
|
checkExpected = true;
|
||||||
}
|
}
|
||||||
if (testFileContent.indexOf("\nconst PARSED") !== -1) {
|
if (testFileContent.indexOf("\nconst PARSED") !== -1) {
|
||||||
testFileContent += 'exports.PARSED = PARSED;';
|
testFileContent += "exports.PARSED = PARSED;";
|
||||||
checkParsed = true;
|
checkParsed = true;
|
||||||
}
|
}
|
||||||
if (!checkParsed && !checkExpected) {
|
if (!checkParsed && !checkExpected) {
|
||||||
@ -325,7 +325,7 @@ function loadSearchJS(doc_folder, resource_suffix) {
|
|||||||
const searchWords = searchModule.initSearch(searchIndex.searchIndex);
|
const searchWords = searchModule.initSearch(searchIndex.searchIndex);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
doSearch: function (queryStr, filterCrate, currentCrate) {
|
doSearch: function(queryStr, filterCrate, currentCrate) {
|
||||||
return searchModule.execQuery(searchModule.parseQuery(queryStr), searchWords,
|
return searchModule.execQuery(searchModule.parseQuery(queryStr), searchWords,
|
||||||
filterCrate, currentCrate);
|
filterCrate, currentCrate);
|
||||||
},
|
},
|
||||||
@ -361,22 +361,24 @@ function parseOptions(args) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
for (let i = 0; i < args.length; ++i) {
|
for (let i = 0; i < args.length; ++i) {
|
||||||
if (correspondences.hasOwnProperty(args[i])) {
|
const arg = args[i];
|
||||||
|
if (Object.prototype.hasOwnProperty.call(correspondences, arg)) {
|
||||||
i += 1;
|
i += 1;
|
||||||
if (i >= args.length) {
|
if (i >= args.length) {
|
||||||
console.log("Missing argument after `" + args[i - 1] + "` option.");
|
console.log("Missing argument after `" + arg + "` option.");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (args[i - 1] !== "--test-file") {
|
const arg_value = args[i];
|
||||||
opts[correspondences[args[i - 1]]] = args[i];
|
if (arg !== "--test-file") {
|
||||||
|
opts[correspondences[arg]] = arg_value;
|
||||||
} else {
|
} else {
|
||||||
opts[correspondences[args[i - 1]]].push(args[i]);
|
opts[correspondences[arg]].push(arg_value);
|
||||||
}
|
}
|
||||||
} else if (args[i] === "--help") {
|
} else if (arg === "--help") {
|
||||||
showHelp();
|
showHelp();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
} else {
|
} else {
|
||||||
console.log("Unknown option `" + args[i] + "`.");
|
console.log("Unknown option `" + arg + "`.");
|
||||||
console.log("Use `--help` to see the list of options");
|
console.log("Use `--help` to see the list of options");
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -405,17 +407,17 @@ function main(argv) {
|
|||||||
);
|
);
|
||||||
let errors = 0;
|
let errors = 0;
|
||||||
|
|
||||||
const doSearch = function (queryStr, filterCrate) {
|
const doSearch = function(queryStr, filterCrate) {
|
||||||
return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
|
return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (opts["test_file"].length !== 0) {
|
if (opts["test_file"].length !== 0) {
|
||||||
opts["test_file"].forEach(function (file) {
|
opts["test_file"].forEach(file => {
|
||||||
process.stdout.write(`Testing ${file} ... `);
|
process.stdout.write(`Testing ${file} ... `);
|
||||||
errors += runChecks(file, doSearch, parseAndSearch.parseQuery);
|
errors += runChecks(file, doSearch, parseAndSearch.parseQuery);
|
||||||
});
|
});
|
||||||
} else if (opts["test_folder"].length !== 0) {
|
} else if (opts["test_folder"].length !== 0) {
|
||||||
fs.readdirSync(opts["test_folder"]).forEach(function (file) {
|
fs.readdirSync(opts["test_folder"]).forEach(file => {
|
||||||
if (!file.endsWith(".js")) {
|
if (!file.endsWith(".js")) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -171,9 +171,9 @@ fn contains_ignore_directive(can_contain: bool, contents: &str, check: &str) ->
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! suppressible_tidy_err {
|
macro_rules! suppressible_tidy_err {
|
||||||
($err:ident, $skip:ident, $msg:expr) => {
|
($err:ident, $skip:ident, $msg:literal) => {
|
||||||
if let Directive::Deny = $skip {
|
if let Directive::Deny = $skip {
|
||||||
$err($msg);
|
$err(&format!($msg));
|
||||||
} else {
|
} else {
|
||||||
$skip = Directive::Ignore(true);
|
$skip = Directive::Ignore(true);
|
||||||
}
|
}
|
||||||
@ -300,10 +300,13 @@ fn skip(path: &Path) -> bool {
|
|||||||
contains_ignore_directive(can_contain, &contents, "leading-newlines");
|
contains_ignore_directive(can_contain, &contents, "leading-newlines");
|
||||||
let mut skip_copyright = contains_ignore_directive(can_contain, &contents, "copyright");
|
let mut skip_copyright = contains_ignore_directive(can_contain, &contents, "copyright");
|
||||||
let mut skip_dbg = contains_ignore_directive(can_contain, &contents, "dbg");
|
let mut skip_dbg = contains_ignore_directive(can_contain, &contents, "dbg");
|
||||||
|
let mut skip_odd_backticks =
|
||||||
|
contains_ignore_directive(can_contain, &contents, "odd-backticks");
|
||||||
let mut leading_new_lines = false;
|
let mut leading_new_lines = false;
|
||||||
let mut trailing_new_lines = 0;
|
let mut trailing_new_lines = 0;
|
||||||
let mut lines = 0;
|
let mut lines = 0;
|
||||||
let mut last_safety_comment = false;
|
let mut last_safety_comment = false;
|
||||||
|
let mut comment_block: Option<(usize, usize)> = None;
|
||||||
let is_test = file.components().any(|c| c.as_os_str() == "tests");
|
let is_test = file.components().any(|c| c.as_os_str() == "tests");
|
||||||
// scanning the whole file for multiple needles at once is more efficient than
|
// scanning the whole file for multiple needles at once is more efficient than
|
||||||
// executing lines times needles separate searches.
|
// executing lines times needles separate searches.
|
||||||
@ -351,7 +354,7 @@ fn skip(path: &Path) -> bool {
|
|||||||
suppressible_tidy_err!(
|
suppressible_tidy_err!(
|
||||||
err,
|
err,
|
||||||
skip_line_length,
|
skip_line_length,
|
||||||
&format!("line longer than {max_columns} chars")
|
"line longer than {max_columns} chars"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if !is_style_file && line.contains('\t') {
|
if !is_style_file && line.contains('\t') {
|
||||||
@ -415,15 +418,55 @@ fn skip(path: &Path) -> bool {
|
|||||||
|
|
||||||
// For now only enforce in compiler
|
// For now only enforce in compiler
|
||||||
let is_compiler = || file.components().any(|c| c.as_os_str() == "compiler");
|
let is_compiler = || file.components().any(|c| c.as_os_str() == "compiler");
|
||||||
if is_compiler()
|
|
||||||
&& line.contains("//")
|
if is_compiler() {
|
||||||
&& line
|
if line.contains("//")
|
||||||
.chars()
|
&& line
|
||||||
.collect::<Vec<_>>()
|
.chars()
|
||||||
.windows(4)
|
.collect::<Vec<_>>()
|
||||||
.any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
|
.windows(4)
|
||||||
{
|
.any(|cs| matches!(cs, ['.', ' ', ' ', last] if last.is_alphabetic()))
|
||||||
err(DOUBLE_SPACE_AFTER_DOT)
|
{
|
||||||
|
err(DOUBLE_SPACE_AFTER_DOT)
|
||||||
|
}
|
||||||
|
|
||||||
|
if filename.ends_with(".ftl") {
|
||||||
|
let line_backticks = trimmed.chars().filter(|ch| *ch == '`').count();
|
||||||
|
if line_backticks % 2 == 1 {
|
||||||
|
suppressible_tidy_err!(err, skip_odd_backticks, "odd number of backticks");
|
||||||
|
}
|
||||||
|
} else if trimmed.contains("//") {
|
||||||
|
let (start_line, mut backtick_count) = comment_block.unwrap_or((i + 1, 0));
|
||||||
|
let line_backticks = trimmed.chars().filter(|ch| *ch == '`').count();
|
||||||
|
let comment_text = trimmed.split("//").nth(1).unwrap();
|
||||||
|
// This check ensures that we don't lint for code that has `//` in a string literal
|
||||||
|
if line_backticks % 2 == 1 {
|
||||||
|
backtick_count += comment_text.chars().filter(|ch| *ch == '`').count();
|
||||||
|
}
|
||||||
|
comment_block = Some((start_line, backtick_count));
|
||||||
|
} else {
|
||||||
|
if let Some((start_line, backtick_count)) = comment_block.take() {
|
||||||
|
if backtick_count % 2 == 1 {
|
||||||
|
let mut err = |msg: &str| {
|
||||||
|
tidy_error!(bad, "{}:{start_line}: {msg}", file.display());
|
||||||
|
};
|
||||||
|
let block_len = (i + 1) - start_line;
|
||||||
|
if block_len == 1 {
|
||||||
|
suppressible_tidy_err!(
|
||||||
|
err,
|
||||||
|
skip_odd_backticks,
|
||||||
|
"comment with odd number of backticks"
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
suppressible_tidy_err!(
|
||||||
|
err,
|
||||||
|
skip_odd_backticks,
|
||||||
|
"{block_len}-line comment block with odd number of backticks"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if leading_new_lines {
|
if leading_new_lines {
|
||||||
@ -441,7 +484,7 @@ fn skip(path: &Path) -> bool {
|
|||||||
n => suppressible_tidy_err!(
|
n => suppressible_tidy_err!(
|
||||||
err,
|
err,
|
||||||
skip_trailing_newlines,
|
skip_trailing_newlines,
|
||||||
&format!("too many trailing newlines ({n})")
|
"too many trailing newlines ({n})"
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
if lines > LINES {
|
if lines > LINES {
|
||||||
|
20
tests/ui/dst/issue-90528-unsizing-suggestion-1.rs
Normal file
20
tests/ui/dst/issue-90528-unsizing-suggestion-1.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// Issue #90528: provide helpful suggestions when a trait bound is unsatisfied
|
||||||
|
// due to a missed unsizing coercion.
|
||||||
|
//
|
||||||
|
// This test exercises array literals and a trait implemented on immutable slices.
|
||||||
|
|
||||||
|
trait Read {}
|
||||||
|
|
||||||
|
impl Read for &[u8] {}
|
||||||
|
|
||||||
|
fn wants_read(_: impl Read) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
wants_read([0u8]);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&[0u8]);
|
||||||
|
//~^ ERROR the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&[0u8][..]);
|
||||||
|
wants_read(&mut [0u8]);
|
||||||
|
//~^ ERROR the trait bound `&mut [u8; 1]: Read` is not satisfied
|
||||||
|
}
|
56
tests/ui/dst/issue-90528-unsizing-suggestion-1.stderr
Normal file
56
tests/ui/dst/issue-90528-unsizing-suggestion-1.stderr
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
error[E0277]: the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:13:16
|
||||||
|
|
|
||||||
|
LL | wants_read([0u8]);
|
||||||
|
| ---------- ^^^^^ the trait `Read` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&[0u8][..]);
|
||||||
|
| + ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:15:16
|
||||||
|
|
|
||||||
|
LL | wants_read(&[0u8]);
|
||||||
|
| ---------- ^^^^^^ the trait `Read` is not implemented for `&[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&[0u8][..]);
|
||||||
|
| ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&mut [u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:18:16
|
||||||
|
|
|
||||||
|
LL | wants_read(&mut [0u8]);
|
||||||
|
| ---------- ^^^^^^^^^^ the trait `Read` is not implemented for `&mut [u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-1.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
|
||||||
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0277`.
|
28
tests/ui/dst/issue-90528-unsizing-suggestion-2.rs
Normal file
28
tests/ui/dst/issue-90528-unsizing-suggestion-2.rs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
// Issue #90528: provide helpful suggestions when a trait bound is unsatisfied
|
||||||
|
// due to a missed unsizing coercion.
|
||||||
|
//
|
||||||
|
// This test exercises array variables and a trait implemented on immmutable slices.
|
||||||
|
|
||||||
|
trait Read {}
|
||||||
|
|
||||||
|
impl Read for &[u8] {}
|
||||||
|
|
||||||
|
fn wants_read(_: impl Read) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let x = [0u8];
|
||||||
|
wants_read(x);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&x);
|
||||||
|
//~^ ERROR the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&x[..]);
|
||||||
|
|
||||||
|
let x = &[0u8];
|
||||||
|
wants_read(x);
|
||||||
|
//~^ ERROR the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&x);
|
||||||
|
//~^ ERROR the trait bound `&&[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(*x);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
wants_read(&x[..]);
|
||||||
|
}
|
94
tests/ui/dst/issue-90528-unsizing-suggestion-2.stderr
Normal file
94
tests/ui/dst/issue-90528-unsizing-suggestion-2.stderr
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
error[E0277]: the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:14:16
|
||||||
|
|
|
||||||
|
LL | wants_read(x);
|
||||||
|
| ---------- ^ the trait `Read` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&x[..]);
|
||||||
|
| + ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:16:16
|
||||||
|
|
|
||||||
|
LL | wants_read(&x);
|
||||||
|
| ---------- ^^ the trait `Read` is not implemented for `&[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&x[..]);
|
||||||
|
| ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:21:16
|
||||||
|
|
|
||||||
|
LL | wants_read(x);
|
||||||
|
| ---------- ^ the trait `Read` is not implemented for `&[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&x[..]);
|
||||||
|
| + ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&&[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:23:16
|
||||||
|
|
|
||||||
|
LL | wants_read(&x);
|
||||||
|
| ---------- ^^ the trait `Read` is not implemented for `&&[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `[u8; 1]: Read` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:25:16
|
||||||
|
|
|
||||||
|
LL | wants_read(*x);
|
||||||
|
| ---------- ^^ the trait `Read` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Read` is implemented for `&[u8]`
|
||||||
|
note: required by a bound in `wants_read`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-2.rs:10:23
|
||||||
|
|
|
||||||
|
LL | fn wants_read(_: impl Read) {}
|
||||||
|
| ^^^^ required by this bound in `wants_read`
|
||||||
|
help: convert the array to a `&[u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_read(&*x[..]);
|
||||||
|
| + ++++
|
||||||
|
|
||||||
|
error: aborting due to 5 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0277`.
|
22
tests/ui/dst/issue-90528-unsizing-suggestion-3.rs
Normal file
22
tests/ui/dst/issue-90528-unsizing-suggestion-3.rs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
// Issue #90528: provide helpful suggestions when a trait bound is unsatisfied
|
||||||
|
// due to a missed unsizing coercion.
|
||||||
|
//
|
||||||
|
// This test exercises array literals and a trait implemented on mutable slices.
|
||||||
|
|
||||||
|
trait Write {}
|
||||||
|
|
||||||
|
impl Write for &mut [u8] {}
|
||||||
|
|
||||||
|
fn wants_write(_: impl Write) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
wants_write([0u8]);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&mut [0u8]);
|
||||||
|
//~^ ERROR the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&mut [0u8][..]);
|
||||||
|
wants_write(&[0u8]);
|
||||||
|
//~^ ERROR the trait bound `&[u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&[0u8][..]);
|
||||||
|
//~^ ERROR the trait bound `&[u8]: Write` is not satisfied
|
||||||
|
}
|
75
tests/ui/dst/issue-90528-unsizing-suggestion-3.stderr
Normal file
75
tests/ui/dst/issue-90528-unsizing-suggestion-3.stderr
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
error[E0277]: the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:13:17
|
||||||
|
|
|
||||||
|
LL | wants_write([0u8]);
|
||||||
|
| ----------- ^^^^^ the trait `Write` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut [0u8][..]);
|
||||||
|
| ++++ ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:15:17
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut [0u8]);
|
||||||
|
| ----------- ^^^^^^^^^^ the trait `Write` is not implemented for `&mut [u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut [0u8][..]);
|
||||||
|
| ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&[u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:18:17
|
||||||
|
|
|
||||||
|
LL | wants_write(&[0u8]);
|
||||||
|
| ----------- ^^^^^^ the trait `Write` is not implemented for `&[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&[u8]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:20:17
|
||||||
|
|
|
||||||
|
LL | wants_write(&[0u8][..]);
|
||||||
|
| ----------- ^^^^^^^^^^ the trait `Write` is not implemented for `&[u8]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-3.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: consider changing this borrow's mutability
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut [0u8][..]);
|
||||||
|
| ~~~~
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0277`.
|
26
tests/ui/dst/issue-90528-unsizing-suggestion-4.rs
Normal file
26
tests/ui/dst/issue-90528-unsizing-suggestion-4.rs
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// Issue #90528: provide helpful suggestions when a trait bound is unsatisfied
|
||||||
|
// due to a missed unsizing coercion.
|
||||||
|
//
|
||||||
|
// This test exercises array variables and a trait implemented on mutable slices.
|
||||||
|
|
||||||
|
trait Write {}
|
||||||
|
|
||||||
|
impl Write for &mut [u8] {}
|
||||||
|
|
||||||
|
fn wants_write(_: impl Write) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut x = [0u8];
|
||||||
|
wants_write(x);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&mut x);
|
||||||
|
//~^ ERROR the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&mut x[..]);
|
||||||
|
|
||||||
|
let x = &mut [0u8];
|
||||||
|
wants_write(x);
|
||||||
|
//~^ ERROR the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(*x);
|
||||||
|
//~^ ERROR the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
wants_write(&mut x[..]);
|
||||||
|
}
|
79
tests/ui/dst/issue-90528-unsizing-suggestion-4.stderr
Normal file
79
tests/ui/dst/issue-90528-unsizing-suggestion-4.stderr
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
error[E0277]: the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:14:17
|
||||||
|
|
|
||||||
|
LL | wants_write(x);
|
||||||
|
| ----------- ^ the trait `Write` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut x[..]);
|
||||||
|
| ++++ ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:16:17
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut x);
|
||||||
|
| ----------- ^^^^^^ the trait `Write` is not implemented for `&mut [u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut x[..]);
|
||||||
|
| ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `&mut [u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:21:17
|
||||||
|
|
|
||||||
|
LL | wants_write(x);
|
||||||
|
| ----------- ^ the trait `Write` is not implemented for `&mut [u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut x[..]);
|
||||||
|
| ++++ ++++
|
||||||
|
|
||||||
|
error[E0277]: the trait bound `[u8; 1]: Write` is not satisfied
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:23:17
|
||||||
|
|
|
||||||
|
LL | wants_write(*x);
|
||||||
|
| ----------- ^^ the trait `Write` is not implemented for `[u8; 1]`
|
||||||
|
| |
|
||||||
|
| required by a bound introduced by this call
|
||||||
|
|
|
||||||
|
= help: the trait `Write` is implemented for `&mut [u8]`
|
||||||
|
note: required by a bound in `wants_write`
|
||||||
|
--> $DIR/issue-90528-unsizing-suggestion-4.rs:10:24
|
||||||
|
|
|
||||||
|
LL | fn wants_write(_: impl Write) {}
|
||||||
|
| ^^^^^ required by this bound in `wants_write`
|
||||||
|
help: convert the array to a `&mut [u8]` slice instead
|
||||||
|
|
|
||||||
|
LL | wants_write(&mut *x[..]);
|
||||||
|
| ++++ ++++
|
||||||
|
|
||||||
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0277`.
|
13
tests/ui/traits/new-solver/lazy-nested-obligations-1.rs
Normal file
13
tests/ui/traits/new-solver/lazy-nested-obligations-1.rs
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 94358
|
||||||
|
|
||||||
|
fn foo<C>(_: C)
|
||||||
|
where
|
||||||
|
for <'a> &'a C: IntoIterator,
|
||||||
|
for <'a> <&'a C as IntoIterator>::IntoIter: ExactSizeIterator,
|
||||||
|
{}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
foo::<_>(vec![true, false]);
|
||||||
|
}
|
23
tests/ui/traits/new-solver/lazy-nested-obligations-2.rs
Normal file
23
tests/ui/traits/new-solver/lazy-nested-obligations-2.rs
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 95863
|
||||||
|
|
||||||
|
pub trait With {
|
||||||
|
type F;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl With for i32 {
|
||||||
|
type F = fn(&str);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f(_: &str) {}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let _: V<i32> = V(f);
|
||||||
|
pub struct V<T: With>(<T as With>::F);
|
||||||
|
|
||||||
|
pub enum E3<T: With> {
|
||||||
|
Var(<T as With>::F),
|
||||||
|
}
|
||||||
|
let _: E3<i32> = E3::Var(f);
|
||||||
|
}
|
38
tests/ui/traits/new-solver/lazy-nested-obligations-3.rs
Normal file
38
tests/ui/traits/new-solver/lazy-nested-obligations-3.rs
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 96750
|
||||||
|
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
|
trait AsyncFn<Arg> {
|
||||||
|
type Output;
|
||||||
|
}
|
||||||
|
trait RequestFamily {
|
||||||
|
type Type<'a>;
|
||||||
|
}
|
||||||
|
trait Service {}
|
||||||
|
|
||||||
|
struct MyFn;
|
||||||
|
impl AsyncFn<String> for MyFn {
|
||||||
|
type Output = ();
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RequestFamily for String {
|
||||||
|
type Type<'a> = String;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ServiceFromAsyncFn<F, Req>(F, PhantomData<Req>);
|
||||||
|
|
||||||
|
impl<F, Req, O> Service for ServiceFromAsyncFn<F, Req>
|
||||||
|
where
|
||||||
|
Req: RequestFamily,
|
||||||
|
F: AsyncFn<Req>,
|
||||||
|
F: for<'a> AsyncFn<Req::Type<'a>, Output = O>,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_service() -> impl Service {
|
||||||
|
ServiceFromAsyncFn(MyFn, PhantomData)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
40
tests/ui/traits/new-solver/normalize-param-env-1.rs
Normal file
40
tests/ui/traits/new-solver/normalize-param-env-1.rs
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 108933
|
||||||
|
|
||||||
|
trait Add<Rhs> {
|
||||||
|
type Sum;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Add<()> for () {
|
||||||
|
type Sum = ();
|
||||||
|
}
|
||||||
|
|
||||||
|
type Unit = <() as Add<()>>::Sum;
|
||||||
|
|
||||||
|
trait Trait<C> {
|
||||||
|
type Output;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn f<T>()
|
||||||
|
where
|
||||||
|
T: Trait<()>,
|
||||||
|
<T as Trait<()>>::Output: Sized,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn g<T>()
|
||||||
|
where
|
||||||
|
T: Trait<Unit>,
|
||||||
|
<T as Trait<()>>::Output: Sized,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn h<T>()
|
||||||
|
where
|
||||||
|
T: Trait<()>,
|
||||||
|
<T as Trait<Unit>>::Output: Sized,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
26
tests/ui/traits/new-solver/normalize-param-env-2.rs
Normal file
26
tests/ui/traits/new-solver/normalize-param-env-2.rs
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 92505
|
||||||
|
|
||||||
|
trait A<T> {
|
||||||
|
type I;
|
||||||
|
|
||||||
|
fn f()
|
||||||
|
where
|
||||||
|
Self::I: A<T>,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> A<T> for () {
|
||||||
|
type I = ();
|
||||||
|
|
||||||
|
fn f()
|
||||||
|
where
|
||||||
|
Self::I: A<T>,
|
||||||
|
{
|
||||||
|
<() as A<T>>::f();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
32
tests/ui/traits/new-solver/normalize-param-env-3.rs
Normal file
32
tests/ui/traits/new-solver/normalize-param-env-3.rs
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
// check-pass
|
||||||
|
// compile-flags: -Ztrait-solver=next
|
||||||
|
// Issue 100177
|
||||||
|
|
||||||
|
trait GenericTrait<T> {}
|
||||||
|
|
||||||
|
trait Channel<I>: GenericTrait<Self::T> {
|
||||||
|
type T;
|
||||||
|
}
|
||||||
|
|
||||||
|
trait Sender {
|
||||||
|
type Msg;
|
||||||
|
|
||||||
|
fn send<C>()
|
||||||
|
where
|
||||||
|
C: Channel<Self::Msg>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Sender for T {
|
||||||
|
type Msg = ();
|
||||||
|
|
||||||
|
fn send<C>()
|
||||||
|
where
|
||||||
|
C: Channel<Self::Msg>,
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This works
|
||||||
|
fn foo<I, C>(ch: C) where C: Channel<I> {}
|
||||||
|
|
||||||
|
fn main() {}
|
Loading…
Reference in New Issue
Block a user