chore: remove duplicate words
This commit is contained in:
parent
c3774be741
commit
ada9fda7c3
@ -403,7 +403,7 @@ fn codegen_switchint_terminator(
|
|||||||
//
|
//
|
||||||
// Why only in unoptimized builds?
|
// Why only in unoptimized builds?
|
||||||
// - In unoptimized builds LLVM uses FastISel which does not support switches, so it
|
// - In unoptimized builds LLVM uses FastISel which does not support switches, so it
|
||||||
// must fall back to the to the slower SelectionDAG isel. Therefore, using `br` gives
|
// must fall back to the slower SelectionDAG isel. Therefore, using `br` gives
|
||||||
// significant compile time speedups for unoptimized builds.
|
// significant compile time speedups for unoptimized builds.
|
||||||
// - In optimized builds the above doesn't hold, and using `br` sometimes results in
|
// - In optimized builds the above doesn't hold, and using `br` sometimes results in
|
||||||
// worse generated code because LLVM can no longer tell that the value being switched
|
// worse generated code because LLVM can no longer tell that the value being switched
|
||||||
|
@ -245,7 +245,7 @@ pub(crate) fn tag_for_variant(
|
|||||||
// The tag of a `Single` enum is like the tag of the niched
|
// The tag of a `Single` enum is like the tag of the niched
|
||||||
// variant: there's no tag as the discriminant is encoded
|
// variant: there's no tag as the discriminant is encoded
|
||||||
// entirely implicitly. If `write_discriminant` ever hits this
|
// entirely implicitly. If `write_discriminant` ever hits this
|
||||||
// case, we do a "validation read" to ensure the the right
|
// case, we do a "validation read" to ensure the right
|
||||||
// discriminant is encoded implicitly, so any attempt to write
|
// discriminant is encoded implicitly, so any attempt to write
|
||||||
// the wrong discriminant for a `Single` enum will reliably
|
// the wrong discriminant for a `Single` enum will reliably
|
||||||
// result in UB.
|
// result in UB.
|
||||||
|
@ -499,7 +499,7 @@ fn check_and_add_sugg_binding(&mut self, binding: LetStmt) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the shadowed binding has an an itializer expression,
|
// If the shadowed binding has an itializer expression,
|
||||||
// use the initializer expression'ty to try to find the method again.
|
// use the initializer expression'ty to try to find the method again.
|
||||||
// For example like: `let mut x = Vec::new();`,
|
// For example like: `let mut x = Vec::new();`,
|
||||||
// `Vec::new()` is the itializer expression.
|
// `Vec::new()` is the itializer expression.
|
||||||
@ -968,7 +968,7 @@ fn report_no_match_method_error(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Make sure that, if any traits other than the found ones were involved,
|
// Make sure that, if any traits other than the found ones were involved,
|
||||||
// we don't don't report an unimplemented trait.
|
// we don't report an unimplemented trait.
|
||||||
// We don't want to say that `iter::Cloned` is not an iterator, just
|
// We don't want to say that `iter::Cloned` is not an iterator, just
|
||||||
// because of some non-Clone item being iterated over.
|
// because of some non-Clone item being iterated over.
|
||||||
for (predicate, _parent_pred, _cause) in unsatisfied_predicates {
|
for (predicate, _parent_pred, _cause) in unsatisfied_predicates {
|
||||||
|
@ -237,7 +237,7 @@ pub fn closure_captures(self, def_id: LocalDefId) -> &'tcx [&'tcx ty::CapturedPl
|
|||||||
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
||||||
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
||||||
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
||||||
/// 2. Since we only look at the projections here function will return `bar.x` as an a valid
|
/// 2. Since we only look at the projections here function will return `bar.x` as a valid
|
||||||
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
||||||
/// list are being applied to the same root variable.
|
/// list are being applied to the same root variable.
|
||||||
pub fn is_ancestor_or_same_capture(
|
pub fn is_ancestor_or_same_capture(
|
||||||
|
@ -130,7 +130,7 @@ fn convert_to_hir_projections_and_truncate_for_capture(
|
|||||||
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
||||||
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
||||||
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
||||||
/// 2. Since we only look at the projections here function will return `bar.x` as an a valid
|
/// 2. Since we only look at the projections here function will return `bar.x` as a valid
|
||||||
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
||||||
/// list are being applied to the same root variable.
|
/// list are being applied to the same root variable.
|
||||||
fn is_ancestor_or_same_capture(
|
fn is_ancestor_or_same_capture(
|
||||||
|
@ -138,7 +138,7 @@ fn to_pat(&mut self, cv: mir::Const<'tcx>) -> Box<Pat<'tcx>> {
|
|||||||
// lints, but no errors), double-check that all types in the const implement
|
// lints, but no errors), double-check that all types in the const implement
|
||||||
// `PartialEq`. Even if we have a valtree, we may have found something
|
// `PartialEq`. Even if we have a valtree, we may have found something
|
||||||
// in there with non-structural-equality, meaning we match using `PartialEq`
|
// in there with non-structural-equality, meaning we match using `PartialEq`
|
||||||
// and we hence have to check that that impl exists.
|
// and we hence have to check if that impl exists.
|
||||||
// This is all messy but not worth cleaning up: at some point we'll emit
|
// This is all messy but not worth cleaning up: at some point we'll emit
|
||||||
// a hard error when we don't have a valtree or when we find something in
|
// a hard error when we don't have a valtree or when we find something in
|
||||||
// the valtree that is not structural; then this can all be made a lot simpler.
|
// the valtree that is not structural; then this can all be made a lot simpler.
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
|
//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
|
||||||
//!
|
//!
|
||||||
//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
|
//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
|
||||||
//! values, the locals in which they are stored, and a the assignment location.
|
//! values, the locals in which they are stored, and the assignment location.
|
||||||
//!
|
//!
|
||||||
//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
|
//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
|
||||||
//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
|
//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
|
||||||
|
@ -519,7 +519,7 @@ fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|||||||
&add_subtyping_projections::Subtyper, // calling this after reveal_all ensures that we don't deal with opaque types
|
&add_subtyping_projections::Subtyper, // calling this after reveal_all ensures that we don't deal with opaque types
|
||||||
&elaborate_drops::ElaborateDrops,
|
&elaborate_drops::ElaborateDrops,
|
||||||
// This will remove extraneous landing pads which are no longer
|
// This will remove extraneous landing pads which are no longer
|
||||||
// necessary as well as well as forcing any call in a non-unwinding
|
// necessary as well as forcing any call in a non-unwinding
|
||||||
// function calling a possibly-unwinding function to abort the process.
|
// function calling a possibly-unwinding function to abort the process.
|
||||||
&abort_unwinding_calls::AbortUnwindingCalls,
|
&abort_unwinding_calls::AbortUnwindingCalls,
|
||||||
// AddMovesForPackedDrops needs to run after drop
|
// AddMovesForPackedDrops needs to run after drop
|
||||||
|
@ -816,7 +816,7 @@ fn promote_temp(&mut self, temp: Local) -> Local {
|
|||||||
mut func, mut args, call_source: desugar, fn_span, ..
|
mut func, mut args, call_source: desugar, fn_span, ..
|
||||||
} => {
|
} => {
|
||||||
// This promoted involves a function call, so it may fail to evaluate.
|
// This promoted involves a function call, so it may fail to evaluate.
|
||||||
// Let's make sure it is added to `required_consts` so that that failure cannot get lost.
|
// Let's make sure it is added to `required_consts` so that failure cannot get lost.
|
||||||
self.add_to_required = true;
|
self.add_to_required = true;
|
||||||
|
|
||||||
self.visit_operand(&mut func, loc);
|
self.visit_operand(&mut func, loc);
|
||||||
|
@ -102,7 +102,7 @@ fn ty_ref_to_pub_struct(tcx: TyCtxt<'_>, ty: &hir::Ty<'_>) -> Publicness {
|
|||||||
Publicness::new(true, true)
|
Publicness::new(true, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determine if a work from the worklist is coming from the a `#[allow]`
|
/// Determine if a work from the worklist is coming from a `#[allow]`
|
||||||
/// or a `#[expect]` of `dead_code`
|
/// or a `#[expect]` of `dead_code`
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||||
enum ComesFromAllowExpect {
|
enum ComesFromAllowExpect {
|
||||||
|
@ -155,7 +155,7 @@ fn write_all(&mut self, buf: &[u8]) {
|
|||||||
if std::intrinsics::unlikely(self.buffered > flush_threshold) {
|
if std::intrinsics::unlikely(self.buffered > flush_threshold) {
|
||||||
self.flush();
|
self.flush();
|
||||||
}
|
}
|
||||||
// SAFETY: We checked above that that N < self.buffer_empty().len(),
|
// SAFETY: We checked above that N < self.buffer_empty().len(),
|
||||||
// and if isn't, flush ensures that our empty buffer is now BUF_SIZE.
|
// and if isn't, flush ensures that our empty buffer is now BUF_SIZE.
|
||||||
// We produce a post-mono error if N > BUF_SIZE.
|
// We produce a post-mono error if N > BUF_SIZE.
|
||||||
let buf = unsafe { self.buffer_empty().first_chunk_mut::<N>().unwrap_unchecked() };
|
let buf = unsafe { self.buffer_empty().first_chunk_mut::<N>().unwrap_unchecked() };
|
||||||
|
@ -4114,7 +4114,7 @@ fn point_at_chain<G: EmissionGuarantee>(
|
|||||||
expr = binding_expr;
|
expr = binding_expr;
|
||||||
}
|
}
|
||||||
if let hir::Node::Param(param) = parent {
|
if let hir::Node::Param(param) = parent {
|
||||||
// ...and it is a an fn argument.
|
// ...and it is an fn argument.
|
||||||
let prev_ty = self.resolve_vars_if_possible(
|
let prev_ty = self.resolve_vars_if_possible(
|
||||||
typeck_results
|
typeck_results
|
||||||
.node_type_opt(param.hir_id)
|
.node_type_opt(param.hir_id)
|
||||||
|
@ -1296,7 +1296,7 @@ fn visit_expr(&mut self, ex: &'v hir::Expr<'v>) -> Self::Result {
|
|||||||
expr = binding_expr;
|
expr = binding_expr;
|
||||||
}
|
}
|
||||||
if let hir::Node::Param(_param) = parent {
|
if let hir::Node::Param(_param) = parent {
|
||||||
// ...and it is a an fn argument.
|
// ...and it is an fn argument.
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -230,7 +230,7 @@ pub struct AssertParamIsCopy<T: Copy + ?Sized> {
|
|||||||
pub unsafe trait CloneToUninit {
|
pub unsafe trait CloneToUninit {
|
||||||
/// Performs copy-assignment from `self` to `dst`.
|
/// Performs copy-assignment from `self` to `dst`.
|
||||||
///
|
///
|
||||||
/// This is analogous to to `std::ptr::write(dst, self.clone())`,
|
/// This is analogous to `std::ptr::write(dst, self.clone())`,
|
||||||
/// except that `self` may be a dynamically-sized type ([`!Sized`](Sized)).
|
/// except that `self` may be a dynamically-sized type ([`!Sized`](Sized)).
|
||||||
///
|
///
|
||||||
/// Before this function is called, `dst` may point to uninitialized memory.
|
/// Before this function is called, `dst` may point to uninitialized memory.
|
||||||
|
@ -159,7 +159,7 @@ pub(crate) struct GenericShunt<'a, I, R> {
|
|||||||
residual: &'a mut Option<R>,
|
residual: &'a mut Option<R>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Process the given iterator as if it yielded a the item's `Try::Output`
|
/// Process the given iterator as if it yielded the item's `Try::Output`
|
||||||
/// type instead. Any `Try::Residual`s encountered will stop the inner iterator
|
/// type instead. Any `Try::Residual`s encountered will stop the inner iterator
|
||||||
/// and be propagated back to the overall result.
|
/// and be propagated back to the overall result.
|
||||||
pub(crate) fn try_process<I, T, R, F, U>(iter: I, mut f: F) -> ChangeOutputType<I::Item, U>
|
pub(crate) fn try_process<I, T, R, F, U>(iter: I, mut f: F) -> ChangeOutputType<I::Item, U>
|
||||||
|
@ -157,7 +157,7 @@ fn compute_product_approx(q: i64, w: u64, precision: usize) -> (u64, u64) {
|
|||||||
// Need to do a second multiplication to get better precision
|
// Need to do a second multiplication to get better precision
|
||||||
// for the lower product. This will always be exact
|
// for the lower product. This will always be exact
|
||||||
// where q is < 55, since 5^55 < 2^128. If this wraps,
|
// where q is < 55, since 5^55 < 2^128. If this wraps,
|
||||||
// then we need to need to round up the hi product.
|
// then we need to round up the hi product.
|
||||||
let (_, second_hi) = full_multiplication(w, hi5);
|
let (_, second_hi) = full_multiplication(w, hi5);
|
||||||
first_lo = first_lo.wrapping_add(second_hi);
|
first_lo = first_lo.wrapping_add(second_hi);
|
||||||
if second_hi > first_lo {
|
if second_hi > first_lo {
|
||||||
|
@ -28,7 +28,7 @@
|
|||||||
//!
|
//!
|
||||||
//! # Layout
|
//! # Layout
|
||||||
//! Tagged values are 64 bits, with the 2 least significant bits used for the
|
//! Tagged values are 64 bits, with the 2 least significant bits used for the
|
||||||
//! tag. This means there are there are 4 "variants":
|
//! tag. This means there are 4 "variants":
|
||||||
//!
|
//!
|
||||||
//! - **Tag 0b00**: The first variant is equivalent to
|
//! - **Tag 0b00**: The first variant is equivalent to
|
||||||
//! `ErrorData::SimpleMessage`, and holds a `&'static SimpleMessage` directly.
|
//! `ErrorData::SimpleMessage`, and holds a `&'static SimpleMessage` directly.
|
||||||
|
@ -183,7 +183,7 @@ pub fn argc_argv() -> (isize, *const *const c_char) {
|
|||||||
// Use `_NSGetArgc` and `_NSGetArgv` on Apple platforms.
|
// Use `_NSGetArgc` and `_NSGetArgv` on Apple platforms.
|
||||||
//
|
//
|
||||||
// Even though these have underscores in their names, they've been available
|
// Even though these have underscores in their names, they've been available
|
||||||
// since since the first versions of both macOS and iOS, and are declared in
|
// since the first versions of both macOS and iOS, and are declared in
|
||||||
// the header `crt_externs.h`.
|
// the header `crt_externs.h`.
|
||||||
//
|
//
|
||||||
// NOTE: This header was added to the iOS 13.0 SDK, which has been the source
|
// NOTE: This header was added to the iOS 13.0 SDK, which has been the source
|
||||||
|
@ -190,7 +190,7 @@ unsafe fn allocate(layout: Layout, zeroed: bool) -> *mut u8 {
|
|||||||
// it, it is safe to write a header directly before it.
|
// it, it is safe to write a header directly before it.
|
||||||
unsafe { ptr::write((aligned as *mut Header).sub(1), Header(ptr)) };
|
unsafe { ptr::write((aligned as *mut Header).sub(1), Header(ptr)) };
|
||||||
|
|
||||||
// SAFETY: The returned pointer does not point to the to the start of an allocated block,
|
// SAFETY: The returned pointer does not point to the start of an allocated block,
|
||||||
// but there is a header readable directly before it containing the location of the start
|
// but there is a header readable directly before it containing the location of the start
|
||||||
// of the block.
|
// of the block.
|
||||||
aligned
|
aligned
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
/// and it may be desirable to do so consistently for style.
|
/// and it may be desirable to do so consistently for style.
|
||||||
///
|
///
|
||||||
/// However, removing the brackets also introduces a public constant named after the struct,
|
/// However, removing the brackets also introduces a public constant named after the struct,
|
||||||
/// so this is not just a syntactic simplification but an an API change, and adding them back
|
/// so this is not just a syntactic simplification but an API change, and adding them back
|
||||||
/// is a *breaking* API change.
|
/// is a *breaking* API change.
|
||||||
///
|
///
|
||||||
/// ### Example
|
/// ### Example
|
||||||
@ -44,7 +44,7 @@
|
|||||||
/// and it may be desirable to do so consistently for style.
|
/// and it may be desirable to do so consistently for style.
|
||||||
///
|
///
|
||||||
/// However, removing the brackets also introduces a public constant named after the variant,
|
/// However, removing the brackets also introduces a public constant named after the variant,
|
||||||
/// so this is not just a syntactic simplification but an an API change, and adding them back
|
/// so this is not just a syntactic simplification but an API change, and adding them back
|
||||||
/// is a *breaking* API change.
|
/// is a *breaking* API change.
|
||||||
///
|
///
|
||||||
/// ### Example
|
/// ### Example
|
||||||
|
@ -258,7 +258,7 @@ fn is_value_unfrozen_raw(
|
|||||||
// e.g. implementing `has_frozen_variant` described above, and not running this function
|
// e.g. implementing `has_frozen_variant` described above, and not running this function
|
||||||
// when the type doesn't have any frozen variants would be the 'correct' way for the 2nd
|
// when the type doesn't have any frozen variants would be the 'correct' way for the 2nd
|
||||||
// case (that actually removes another suboptimal behavior (I won't say 'false positive') where,
|
// case (that actually removes another suboptimal behavior (I won't say 'false positive') where,
|
||||||
// similar to 2., but with the a frozen variant) (e.g. borrowing
|
// similar to 2., but with a frozen variant) (e.g. borrowing
|
||||||
// `borrow_interior_mutable_const::enums::AssocConsts::TO_BE_FROZEN_VARIANT`).
|
// `borrow_interior_mutable_const::enums::AssocConsts::TO_BE_FROZEN_VARIANT`).
|
||||||
// I chose this way because unfrozen enums as assoc consts are rare (or, hopefully, none).
|
// I chose this way because unfrozen enums as assoc consts are rare (or, hopefully, none).
|
||||||
matches!(err, ErrorHandled::TooGeneric(..))
|
matches!(err, ErrorHandled::TooGeneric(..))
|
||||||
|
@ -48,7 +48,7 @@ fn drop(&mut self) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
|
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
|
||||||
// needed if the a parent to that path is captured.
|
// needed if the parent to that path is captured.
|
||||||
fn test_precise_analysis_parent_captured_1() {
|
fn test_precise_analysis_parent_captured_1() {
|
||||||
let t = ConstainsDropField(Foo(10), Foo(20));
|
let t = ConstainsDropField(Foo(10), Foo(20));
|
||||||
|
|
||||||
@ -60,7 +60,7 @@ fn test_precise_analysis_parent_captured_1() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
|
// If a path isn't directly captured but requires Drop, then this tests that migrations aren't
|
||||||
// needed if the a parent to that path is captured.
|
// needed if the parent to that path is captured.
|
||||||
fn test_precise_analysis_parent_captured_2() {
|
fn test_precise_analysis_parent_captured_2() {
|
||||||
let t = ContainsAndImplsDrop(Foo(10));
|
let t = ContainsAndImplsDrop(Foo(10));
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ impl<T, S> Trait<T> for i32 {
|
|||||||
type Assoc = String;
|
type Assoc = String;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Should not not trigger suggestion here...
|
// Should not trigger suggestion here...
|
||||||
impl<T, S> Trait<T, S> for () {}
|
impl<T, S> Trait<T, S> for () {}
|
||||||
//~^ ERROR trait takes 1 generic argument but 2 generic arguments were supplied
|
//~^ ERROR trait takes 1 generic argument but 2 generic arguments were supplied
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user