Rollup merge of #103018 - Rageking8:more-dupe-word-typos, r=TaKO8Ki

More dupe word typos

I only picked those changes (from the regex search) that I am pretty certain doesn't change meaning and is just a typo fix. Do correct me if any fix is undesirable and I can revert those. Thanks.
This commit is contained in:
Dylan DPC 2022-10-14 16:19:15 +05:30 committed by GitHub
commit 77064b7f0a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 68 additions and 72 deletions

View File

@ -414,7 +414,7 @@ fn build(self) -> UniversalRegions<'tcx> {
let typeck_root_def_id = self.infcx.tcx.typeck_root_def_id(self.mir_def.did.to_def_id());
// If this is is a 'root' body (not a closure/generator/inline const), then
// If this is a 'root' body (not a closure/generator/inline const), then
// there are no extern regions, so the local regions start at the same
// position as the (empty) sub-list of extern regions
let first_local_index = if self.mir_def.did.to_def_id() == typeck_root_def_id {

View File

@ -65,7 +65,7 @@ pub(crate) fn get_file_name(crate_name: &str, crate_type: &str) -> String {
}
/// Similar to `get_file_name`, but converts any dashes (`-`) in the `crate_name` to
/// underscores (`_`). This is specially made for the the rustc and cargo wrappers
/// underscores (`_`). This is specially made for the rustc and cargo wrappers
/// which have a dash in the name, and that is not allowed in a crate name.
pub(crate) fn get_wrapper_file_name(crate_name: &str, crate_type: &str) -> String {
let crate_name = crate_name.replace('-', "_");

View File

@ -193,7 +193,7 @@ pub(super) fn from_casted_value<'tcx>(
kind: StackSlotKind::ExplicitSlot,
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
// Stack slot size may be bigger for for example `[u8; 3]` which is packed into an `i32`.
// Stack slot size may be bigger for example `[u8; 3]` which is packed into an `i32`.
// It may also be smaller for example when the type is a wrapper around an integer with a
// larger alignment than the integer.
size: (std::cmp::max(abi_param_size, layout_size) + 15) / 16 * 16,

View File

@ -129,7 +129,7 @@ fn new(tcx: TyCtxt<'_>, version: u32) -> Self {
// LLVM Coverage Mapping Format version 6 (zero-based encoded as 5)
// requires setting the first filename to the compilation directory.
// Since rustc generates coverage maps with relative paths, the
// compilation directory can be combined with the the relative paths
// compilation directory can be combined with the relative paths
// to get absolute paths, if needed.
let working_dir = tcx
.sess

View File

@ -64,7 +64,7 @@ pub struct ConstEvalErr<'tcx> {
impl<'tcx> ConstEvalErr<'tcx> {
/// Turn an interpreter error into something to report to the user.
/// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace.
/// Should be called only if the error is actually going to to be reported!
/// Should be called only if the error is actually going to be reported!
pub fn new<'mir, M: Machine<'mir, 'tcx>>(
ecx: &InterpCx<'mir, 'tcx, M>,
error: InterpErrorInfo<'tcx>,

View File

@ -794,7 +794,7 @@ pub fn leak_report(&self, static_roots: &[AllocId]) -> usize {
todo.extend(static_roots);
while let Some(id) = todo.pop() {
if reachable.insert(id) {
// This is a new allocation, add the allocation it points to to `todo`.
// This is a new allocation, add the allocation it points to `todo`.
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
todo.extend(
alloc.provenance().values().filter_map(|prov| prov.get_alloc_id()),

View File

@ -29,8 +29,8 @@ pub fn new(num_nodes: usize, mut edge_pairs: Vec<(N, N)>) -> Self {
// Store the *target* of each edge into `edge_targets`.
let edge_targets: Vec<N> = edge_pairs.iter().map(|&(_, target)| target).collect();
// Create the *edge starts* array. We are iterating over over
// the (sorted) edge pairs. We maintain the invariant that the
// Create the *edge starts* array. We are iterating over the
// (sorted) edge pairs. We maintain the invariant that the
// length of the `node_starts` array is enough to store the
// current source node -- so when we see that the source node
// for an edge is greater than the current length, we grow the

View File

@ -327,7 +327,7 @@ impl InvocationKind {
fn placeholder_visibility(&self) -> Option<ast::Visibility> {
// HACK: For unnamed fields placeholders should have the same visibility as the actual
// fields because for tuple structs/variants resolve determines visibilities of their
// constructor using these field visibilities before attributes on them are are expanded.
// constructor using these field visibilities before attributes on them are expanded.
// The assumption is that the attribute expansion cannot change field visibilities,
// and it holds because only inert attributes are supported in this position.
match self {

View File

@ -72,7 +72,7 @@ pub(super) fn type_inference_fallback(&self) -> bool {
//
// - Unconstrained ints are replaced with `i32`.
//
// - Unconstrained floats are replaced with with `f64`.
// - Unconstrained floats are replaced with `f64`.
//
// - Non-numerics may get replaced with `()` or `!`, depending on
// how they were categorized by `calculate_diverging_fallback`

View File

@ -210,7 +210,7 @@ fn reinit_expr(&mut self, expr: &hir::Expr<'_>) {
}
/// For an expression with an uninhabited return type (e.g. a function that returns !),
/// this adds a self edge to to the CFG to model the fact that the function does not
/// this adds a self edge to the CFG to model the fact that the function does not
/// return.
fn handle_uninhabited_return(&mut self, expr: &Expr<'tcx>) {
let ty = self.typeck_results.expr_ty(expr);

View File

@ -352,7 +352,7 @@ fn final_upvar_tys(&self, closure_id: LocalDefId) -> Vec<Ty<'tcx>> {
/// and that the path can be captured with required capture kind (depending on use in closure,
/// move closure etc.)
///
/// Returns the set of of adjusted information along with the inferred closure kind and span
/// Returns the set of adjusted information along with the inferred closure kind and span
/// associated with the closure kind inference.
///
/// Note that we *always* infer a minimal kind, even if

View File

@ -1150,8 +1150,8 @@ pub fn next_region_var_in_universe(
/// Return the universe that the region `r` was created in. For
/// most regions (e.g., `'static`, named regions from the user,
/// etc) this is the root universe U0. For inference variables or
/// placeholders, however, it will return the universe which which
/// they are associated.
/// placeholders, however, it will return the universe which they
/// are associated.
pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex {
self.inner.borrow_mut().unwrap_region_constraints().universe(r)
}

View File

@ -3310,7 +3310,7 @@ fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>) {
/// explicitly.
///
/// To access a library from a binary target within the same crate,
/// use `your_crate_name::` as the path path instead of `lib::`:
/// use `your_crate_name::` as the path instead of `lib::`:
///
/// ```rust,compile_fail
/// // bar/src/lib.rs

View File

@ -3937,7 +3937,7 @@
///
/// The compiler disables the automatic implementation if an explicit one
/// exists for given type constructor. The exact rules governing this
/// are currently unsound and quite subtle and and will be modified in the future.
/// are currently unsound, quite subtle, and will be modified in the future.
/// This change will cause the automatic implementation to be disabled in more
/// cases, potentially breaking some code.
pub SUSPICIOUS_AUTO_TRAIT_IMPLS,

View File

@ -43,7 +43,7 @@ fn machine_usize_to_isize(&self, val: u64) -> i64 {
let val = val as i64;
// Now wrap-around into the machine_isize range.
if val > self.machine_isize_max() {
// This can only happen the the ptr size is < 64, so we know max_usize_plus_1 fits into
// This can only happen if the ptr size is < 64, so we know max_usize_plus_1 fits into
// i64.
debug_assert!(self.pointer_size().bits() < 64);
let max_usize_plus_1 = 1u128 << self.pointer_size().bits();

View File

@ -13,8 +13,7 @@
//!
//! There are three groups of traits involved in each traversal.
//! - `TypeFoldable`. This is implemented once for many types, including:
//! - Types of interest, for which the the methods delegate to the
//! folder.
//! - Types of interest, for which the methods delegate to the folder.
//! - All other types, including generic containers like `Vec` and `Option`.
//! It defines a "skeleton" of how they should be folded.
//! - `TypeSuperFoldable`. This is implemented only for each type of interest,

View File

@ -958,7 +958,7 @@ pub fn needs_drop(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> boo
}
}
/// Checks if `ty` has has a significant drop.
/// Checks if `ty` has a significant drop.
///
/// Note that this method can return false even if `ty` has a destructor
/// attached; even if that is the case then the adt has been marked with

View File

@ -10,8 +10,7 @@
//!
//! There are three groups of traits involved in each traversal.
//! - `TypeVisitable`. This is implemented once for many types, including:
//! - Types of interest, for which the the methods delegate to the
//! visitor.
//! - Types of interest, for which the methods delegate to the visitor.
//! - All other types, including generic containers like `Vec` and `Option`.
//! It defines a "skeleton" of how they should be visited.
//! - `TypeSuperVisitable`. This is implemented only for each type of interest,

View File

@ -885,7 +885,7 @@ fn is_useful<'p, 'tcx>(
// that has the potential to trigger the `non_exhaustive_omitted_patterns` lint.
// To understand the workings checkout `Constructor::split` and `SplitWildcard::new/into_ctors`
if is_non_exhaustive_and_wild
// We check that the match has a wildcard pattern and that that wildcard is useful,
// We check that the match has a wildcard pattern and that wildcard is useful,
// meaning there are variants that are covered by the wildcard. Without the check
// for `witness_preference` the lint would trigger on `if let NonExhaustiveEnum::A = foo {}`
&& usefulness.is_useful() && matches!(witness_preference, RealArm)

View File

@ -2232,7 +2232,7 @@ pub fn recover_const_param_declaration(
///
/// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
/// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
/// if we think that that the resulting expression would be well formed.
/// if we think that the resulting expression would be well formed.
pub fn recover_const_arg(
&mut self,
start: Span,

View File

@ -268,7 +268,7 @@ fn check_inline(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Targ
}
// FIXME(#65833): We permit associated consts to have an `#[inline]` attribute with
// just a lint, because we previously erroneously allowed it and some crates used it
// accidentally, to to be compatible with crates depending on them, we can't throw an
// accidentally, to be compatible with crates depending on them, we can't throw an
// error here.
Target::AssocConst => {
self.tcx.emit_spanned_lint(
@ -376,7 +376,7 @@ fn check_naked(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Targe
| Target::Method(MethodKind::Trait { body: true } | MethodKind::Inherent) => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[allow_internal_unstable]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "naked");
@ -456,7 +456,7 @@ fn check_track_caller(
Target::Fn | Target::Method(..) | Target::ForeignFn | Target::Closure => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[track_caller]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
for attr in attrs {
@ -485,7 +485,7 @@ fn check_non_exhaustive(
Target::Struct | Target::Enum | Target::Variant => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[non_exhaustive]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "non_exhaustive");
@ -507,7 +507,7 @@ fn check_marker(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Targ
Target::Trait => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[marker]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "marker");
@ -566,7 +566,7 @@ fn check_target_feature(
}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[target_feature]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "target_feature");
@ -1205,7 +1205,7 @@ fn check_cold(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target
Target::Fn | Target::Method(..) | Target::ForeignFn | Target::Closure => {}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[cold]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "cold");
@ -1247,7 +1247,7 @@ fn check_link_name(&self, hir_id: HirId, attr: &Attribute, span: Span, target: T
Target::ForeignFn | Target::ForeignStatic => {}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[link_name]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "link_name");
@ -1281,7 +1281,7 @@ fn check_no_link(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Tar
Target::ExternCrate => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[no_link]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "no_link");
@ -1311,7 +1311,7 @@ fn check_export_name(
Target::Method(..) if self.is_impl_item(hir_id) => true,
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[export_name]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "export_name");
@ -1503,7 +1503,7 @@ fn check_link_section(&self, hir_id: HirId, attr: &Attribute, span: Span, target
Target::Static | Target::Fn | Target::Method(..) => {}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[link_section]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "link_section");
@ -1528,7 +1528,7 @@ fn check_no_mangle(&self, hir_id: HirId, attr: &Attribute, span: Span, target: T
Target::Method(..) if self.is_impl_item(hir_id) => {}
// FIXME(#80564): We permit struct fields, match arms and macro defs to have an
// `#[no_mangle]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "no_mangle");
@ -1782,7 +1782,7 @@ fn check_allow_internal_unstable(
Target::MacroDef => true,
// FIXME(#80564): We permit struct fields and match arms to have an
// `#[allow_internal_unstable]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm => {
self.inline_attr_str_error_without_macro_def(
@ -1877,7 +1877,7 @@ fn check_rustc_allow_const_fn_unstable(
}
// FIXME(#80564): We permit struct fields and match arms to have an
// `#[allow_internal_unstable]` attribute with just a lint, because we previously
// erroneously allowed it and some crates used it accidentally, to to be compatible
// erroneously allowed it and some crates used it accidentally, to be compatible
// with crates depending on them, we can't throw an error here.
Target::Field | Target::Arm | Target::MacroDef => {
self.inline_attr_str_error_with_macro_def(hir_id, attr, "allow_internal_unstable");

View File

@ -2060,7 +2060,7 @@ struct Finalize {
/// Span of the whole path or some its characteristic fragment.
/// E.g. span of `b` in `foo::{a, b, c}`, or full span for regular paths.
path_span: Span,
/// Span of the path start, suitable for prepending something to to it.
/// Span of the path start, suitable for prepending something to it.
/// E.g. span of `foo` in `foo::{a, b, c}`, or full span for regular paths.
root_span: Span,
/// Whether to report privacy errors or silently return "no resolution" for them,

View File

@ -186,7 +186,7 @@ pub struct Options {
/// Remap source path prefixes in all output (messages, object files, debug, etc.).
remap_path_prefix: Vec<(PathBuf, PathBuf)> [TRACKED_NO_CRATE_HASH],
/// Base directory containing the `src/` for the Rust standard library, and
/// potentially `rustc` as well, if we can can find it. Right now it's always
/// potentially `rustc` as well, if we can find it. Right now it's always
/// `$sysroot/lib/rustlib/src/rust` (i.e. the `rustup` `rust-src` component).
///
/// This directory is what the virtual `/rustc/$hash` is translated back to,

View File

@ -540,7 +540,7 @@ fn encode_ty<'tcx>(
let mut s = String::new();
let def_id = adt_def.0.did;
if options.contains(EncodeTyOptions::GENERALIZE_REPR_C) && adt_def.repr().c() {
// For for cross-language CFI support, the encoding must be compatible at the FFI
// For cross-language CFI support, the encoding must be compatible at the FFI
// boundary. For instance:
//
// struct type1 {};

View File

@ -47,7 +47,7 @@ pub enum PassMode {
/// Pass the argument indirectly via a hidden pointer.
/// The `extra_attrs` value, if any, is for the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue.
/// `on_stack` defines that the the value should be passed at a fixed
/// `on_stack` defines that the value should be passed at a fixed
/// stack offset in accordance to the ABI rather than passed using a
/// pointer. This corresponds to the `byval` LLVM argument attribute.
Indirect { attrs: ArgAttributes, extra_attrs: Option<ArgAttributes>, on_stack: bool },

View File

@ -193,7 +193,7 @@ fn overlap_within_probe<'cx, 'tcx>(
}
}
// We disable the leak when when creating the `snapshot` by using
// We disable the leak when creating the `snapshot` by using
// `infcx.probe_maybe_disable_leak_check`.
if infcx.leak_check(true, snapshot).is_err() {
debug!("overlap: leak check failed");

View File

@ -101,7 +101,7 @@ pub fn try_unify(&self, a: AbstractConst<'tcx>, b: AbstractConst<'tcx>) -> bool
a_uv == b_uv
}
// FIXME(generic_const_exprs): We may want to either actually try
// to evaluate `a_ct` and `b_ct` if they are are fully concrete or something like
// to evaluate `a_ct` and `b_ct` if they are fully concrete or something like
// this, for now we just return false here.
_ => false,
}

View File

@ -566,7 +566,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
.unwrap_or_else(|| ty.super_fold_with(self).into())
};
// For cases like #95134 we would like to catch overflows early
// otherwise they slip away away and cause ICE.
// otherwise they slip away and cause ICE.
let recursion_limit = self.tcx().recursion_limit();
if !recursion_limit.value_within_limit(self.depth)
// HACK: Don't overflow when running cargo doc see #100991

View File

@ -264,7 +264,7 @@ fn adt_consider_insignificant_dtor<'tcx>(
if is_marked_insig {
// In some cases like `std::collections::HashMap` where the struct is a wrapper around
// a type that is a Drop type, and the wrapped type (eg: `hashbrown::HashMap`) lies
// outside stdlib, we might choose to still annotate the the wrapper (std HashMap) with
// outside stdlib, we might choose to still annotate the wrapper (std HashMap) with
// `rustc_insignificant_dtor`, even if the type itself doesn't have a `Drop` impl.
Some(DtorType::Insignificant)
} else if adt_def.destructor(tcx).is_some() {

View File

@ -1613,7 +1613,7 @@ pub fn pop_front(&mut self) -> Option<T> {
None
} else {
// We can't point to the node that we pop. Copying the behavior of
// `remove_current`, we move on the the next node in the sequence.
// `remove_current`, we move on to the next node in the sequence.
// If the list is of length 1 then we end pointing to the "ghost"
// node at index 0, which is expected.
if self.list.head == self.current {

View File

@ -692,7 +692,7 @@ fn intersperse(self, separator: Self::Item) -> Intersperse<Self>
/// assert_eq!(it.next(), Some(NotClone(99))); // The separator.
/// assert_eq!(it.next(), Some(NotClone(1))); // The next element from `v`.
/// assert_eq!(it.next(), Some(NotClone(99))); // The separator.
/// assert_eq!(it.next(), Some(NotClone(2))); // The last element from from `v`.
/// assert_eq!(it.next(), Some(NotClone(2))); // The last element from `v`.
/// assert_eq!(it.next(), None); // The iterator is finished.
/// ```
///

View File

@ -647,7 +647,7 @@ pub const fn as_mut_ptr(&mut self) -> *mut T {
/// implements the [`Copy`] trait or not. When using multiple copies of the
/// data (by calling `assume_init_read` multiple times, or first calling
/// `assume_init_read` and then [`assume_init`]), it is your responsibility
/// to ensure that that data may indeed be duplicated.
/// to ensure that data may indeed be duplicated.
///
/// [inv]: #initialization-invariant
/// [`assume_init`]: MaybeUninit::assume_init

View File

@ -76,7 +76,7 @@ pub fn compute_float<F: RawFloat>(q: i64, mut w: u64) -> BiasedFp {
return BiasedFp { f: mantissa, e: power2 };
}
// Need to handle rounding ties. Normally, we need to round up,
// but if we fall right in between and and we have an even basis, we
// but if we fall right in between and we have an even basis, we
// need to round down.
//
// This will only occur if:

View File

@ -1,5 +1,5 @@
/// These functions compute the integer logarithm of their type, assuming
/// that someone has already checked that the the value is strictly positive.
/// that someone has already checked that the value is strictly positive.
// 0 < val <= u8::MAX
#[inline]

View File

@ -694,7 +694,7 @@ pub fn mask(self, mask: usize) -> *const T {
/// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
///
/// This computes the same value that [`offset_from`](#method.offset_from)
/// would compute, but with the added precondition that that the offset is
/// would compute, but with the added precondition that the offset is
/// guaranteed to be non-negative. This method is equivalent to
/// `usize::from(self.offset_from(origin)).unwrap_unchecked()`,
/// but it provides slightly more information to the optimizer, which can

View File

@ -867,7 +867,7 @@ pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
/// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
///
/// This computes the same value that [`offset_from`](#method.offset_from)
/// would compute, but with the added precondition that that the offset is
/// would compute, but with the added precondition that the offset is
/// guaranteed to be non-negative. This method is equivalent to
/// `usize::from(self.offset_from(origin)).unwrap_unchecked()`,
/// but it provides slightly more information to the optimizer, which can

View File

@ -1642,8 +1642,8 @@ pub fn fetch_byte_sub(&self, val: usize, order: Ordering) -> *mut T {
/// and the argument `val`, and stores a pointer with provenance of the
/// current pointer and the resulting address.
///
/// This is equivalent equivalent to using [`map_addr`] to atomically
/// perform `ptr = ptr.map_addr(|a| a | val)`. This can be used in tagged
/// This is equivalent to using [`map_addr`] to atomically perform
/// `ptr = ptr.map_addr(|a| a | val)`. This can be used in tagged
/// pointer schemes to atomically set tag bits.
///
/// **Caveat**: This operation returns the previous value. To compute the
@ -1693,8 +1693,8 @@ pub fn fetch_or(&self, val: usize, order: Ordering) -> *mut T {
/// pointer, and the argument `val`, and stores a pointer with provenance of
/// the current pointer and the resulting address.
///
/// This is equivalent equivalent to using [`map_addr`] to atomically
/// perform `ptr = ptr.map_addr(|a| a & val)`. This can be used in tagged
/// This is equivalent to using [`map_addr`] to atomically perform
/// `ptr = ptr.map_addr(|a| a & val)`. This can be used in tagged
/// pointer schemes to atomically unset tag bits.
///
/// **Caveat**: This operation returns the previous value. To compute the
@ -1743,8 +1743,8 @@ pub fn fetch_and(&self, val: usize, order: Ordering) -> *mut T {
/// pointer, and the argument `val`, and stores a pointer with provenance of
/// the current pointer and the resulting address.
///
/// This is equivalent equivalent to using [`map_addr`] to atomically
/// perform `ptr = ptr.map_addr(|a| a ^ val)`. This can be used in tagged
/// This is equivalent to using [`map_addr`] to atomically perform
/// `ptr = ptr.map_addr(|a| a ^ val)`. This can be used in tagged
/// pointer schemes to atomically toggle tag bits.
///
/// **Caveat**: This operation returns the previous value. To compute the

View File

@ -172,7 +172,7 @@ macro_rules! check {
// Calcutate the string length for the smallest overflowing number:
let max_len_string = format_radix(num, base as u128);
// Ensure that that string length is deemed to potentially overflow:
// Ensure that string length is deemed to potentially overflow:
assert!(can_overflow::<$t>(base, &max_len_string));
}
)*)

View File

@ -1629,7 +1629,7 @@ pub fn code(&self) -> Option<i32> {
///
/// This is exactly like [`code()`](Self::code), except that it returns a `NonZeroI32`.
///
/// Plain `code`, returning a plain integer, is provided because is is often more convenient.
/// Plain `code`, returning a plain integer, is provided because it is often more convenient.
/// The returned value from `code()` is indeed also nonzero; use `code_nonzero()` when you want
/// a type-level guarantee of nonzeroness.
///

View File

@ -287,7 +287,7 @@ pub fn into_raw(&self) -> c_int {
// SuS and POSIX) say a wait status is, but Fuchsia apparently uses a u64, so it won't
// necessarily fit.
//
// It seems to me that that the right answer would be to provide std::os::fuchsia with its
// It seems to me that the right answer would be to provide std::os::fuchsia with its
// own ExitStatusExt, rather that trying to provide a not very convincing imitation of
// Unix. Ie, std::os::unix::process:ExitStatusExt ought not to exist on Fuchsia. But
// fixing this up that is beyond the scope of my efforts now.

View File

@ -464,7 +464,7 @@ fn copy_sanitizers(
builder.copy(&runtime.path, &dst);
if target == "x86_64-apple-darwin" || target == "aarch64-apple-darwin" {
// Update the librarys install name to reflect that it has has been renamed.
// Update the librarys install name to reflect that it has been renamed.
apple_darwin_update_library_name(&dst, &format!("@rpath/{}", &runtime.name));
// Upon renaming the install name, the code signature of the file will invalidate,
// so we will sign it again.

View File

@ -1514,7 +1514,7 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
// as we currently do not supply the parent generics to anonymous constants
// but do allow `ConstKind::Param`.
//
// `const_eval_poly` tries to to first substitute generic parameters which
// `const_eval_poly` tries to first substitute generic parameters which
// results in an ICE while manually constructing the constant and using `eval`
// does nothing for `ConstKind::Param`.
let ct = ty::Const::from_anon_const(cx.tcx, def_id);

View File

@ -612,7 +612,7 @@ fn generate_macro_def_id_path(
};
if path.len() < 2 {
// The minimum we can have is the crate name followed by the macro name. If shorter, then
// it means that that `relative` was empty, which is an error.
// it means that `relative` was empty, which is an error.
debug!("macro path cannot be empty!");
return Err(HrefError::NotInExternalCache);
}

View File

@ -17,7 +17,7 @@ pub struct Foo {
// Going from an aggregate struct to another type currently requires Copy to
// enable the TrustedRandomAccess specialization. Without it optimizations do not yet
// reliably recognize the loops as noop for for repr(C) or non-Copy structs.
// reliably recognize the loops as noop for repr(C) or non-Copy structs.
#[derive(Copy, Clone)]
pub struct Bar {
a: u64,

View File

@ -1,8 +1,6 @@
//
// popped up in in #94012, where an alternative desugaring was
// popped up in #94012, where an alternative desugaring was
// causing unreachable code errors
#![deny(unused_variables)]
#![deny(unreachable_code)]

View File

@ -1,11 +1,11 @@
error: unused variable: `x`
--> $DIR/let-else-then-diverge.rs:11:13
--> $DIR/let-else-then-diverge.rs:9:13
|
LL | let x = 5;
| ^ help: if this is intentional, prefix it with an underscore: `_x`
|
note: the lint level is defined here
--> $DIR/let-else-then-diverge.rs:6:9
--> $DIR/let-else-then-diverge.rs:4:9
|
LL | #![deny(unused_variables)]
| ^^^^^^^^^^^^^^^^

View File

@ -5,7 +5,7 @@
// These first two `#[allow(deprecated)]` attributes
// do nothing, since the AST nodes for `First` and `Second`
// haven't been been assigned a `NodeId`.
// haven't been assigned a `NodeId`.
// See #63221 for a discussion about how we should
// handle the interaction of 'inert' attributes and
// proc-macro attributes.