Rollup merge of #95461 - nyurik:spelling, r=lcnr
Spellchecking some comments This PR attempts to clean up some minor spelling mistakes in comments
This commit is contained in:
commit
03b3993ae8
@ -461,7 +461,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
(combined / 10) as u32 as Limb
|
||||
});
|
||||
|
||||
// Reduce the sigificand to avoid wasting time dividing 0's.
|
||||
// Reduce the significand to avoid wasting time dividing 0's.
|
||||
while sig.last() == Some(&0) {
|
||||
sig.pop();
|
||||
}
|
||||
|
@ -634,7 +634,7 @@ fn lower_res(&mut self, res: Res<NodeId>) -> Res {
|
||||
// This can happen when trying to lower the return type `x` in erroneous code like
|
||||
// async fn foo(x: u8) -> x {}
|
||||
// In that case, `x` is lowered as a function parameter, and the return type is lowered as
|
||||
// an opaque type as a synthetized HIR owner.
|
||||
// an opaque type as a synthesized HIR owner.
|
||||
res.unwrap_or(Res::Err)
|
||||
}
|
||||
|
||||
|
@ -622,7 +622,7 @@ fn check_foreign_item_ascii_only(&self, ident: Ident) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Reject C-varadic type unless the function is foreign,
|
||||
/// Reject C-variadic type unless the function is foreign,
|
||||
/// or free and `unsafe extern "C"` semantically.
|
||||
fn check_c_variadic_type(&self, fk: FnKind<'a>) {
|
||||
match (fk.ctxt(), fk.header()) {
|
||||
|
@ -55,7 +55,7 @@ fn region_name_is_suggestable(name: &RegionName) -> bool {
|
||||
| RegionNameSource::NamedFreeRegion(..)
|
||||
| RegionNameSource::Static => true,
|
||||
|
||||
// Don't give suggestions for upvars, closure return types, or other unnamable
|
||||
// Don't give suggestions for upvars, closure return types, or other unnameable
|
||||
// regions.
|
||||
RegionNameSource::SynthesizedFreeEnvRegion(..)
|
||||
| RegionNameSource::AnonRegionFromArgument(..)
|
||||
|
@ -176,7 +176,7 @@ pub(crate) fn report_region_errors(&mut self, nll_errors: RegionErrors<'tcx>) {
|
||||
// FIXME. We should handle this case better. It
|
||||
// indicates that we have e.g., some region variable
|
||||
// whose value is like `'a+'b` where `'a` and `'b` are
|
||||
// distinct unrelated univesal regions that are not
|
||||
// distinct unrelated universal regions that are not
|
||||
// known to outlive one another. It'd be nice to have
|
||||
// some examples where this arises to decide how best
|
||||
// to report it; we could probably handle it by
|
||||
|
@ -365,7 +365,7 @@ fn check_access_for_conflict(
|
||||
// borrow); so don't check if they interfere.
|
||||
//
|
||||
// NOTE: *reservations* do conflict with themselves;
|
||||
// thus aren't injecting unsoundenss w/ this check.)
|
||||
// thus aren't injecting unsoundness w/ this check.)
|
||||
(Activation(_, activating), _) if activating == borrow_index => {
|
||||
// Activating a borrow doesn't generate any invalidations, since we
|
||||
// have already taken the reservation
|
||||
|
@ -1046,7 +1046,7 @@ fn check_access_for_conflict(
|
||||
// borrow); so don't check if they interfere.
|
||||
//
|
||||
// NOTE: *reservations* do conflict with themselves;
|
||||
// thus aren't injecting unsoundenss w/ this check.)
|
||||
// thus aren't injecting unsoundness w/ this check.)
|
||||
(Activation(_, activating), _) if activating == borrow_index => {
|
||||
debug!(
|
||||
"check_access_for_conflict place_span: {:?} sd: {:?} rw: {:?} \
|
||||
@ -1105,7 +1105,7 @@ fn check_access_for_conflict(
|
||||
);
|
||||
// rust-lang/rust#56254 - This was previously permitted on
|
||||
// the 2018 edition so we emit it as a warning. We buffer
|
||||
// these sepately so that we only emit a warning if borrow
|
||||
// these separately so that we only emit a warning if borrow
|
||||
// checking was otherwise successful.
|
||||
this.reservation_warnings
|
||||
.insert(bi, (place_span.0, place_span.1, location, bk, borrow.clone()));
|
||||
@ -1586,7 +1586,7 @@ fn check_if_reassignment_to_immutable_state(
|
||||
) {
|
||||
debug!("check_if_reassignment_to_immutable_state({:?})", local);
|
||||
|
||||
// Check if any of the initializiations of `local` have happened yet:
|
||||
// Check if any of the initializations of `local` have happened yet:
|
||||
if let Some(init_index) = self.is_local_ever_initialized(local, flow_state) {
|
||||
// And, if so, report an error.
|
||||
let init = &self.move_data.inits[init_index];
|
||||
|
@ -787,7 +787,7 @@ fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccInd
|
||||
let universe_a = self.scc_universes[scc_a];
|
||||
|
||||
// Quick check: if scc_b's declared universe is a subset of
|
||||
// scc_a's declared univese (typically, both are ROOT), then
|
||||
// scc_a's declared universe (typically, both are ROOT), then
|
||||
// it cannot contain any problematic universe elements.
|
||||
if universe_a.can_name(self.scc_universes[scc_b]) {
|
||||
return true;
|
||||
@ -1991,7 +1991,7 @@ fn check_member_constraints(
|
||||
.iter()
|
||||
.find_map(|constraint| {
|
||||
if let ConstraintCategory::Predicate(predicate_span) = constraint.category {
|
||||
// We currentl'y doesn't store the `DefId` in the `ConstraintCategory`
|
||||
// We currently do not store the `DefId` in the `ConstraintCategory`
|
||||
// for performances reasons. The error reporting code used by NLL only
|
||||
// uses the span, so this doesn't cause any problems at the moment.
|
||||
Some(ObligationCauseCode::BindingObligation(
|
||||
|
@ -2640,7 +2640,7 @@ fn prove_aggregate_predicates(
|
||||
// we have to solve them here where we instantiate the
|
||||
// closure.
|
||||
//
|
||||
// Despite the opacity of the previous parapgrah, this is
|
||||
// Despite the opacity of the previous paragraph, this is
|
||||
// actually relatively easy to understand in terms of the
|
||||
// desugaring. A closure gets desugared to a struct, and
|
||||
// these extra requirements are basically like where
|
||||
|
@ -137,7 +137,7 @@ fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option<Anno
|
||||
}
|
||||
|
||||
// The majority of parsed attribute targets will never need to have early cfg-expansion
|
||||
// run (e.g. they are not part of a `#[derive]` or `#[cfg_eval]` macro inoput).
|
||||
// run (e.g. they are not part of a `#[derive]` or `#[cfg_eval]` macro input).
|
||||
// Therefore, we normally do not capture the necessary information about `#[cfg]`
|
||||
// and `#[cfg_attr]` attributes during parsing.
|
||||
//
|
||||
|
@ -28,7 +28,7 @@ pub fn expand_deriving_clone(
|
||||
// - the item is a union with Copy fields
|
||||
// Unions with generic parameters still can derive Clone because they require Copy
|
||||
// for deriving, Clone alone is not enough.
|
||||
// Whever Clone is implemented for fields is irrelevant so we don't assert it.
|
||||
// Wherever Clone is implemented for fields is irrelevant so we don't assert it.
|
||||
let bounds;
|
||||
let substructure;
|
||||
let is_shallow;
|
||||
|
@ -70,7 +70,7 @@ fn expand<'cx>(
|
||||
}
|
||||
|
||||
pub fn use_panic_2021(mut span: Span) -> bool {
|
||||
// To determine the editon, we check the first span up the expansion
|
||||
// To determine the edition, we check the first span up the expansion
|
||||
// stack that does not have #[allow_internal_unstable(edition_panic)].
|
||||
// (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.)
|
||||
loop {
|
||||
|
@ -36,7 +36,7 @@ impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
|
||||
|
||||
trait Trait {
|
||||
// This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
|
||||
// without unsized_locals), but wrappers arond `Self` currently are not.
|
||||
// without unsized_locals), but wrappers around `Self` currently are not.
|
||||
// FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
|
||||
// fn wrapper(self: Wrapper<Self>) -> i32;
|
||||
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
|
||||
|
@ -12,7 +12,7 @@ A secondary goal is to check if using the gcc backend will provide any run-time
|
||||
## Building
|
||||
|
||||
**This requires a patched libgccjit in order to work.
|
||||
The patches in [this repostory](https://github.com/antoyo/libgccjit-patches) need to be applied.
|
||||
The patches in [this repository](https://github.com/antoyo/libgccjit-patches) need to be applied.
|
||||
(Those patches should work when applied on master, but in case it doesn't work, they are known to work when applied on 079c23cfe079f203d5df83fea8e92a60c7d7e878.)
|
||||
You can also use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.**
|
||||
|
||||
|
@ -37,7 +37,7 @@ impl<T: DispatchFromDyn<U>, U> DispatchFromDyn<Wrapper<U>> for Wrapper<T> {}
|
||||
|
||||
trait Trait {
|
||||
// This method isn't object-safe yet. Unsized by-value `self` is object-safe (but not callable
|
||||
// without unsized_locals), but wrappers arond `Self` currently are not.
|
||||
// without unsized_locals), but wrappers around `Self` currently are not.
|
||||
// FIXME (mikeyhew) uncomment this when unsized rvalues object-safety is implemented
|
||||
// fn wrapper(self: Wrapper<Self>) -> i32;
|
||||
fn ptr_wrapper(self: Ptr<Wrapper<Self>>) -> i32;
|
||||
|
@ -508,7 +508,7 @@ fn sdiv(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
|
||||
}
|
||||
|
||||
fn exactsdiv(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
|
||||
// TODO(antoyo): posion if not exact.
|
||||
// TODO(antoyo): poison if not exact.
|
||||
// FIXME(antoyo): rustc_codegen_ssa::mir::intrinsic uses different types for a and b but they
|
||||
// should be the same.
|
||||
let typ = a.get_type().to_signed(self);
|
||||
@ -1252,7 +1252,7 @@ fn cx(&self) -> &CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
fn do_not_inline(&mut self, _llret: RValue<'gcc>) {
|
||||
// FIMXE(bjorn3): implement
|
||||
// FIXME(bjorn3): implement
|
||||
}
|
||||
|
||||
fn set_span(&mut self, _span: Span) {}
|
||||
|
@ -110,7 +110,7 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||
/// NOTE: a hack is used because the rustc API is not suitable to libgccjit and as such,
|
||||
/// `const_undef()` returns struct as pointer so that they can later be assigned a value.
|
||||
/// As such, this set remembers which of these pointers were returned by this function so that
|
||||
/// they can be deferenced later.
|
||||
/// they can be dereferenced later.
|
||||
/// FIXME(antoyo): fix the rustc API to avoid having this hack.
|
||||
pub structs_as_pointer: RefCell<FxHashSet<RValue<'gcc>>>,
|
||||
}
|
||||
|
@ -340,7 +340,7 @@ fn abort(&mut self) {
|
||||
}
|
||||
|
||||
fn assume(&mut self, value: Self::Value) {
|
||||
// TODO(antoyo): switch to asumme when it exists.
|
||||
// TODO(antoyo): switch to assume when it exists.
|
||||
// Or use something like this:
|
||||
// #define __assume(cond) do { if (!(cond)) __builtin_unreachable(); } while (0)
|
||||
self.expect(value, true);
|
||||
|
@ -360,7 +360,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
|
||||
// feasible. The compiler may be able to get around this, but it may
|
||||
// involve some invasive changes to deal with this.
|
||||
//
|
||||
// The flipside of this situation is that whenever you link to a dll and
|
||||
// The flip side of this situation is that whenever you link to a dll and
|
||||
// you import a function from it, the import should be tagged with
|
||||
// `dllimport`. At this time, however, the compiler does not emit
|
||||
// `dllimport` for any declarations other than constants (where it is
|
||||
|
@ -1113,7 +1113,7 @@ macro_rules! require_simd {
|
||||
&& len.try_eval_usize(bx.tcx, ty::ParamEnv::reveal_all())
|
||||
== Some(expected_bytes) =>
|
||||
{
|
||||
// Zero-extend iN to the array lengh:
|
||||
// Zero-extend iN to the array length:
|
||||
let ze = bx.zext(i_, bx.type_ix(expected_bytes * 8));
|
||||
|
||||
// Convert the integer to a byte array
|
||||
|
@ -709,7 +709,7 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
|
||||
let out = String::from_utf8_lossy(&out);
|
||||
|
||||
// Check to see if the link failed with an error message that indicates it
|
||||
// doesn't recognize the -no-pie option. If so, reperform the link step
|
||||
// doesn't recognize the -no-pie option. If so, re-perform the link step
|
||||
// without it. This is safe because if the linker doesn't support -no-pie
|
||||
// then it should not default to linking executables as pie. Different
|
||||
// versions of gcc seem to use different quotes in the error message so
|
||||
@ -1049,7 +1049,7 @@ fn escape_string(s: &[u8]) -> String {
|
||||
fn add_sanitizer_libraries(sess: &Session, crate_type: CrateType, linker: &mut dyn Linker) {
|
||||
// On macOS the runtimes are distributed as dylibs which should be linked to
|
||||
// both executables and dynamic shared objects. Everywhere else the runtimes
|
||||
// are currently distributed as static liraries which should be linked to
|
||||
// are currently distributed as static libraries which should be linked to
|
||||
// executables only.
|
||||
let needs_runtime = match crate_type {
|
||||
CrateType::Executable => true,
|
||||
@ -1850,7 +1850,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
|
||||
// Upstream rust libraries and their nobundle static libraries
|
||||
add_upstream_rust_crates::<B>(cmd, sess, codegen_results, crate_type, tmpdir);
|
||||
|
||||
// Upstream dymamic native libraries linked with `#[link]` attributes at and `-l`
|
||||
// Upstream dynamic native libraries linked with `#[link]` attributes at and `-l`
|
||||
// command line options.
|
||||
// If -Zlink-native-libraries=false is set, then the assumption is that an
|
||||
// external build system already has the native dependencies defined, and it
|
||||
|
@ -91,7 +91,7 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, cnum: CrateNum) -> DefIdMap<
|
||||
if !generics.requires_monomorphization(tcx)
|
||||
// Functions marked with #[inline] are codegened with "internal"
|
||||
// linkage and are not exported unless marked with an extern
|
||||
// inidicator
|
||||
// indicator
|
||||
&& (!Instance::mono(tcx, def_id.to_def_id()).def.generates_cgu_internal_copy(tcx)
|
||||
|| tcx.codegen_fn_attrs(def_id.to_def_id()).contains_extern_indicator())
|
||||
{
|
||||
|
@ -73,7 +73,7 @@ enum InternMode {
|
||||
|
||||
/// Intern an allocation without looking at its children.
|
||||
/// `mode` is the mode of the environment where we found this pointer.
|
||||
/// `mutablity` is the mutability of the place to be interned; even if that says
|
||||
/// `mutability` is the mutability of the place to be interned; even if that says
|
||||
/// `immutable` things might become mutable if `ty` is not frozen.
|
||||
/// `ty` can be `None` if there is no potential interior mutability
|
||||
/// to account for (e.g. for vtables).
|
||||
|
@ -1011,7 +1011,7 @@ pub fn copy_repeatedly(
|
||||
let src_parts = self.get_ptr_access(src, size, src_align)?;
|
||||
let dest_parts = self.get_ptr_access(dest, size * num_copies, dest_align)?; // `Size` multiplication
|
||||
|
||||
// FIXME: we look up both allocations twice here, once ebfore for the `check_ptr_access`
|
||||
// FIXME: we look up both allocations twice here, once before for the `check_ptr_access`
|
||||
// and once below to get the underlying `&[mut] Allocation`.
|
||||
|
||||
// Source alloc preparations and access hooks.
|
||||
|
@ -876,7 +876,7 @@ pub fn copy_op_transmute(
|
||||
if src.layout.size != dest.layout.size {
|
||||
// FIXME: This should be an assert instead of an error, but if we transmute within an
|
||||
// array length computation, `typeck` may not have yet been run and errored out. In fact
|
||||
// most likey we *are* running `typeck` right now. Investigate whether we can bail out
|
||||
// most likely we *are* running `typeck` right now. Investigate whether we can bail out
|
||||
// on `typeck_results().has_errors` at all const eval entry points.
|
||||
debug!("Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest);
|
||||
self.tcx.sess.delay_span_bug(
|
||||
|
@ -39,7 +39,7 @@ pub fn run(&mut self) -> InterpResult<'tcx> {
|
||||
///
|
||||
/// This is used by [priroda](https://github.com/oli-obk/priroda)
|
||||
///
|
||||
/// This is marked `#inline(always)` to work around adverserial codegen when `opt-level = 3`
|
||||
/// This is marked `#inline(always)` to work around adversarial codegen when `opt-level = 3`
|
||||
#[inline(always)]
|
||||
pub fn step(&mut self) -> InterpResult<'tcx, bool> {
|
||||
if self.stack().is_empty() {
|
||||
|
@ -329,7 +329,7 @@ pub(crate) fn eval_fn_call(
|
||||
|
||||
// Compute callee information using the `instance` returned by
|
||||
// `find_mir_or_eval_fn`.
|
||||
// FIXME: for variadic support, do we have to somehow determine calle's extra_args?
|
||||
// FIXME: for variadic support, do we have to somehow determine callee's extra_args?
|
||||
let callee_fn_abi = self.fn_abi_of_instance(instance, ty::List::empty())?;
|
||||
|
||||
if callee_fn_abi.c_variadic != caller_fn_abi.c_variadic {
|
||||
|
@ -942,7 +942,7 @@ fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location
|
||||
if callee_is_unstable_unmarked {
|
||||
trace!("callee_is_unstable_unmarked");
|
||||
// We do not use `const` modifiers for intrinsic "functions", as intrinsics are
|
||||
// `extern` funtions, and these have no way to get marked `const`. So instead we
|
||||
// `extern` functions, and these have no way to get marked `const`. So instead we
|
||||
// use `rustc_const_(un)stable` attributes to mean that the intrinsic is `const`
|
||||
if self.ccx.is_const_stable_const_fn() || is_intrinsic {
|
||||
self.check_op(ops::FnCallUnstable(callee, None));
|
||||
|
@ -56,7 +56,7 @@ pub trait Qualif {
|
||||
/// Returns `true` if *any* value of the given type could possibly have this `Qualif`.
|
||||
///
|
||||
/// This function determines `Qualif`s when we cannot do a value-based analysis. Since qualif
|
||||
/// propagation is context-insenstive, this includes function arguments and values returned
|
||||
/// propagation is context-insensitive, this includes function arguments and values returned
|
||||
/// from a call to another function.
|
||||
///
|
||||
/// It also determines the `Qualif`s for primitive types.
|
||||
|
@ -45,7 +45,7 @@ pub enum CallKind<'tcx> {
|
||||
},
|
||||
/// A call to `Fn(..)::call(..)`, desugared from `my_closure(a, b, c)`
|
||||
FnCall { fn_trait_id: DefId, self_ty: Ty<'tcx> },
|
||||
/// A call to an operator trait, desuraged from operator syntax (e.g. `a << b`)
|
||||
/// A call to an operator trait, desugared from operator syntax (e.g. `a << b`)
|
||||
Operator { self_arg: Option<Ident>, trait_id: DefId, self_ty: Ty<'tcx> },
|
||||
DerefCoercion {
|
||||
/// The `Span` of the `Target` associated type
|
||||
|
@ -623,7 +623,7 @@ pub enum NodeIdHashingMode {
|
||||
/// result, it needs to include `HashingControls` as part
|
||||
/// of the key, to ensure that is does not produce an incorrect
|
||||
/// result (for example, using a `Fingerprint` produced while
|
||||
/// hashing `Span`s when a `Fingeprint` without `Span`s is
|
||||
/// hashing `Span`s when a `Fingerprint` without `Span`s is
|
||||
/// being requested)
|
||||
#[derive(Clone, Hash, Eq, PartialEq, Debug)]
|
||||
pub struct HashingControls {
|
||||
|
@ -96,7 +96,7 @@ fn mubs_best_choice2() {
|
||||
//
|
||||
// mubs(0,3) = [2]
|
||||
|
||||
// Like the precedecing test, but in this case intersection is [2,
|
||||
// Like the preceding test, but in this case intersection is [2,
|
||||
// 1], and hence we rely on the first pare down call.
|
||||
|
||||
let mut relation = TransitiveRelation::default();
|
||||
|
@ -26,7 +26,7 @@ pub fn new() -> StyledBuffer {
|
||||
StyledBuffer { lines: vec![] }
|
||||
}
|
||||
|
||||
/// Returns content of `StyledBuffer` splitted by lines and line styles
|
||||
/// Returns content of `StyledBuffer` split by lines and line styles
|
||||
pub fn render(&self) -> Vec<Vec<StyledString>> {
|
||||
// Tabs are assumed to have been replaced by spaces in calling code.
|
||||
debug_assert!(self.lines.iter().all(|r| !r.iter().any(|sc| sc.chr == '\t')));
|
||||
|
@ -414,7 +414,7 @@ fn expand_cfg_attr_item(
|
||||
};
|
||||
trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
|
||||
}
|
||||
// We don't really have a good span to use for the syntheized `[]`
|
||||
// We don't really have a good span to use for the synthesized `[]`
|
||||
// in `#[attr]`, so just use the span of the `#` token.
|
||||
let bracket_group = AttrAnnotatedTokenTree::Delimited(
|
||||
DelimSpan::from_single(pound_span),
|
||||
|
@ -1847,7 +1847,7 @@ fn flat_map_generic_param(
|
||||
if node.is_expr() {
|
||||
// The only way that we can end up with a `MacCall` expression statement,
|
||||
// (as opposed to a `StmtKind::MacCall`) is if we have a macro as the
|
||||
// traiing expression in a block (e.g. `fn foo() { my_macro!() }`).
|
||||
// trailing expression in a block (e.g. `fn foo() { my_macro!() }`).
|
||||
// Record this information, so that we can report a more specific
|
||||
// `SEMICOLON_IN_EXPRESSIONS_FROM_MACROS` lint if needed.
|
||||
// See #78991 for an investigation of treating macros in this position
|
||||
|
@ -36,7 +36,7 @@
|
||||
// HACK(Centril, #64197): These shouldn't really be here.
|
||||
// Rather, they should be with their respective modules which are defined in other crates.
|
||||
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
||||
// these tests will need to live here in the iterim.
|
||||
// these tests will need to live here in the interim.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -41,7 +41,7 @@ fn open_tt(&self) -> &TokenTree {
|
||||
}
|
||||
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. Panics if
|
||||
/// the delimeter is `NoDelim`.
|
||||
/// the delimiter is `NoDelim`.
|
||||
fn close_tt(&self) -> &TokenTree {
|
||||
let tt = self.all_tts.last().unwrap();
|
||||
debug_assert!(matches!(
|
||||
|
@ -1118,7 +1118,7 @@ fn into(self) -> ast::BinOpKind {
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Encodable, Debug, HashStable_Generic)]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator (deferencing).
|
||||
/// The `*` operator (dereferencing).
|
||||
Deref,
|
||||
/// The `!` operator (logical negation).
|
||||
Not,
|
||||
@ -1611,7 +1611,7 @@ pub fn can_have_side_effects(&self) -> bool {
|
||||
| ExprKind::Index(base, _)
|
||||
| ExprKind::AddrOf(.., base)
|
||||
| ExprKind::Cast(base, _) => {
|
||||
// This isn't exactly true for `Index` and all `Unnary`, but we are using this
|
||||
// This isn't exactly true for `Index` and all `Unary`, but we are using this
|
||||
// method exclusively for diagnostics and there's a *cultural* pressure against
|
||||
// them being used only for its side-effects.
|
||||
base.can_have_side_effects()
|
||||
|
@ -146,7 +146,7 @@ fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut FileEncoder) -> FileEncodeR
|
||||
|
||||
/// Builds the dependency graph.
|
||||
///
|
||||
/// This function breates the *staging dep-graph*. When the dep-graph is modified by a query
|
||||
/// This function creates the *staging dep-graph*. When the dep-graph is modified by a query
|
||||
/// execution, the new dependency information is not kept in memory but directly
|
||||
/// output to this file. `save_dep_graph` then finalizes the staging dep-graph
|
||||
/// and moves it to the permanent dep-graph path
|
||||
|
@ -164,7 +164,7 @@ pub fn uninit(size: Size, align: Align, panic_on_fail: bool) -> InterpResult<'st
|
||||
let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).map_err(|_| {
|
||||
// This results in an error that can happen non-deterministically, since the memory
|
||||
// available to the compiler can change between runs. Normally queries are always
|
||||
// deterministic. However, we can be non-determinstic here because all uses of const
|
||||
// deterministic. However, we can be non-deterministic here because all uses of const
|
||||
// evaluation (including ConstProp!) will make compilation fail (via hard error
|
||||
// or ICE) upon encountering a `MemoryExhausted` error.
|
||||
if panic_on_fail {
|
||||
|
@ -192,7 +192,7 @@ fn find_capture_matching_projections<'a, 'tcx>(
|
||||
is_ancestor_or_same_capture(&possible_ancestor_proj_kinds, &hir_projections)
|
||||
})?;
|
||||
|
||||
// Convert index to be from the presepective of the entire closure_min_captures map
|
||||
// Convert index to be from the perspective of the entire closure_min_captures map
|
||||
// instead of just the root variable capture list
|
||||
Some((compute_capture_idx(closure_min_captures, var_hir_id, idx), capture))
|
||||
}
|
||||
|
@ -304,7 +304,7 @@ pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::Attr
|
||||
// this replace range with it, removing the inner attribute from the final
|
||||
// `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note.
|
||||
// During macro expansion, they are selectively inserted back into the
|
||||
// token stream (the first inner attribute is remoevd each time we invoke the
|
||||
// token stream (the first inner attribute is removed each time we invoke the
|
||||
// corresponding macro).
|
||||
let range = start_pos..end_pos;
|
||||
if let Capturing::Yes = self.capture_state.capturing {
|
||||
|
@ -188,7 +188,7 @@ impl<'a> Parser<'a> {
|
||||
///
|
||||
/// Note: If your callback consumes an opening delimiter
|
||||
/// (including the case where you call `collect_tokens`
|
||||
/// when the current token is an opening delimeter),
|
||||
/// when the current token is an opening delimiter),
|
||||
/// you must also consume the corresponding closing delimiter.
|
||||
///
|
||||
/// That is, you can consume
|
||||
@ -260,7 +260,7 @@ pub fn collect_tokens_trailing_token<R: AstLike>(
|
||||
// We also call `has_cfg_or_cfg_attr` at the beginning of this function,
|
||||
// but we only bail out if there's no possibility of inner attributes
|
||||
// (!R::SUPPORTS_CUSTOM_INNER_ATTRS)
|
||||
// We only catpure about `#[cfg]` or `#[cfg_attr]` in `capture_cfg`
|
||||
// We only capture about `#[cfg]` or `#[cfg_attr]` in `capture_cfg`
|
||||
// mode - during normal parsing, we don't need any special capturing
|
||||
// for those attributes, since they're builtin.
|
||||
&& !(self.capture_cfg && has_cfg_or_cfg_attr(ret.attrs()))
|
||||
@ -382,7 +382,7 @@ pub fn collect_tokens_trailing_token<R: AstLike>(
|
||||
if matches!(self.capture_state.capturing, Capturing::No) {
|
||||
self.capture_state.replace_ranges.clear();
|
||||
// We don't clear `inner_attr_ranges`, as doing so repeatedly
|
||||
// had a measureable performance impact. Most inner attributes that
|
||||
// had a measurable performance impact. Most inner attributes that
|
||||
// we insert will get removed - when we drop the parser, we'll free
|
||||
// up the memory used by any attributes that we didn't remove from the map.
|
||||
}
|
||||
@ -418,7 +418,7 @@ struct FrameData {
|
||||
stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
|
||||
}
|
||||
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
||||
// HACK: If we enconter a mismatched `None` delimiter at the top
|
||||
// HACK: If we encounter a mismatched `None` delimiter at the top
|
||||
// level, just ignore it.
|
||||
if matches!(delim, DelimToken::NoDelim)
|
||||
&& (stack.len() == 1
|
||||
|
@ -288,7 +288,7 @@ fn evaluate_predicates(
|
||||
) -> Option<(ty::ParamEnv<'tcx>, ty::ParamEnv<'tcx>)> {
|
||||
let tcx = infcx.tcx;
|
||||
|
||||
// Don't try to proess any nested obligations involving predicates
|
||||
// Don't try to process any nested obligations involving predicates
|
||||
// that are already in the `ParamEnv` (modulo regions): we already
|
||||
// know that they must hold.
|
||||
for predicate in param_env.caller_bounds() {
|
||||
@ -688,7 +688,7 @@ fn evaluate_nested_obligations(
|
||||
predicate
|
||||
);
|
||||
|
||||
// Under unusual circumstances, we can end up with a self-refeential
|
||||
// Under unusual circumstances, we can end up with a self-referential
|
||||
// projection predicate. For example:
|
||||
// <T as MyType>::Value == <T as MyType>::Value
|
||||
// Not only is displaying this to the user pointless,
|
||||
@ -767,7 +767,7 @@ fn evaluate_nested_obligations(
|
||||
// We only care about sub-obligations
|
||||
// when we started out trying to unify
|
||||
// some inference variables. See the comment above
|
||||
// for more infomration
|
||||
// for more information
|
||||
if p.term().skip_binder().has_infer_types() {
|
||||
if !self.evaluate_nested_obligations(
|
||||
ty,
|
||||
@ -784,7 +784,7 @@ fn evaluate_nested_obligations(
|
||||
}
|
||||
Ok(Ok(None)) => {
|
||||
// It's ok not to make progress when have no inference variables -
|
||||
// in that case, we were only performing unifcation to check if an
|
||||
// in that case, we were only performing unification to check if an
|
||||
// error occurred (which would indicate that it's impossible for our
|
||||
// type to implement the auto trait).
|
||||
// However, we should always make progress (either by generating
|
||||
|
@ -10,7 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
- `llvm-libunwind` now accepts `in-tree` (formerly true), `system` or `no` (formerly false) [#77703](https://github.com/rust-lang/rust/pull/77703)
|
||||
- The options `infodir`, `localstatedir`, and `gpg-password-file` are no longer allowed in config.toml. Previously, they were ignored without warning. Note that `infodir` and `localstatedir` are still accepted by `./configure`, with a warning. [#82451](https://github.com/rust-lang/rust/pull/82451)
|
||||
- Add options for enabling overflow checks, one for std (`overflow-checks-std`) and one for everything else (`overflow-checks`). Both default to false.
|
||||
- Change the names for `dist` commmands to match the component they generate. [#90684](https://github.com/rust-lang/rust/pull/90684)
|
||||
- Change the names for `dist` commands to match the component they generate. [#90684](https://github.com/rust-lang/rust/pull/90684)
|
||||
|
||||
### Non-breaking changes
|
||||
|
||||
|
@ -594,7 +594,7 @@ macro_rules! describe {
|
||||
dist::RustDev,
|
||||
dist::Extended,
|
||||
// It seems that PlainSourceTarball somehow changes how some of the tools
|
||||
// perceive their dependencies (see #93033) which would invaliate fingerprints
|
||||
// perceive their dependencies (see #93033) which would invalidate fingerprints
|
||||
// and force us to rebuild tools after vendoring dependencies.
|
||||
// To work around this, create the Tarball after building all the tools.
|
||||
dist::PlainSourceTarball,
|
||||
|
@ -703,7 +703,7 @@ pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetS
|
||||
//
|
||||
// Note that this is disabled if LLVM itself is disabled or we're in a check
|
||||
// build. If we are in a check build we still go ahead here presuming we've
|
||||
// detected that LLVM is alreay built and good to go which helps prevent
|
||||
// detected that LLVM is already built and good to go which helps prevent
|
||||
// busting caches (e.g. like #71152).
|
||||
if builder.config.llvm_enabled()
|
||||
&& (builder.kind != Kind::Check
|
||||
|
@ -5,7 +5,7 @@
|
||||
extern crate xcrate;
|
||||
|
||||
fn main() {
|
||||
// NOTE line below commeted out due to issue #45994
|
||||
// NOTE line below commented out due to issue #45994
|
||||
// assert_eq!(xcrate::fourway_add(1)(2)(3)(4), 10);
|
||||
xcrate::return_closure_accessing_internal_fn()();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user