Merge pull request #6285 from ytmimi/subtree-push-nightly-2024-08-17

subtree-push nightly-2024-08-17
This commit is contained in:
Yacin Tmimi 2024-08-19 19:04:27 -04:00 committed by GitHub
commit 448906160d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 45 additions and 55 deletions

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2024-06-25"
channel = "nightly-2024-08-17"
components = ["llvm-tools", "rustc-dev"]

View File

@ -386,7 +386,7 @@ pub(crate) fn format_expr(
))
}
}
ast::ExprKind::Gen(capture_by, ref block, ref kind) => {
ast::ExprKind::Gen(capture_by, ref block, ref kind, _) => {
let mover = if matches!(capture_by, ast::CaptureBy::Value { .. }) {
"move "
} else {

View File

@ -1,3 +1,7 @@
// We need this feature as it changes `dylib` linking behavior and allows us to link to
// `rustc_driver`.
#![feature(rustc_private)]
#[macro_use]
extern crate tracing;

View File

@ -33,15 +33,17 @@ pub(crate) fn parse_lazy_static(
}
while parser.token.kind != TokenKind::Eof {
// Parse a `lazy_static!` item.
// FIXME: These `eat_*` calls should be converted to `parse_or` to avoid
// silently formatting malformed lazy-statics.
let vis = parse_or!(parse_visibility, rustc_parse::parser::FollowedByType::No);
parser.eat_keyword(kw::Static);
parser.eat_keyword(kw::Ref);
let _ = parser.eat_keyword(kw::Static);
let _ = parser.eat_keyword(kw::Ref);
let id = parse_or!(parse_ident);
parser.eat(&TokenKind::Colon);
let _ = parser.eat(&TokenKind::Colon);
let ty = parse_or!(parse_ty);
parser.eat(&TokenKind::Eq);
let _ = parser.eat(&TokenKind::Eq);
let expr = parse_or!(parse_expr);
parser.eat(&TokenKind::Semi);
let _ = parser.eat(&TokenKind::Semi);
result.push((vis, id, ty, expr));
}

View File

@ -84,9 +84,7 @@ pub(crate) struct ParsedMacroArgs {
fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
for &keyword in RUST_KW.iter() {
if parser.token.is_keyword(keyword)
&& parser.look_ahead(1, |t| {
t.kind == TokenKind::Eof || t.kind == TokenKind::Comma
})
&& parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
{
parser.bump();
return Some(MacroArg::Keyword(
@ -131,7 +129,7 @@ pub(crate) fn parse_macro_args(
Some(arg) => {
args.push(arg);
parser.bump();
if parser.token.kind == TokenKind::Eof && args.len() == 2 {
if parser.token == TokenKind::Eof && args.len() == 2 {
vec_with_semi = true;
break;
}
@ -150,7 +148,7 @@ pub(crate) fn parse_macro_args(
parser.bump();
if parser.token.kind == TokenKind::Eof {
if parser.token == TokenKind::Eof {
trailing_comma = true;
break;
}

View File

@ -517,21 +517,25 @@ fn rewrite_generic_args(
span: Span,
) -> RewriteResult {
match gen_args {
ast::GenericArgs::AngleBracketed(ref data) if !data.args.is_empty() => {
let args = data
.args
.iter()
.map(|x| match x {
ast::AngleBracketedArg::Arg(generic_arg) => {
SegmentParam::from_generic_arg(generic_arg)
}
ast::AngleBracketedArg::Constraint(constraint) => {
SegmentParam::Binding(constraint)
}
})
.collect::<Vec<_>>();
ast::GenericArgs::AngleBracketed(ref data) => {
if data.args.is_empty() {
Ok("".to_owned())
} else {
let args = data
.args
.iter()
.map(|x| match x {
ast::AngleBracketedArg::Arg(generic_arg) => {
SegmentParam::from_generic_arg(generic_arg)
}
ast::AngleBracketedArg::Constraint(constraint) => {
SegmentParam::Binding(constraint)
}
})
.collect::<Vec<_>>();
overflow::rewrite_with_angle_brackets(context, "", args.iter(), shape, span)
overflow::rewrite_with_angle_brackets(context, "", args.iter(), shape, span)
}
}
ast::GenericArgs::Parenthesized(ref data) => format_function_type(
data.inputs.iter().map(|x| &**x),
@ -541,7 +545,7 @@ fn rewrite_generic_args(
context,
shape,
),
_ => Ok("".to_owned()),
ast::GenericArgs::ParenthesizedElided(..) => Ok("(..)".to_owned()),
}
}

View File

@ -34,15 +34,6 @@ pub fn check_for(x: Feature) -> bool {
fn detect_features() -> cache::Initializer {
let mut value = cache::Initializer::default();
// If the x86 CPU does not support the CPUID instruction then it is too
// old to support any of the currently-detectable features.
if !has_cpuid() {
return value;
}
// Calling `__cpuid`/`__cpuid_count` from here on is safe because the CPU
// has `cpuid` support.
// 0. EAX = 0: Basic Information:
// - EAX returns the "Highest Function Parameter", that is, the maximum
// leaf value for subsequent calls of `cpuinfo` in range [0,

View File

@ -146,8 +146,6 @@ trait T: ~ const Super {}
const fn not_quite_const<S: ~ const T>() -> i32 { <S as T>::CONST }
struct S<T:~ const ? Sized>(std::marker::PhantomData<T>);
impl ~ const T {}
fn apit(_: impl ~ const T) {}

View File

@ -34,15 +34,6 @@ pub fn check_for(x: Feature) -> bool {
fn detect_features() -> cache::Initializer {
let mut value = cache::Initializer::default();
// If the x86 CPU does not support the CPUID instruction then it is too
// old to support any of the currently-detectable features.
if !has_cpuid() {
return value;
}
// Calling `__cpuid`/`__cpuid_count` from here on is safe because the CPU
// has `cpuid` support.
// 0. EAX = 0: Basic Information:
// - EAX returns the "Highest Function Parameter", that is, the maximum
// leaf value for subsequent calls of `cpuinfo` in range [0,

View File

@ -3,9 +3,3 @@ where
i32: !Copy,
{
}
fn maybe_const_negative()
where
i32: ~const !Copy,
{
}

View File

@ -0,0 +1,10 @@
fn rtn()
where
T: Trait<method(..): Send + 'static>,
T::method(..): Send + 'static,
{
}
fn test() {
let x: T::method(..);
}

View File

@ -153,8 +153,6 @@ const fn not_quite_const<S: ~const T>() -> i32 {
<S as T>::CONST
}
struct S<T: ~const ?Sized>(std::marker::PhantomData<T>);
impl ~const T {}
fn apit(_: impl ~const T) {}