Auto merge of #129162 - matthiaskrgr:rollup-r0oxdev, r=matthiaskrgr
Rollup of 6 pull requests Successful merges: - #128990 (Re-enable more debuginfo tests on freebsd) - #129042 (Special-case alias ty during the delayed bug emission in `try_from_lit`) - #129086 (Stabilize `is_none_or`) - #129149 (Migrate `validate_json.py` script to rust in `run-make/rustdoc-map-file` test) - #129154 (Fix wrong source location for some incorrect macro definitions) - #129161 (Stabilize std:🧵:Builder::spawn_unchecked) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
506052d49d
@ -3149,6 +3149,7 @@ dependencies = [
|
||||
"gimli 0.31.0",
|
||||
"object 0.36.2",
|
||||
"regex",
|
||||
"serde_json",
|
||||
"similar",
|
||||
"wasmparser 0.214.0",
|
||||
]
|
||||
|
@ -6,7 +6,6 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(is_none_or)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(rustdoc_internals)]
|
||||
|
@ -54,18 +54,24 @@ pub(super) fn parse(
|
||||
|
||||
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
|
||||
// additional trees if need be.
|
||||
let mut trees = input.trees();
|
||||
let mut trees = input.trees().peekable();
|
||||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
|
||||
let span = match trees.next() {
|
||||
// Not consuming the next token immediately, as it may not be a colon
|
||||
let span = match trees.peek() {
|
||||
Some(&tokenstream::TokenTree::Token(
|
||||
Token { kind: token::Colon, span: colon_span },
|
||||
_,
|
||||
)) => {
|
||||
// Consume the colon first
|
||||
trees.next();
|
||||
|
||||
// It's ok to consume the next tree no matter how,
|
||||
// since if it's not a token then it will be an invalid declaration.
|
||||
match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
|
||||
Some((fragment, _)) => {
|
||||
@ -125,12 +131,13 @@ pub(super) fn parse(
|
||||
}
|
||||
_ => token.span,
|
||||
},
|
||||
Some(tree) => tree.span(),
|
||||
None => colon_span,
|
||||
// Invalid, return a nice source location
|
||||
_ => colon_span.with_lo(start_sp.lo()),
|
||||
}
|
||||
}
|
||||
Some(tree) => tree.span(),
|
||||
None => start_sp,
|
||||
// Whether it's none or some other tree, it doesn't belong to
|
||||
// the current meta variable, returning the original span.
|
||||
_ => start_sp,
|
||||
};
|
||||
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, None));
|
||||
|
@ -5,7 +5,6 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(control_flow_enum)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(is_none_or)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![feature(try_blocks)]
|
||||
|
@ -1,7 +1,6 @@
|
||||
// tidy-alphabetical-start
|
||||
#![feature(decl_macro)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(thread_spawn_unchecked)]
|
||||
#![feature(try_blocks)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
|
@ -305,6 +305,10 @@ fn try_from_lit(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, expr: &'tcx hir::Expr<'tcx>) ->
|
||||
// mir.
|
||||
match tcx.at(expr.span).lit_to_const(lit_input) {
|
||||
Ok(c) => return Some(c),
|
||||
Err(_) if lit_input.ty.has_aliases() => {
|
||||
// allow the `ty` to be an alias type, though we cannot handle it here
|
||||
return None;
|
||||
}
|
||||
Err(e) => {
|
||||
tcx.dcx().span_delayed_bug(
|
||||
expr.span,
|
||||
|
@ -656,8 +656,6 @@ pub const fn is_none(&self) -> bool {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(is_none_or)]
|
||||
///
|
||||
/// let x: Option<u32> = Some(2);
|
||||
/// assert_eq!(x.is_none_or(|x| x > 1), true);
|
||||
///
|
||||
@ -669,7 +667,7 @@ pub const fn is_none(&self) -> bool {
|
||||
/// ```
|
||||
#[must_use]
|
||||
#[inline]
|
||||
#[unstable(feature = "is_none_or", issue = "126383")]
|
||||
#[stable(feature = "is_none_or", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool {
|
||||
match self {
|
||||
None => true,
|
||||
|
@ -412,7 +412,6 @@ pub fn spawn<F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(thread_spawn_unchecked)]
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let builder = thread::Builder::new();
|
||||
@ -433,7 +432,7 @@ pub fn spawn<F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
/// ```
|
||||
///
|
||||
/// [`io::Result`]: crate::io::Result
|
||||
#[unstable(feature = "thread_spawn_unchecked", issue = "55132")]
|
||||
#[stable(feature = "thread_spawn_unchecked", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub unsafe fn spawn_unchecked<F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
|
@ -12,7 +12,6 @@
|
||||
#![feature(let_chains)]
|
||||
#![feature(trait_upcasting)]
|
||||
#![feature(strict_overflow_ops)]
|
||||
#![feature(is_none_or)]
|
||||
// Configure clippy and other lints
|
||||
#![allow(
|
||||
clippy::collapsible_else_if,
|
||||
|
@ -11,3 +11,4 @@ wasmparser = { version = "0.214", default-features = false, features = ["std"] }
|
||||
regex = "1.8" # 1.8 to avoid memchr 2.6.0, as 2.5.0 is pinned in the workspace
|
||||
gimli = "0.31.0"
|
||||
build_helper = { path = "../build_helper" }
|
||||
serde_json = "1.0"
|
||||
|
@ -38,6 +38,7 @@ pub mod rfs {
|
||||
pub use gimli;
|
||||
pub use object;
|
||||
pub use regex;
|
||||
pub use serde_json;
|
||||
pub use wasmparser;
|
||||
|
||||
// Re-exports of external dependencies.
|
||||
|
@ -1462,7 +1462,7 @@ fn generic_args_sans_defaults<'ga>(
|
||||
// otherwise, if the arg is equal to the param default, hide it (unless the
|
||||
// default is an error which can happen for the trait Self type)
|
||||
#[allow(unstable_name_collisions)]
|
||||
default_parameters.get(i).is_none_or(|default_parameter| {
|
||||
IsNoneOr::is_none_or(default_parameters.get(i), |default_parameter| {
|
||||
// !is_err(default_parameter.skip_binders())
|
||||
// &&
|
||||
arg != &default_parameter.clone().substitute(Interner, ¶meters)
|
||||
|
@ -1,5 +1,4 @@
|
||||
//@ ignore-windows failing on win32 bot
|
||||
//@ ignore-freebsd: gdb package too new
|
||||
//@ ignore-android: FIXME(#10381)
|
||||
//@ compile-flags:-g
|
||||
//@ min-gdb-version: 8.1
|
||||
|
@ -1,5 +1,4 @@
|
||||
//@ ignore-windows failing on win32 bot
|
||||
//@ ignore-freebsd: gdb package too new
|
||||
//@ ignore-android: FIXME(#10381)
|
||||
//@ ignore-windows-gnu: #128981
|
||||
//@ compile-flags:-g
|
||||
|
@ -1,5 +1,4 @@
|
||||
// ignore-tidy-linelength
|
||||
//@ ignore-freebsd: gdb package too new
|
||||
//@ ignore-windows-gnu: #128981
|
||||
//@ ignore-android: FIXME(#10381)
|
||||
//@ compile-flags:-g
|
||||
|
@ -1,5 +1,4 @@
|
||||
//@ ignore-windows failing on win32 bot
|
||||
//@ ignore-freebsd: gdb package too new
|
||||
//@ ignore-android: FIXME(#10381)
|
||||
//@ compile-flags:-g
|
||||
//@ min-gdb-version: 8.1
|
||||
|
@ -1,13 +1,54 @@
|
||||
use run_make_support::{python_command, rustdoc};
|
||||
// This test ensures that all items from `foo` are correctly generated into the `redirect-map.json`
|
||||
// file with `--generate-redirect-map` rustdoc option.
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use run_make_support::rfs::read_to_string;
|
||||
use run_make_support::{path, rustdoc, serde_json};
|
||||
|
||||
fn main() {
|
||||
let out_dir = "out";
|
||||
let crate_name = "foo";
|
||||
rustdoc()
|
||||
.input("foo.rs")
|
||||
.crate_name(crate_name)
|
||||
.arg("-Zunstable-options")
|
||||
.arg("--generate-redirect-map")
|
||||
.out_dir(&out_dir)
|
||||
.run();
|
||||
// FIXME (GuillaumeGomez): Port the python script to Rust as well.
|
||||
python_command().arg("validate_json.py").arg(&out_dir).run();
|
||||
|
||||
let generated = read_to_string(path(out_dir).join(crate_name).join("redirect-map.json"));
|
||||
let expected = read_to_string("expected.json");
|
||||
let generated: serde_json::Value =
|
||||
serde_json::from_str(&generated).expect("failed to parse JSON");
|
||||
let expected: serde_json::Value =
|
||||
serde_json::from_str(&expected).expect("failed to parse JSON");
|
||||
let expected = expected.as_object().unwrap();
|
||||
|
||||
let mut differences = Vec::new();
|
||||
for (key, expected_value) in expected.iter() {
|
||||
match generated.get(key) {
|
||||
Some(value) => {
|
||||
if expected_value != value {
|
||||
differences.push(format!(
|
||||
"values for key `{key}` don't match: `{expected_value:?}` != `{value:?}`"
|
||||
));
|
||||
}
|
||||
}
|
||||
None => differences.push(format!("missing key `{key}`")),
|
||||
}
|
||||
}
|
||||
for (key, data) in generated.as_object().unwrap().iter() {
|
||||
if !expected.contains_key(key) {
|
||||
differences.push(format!("Extra data not expected: key: `{key}`, data: `{data}`"));
|
||||
}
|
||||
}
|
||||
|
||||
if !differences.is_empty() {
|
||||
eprintln!("Found differences in JSON files:");
|
||||
for diff in differences {
|
||||
eprintln!("=> {diff}");
|
||||
}
|
||||
panic!("Found differences in JSON files");
|
||||
}
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
||||
def find_redirect_map_file(folder, errors):
|
||||
for root, _dirs, files in os.walk(folder):
|
||||
for name in files:
|
||||
if not name.endswith("redirect-map.json"):
|
||||
continue
|
||||
with open(os.path.join(root, name)) as f:
|
||||
data = json.load(f)
|
||||
with open("expected.json") as f:
|
||||
expected = json.load(f)
|
||||
for key in expected:
|
||||
if expected[key] != data.get(key):
|
||||
errors.append("Expected `{}` for key `{}`, found: `{}`".format(
|
||||
expected[key], key, data.get(key)))
|
||||
else:
|
||||
del data[key]
|
||||
for key in data:
|
||||
errors.append("Extra data not expected: key: `{}`, data: `{}`".format(
|
||||
key, data[key]))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Expected doc directory to check!")
|
||||
sys.exit(1)
|
||||
|
||||
errors = []
|
||||
if not find_redirect_map_file(sys.argv[1], errors):
|
||||
print("Didn't find the map file in `{}`...".format(sys.argv[1]))
|
||||
sys.exit(1)
|
||||
for err in errors:
|
||||
print("=> {}".format(err))
|
||||
if len(errors) != 0:
|
||||
sys.exit(1)
|
@ -1,6 +1,8 @@
|
||||
//@ known-bug: #116308
|
||||
//@ check-pass
|
||||
#![feature(adt_const_params)]
|
||||
|
||||
// Regression test for #116308
|
||||
|
||||
pub trait Identity {
|
||||
type Identity;
|
||||
}
|
@ -1,14 +1,14 @@
|
||||
error: missing fragment specifier
|
||||
--> $DIR/macro-match-nonterminal.rs:2:8
|
||||
--> $DIR/macro-match-nonterminal.rs:2:6
|
||||
|
|
||||
LL | ($a, $b) => {
|
||||
| ^
|
||||
| ^^
|
||||
|
||||
error: missing fragment specifier
|
||||
--> $DIR/macro-match-nonterminal.rs:2:8
|
||||
--> $DIR/macro-match-nonterminal.rs:2:6
|
||||
|
|
||||
LL | ($a, $b) => {
|
||||
| ^
|
||||
| ^^
|
||||
|
|
||||
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||
= note: for more information, see issue #40107 <https://github.com/rust-lang/rust/issues/40107>
|
||||
@ -27,10 +27,10 @@ error: aborting due to 3 previous errors
|
||||
|
||||
Future incompatibility report: Future breakage diagnostic:
|
||||
error: missing fragment specifier
|
||||
--> $DIR/macro-match-nonterminal.rs:2:8
|
||||
--> $DIR/macro-match-nonterminal.rs:2:6
|
||||
|
|
||||
LL | ($a, $b) => {
|
||||
| ^
|
||||
| ^^
|
||||
|
|
||||
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
|
||||
= note: for more information, see issue #40107 <https://github.com/rust-lang/rust/issues/40107>
|
||||
|
Loading…
Reference in New Issue
Block a user