Auto merge of #111585 - matthiaskrgr:rollup-468pykj, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #102673 (Update doc for `PhantomData` to match code example) - #111531 (Fix ice caused by shorthand fields in NoFieldsForFnCall) - #111547 (Start node has no immediate dominator) - #111548 (add util function to TokenStream to eliminate some clones) - #111560 (Simplify find_width_of_character_at_span.) - #111569 (Appease lints) - #111581 (Fix some misleading and copy-pasted `Pattern` examples) - #111582 ((docs) Change "wanting" to "want") Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
2913ad6db0
@ -551,6 +551,10 @@ pub fn push_stream(&mut self, stream: TokenStream) {
|
||||
vec_mut.extend(stream_iter);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
|
||||
self.0.chunks(chunk_size)
|
||||
}
|
||||
}
|
||||
|
||||
/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
|
||||
|
@ -164,7 +164,7 @@ fn check_cleanup_control_flow(&self) {
|
||||
if let Some(root) = post_contract_node.get(&bb) {
|
||||
break *root;
|
||||
}
|
||||
let parent = doms.immediate_dominator(bb);
|
||||
let parent = doms.immediate_dominator(bb).unwrap();
|
||||
dom_path.push(bb);
|
||||
if !self.body.basic_blocks[parent].is_cleanup {
|
||||
break bb;
|
||||
|
@ -242,7 +242,9 @@ pub fn dominators<G: ControlFlowGraph>(graph: G) -> Dominators<G::Node> {
|
||||
immediate_dominators[*node] = Some(pre_order_to_real[idom[idx]]);
|
||||
}
|
||||
|
||||
Dominators { post_order_rank, immediate_dominators }
|
||||
let start_node = graph.start_node();
|
||||
immediate_dominators[start_node] = None;
|
||||
Dominators { start_node, post_order_rank, immediate_dominators }
|
||||
}
|
||||
|
||||
/// Evaluate the link-eval virtual forest, providing the currently minimum semi
|
||||
@ -308,6 +310,7 @@ fn compress(
|
||||
/// Tracks the list of dominators for each node.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Dominators<N: Idx> {
|
||||
start_node: N,
|
||||
post_order_rank: IndexVec<N, usize>,
|
||||
// Even though we track only the immediate dominator of each node, it's
|
||||
// possible to get its full list of dominators by looking up the dominator
|
||||
@ -316,14 +319,14 @@ pub struct Dominators<N: Idx> {
|
||||
}
|
||||
|
||||
impl<Node: Idx> Dominators<Node> {
|
||||
/// Whether the given Node has an immediate dominator.
|
||||
/// Returns true if node is reachable from the start node.
|
||||
pub fn is_reachable(&self, node: Node) -> bool {
|
||||
self.immediate_dominators[node].is_some()
|
||||
node == self.start_node || self.immediate_dominators[node].is_some()
|
||||
}
|
||||
|
||||
pub fn immediate_dominator(&self, node: Node) -> Node {
|
||||
assert!(self.is_reachable(node), "node {node:?} is not reachable");
|
||||
self.immediate_dominators[node].unwrap()
|
||||
/// Returns the immediate dominator of node, if any.
|
||||
pub fn immediate_dominator(&self, node: Node) -> Option<Node> {
|
||||
self.immediate_dominators[node]
|
||||
}
|
||||
|
||||
/// Provides an iterator over each dominator up the CFG, for the given Node.
|
||||
@ -357,12 +360,7 @@ impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(node) = self.node {
|
||||
let dom = self.dominators.immediate_dominator(node);
|
||||
if dom == node {
|
||||
self.node = None; // reached the root
|
||||
} else {
|
||||
self.node = Some(dom);
|
||||
}
|
||||
self.node = self.dominators.immediate_dominator(node);
|
||||
Some(node)
|
||||
} else {
|
||||
None
|
||||
|
@ -8,7 +8,7 @@ fn diamond() {
|
||||
|
||||
let dominators = dominators(&graph);
|
||||
let immediate_dominators = &dominators.immediate_dominators;
|
||||
assert_eq!(immediate_dominators[0], Some(0));
|
||||
assert_eq!(immediate_dominators[0], None);
|
||||
assert_eq!(immediate_dominators[1], Some(0));
|
||||
assert_eq!(immediate_dominators[2], Some(0));
|
||||
assert_eq!(immediate_dominators[3], Some(0));
|
||||
@ -30,7 +30,7 @@ fn paper() {
|
||||
assert_eq!(immediate_dominators[3], Some(6));
|
||||
assert_eq!(immediate_dominators[4], Some(6));
|
||||
assert_eq!(immediate_dominators[5], Some(6));
|
||||
assert_eq!(immediate_dominators[6], Some(6));
|
||||
assert_eq!(immediate_dominators[6], None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -43,3 +43,13 @@ fn paper_slt() {
|
||||
|
||||
dominators(&graph);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn immediate_dominator() {
|
||||
let graph = TestGraph::new(1, &[(1, 2), (2, 3)]);
|
||||
let dominators = dominators(&graph);
|
||||
assert_eq!(dominators.immediate_dominator(0), None);
|
||||
assert_eq!(dominators.immediate_dominator(1), None);
|
||||
assert_eq!(dominators.immediate_dominator(2), Some(1));
|
||||
assert_eq!(dominators.immediate_dominator(3), Some(2));
|
||||
}
|
||||
|
@ -1180,6 +1180,10 @@ fn maybe_recover_struct_lit_bad_delims(
|
||||
self.restore_snapshot(snapshot);
|
||||
let close_paren = self.prev_token.span;
|
||||
let span = lo.to(close_paren);
|
||||
// filter shorthand fields
|
||||
let fields: Vec<_> =
|
||||
fields.into_iter().filter(|field| !field.is_shorthand).collect();
|
||||
|
||||
if !fields.is_empty() &&
|
||||
// `token.kind` should not be compared here.
|
||||
// This is because the `snapshot.token.kind` is treated as the same as
|
||||
|
@ -20,6 +20,7 @@
|
||||
#![feature(min_specialization)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(round_char_boundary)]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
|
||||
|
@ -1019,36 +1019,19 @@ fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 {
|
||||
|
||||
let src = local_begin.sf.external_src.borrow();
|
||||
|
||||
// We need to extend the snippet to the end of the src rather than to end_index so when
|
||||
// searching forwards for boundaries we've got somewhere to search.
|
||||
let snippet = if let Some(ref src) = local_begin.sf.src {
|
||||
&src[start_index..]
|
||||
let snippet = if let Some(src) = &local_begin.sf.src {
|
||||
src
|
||||
} else if let Some(src) = src.get_source() {
|
||||
&src[start_index..]
|
||||
src
|
||||
} else {
|
||||
return 1;
|
||||
};
|
||||
debug!("snippet=`{:?}`", snippet);
|
||||
|
||||
let mut target = if forwards { end_index + 1 } else { end_index - 1 };
|
||||
debug!("initial target=`{:?}`", target);
|
||||
|
||||
while !snippet.is_char_boundary(target - start_index) && target < source_len {
|
||||
target = if forwards {
|
||||
target + 1
|
||||
} else {
|
||||
match target.checked_sub(1) {
|
||||
Some(target) => target,
|
||||
None => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
debug!("target=`{:?}`", target);
|
||||
if forwards {
|
||||
(snippet.ceil_char_boundary(end_index + 1) - end_index) as u32
|
||||
} else {
|
||||
(end_index - snippet.floor_char_boundary(end_index - 1)) as u32
|
||||
}
|
||||
debug!("final target=`{:?}`", target);
|
||||
|
||||
if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 }
|
||||
}
|
||||
|
||||
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
||||
|
@ -695,7 +695,7 @@ impl<T: ?Sized> !Sync for *mut T {}
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// This also in turn requires the annotation `T: 'a`, indicating
|
||||
/// This also in turn infers the lifetime bound `T: 'a`, indicating
|
||||
/// that any references in `T` are valid over the lifetime `'a`.
|
||||
///
|
||||
/// When initializing a `Slice` you simply provide the value
|
||||
|
@ -791,8 +791,8 @@ pub struct CharArrayRefSearcher<'a, 'b, const N: usize>(
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// assert_eq!("Hello world".find(['l', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(['l', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(['o', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(['h', 'w']), Some(6));
|
||||
/// ```
|
||||
impl<'a, const N: usize> Pattern<'a> for [char; N] {
|
||||
pattern_methods!(CharArraySearcher<'a, N>, MultiCharEqPattern, CharArraySearcher);
|
||||
@ -811,8 +811,8 @@ unsafe impl<'a, const N: usize> ReverseSearcher<'a> for CharArraySearcher<'a, N>
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// assert_eq!("Hello world".find(&['l', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(&['l', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(&['o', 'l']), Some(2));
|
||||
/// assert_eq!("Hello world".find(&['h', 'w']), Some(6));
|
||||
/// ```
|
||||
impl<'a, 'b, const N: usize> Pattern<'a> for &'b [char; N] {
|
||||
pattern_methods!(CharArrayRefSearcher<'a, 'b, N>, MultiCharEqPattern, CharArrayRefSearcher);
|
||||
|
@ -1946,7 +1946,7 @@ pub fn rename<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<()>
|
||||
/// On success, the total number of bytes copied is returned and it is equal to
|
||||
/// the length of the `to` file as reported by `metadata`.
|
||||
///
|
||||
/// If you’re wanting to copy the contents of one file to another and you’re
|
||||
/// If you want to copy the contents of one file to another and you’re
|
||||
/// working with [`File`]s, see the [`io::copy()`] function.
|
||||
///
|
||||
/// # Platform-specific behavior
|
||||
|
@ -10,7 +10,7 @@
|
||||
/// On success, the total number of bytes that were copied from
|
||||
/// `reader` to `writer` is returned.
|
||||
///
|
||||
/// If you’re wanting to copy the contents of one file to another and you’re
|
||||
/// If you want to copy the contents of one file to another and you’re
|
||||
/// working with filesystem paths, see the [`fs::copy`] function.
|
||||
///
|
||||
/// [`fs::copy`]: crate::fs::copy
|
||||
|
@ -594,9 +594,8 @@ pub(super) fn display_macro_source(
|
||||
def_id: DefId,
|
||||
vis: ty::Visibility<DefId>,
|
||||
) -> String {
|
||||
let tts: Vec<_> = def.body.tokens.clone().into_trees().collect();
|
||||
// Extract the spans of all matchers. They represent the "interface" of the macro.
|
||||
let matchers = tts.chunks(4).map(|arm| &arm[0]);
|
||||
let matchers = def.body.tokens.chunks(4).map(|arm| &arm[0]);
|
||||
|
||||
if def.macro_rules {
|
||||
format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))
|
||||
|
@ -139,18 +139,18 @@ enum FileEntry {
|
||||
type Cache = HashMap<String, FileEntry>;
|
||||
|
||||
fn small_url_encode(s: &str) -> String {
|
||||
s.replace("<", "%3C")
|
||||
.replace(">", "%3E")
|
||||
.replace(" ", "%20")
|
||||
.replace("?", "%3F")
|
||||
.replace("'", "%27")
|
||||
.replace("&", "%26")
|
||||
.replace(",", "%2C")
|
||||
.replace(":", "%3A")
|
||||
.replace(";", "%3B")
|
||||
.replace("[", "%5B")
|
||||
.replace("]", "%5D")
|
||||
.replace("\"", "%22")
|
||||
s.replace('<', "%3C")
|
||||
.replace('>', "%3E")
|
||||
.replace(' ', "%20")
|
||||
.replace('?', "%3F")
|
||||
.replace('\'', "%27")
|
||||
.replace('&', "%26")
|
||||
.replace(',', "%2C")
|
||||
.replace(':', "%3A")
|
||||
.replace(';', "%3B")
|
||||
.replace('[', "%5B")
|
||||
.replace(']', "%5D")
|
||||
.replace('\"', "%22")
|
||||
}
|
||||
|
||||
impl Checker {
|
||||
@ -267,7 +267,6 @@ fn check(&mut self, file: &Path, report: &mut Report) {
|
||||
FileEntry::OtherFile => return,
|
||||
FileEntry::Redirect { target } => {
|
||||
let t = target.clone();
|
||||
drop(target);
|
||||
let (target, redir_entry) = self.load_file(&t, report);
|
||||
match redir_entry {
|
||||
FileEntry::Missing => {
|
||||
@ -391,7 +390,7 @@ fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry
|
||||
const ERROR_INVALID_NAME: i32 = 123;
|
||||
|
||||
let pretty_path =
|
||||
file.strip_prefix(&self.root).unwrap_or(&file).to_str().unwrap().to_string();
|
||||
file.strip_prefix(&self.root).unwrap_or(file).to_str().unwrap().to_string();
|
||||
|
||||
let entry =
|
||||
self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) {
|
||||
@ -470,10 +469,8 @@ fn is_exception(file: &Path, link: &str) -> bool {
|
||||
// NOTE: This cannot be added to `LINKCHECK_EXCEPTIONS` because the resolved path
|
||||
// calculated in `check` function is outside `build/<triple>/doc` dir.
|
||||
// So the `strip_prefix` method just returns the old absolute broken path.
|
||||
if file.ends_with("std/primitive.slice.html") {
|
||||
if link.ends_with("primitive.slice.html") {
|
||||
return true;
|
||||
}
|
||||
if file.ends_with("std/primitive.slice.html") && link.ends_with("primitive.slice.html") {
|
||||
return true;
|
||||
}
|
||||
false
|
||||
}
|
||||
@ -545,7 +542,7 @@ fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(source: &str, attr: &str, m
|
||||
fn parse_ids(ids: &mut HashSet<String>, file: &str, source: &str, report: &mut Report) {
|
||||
if ids.is_empty() {
|
||||
with_attrs_in_source(source, " id", |fragment, i, _| {
|
||||
let frag = fragment.trim_start_matches("#").to_owned();
|
||||
let frag = fragment.trim_start_matches('#').to_owned();
|
||||
let encoded = small_url_encode(&frag);
|
||||
if !ids.insert(frag) {
|
||||
report.errors += 1;
|
||||
|
3
tests/ui/parser/issues/issue-111416.rs
Normal file
3
tests/ui/parser/issues/issue-111416.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() {
|
||||
let my = monad_bind(mx, T: Try); //~ ERROR invalid `struct` delimiters or `fn` call arguments
|
||||
}
|
18
tests/ui/parser/issues/issue-111416.stderr
Normal file
18
tests/ui/parser/issues/issue-111416.stderr
Normal file
@ -0,0 +1,18 @@
|
||||
error: invalid `struct` delimiters or `fn` call arguments
|
||||
--> $DIR/issue-111416.rs:2:14
|
||||
|
|
||||
LL | let my = monad_bind(mx, T: Try);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: if `monad_bind` is a struct, use braces as delimiters
|
||||
|
|
||||
LL | let my = monad_bind { mx, T: Try };
|
||||
| ~ ~
|
||||
help: if `monad_bind` is a function, use the arguments directly
|
||||
|
|
||||
LL - let my = monad_bind(mx, T: Try);
|
||||
LL + let my = monad_bind(mx, Try);
|
||||
|
|
||||
|
||||
error: aborting due to previous error
|
||||
|
Loading…
Reference in New Issue
Block a user