Rollup merge of #110155 - DaniPopes:rest-typos, r=jyn514

Fix typos in librustdoc, tools and config files

I used [`typos`](https://github.com/crate-ci/typos) to fix all typos, minus the ones present in #110153 and in #110154.

Refs #110150
This commit is contained in:
Michael Goulet 2023-04-11 20:28:49 -07:00 committed by GitHub
commit e6975455d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 74 additions and 73 deletions

View File

@ -257,7 +257,7 @@ changelog-seen = 2
#python = "python"
# The path to the REUSE executable to use. Note that REUSE is not required in
# most cases, as our tooling relies on a cached (and shrinked) copy of the
# most cases, as our tooling relies on a cached (and shrunk) copy of the
# REUSE output present in the git repository and in our source tarballs.
#
# REUSE is only needed if your changes caused the overall licensing of the

View File

@ -139,7 +139,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
sha: sha.to_owned(),
short_sha: short_sha.to_owned(),
},
_ => panic!("the `git-comit-info` file is malformed"),
_ => panic!("the `git-commit-info` file is malformed"),
};
Some(info)
} else {

View File

@ -643,7 +643,7 @@ Examples:
```rust
match foo {
foo => bar,
a_very_long_patten | another_pattern if an_expression() => {
a_very_long_pattern | another_pattern if an_expression() => {
no_room_for_this_expression()
}
foo => {

View File

@ -3,4 +3,4 @@
--------------------
The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`.
The default is markdown; currently JSON is also supported. JSON can be useful for programatically manipulating the results (e.g. to find the item that took the longest to compile).
The default is markdown; currently JSON is also supported. JSON can be useful for programmatically manipulating the results (e.g. to find the item that took the longest to compile).

View File

@ -119,7 +119,7 @@
<SetProperty Sequence="ui" Before="CostFinalize"
Id="WixAppFolder" Value="WixPerUserFolder">NOT ALLUSERS</SetProperty>
<!-- UI sets ALLUSERS per user selection; progagate this choice to MSIINSTALLPERUSER before executing installation actions -->
<!-- UI sets ALLUSERS per user selection; propagate this choice to MSIINSTALLPERUSER before executing installation actions -->
<SetProperty Sequence="ui" Before="ExecuteAction"
Id="MSIINSTALLPERUSER" Value="1">NOT ALLUSERS</SetProperty>

View File

@ -230,7 +230,7 @@ pub(crate) struct RenderOptions {
pub(crate) extension_css: Option<PathBuf>,
/// A map of crate names to the URL to use instead of querying the crate's `html_root_url`.
pub(crate) extern_html_root_urls: BTreeMap<String, String>,
/// Whether to give precedence to `html_root_url` or `--exten-html-root-url`.
/// Whether to give precedence to `html_root_url` or `--extern-html-root-url`.
pub(crate) extern_html_root_takes_precedence: bool,
/// A map of the default settings (values are as for DOM storage API). Keys should lack the
/// `rustdoc-` prefix.

View File

@ -349,10 +349,10 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
let mut br_with_padding = String::with_capacity(6 * indent + 28);
br_with_padding.push_str("\n");
let padding_amout =
let padding_amount =
if ending == Ending::Newline { indent + 4 } else { indent + "fn where ".len() };
for _ in 0..padding_amout {
for _ in 0..padding_amount {
br_with_padding.push_str(" ");
}
let where_preds = where_preds.to_string().replace('\n', &br_with_padding);

View File

@ -1796,10 +1796,11 @@ fn render_struct(
}
match ty {
None => {
let where_diplayed = g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
let where_displayed =
g.map(|g| print_where_clause_and_check(w, g, cx)).unwrap_or(false);
// If there wasn't a `where` clause, we add a whitespace.
if !where_diplayed {
if !where_displayed {
w.write_str(" {");
} else {
w.write_str("{");

View File

@ -550,7 +550,7 @@ pub enum Type {
DynTrait(DynTrait),
/// Parameterized types
Generic(String),
/// Built in numberic (i*, u*, f*) types, bool, and char
/// Built in numeric (i*, u*, f*) types, bool, and char
Primitive(String),
/// `extern "ABI" fn`
FunctionPointer(Box<FunctionPointer>),

View File

@ -10,8 +10,8 @@
#[derive(serde::Serialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node<L> {
Root { childs: Vec<Node<L>> },
Directory { name: PathBuf, childs: Vec<Node<L>>, license: Option<L> },
Root { children: Vec<Node<L>> },
Directory { name: PathBuf, children: Vec<Node<L>>, license: Option<L> },
File { name: PathBuf, license: L },
Group { files: Vec<PathBuf>, directories: Vec<PathBuf>, license: L },
Empty,
@ -48,14 +48,14 @@ pub(crate) fn simplify(&mut self) {
/// ```
fn merge_directories(&mut self) {
match self {
Node::Root { childs } | Node::Directory { childs, license: None, .. } => {
Node::Root { children } | Node::Directory { children, license: None, .. } => {
let mut directories = BTreeMap::new();
let mut files = Vec::new();
for child in childs.drain(..) {
for child in children.drain(..) {
match child {
Node::Directory { name, mut childs, license: None } => {
directories.entry(name).or_insert_with(Vec::new).append(&mut childs);
Node::Directory { name, mut children, license: None } => {
directories.entry(name).or_insert_with(Vec::new).append(&mut children);
}
file @ Node::File { .. } => {
files.push(file);
@ -73,14 +73,14 @@ fn merge_directories(&mut self) {
}
}
childs.extend(directories.into_iter().map(|(name, childs)| Node::Directory {
children.extend(directories.into_iter().map(|(name, children)| Node::Directory {
name,
childs,
children,
license: None,
}));
childs.append(&mut files);
children.append(&mut files);
for child in &mut *childs {
for child in &mut *children {
child.merge_directories();
}
}
@ -105,13 +105,13 @@ fn merge_directories(&mut self) {
/// our inclusion of LLVM.
fn collapse_in_licensed_directories(&mut self) {
match self {
Node::Directory { childs, license, .. } => {
for child in &mut *childs {
Node::Directory { children, license, .. } => {
for child in &mut *children {
child.collapse_in_licensed_directories();
}
let mut licenses_count = BTreeMap::new();
for child in &*childs {
for child in &*children {
let Some(license) = child.license() else { continue };
*licenses_count.entry(license).or_insert(0) += 1;
}
@ -122,12 +122,12 @@ fn collapse_in_licensed_directories(&mut self) {
.map(|(license, _)| license);
if let Some(most_popular_license) = most_popular_license {
childs.retain(|child| child.license() != Some(most_popular_license));
children.retain(|child| child.license() != Some(most_popular_license));
*license = Some(most_popular_license);
}
}
Node::Root { childs } => {
for child in &mut *childs {
Node::Root { children } => {
for child in &mut *children {
child.collapse_in_licensed_directories();
}
}
@ -138,29 +138,29 @@ fn collapse_in_licensed_directories(&mut self) {
}
/// Reduce the depth of the tree by merging subdirectories with the same license as their
/// parent directory into their parent, and adjusting the paths of the childs accordingly.
/// parent directory into their parent, and adjusting the paths of the children accordingly.
fn merge_directory_licenses(&mut self) {
match self {
Node::Root { childs } => {
for child in &mut *childs {
Node::Root { children } => {
for child in &mut *children {
child.merge_directory_licenses();
}
}
Node::Directory { childs, license, .. } => {
Node::Directory { children, license, .. } => {
let mut to_add = Vec::new();
for child in &mut *childs {
for child in &mut *children {
child.merge_directory_licenses();
let Node::Directory {
name: child_name,
childs: child_childs,
children: child_children,
license: child_license,
} = child else { continue };
if child_license != license {
continue;
}
for mut child_child in child_childs.drain(..) {
for mut child_child in child_children.drain(..) {
match &mut child_child {
Node::Root { .. } => {
panic!("can't have a root inside another element");
@ -181,7 +181,7 @@ fn merge_directory_licenses(&mut self) {
*child = Node::Empty;
}
childs.append(&mut to_add);
children.append(&mut to_add);
}
Node::Empty => {}
Node::File { .. } => {}
@ -203,14 +203,14 @@ struct Grouped {
directories: Vec<PathBuf>,
}
match self {
Node::Root { childs } | Node::Directory { childs, .. } => {
Node::Root { children } | Node::Directory { children, .. } => {
let mut grouped: BTreeMap<LicenseId, Grouped> = BTreeMap::new();
for child in &mut *childs {
for child in &mut *children {
child.merge_groups();
match child {
Node::Directory { name, childs, license: Some(license) } => {
if childs.is_empty() {
Node::Directory { name, children, license: Some(license) } => {
if children.is_empty() {
grouped
.entry(*license)
.or_insert_with(Grouped::default)
@ -234,16 +234,16 @@ struct Grouped {
for (license, mut grouped) in grouped.into_iter() {
if grouped.files.len() + grouped.directories.len() <= 1 {
if let Some(name) = grouped.files.pop() {
childs.push(Node::File { license, name });
children.push(Node::File { license, name });
} else if let Some(name) = grouped.directories.pop() {
childs.push(Node::Directory {
children.push(Node::Directory {
name,
childs: Vec::new(),
children: Vec::new(),
license: Some(license),
});
}
} else {
childs.push(Node::Group {
children.push(Node::Group {
license,
files: grouped.files,
directories: grouped.directories,
@ -261,11 +261,11 @@ struct Grouped {
/// sure to remove them from the tree.
fn remove_empty(&mut self) {
match self {
Node::Root { childs } | Node::Directory { childs, .. } => {
for child in &mut *childs {
Node::Root { children } | Node::Directory { children, .. } => {
for child in &mut *children {
child.remove_empty();
}
childs.retain(|child| !matches!(child, Node::Empty));
children.retain(|child| !matches!(child, Node::Empty));
}
Node::Group { .. } => {}
Node::File { .. } => {}
@ -275,7 +275,7 @@ fn remove_empty(&mut self) {
fn license(&self) -> Option<LicenseId> {
match self {
Node::Directory { childs, license: Some(license), .. } if childs.is_empty() => {
Node::Directory { children, license: Some(license), .. } if children.is_empty() => {
Some(*license)
}
Node::File { license, .. } => Some(*license),
@ -285,7 +285,7 @@ fn license(&self) -> Option<LicenseId> {
}
pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
let mut childs = Vec::new();
let mut children = Vec::new();
// Ensure reproducibility of all future steps.
input.sort();
@ -295,15 +295,15 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node<LicenseId> {
for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() {
node = Node::Directory {
name: component.as_os_str().into(),
childs: vec![node],
children: vec![node],
license: None,
};
}
childs.push(node);
children.push(node);
}
Node::Root { childs }
Node::Root { children }
}
/// Convert a `Node<LicenseId>` into a `Node<&License>`, expanding all interned license IDs with a
@ -313,14 +313,14 @@ pub(crate) fn expand_interned_licenses(
interner: &LicensesInterner,
) -> Node<&License> {
match node {
Node::Root { childs } => Node::Root {
childs: childs
Node::Root { children } => Node::Root {
children: children
.into_iter()
.map(|child| expand_interned_licenses(child, interner))
.collect(),
},
Node::Directory { name, childs, license } => Node::Directory {
childs: childs
Node::Directory { name, children, license } => Node::Directory {
children: children
.into_iter()
.map(|child| expand_interned_licenses(child, interner))
.collect(),

View File

@ -20,17 +20,17 @@ fn render_recursive(node: &Node, buffer: &mut Vec<u8>, depth: usize) -> Result<(
let prefix = std::iter::repeat("> ").take(depth + 1).collect::<String>();
match node {
Node::Root { childs } => {
for child in childs {
Node::Root { children } => {
for child in children {
render_recursive(child, buffer, depth)?;
}
}
Node::Directory { name, childs, license } => {
Node::Directory { name, children, license } => {
render_license(&prefix, std::iter::once(name), license, buffer)?;
if !childs.is_empty() {
if !children.is_empty() {
writeln!(buffer, "{prefix}")?;
writeln!(buffer, "{prefix}*Exceptions:*")?;
for child in childs {
for child in children {
writeln!(buffer, "{prefix}")?;
render_recursive(child, buffer, depth + 1)?;
}
@ -73,8 +73,8 @@ struct Metadata {
#[derive(serde::Deserialize)]
#[serde(rename_all = "kebab-case", tag = "type")]
pub(crate) enum Node {
Root { childs: Vec<Node> },
Directory { name: String, childs: Vec<Node>, license: License },
Root { children: Vec<Node> },
Directory { name: String, children: Vec<Node>, license: License },
File { name: String, license: License },
Group { files: Vec<String>, directories: Vec<String>, license: License },
}

View File

@ -237,7 +237,7 @@ fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
// Serde json doesn't implement Ord or Hash for Value, so we must
// use a Vec here. While in theory that makes setwize equality
// O(n^2), in practice n will never be large enought to matter.
// O(n^2), in practice n will never be large enough to matter.
let expected_values =
values.iter().map(|v| string_to_value(v, cache)).collect::<Vec<_>>();
if expected_values.len() != got_values.len() {

View File

@ -1,6 +1,6 @@
use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary};
/// A univeral way to represent an [`ItemEnum`] or [`ItemKind`]
/// A universal way to represent an [`ItemEnum`] or [`ItemKind`]
#[derive(Debug, Clone, Copy)]
pub(crate) enum Kind {
Module,
@ -53,7 +53,7 @@ pub fn can_appear_in_mod(self) -> bool {
Primitive => true,
ForeignType => true,
// FIXME(adotinthevoid): I'm not sure if these are corrent
// FIXME(adotinthevoid): I'm not sure if these are correct
Keyword => false,
OpaqueTy => false,
ProcAttribute => false,

View File

@ -72,7 +72,7 @@ fn main() -> Result<()> {
)
}
[sel] => eprintln!(
"{} not in index or paths, but refered to at '{}'",
"{} not in index or paths, but referred to at '{}'",
err.id.0,
json_find::to_jsonpath(&sel)
),
@ -85,12 +85,12 @@ fn main() -> Result<()> {
.collect::<Vec<_>>()
.join(", ");
eprintln!(
"{} not in index or paths, but refered to at {sels}",
"{} not in index or paths, but referred to at {sels}",
err.id.0
);
} else {
eprintln!(
"{} not in index or paths, but refered to at '{}' and {} more",
"{} not in index or paths, but referred to at '{}' and {} more",
err.id.0,
json_find::to_jsonpath(&sel),
sels.len() - 1,

View File

@ -86,7 +86,7 @@ def gh_url():
return os.environ['TOOLSTATE_ISSUES_API_URL']
def maybe_delink(message):
def maybe_remove_mention(message):
# type: (str) -> str
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
return message.replace("@", "")
@ -109,7 +109,7 @@ def issue(
else:
status_description = 'no longer builds'
request = json.dumps({
'body': maybe_delink(textwrap.dedent('''\
'body': maybe_remove_mention(textwrap.dedent('''\
Hello, this is your friendly neighborhood mergebot.
After merging PR {}, I observed that the tool {} {}.
A follow-up PR to the repository {} is needed to fix the fallout.
@ -285,7 +285,7 @@ try:
issue_url = gh_url() + '/{}/comments'.format(number)
response = urllib2.urlopen(urllib2.Request(
issue_url,
json.dumps({'body': maybe_delink(message)}).encode(),
json.dumps({'body': maybe_remove_mention(message)}).encode(),
{
'Authorization': 'token ' + github_token,
'Content-Type': 'application/json',

View File

@ -344,11 +344,11 @@ message = "Some changes occurred in `const_evaluatable.rs`"
cc = ["@BoxyUwU"]
[mentions."compiler/rustc_middle/src/ty/abstract_const.rs"]
message = "Some changes occured in `abstract_const.rs`"
message = "Some changes occurred in `abstract_const.rs`"
cc = ["@BoxyUwU"]
[mentions."compiler/rustc_ty_utils/src/consts.rs"]
message = "Some changes occured in `rustc_ty_utils::consts.rs`"
message = "Some changes occurred in `rustc_ty_utils::consts.rs`"
cc = ["@BoxyUwU"]
[mentions."compiler/rustc_trait_selection/src/solve/"]