diff --git a/config.example.toml b/config.example.toml index b27a34e61c5..081b3638383 100644 --- a/config.example.toml +++ b/config.example.toml @@ -257,7 +257,7 @@ changelog-seen = 2 #python = "python" # The path to the REUSE executable to use. Note that REUSE is not required in -# most cases, as our tooling relies on a cached (and shrinked) copy of the +# most cases, as our tooling relies on a cached (and shrunk) copy of the # REUSE output present in the git repository and in our source tarballs. # # REUSE is only needed if your changes caused the overall licensing of the diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 390047f6fdc..c3e3fa009a6 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -139,7 +139,7 @@ pub fn read_commit_info_file(root: &Path) -> Option { sha: sha.to_owned(), short_sha: short_sha.to_owned(), }, - _ => panic!("the `git-comit-info` file is malformed"), + _ => panic!("the `git-commit-info` file is malformed"), }; Some(info) } else { diff --git a/src/doc/style-guide/src/expressions.md b/src/doc/style-guide/src/expressions.md index c7d0446dded..96f66c89c25 100644 --- a/src/doc/style-guide/src/expressions.md +++ b/src/doc/style-guide/src/expressions.md @@ -643,7 +643,7 @@ Examples: ```rust match foo { foo => bar, - a_very_long_patten | another_pattern if an_expression() => { + a_very_long_pattern | another_pattern if an_expression() => { no_room_for_this_expression() } foo => { diff --git a/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md b/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md index a497a75261f..05ffdcf201c 100644 --- a/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md +++ b/src/doc/unstable-book/src/compiler-flags/dump-mono-stats-format.md @@ -3,4 +3,4 @@ -------------------- The `-Z dump-mono-stats-format` compiler flag controls what file format to use for `-Z dump-mono-stats`. -The default is markdown; currently JSON is also supported. JSON can be useful for programatically manipulating the results (e.g. to find the item that took the longest to compile). +The default is markdown; currently JSON is also supported. JSON can be useful for programmatically manipulating the results (e.g. to find the item that took the longest to compile). diff --git a/src/etc/installer/msi/rust.wxs b/src/etc/installer/msi/rust.wxs index 9f4e4fd0611..f29e1e4d27a 100644 --- a/src/etc/installer/msi/rust.wxs +++ b/src/etc/installer/msi/rust.wxs @@ -119,7 +119,7 @@ NOT ALLUSERS - + NOT ALLUSERS diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index 4c210291b11..3cf8ceed620 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -550,7 +550,7 @@ pub enum Type { DynTrait(DynTrait), /// Parameterized types Generic(String), - /// Built in numberic (i*, u*, f*) types, bool, and char + /// Built in numeric (i*, u*, f*) types, bool, and char Primitive(String), /// `extern "ABI" fn` FunctionPointer(Box), diff --git a/src/tools/collect-license-metadata/src/path_tree.rs b/src/tools/collect-license-metadata/src/path_tree.rs index 7a2a440636d..68b6cef6432 100644 --- a/src/tools/collect-license-metadata/src/path_tree.rs +++ b/src/tools/collect-license-metadata/src/path_tree.rs @@ -10,8 +10,8 @@ #[derive(serde::Serialize)] #[serde(rename_all = "kebab-case", tag = "type")] pub(crate) enum Node { - Root { childs: Vec> }, - Directory { name: PathBuf, childs: Vec>, license: Option }, + Root { children: Vec> }, + Directory { name: PathBuf, children: Vec>, license: Option }, File { name: PathBuf, license: L }, Group { files: Vec, directories: Vec, license: L }, Empty, @@ -48,14 +48,14 @@ pub(crate) fn simplify(&mut self) { /// ``` fn merge_directories(&mut self) { match self { - Node::Root { childs } | Node::Directory { childs, license: None, .. } => { + Node::Root { children } | Node::Directory { children, license: None, .. } => { let mut directories = BTreeMap::new(); let mut files = Vec::new(); - for child in childs.drain(..) { + for child in children.drain(..) { match child { - Node::Directory { name, mut childs, license: None } => { - directories.entry(name).or_insert_with(Vec::new).append(&mut childs); + Node::Directory { name, mut children, license: None } => { + directories.entry(name).or_insert_with(Vec::new).append(&mut children); } file @ Node::File { .. } => { files.push(file); @@ -73,14 +73,14 @@ fn merge_directories(&mut self) { } } - childs.extend(directories.into_iter().map(|(name, childs)| Node::Directory { + children.extend(directories.into_iter().map(|(name, children)| Node::Directory { name, - childs, + children, license: None, })); - childs.append(&mut files); + children.append(&mut files); - for child in &mut *childs { + for child in &mut *children { child.merge_directories(); } } @@ -105,13 +105,13 @@ fn merge_directories(&mut self) { /// our inclusion of LLVM. fn collapse_in_licensed_directories(&mut self) { match self { - Node::Directory { childs, license, .. } => { - for child in &mut *childs { + Node::Directory { children, license, .. } => { + for child in &mut *children { child.collapse_in_licensed_directories(); } let mut licenses_count = BTreeMap::new(); - for child in &*childs { + for child in &*children { let Some(license) = child.license() else { continue }; *licenses_count.entry(license).or_insert(0) += 1; } @@ -122,12 +122,12 @@ fn collapse_in_licensed_directories(&mut self) { .map(|(license, _)| license); if let Some(most_popular_license) = most_popular_license { - childs.retain(|child| child.license() != Some(most_popular_license)); + children.retain(|child| child.license() != Some(most_popular_license)); *license = Some(most_popular_license); } } - Node::Root { childs } => { - for child in &mut *childs { + Node::Root { children } => { + for child in &mut *children { child.collapse_in_licensed_directories(); } } @@ -138,29 +138,29 @@ fn collapse_in_licensed_directories(&mut self) { } /// Reduce the depth of the tree by merging subdirectories with the same license as their - /// parent directory into their parent, and adjusting the paths of the childs accordingly. + /// parent directory into their parent, and adjusting the paths of the children accordingly. fn merge_directory_licenses(&mut self) { match self { - Node::Root { childs } => { - for child in &mut *childs { + Node::Root { children } => { + for child in &mut *children { child.merge_directory_licenses(); } } - Node::Directory { childs, license, .. } => { + Node::Directory { children, license, .. } => { let mut to_add = Vec::new(); - for child in &mut *childs { + for child in &mut *children { child.merge_directory_licenses(); let Node::Directory { name: child_name, - childs: child_childs, + children: child_children, license: child_license, } = child else { continue }; if child_license != license { continue; } - for mut child_child in child_childs.drain(..) { + for mut child_child in child_children.drain(..) { match &mut child_child { Node::Root { .. } => { panic!("can't have a root inside another element"); @@ -181,7 +181,7 @@ fn merge_directory_licenses(&mut self) { *child = Node::Empty; } - childs.append(&mut to_add); + children.append(&mut to_add); } Node::Empty => {} Node::File { .. } => {} @@ -203,14 +203,14 @@ struct Grouped { directories: Vec, } match self { - Node::Root { childs } | Node::Directory { childs, .. } => { + Node::Root { children } | Node::Directory { children, .. } => { let mut grouped: BTreeMap = BTreeMap::new(); - for child in &mut *childs { + for child in &mut *children { child.merge_groups(); match child { - Node::Directory { name, childs, license: Some(license) } => { - if childs.is_empty() { + Node::Directory { name, children, license: Some(license) } => { + if children.is_empty() { grouped .entry(*license) .or_insert_with(Grouped::default) @@ -234,16 +234,16 @@ struct Grouped { for (license, mut grouped) in grouped.into_iter() { if grouped.files.len() + grouped.directories.len() <= 1 { if let Some(name) = grouped.files.pop() { - childs.push(Node::File { license, name }); + children.push(Node::File { license, name }); } else if let Some(name) = grouped.directories.pop() { - childs.push(Node::Directory { + children.push(Node::Directory { name, - childs: Vec::new(), + children: Vec::new(), license: Some(license), }); } } else { - childs.push(Node::Group { + children.push(Node::Group { license, files: grouped.files, directories: grouped.directories, @@ -261,11 +261,11 @@ struct Grouped { /// sure to remove them from the tree. fn remove_empty(&mut self) { match self { - Node::Root { childs } | Node::Directory { childs, .. } => { - for child in &mut *childs { + Node::Root { children } | Node::Directory { children, .. } => { + for child in &mut *children { child.remove_empty(); } - childs.retain(|child| !matches!(child, Node::Empty)); + children.retain(|child| !matches!(child, Node::Empty)); } Node::Group { .. } => {} Node::File { .. } => {} @@ -275,7 +275,7 @@ fn remove_empty(&mut self) { fn license(&self) -> Option { match self { - Node::Directory { childs, license: Some(license), .. } if childs.is_empty() => { + Node::Directory { children, license: Some(license), .. } if children.is_empty() => { Some(*license) } Node::File { license, .. } => Some(*license), @@ -285,7 +285,7 @@ fn license(&self) -> Option { } pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node { - let mut childs = Vec::new(); + let mut children = Vec::new(); // Ensure reproducibility of all future steps. input.sort(); @@ -295,15 +295,15 @@ pub(crate) fn build(mut input: Vec<(PathBuf, LicenseId)>) -> Node { for component in path.parent().unwrap_or_else(|| Path::new(".")).components().rev() { node = Node::Directory { name: component.as_os_str().into(), - childs: vec![node], + children: vec![node], license: None, }; } - childs.push(node); + children.push(node); } - Node::Root { childs } + Node::Root { children } } /// Convert a `Node` into a `Node<&License>`, expanding all interned license IDs with a @@ -313,14 +313,14 @@ pub(crate) fn expand_interned_licenses( interner: &LicensesInterner, ) -> Node<&License> { match node { - Node::Root { childs } => Node::Root { - childs: childs + Node::Root { children } => Node::Root { + children: children .into_iter() .map(|child| expand_interned_licenses(child, interner)) .collect(), }, - Node::Directory { name, childs, license } => Node::Directory { - childs: childs + Node::Directory { name, children, license } => Node::Directory { + children: children .into_iter() .map(|child| expand_interned_licenses(child, interner)) .collect(), diff --git a/src/tools/generate-copyright/src/main.rs b/src/tools/generate-copyright/src/main.rs index 4d116c7da65..60c77167613 100644 --- a/src/tools/generate-copyright/src/main.rs +++ b/src/tools/generate-copyright/src/main.rs @@ -20,17 +20,17 @@ fn render_recursive(node: &Node, buffer: &mut Vec, depth: usize) -> Result<( let prefix = std::iter::repeat("> ").take(depth + 1).collect::(); match node { - Node::Root { childs } => { - for child in childs { + Node::Root { children } => { + for child in children { render_recursive(child, buffer, depth)?; } } - Node::Directory { name, childs, license } => { + Node::Directory { name, children, license } => { render_license(&prefix, std::iter::once(name), license, buffer)?; - if !childs.is_empty() { + if !children.is_empty() { writeln!(buffer, "{prefix}")?; writeln!(buffer, "{prefix}*Exceptions:*")?; - for child in childs { + for child in children { writeln!(buffer, "{prefix}")?; render_recursive(child, buffer, depth + 1)?; } @@ -73,8 +73,8 @@ struct Metadata { #[derive(serde::Deserialize)] #[serde(rename_all = "kebab-case", tag = "type")] pub(crate) enum Node { - Root { childs: Vec }, - Directory { name: String, childs: Vec, license: License }, + Root { children: Vec }, + Directory { name: String, children: Vec, license: License }, File { name: String, license: License }, Group { files: Vec, directories: Vec, license: License }, } diff --git a/src/tools/jsondocck/src/main.rs b/src/tools/jsondocck/src/main.rs index 76770fe36a7..e3d05ec8315 100644 --- a/src/tools/jsondocck/src/main.rs +++ b/src/tools/jsondocck/src/main.rs @@ -237,7 +237,7 @@ fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> { // Serde json doesn't implement Ord or Hash for Value, so we must // use a Vec here. While in theory that makes setwize equality - // O(n^2), in practice n will never be large enought to matter. + // O(n^2), in practice n will never be large enough to matter. let expected_values = values.iter().map(|v| string_to_value(v, cache)).collect::>(); if expected_values.len() != got_values.len() { diff --git a/src/tools/jsondoclint/src/item_kind.rs b/src/tools/jsondoclint/src/item_kind.rs index b395c6e7d2d..45a9c93ee0b 100644 --- a/src/tools/jsondoclint/src/item_kind.rs +++ b/src/tools/jsondoclint/src/item_kind.rs @@ -1,6 +1,6 @@ use rustdoc_json_types::{Item, ItemEnum, ItemKind, ItemSummary}; -/// A univeral way to represent an [`ItemEnum`] or [`ItemKind`] +/// A universal way to represent an [`ItemEnum`] or [`ItemKind`] #[derive(Debug, Clone, Copy)] pub(crate) enum Kind { Module, @@ -53,7 +53,7 @@ pub fn can_appear_in_mod(self) -> bool { Primitive => true, ForeignType => true, - // FIXME(adotinthevoid): I'm not sure if these are corrent + // FIXME(adotinthevoid): I'm not sure if these are correct Keyword => false, OpaqueTy => false, ProcAttribute => false, diff --git a/src/tools/jsondoclint/src/main.rs b/src/tools/jsondoclint/src/main.rs index 05e938f4f7d..ee163ddfdd9 100644 --- a/src/tools/jsondoclint/src/main.rs +++ b/src/tools/jsondoclint/src/main.rs @@ -72,7 +72,7 @@ fn main() -> Result<()> { ) } [sel] => eprintln!( - "{} not in index or paths, but refered to at '{}'", + "{} not in index or paths, but referred to at '{}'", err.id.0, json_find::to_jsonpath(&sel) ), @@ -85,12 +85,12 @@ fn main() -> Result<()> { .collect::>() .join(", "); eprintln!( - "{} not in index or paths, but refered to at {sels}", + "{} not in index or paths, but referred to at {sels}", err.id.0 ); } else { eprintln!( - "{} not in index or paths, but refered to at '{}' and {} more", + "{} not in index or paths, but referred to at '{}' and {} more", err.id.0, json_find::to_jsonpath(&sel), sels.len() - 1, diff --git a/src/tools/publish_toolstate.py b/src/tools/publish_toolstate.py index 395bcc745f8..bad86701dae 100755 --- a/src/tools/publish_toolstate.py +++ b/src/tools/publish_toolstate.py @@ -86,7 +86,7 @@ def gh_url(): return os.environ['TOOLSTATE_ISSUES_API_URL'] -def maybe_delink(message): +def maybe_unlink(message): # type: (str) -> str if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None: return message.replace("@", "") @@ -109,7 +109,7 @@ def issue( else: status_description = 'no longer builds' request = json.dumps({ - 'body': maybe_delink(textwrap.dedent('''\ + 'body': maybe_unlink(textwrap.dedent('''\ Hello, this is your friendly neighborhood mergebot. After merging PR {}, I observed that the tool {} {}. A follow-up PR to the repository {} is needed to fix the fallout. @@ -285,7 +285,7 @@ try: issue_url = gh_url() + '/{}/comments'.format(number) response = urllib2.urlopen(urllib2.Request( issue_url, - json.dumps({'body': maybe_delink(message)}).encode(), + json.dumps({'body': maybe_unlink(message)}).encode(), { 'Authorization': 'token ' + github_token, 'Content-Type': 'application/json', diff --git a/triagebot.toml b/triagebot.toml index 5f4de0562f8..58f51959e1f 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -344,11 +344,11 @@ message = "Some changes occurred in `const_evaluatable.rs`" cc = ["@BoxyUwU"] [mentions."compiler/rustc_middle/src/ty/abstract_const.rs"] -message = "Some changes occured in `abstract_const.rs`" +message = "Some changes occurred in `abstract_const.rs`" cc = ["@BoxyUwU"] [mentions."compiler/rustc_ty_utils/src/consts.rs"] -message = "Some changes occured in `rustc_ty_utils::consts.rs`" +message = "Some changes occurred in `rustc_ty_utils::consts.rs`" cc = ["@BoxyUwU"] [mentions."compiler/rustc_trait_selection/src/solve/"]