Merge from rustc

This commit is contained in:
Ralf Jung 2022-11-05 12:35:40 +01:00
commit 83239c2c1e
755 changed files with 11790 additions and 8944 deletions

View File

@ -339,3 +339,53 @@ their own copyright notices and license terms:
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
OF SUCH DAMAGE.
* Portions of internationalization code use code or data from Unicode, which
carry the following license:
UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE
See Terms of Use <https://www.unicode.org/copyright.html>
for definitions of Unicode Inc.s Data Files and Software.
NOTICE TO USER: Carefully read the following legal agreement.
BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S
DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"),
YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE
TERMS AND CONDITIONS OF THIS AGREEMENT.
IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE
THE DATA FILES OR SOFTWARE.
COPYRIGHT AND PERMISSION NOTICE
Copyright © 1991-2022 Unicode, Inc. All rights reserved.
Distributed under the Terms of Use in https://www.unicode.org/copyright.html.
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Unicode data files and any associated documentation
(the "Data Files") or Unicode software and any associated documentation
(the "Software") to deal in the Data Files or Software
without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, and/or sell copies of
the Data Files or Software, and to permit persons to whom the Data Files
or Software are furnished to do so, provided that either
(a) this copyright and permission notice appear with all copies
of the Data Files or Software, or
(b) this copyright and permission notice appear in associated
Documentation.
THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THE DATA FILES OR SOFTWARE.
Except as contained in this notice, the name of a copyright holder
shall not be used in advertising or otherwise to promote the sale,
use or other dealings in these Data Files or Software without prior
written authorization of the copyright holder.

View File

@ -427,7 +427,6 @@ dependencies = [
"glob",
"itertools",
"lazy_static",
"remove_dir_all",
"serde_json",
"snapbox",
"tar",
@ -449,7 +448,7 @@ dependencies = [
"jobserver",
"libc",
"log",
"miow",
"miow 0.4.0",
"same-file",
"shell-escape",
"tempfile",
@ -815,7 +814,8 @@ dependencies = [
"lazy_static",
"lazycell",
"libc",
"miow",
"miow 0.3.7",
"miropt-test-tools",
"regex",
"rustfix",
"serde",
@ -839,7 +839,7 @@ dependencies = [
"lazy_static",
"libc",
"log",
"miow",
"miow 0.3.7",
"regex",
"rustfix",
"serde",
@ -852,9 +852,9 @@ dependencies = [
[[package]]
name = "concolor"
version = "0.0.8"
version = "0.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "015267563b1df20adccdd00cb05257b1dfbea70a04928e9cf88ffb850c1a40af"
checksum = "b90f9dcd9490a97db91a85ccd79e38a87e14323f0bb824659ee3274e9143ba37"
dependencies = [
"atty",
"bitflags",
@ -863,9 +863,9 @@ dependencies = [
[[package]]
name = "concolor-query"
version = "0.0.5"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6417fe6fc03a8b533fd2177742eeb39a90c7233eedec7bac96d4d6b69a09449"
checksum = "82a90734b3d5dcf656e7624cca6bce9c3a90ee11f900e80141a7427ccfb3d317"
[[package]]
name = "content_inspector"
@ -1035,9 +1035,9 @@ dependencies = [
[[package]]
name = "curl"
version = "0.4.43"
version = "0.4.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d855aeef205b43f65a5001e0997d81f8efca7badad4fad7d897aa7f0d0651f"
checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22"
dependencies = [
"curl-sys",
"libc",
@ -1050,9 +1050,9 @@ dependencies = [
[[package]]
name = "curl-sys"
version = "0.4.55+curl-7.83.1"
version = "0.4.59+curl-7.86.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23734ec77368ec583c2e61dd3f0b0e5c98b93abe6d2a004ca06b91dd7e3e2762"
checksum = "6cfce34829f448b08f55b7db6d0009e23e2e86a34e8c2b366269bf5799b4a407"
dependencies = [
"cc",
"libc",
@ -1153,6 +1153,17 @@ dependencies = [
"winapi",
]
[[package]]
name = "displaydoc"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bf95dc3f046b9da4f2d51833c0d3547d8564ef6910f5c1ed130306a75b92886"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "dlmalloc"
version = "0.2.3"
@ -1514,11 +1525,11 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.1.14"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7abc8dd8451921606d809ba32e95b6111925cd2906060d2dcc29c070220503eb"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
]
@ -1833,11 +1844,10 @@ dependencies = [
[[package]]
name = "intl_pluralrules"
version = "7.0.1"
version = "7.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b18f988384267d7066cc2be425e6faf352900652c046b6971d2e228d3b1c5ecf"
checksum = "078ea7b7c29a2b4df841a7f6ac8775ff6074020c6776d48491ce2268e068f972"
dependencies = [
"tinystr",
"unic-langid",
]
@ -2246,6 +2256,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "miow"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e"
dependencies = [
"windows-sys 0.28.0",
]
[[package]]
name = "miri"
version = "0.1.0"
@ -2268,6 +2287,13 @@ dependencies = [
"ui_test",
]
[[package]]
name = "miropt-test-tools"
version = "0.1.0"
dependencies = [
"regex",
]
[[package]]
name = "new_debug_unreachable"
version = "1.0.4"
@ -2460,7 +2486,7 @@ name = "panic_abort"
version = "0.0.0"
dependencies = [
"alloc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",
@ -2471,7 +2497,7 @@ name = "panic_unwind"
version = "0.0.0"
dependencies = [
"alloc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",
@ -2523,7 +2549,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
"windows-sys",
"windows-sys 0.36.1",
]
[[package]]
@ -2799,7 +2825,7 @@ version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.14",
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
@ -2843,7 +2869,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.14",
"getrandom 0.1.16",
]
[[package]]
@ -3646,7 +3672,6 @@ dependencies = [
name = "rustc_interface"
version = "0.0.0"
dependencies = [
"libc",
"libloading",
"rustc-rayon",
"rustc-rayon-core",
@ -3689,7 +3714,6 @@ dependencies = [
"rustc_ty_utils",
"smallvec",
"tracing",
"winapi",
]
[[package]]
@ -4109,6 +4133,7 @@ name = "rustc_session"
version = "0.0.0"
dependencies = [
"getopts",
"libc",
"rustc_ast",
"rustc_data_structures",
"rustc_errors",
@ -4120,7 +4145,9 @@ dependencies = [
"rustc_serialize",
"rustc_span",
"rustc_target",
"smallvec",
"tracing",
"winapi",
]
[[package]]
@ -4610,9 +4637,9 @@ checksum = "da73c8f77aebc0e40c300b93f0a5f1bece7a248a36eee287d4e095f35c7b7d6e"
[[package]]
name = "snapbox"
version = "0.3.3"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d199ccf8f606592df2d145db26f2aa45344e23c64b074cc5a4047f1d99b0f7"
checksum = "827c00e91b15e2674d8a5270bae91f898693cbf9561cbb58d8eaa31974597293"
dependencies = [
"concolor",
"content_inspector",
@ -4869,9 +4896,9 @@ checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
[[package]]
name = "thin-vec"
version = "0.2.8"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "104c2cb3180b6fb6d5b2278768e9b88b578d32ba751ea6e8d026688a40d7ed87"
checksum = "ceb05e71730d396f960f8f3901cdb41be2d339b303e9d7d3a07c5ff0536e671b"
[[package]]
name = "thiserror"
@ -4919,7 +4946,9 @@ name = "tidy"
version = "0.1.0"
dependencies = [
"cargo_metadata 0.14.0",
"ignore",
"lazy_static",
"miropt-test-tools",
"regex",
"walkdir",
]
@ -4940,9 +4969,12 @@ dependencies = [
[[package]]
name = "tinystr"
version = "0.3.4"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29738eedb4388d9ea620eeab9384884fc3f06f586a2eddb56bedc5885126c7c1"
checksum = "f8aeafdfd935e4a7fe16a91ab711fa52d54df84f9c8f7ca5837a9d1d902ef4c2"
dependencies = [
"displaydoc",
]
[[package]]
name = "tinyvec"
@ -5178,9 +5210,9 @@ dependencies = [
[[package]]
name = "unic-langid"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73328fcd730a030bdb19ddf23e192187a6b01cd98be6d3140622a89129459ce5"
checksum = "398f9ad7239db44fd0f80fe068d12ff22d78354080332a5077dc6f52f14dcf2f"
dependencies = [
"unic-langid-impl",
"unic-langid-macros",
@ -5188,18 +5220,18 @@ dependencies = [
[[package]]
name = "unic-langid-impl"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a4a8eeaf0494862c1404c95ec2f4c33a2acff5076f64314b465e3ddae1b934d"
checksum = "e35bfd2f2b8796545b55d7d3fd3e89a0613f68a0d1c8bc28cb7ff96b411a35ff"
dependencies = [
"tinystr",
]
[[package]]
name = "unic-langid-macros"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18f980d6d87e8805f2836d64b4138cc95aa7986fa63b1f51f67d5fbff64dd6e5"
checksum = "055e618bf694161ffff0466d95cef3e1a5edc59f6ba1888e97801f2b4ebdc4fe"
dependencies = [
"proc-macro-hack",
"tinystr",
@ -5209,9 +5241,9 @@ dependencies = [
[[package]]
name = "unic-langid-macros-impl"
version = "0.9.0"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29396ffd97e27574c3e01368b1a64267d3064969e4848e2e130ff668be9daa9f"
checksum = "1f5cdec05b907f4e2f6843f4354f4ce6a5bebe1a56df320a49134944477ce4d8"
dependencies = [
"proc-macro-hack",
"quote",
@ -5335,7 +5367,7 @@ name = "unwind"
version = "0.0.0"
dependencies = [
"cc",
"cfg-if 0.1.10",
"cfg-if 1.0.0",
"compiler_builtins",
"core",
"libc",
@ -5452,43 +5484,86 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6"
dependencies = [
"windows_aarch64_msvc 0.28.0",
"windows_i686_gnu 0.28.0",
"windows_i686_msvc 0.28.0",
"windows_x86_64_gnu 0.28.0",
"windows_x86_64_msvc 0.28.0",
]
[[package]]
name = "windows-sys"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
dependencies = [
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_msvc",
"windows_aarch64_msvc 0.36.1",
"windows_i686_gnu 0.36.1",
"windows_i686_msvc 0.36.1",
"windows_x86_64_gnu 0.36.1",
"windows_x86_64_msvc 0.36.1",
]
[[package]]
name = "windows_aarch64_msvc"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2"
[[package]]
name = "windows_aarch64_msvc"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
[[package]]
name = "windows_i686_gnu"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a"
[[package]]
name = "windows_i686_gnu"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
[[package]]
name = "windows_i686_msvc"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64"
[[package]]
name = "windows_i686_msvc"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
[[package]]
name = "windows_x86_64_gnu"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954"
[[package]]
name = "windows_x86_64_gnu"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
[[package]]
name = "windows_x86_64_msvc"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f"
[[package]]
name = "windows_x86_64_msvc"
version = "0.36.1"

View File

@ -11,6 +11,7 @@ members = [
"src/tools/error_index_generator",
"src/tools/linkchecker",
"src/tools/lint-docs",
"src/tools/miropt-test-tools",
"src/tools/rustbook",
"src/tools/unstable-book-gen",
"src/tools/tidy",

View File

@ -6,7 +6,7 @@ Language
- [Error on `as` casts of enums with `#[non_exhaustive]` variants](https://github.com/rust-lang/rust/pull/92744/)
- [Stabilize `let else`](https://github.com/rust-lang/rust/pull/93628/)
- [Stabilize generic associated types (GATs)](https://github.com/rust-lang/rust/pull/96709/)
- [Add lints `let_underscore_drop`, `let_underscore_lock`, and `let_underscore_must_use` from Clippy](https://github.com/rust-lang/rust/pull/97739/)
- [Add lints `let_underscore_drop` and `let_underscore_lock` from Clippy](https://github.com/rust-lang/rust/pull/97739/)
- [Stabilize `break`ing from arbitrary labeled blocks ("label-break-value")](https://github.com/rust-lang/rust/pull/99332/)
- [Uninitialized integers, floats, and raw pointers are now considered immediate UB](https://github.com/rust-lang/rust/pull/98919/).
Usage of `MaybeUninit` is the correct way to work with uninitialized memory.
@ -87,6 +87,9 @@ Compatibility Notes
This strengthens the forward compatibility lint deprecated_cfg_attr_crate_type_name to deny.
- [`llvm-has-rust-patches` allows setting the build system to treat the LLVM as having Rust-specific patches](https://github.com/rust-lang/rust/pull/101072)
This option may need to be set for distributions that are building Rust with a patched LLVM via `llvm-config`, not the built-in LLVM.
- Combining three or more languages (e.g. Objective C, C++ and Rust) into one binary may hit linker limitations when using `lld`. For more information, see [issue 102754][102754].
[102754]: https://github.com/rust-lang/rust/issues/102754
Internal Changes
----------------

View File

@ -14,5 +14,5 @@ rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_span = { path = "../rustc_span" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.8"
thin-vec = "0.2.9"
tracing = "0.1"

View File

@ -21,5 +21,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.8"
thin-vec = "0.2.9"
tracing = "0.1"

View File

@ -277,8 +277,9 @@ pub struct RegisterConflict<'a> {
pub struct SubTupleBinding<'a> {
#[primary_span]
#[label]
#[suggestion_verbose(
#[suggestion(
ast_lowering_sub_tuple_binding_suggestion,
style = "verbose",
code = "..",
applicability = "maybe-incorrect"
)]

View File

@ -191,7 +191,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.lower_angle_bracketed_parameter_data(data, param_mode, itctx)
}
GenericArgs::Parenthesized(ref data) => match parenthesized_generic_args {
ParenthesizedGenericArgs::Ok => self.lower_parenthesized_parameter_data(data),
ParenthesizedGenericArgs::Ok => {
self.lower_parenthesized_parameter_data(data, itctx)
}
ParenthesizedGenericArgs::Err => {
// Suggest replacing parentheses with angle brackets `Trait(params...)` to `Trait<params...>`
let sub = if !data.inputs.is_empty() {
@ -344,6 +346,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_parenthesized_parameter_data(
&mut self,
data: &ParenthesizedArgs,
itctx: &ImplTraitContext,
) -> (GenericArgsCtor<'hir>, bool) {
// Switch to `PassThrough` mode for anonymous lifetimes; this
// means that we permit things like `&Ref<T>`, where `Ref` has
@ -355,6 +358,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
self.lower_ty_direct(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitParam))
}));
let output_ty = match output {
// Only allow `impl Trait` in return position. i.e.:
// ```rust
// fn f(_: impl Fn() -> impl Debug) -> impl Fn() -> impl Debug
// // disallowed --^^^^^^^^^^ allowed --^^^^^^^^^^
// ```
FnRetTy::Ty(ty)
if matches!(itctx, ImplTraitContext::ReturnPositionOpaqueTy { .. })
&& self.tcx.features().impl_trait_in_fn_trait_return =>
{
self.lower_ty(&ty, itctx)
}
FnRetTy::Ty(ty) => {
self.lower_ty(&ty, &ImplTraitContext::Disallowed(ImplTraitPosition::FnTraitReturn))
}

View File

@ -5,14 +5,14 @@ use rustc_infer::infer::region_constraints::Constraint;
use rustc_infer::infer::region_constraints::RegionConstraintData;
use rustc_infer::infer::RegionVariableOrigin;
use rustc_infer::infer::{InferCtxt, RegionResolutionError, SubregionOrigin, TyCtxtInferExt as _};
use rustc_infer::traits::{Normalized, ObligationCause, TraitEngine, TraitEngineExt};
use rustc_infer::traits::ObligationCause;
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::RegionVid;
use rustc_middle::ty::UniverseIndex;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc_span::Span;
use rustc_trait_selection::traits::query::type_op;
use rustc_trait_selection::traits::{SelectionContext, TraitEngineExt as _};
use rustc_trait_selection::traits::ObligationCtxt;
use rustc_traits::{type_op_ascribe_user_type_with_span, type_op_prove_predicate_with_cause};
use std::fmt;
@ -240,9 +240,9 @@ impl<'tcx> TypeOpInfo<'tcx> for PredicateQuery<'tcx> {
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
let (ref infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
type_op_prove_predicate_with_cause(infcx, &mut *fulfill_cx, key, cause);
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
let ocx = ObligationCtxt::new(infcx);
type_op_prove_predicate_with_cause(&ocx, key, cause);
try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@ -281,9 +281,7 @@ where
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
let (ref infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
let mut selcx = SelectionContext::new(infcx);
let ocx = ObligationCtxt::new(infcx);
// FIXME(lqd): Unify and de-duplicate the following with the actual
// `rustc_traits::type_op::type_op_normalize` query to allow the span we need in the
@ -292,11 +290,9 @@ where
// to normalize the `nll/relate_tys/impl-fn-ignore-binder-via-bottom.rs` test. Check
// after #85499 lands to see if its fixes have erased this difference.
let (param_env, value) = key.into_parts();
let Normalized { value: _, obligations } =
rustc_trait_selection::traits::normalize(&mut selcx, param_env, cause, value.value);
fulfill_cx.register_predicate_obligations(infcx, obligations);
let _ = ocx.normalize(cause, param_env, value.value);
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@ -329,9 +325,9 @@ impl<'tcx> TypeOpInfo<'tcx> for AscribeUserTypeQuery<'tcx> {
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
let (ref infcx, key, _) =
mbcx.infcx.tcx.infer_ctxt().build_with_canonical(cause.span, &self.canonical_query);
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
type_op_ascribe_user_type_with_span(infcx, &mut *fulfill_cx, key, Some(cause.span)).ok()?;
try_extract_error_from_fulfill_cx(fulfill_cx, infcx, placeholder_region, error_region)
let ocx = ObligationCtxt::new(infcx);
type_op_ascribe_user_type_with_span(&ocx, key, Some(cause.span)).ok()?;
try_extract_error_from_fulfill_cx(&ocx, placeholder_region, error_region)
}
}
@ -372,25 +368,24 @@ impl<'tcx> TypeOpInfo<'tcx> for crate::type_check::InstantiateOpaqueType<'tcx> {
}
}
#[instrument(skip(fulfill_cx, infcx), level = "debug")]
#[instrument(skip(ocx), level = "debug")]
fn try_extract_error_from_fulfill_cx<'tcx>(
mut fulfill_cx: Box<dyn TraitEngine<'tcx> + 'tcx>,
infcx: &InferCtxt<'tcx>,
ocx: &ObligationCtxt<'_, 'tcx>,
placeholder_region: ty::Region<'tcx>,
error_region: Option<ty::Region<'tcx>>,
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
// We generally shouldn't have errors here because the query was
// already run, but there's no point using `delay_span_bug`
// when we're going to emit an error here anyway.
let _errors = fulfill_cx.select_all_or_error(infcx);
let region_constraints = infcx.with_region_constraints(|r| r.clone());
let _errors = ocx.select_all_or_error();
let region_constraints = ocx.infcx.with_region_constraints(|r| r.clone());
try_extract_error_from_region_constraints(
infcx,
ocx.infcx,
placeholder_region,
error_region,
&region_constraints,
|vid| infcx.region_var_origin(vid),
|vid| infcx.universe_of_region(infcx.tcx.mk_region(ty::ReVar(vid))),
|vid| ocx.infcx.region_var_origin(vid),
|vid| ocx.infcx.universe_of_region(ocx.infcx.tcx.mk_region(ty::ReVar(vid))),
)
}

View File

@ -23,7 +23,6 @@ use rustc_span::hygiene::DesugaringKind;
use rustc_span::symbol::sym;
use rustc_span::{BytePos, Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::TraitEngineExt as _;
use crate::borrow_set::TwoPhaseActivation;
use crate::borrowck_errors;
@ -613,24 +612,20 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
else { return; };
// Try to find predicates on *generic params* that would allow copying `ty`
let infcx = tcx.infer_ctxt().build();
let mut fulfill_cx = <dyn rustc_infer::traits::TraitEngine<'_>>::new(infcx.tcx);
let copy_did = infcx.tcx.lang_items().copy_trait().unwrap();
let cause = ObligationCause::new(
span,
self.mir_hir_id(),
rustc_infer::traits::ObligationCauseCode::MiscObligation,
);
fulfill_cx.register_bound(
let errors = rustc_trait_selection::traits::fully_solve_bound(
&infcx,
cause,
self.param_env,
// Erase any region vids from the type, which may not be resolved
infcx.tcx.erase_regions(ty),
copy_did,
cause,
);
// Select all, including ambiguous predicates
let errors = fulfill_cx.select_all_or_error(&infcx);
// Only emit suggestion if all required predicates are on generic
let predicates: Result<Vec<_>, _> = errors

View File

@ -2314,7 +2314,7 @@ impl<'tcx> ClosureRegionRequirementsExt<'tcx> for ClosureRegionRequirements<'tcx
tcx,
closure_substs,
self.num_external_vids,
tcx.typeck_root_def_id(closure_def_id),
closure_def_id.expect_local(),
);
debug!("apply_requirements: closure_mapping={:?}", closure_mapping);

View File

@ -4,7 +4,7 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::OpaqueTyOrigin;
use rustc_infer::infer::TyCtxtInferExt as _;
use rustc_infer::infer::{DefiningAnchor, InferCtxt};
use rustc_infer::traits::{Obligation, ObligationCause, TraitEngine};
use rustc_infer::traits::{Obligation, ObligationCause};
use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{
@ -12,7 +12,7 @@ use rustc_middle::ty::{
};
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
use rustc_trait_selection::traits::TraitEngineExt as _;
use rustc_trait_selection::traits::ObligationCtxt;
use super::RegionInferenceContext;
@ -252,48 +252,45 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
// type-alias-impl-trait/issue-67844-nested-opaque.rs
let infcx =
self.tcx.infer_ctxt().with_opaque_type_inference(DefiningAnchor::Bubble).build();
let ocx = ObligationCtxt::new(&infcx);
// Require the hidden type to be well-formed with only the generics of the opaque type.
// Defining use functions may have more bounds than the opaque type, which is ok, as long as the
// hidden type is well formed even without those bounds.
let predicate = ty::Binder::dummy(ty::PredicateKind::WellFormed(definition_ty.into()))
.to_predicate(infcx.tcx);
let mut fulfillment_cx = <dyn TraitEngine<'tcx>>::new(infcx.tcx);
let id_substs = InternalSubsts::identity_for_item(self.tcx, def_id.to_def_id());
// Require that the hidden type actually fulfills all the bounds of the opaque type, even without
// the bounds that the function supplies.
let opaque_ty = self.tcx.mk_opaque(def_id.to_def_id(), id_substs);
match infcx
.at(&ObligationCause::misc(instantiated_ty.span, body_id), param_env)
.eq(opaque_ty, definition_ty)
{
Ok(infer_ok) => {
for obligation in infer_ok.obligations {
fulfillment_cx.register_predicate_obligation(&infcx, obligation);
}
}
Err(err) => {
infcx
.err_ctxt()
.report_mismatched_types(
&ObligationCause::misc(instantiated_ty.span, body_id),
opaque_ty,
definition_ty,
err,
)
.emit();
}
if let Err(err) = ocx.eq(
&ObligationCause::misc(instantiated_ty.span, body_id),
param_env,
opaque_ty,
definition_ty,
) {
infcx
.err_ctxt()
.report_mismatched_types(
&ObligationCause::misc(instantiated_ty.span, body_id),
opaque_ty,
definition_ty,
err,
)
.emit();
}
fulfillment_cx.register_predicate_obligation(
&infcx,
Obligation::misc(instantiated_ty.span, body_id, param_env, predicate),
);
ocx.register_obligation(Obligation::misc(
instantiated_ty.span,
body_id,
param_env,
predicate,
));
// Check that all obligations are satisfied by the implementation's
// version.
let errors = fulfillment_cx.select_all_or_error(&infcx);
let errors = ocx.select_all_or_error();
// This is still required for many(half of the tests in ui/type-alias-impl-trait)
// tests to pass

View File

@ -49,7 +49,7 @@ pub(crate) struct GenericDoesNotLiveLongEnough {
#[derive(LintDiagnostic)]
#[diag(borrowck_var_does_not_need_mut)]
pub(crate) struct VarNeedNotMut {
#[suggestion_short(applicability = "machine-applicable", code = "")]
#[suggestion(style = "short", applicability = "machine-applicable", code = "")]
pub span: Span,
}
#[derive(Diagnostic)]

View File

@ -22,7 +22,9 @@ use rustc_hir::{BodyOwnerKind, HirId};
use rustc_index::vec::{Idx, IndexVec};
use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::{self, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt};
use rustc_middle::ty::{
self, DefIdTree, InlineConstSubsts, InlineConstSubstsParts, RegionVid, Ty, TyCtxt,
};
use rustc_middle::ty::{InternalSubsts, SubstsRef};
use std::iter;
@ -241,7 +243,7 @@ impl<'tcx> UniversalRegions<'tcx> {
tcx: TyCtxt<'tcx>,
closure_substs: SubstsRef<'tcx>,
expected_num_vars: usize,
typeck_root_def_id: DefId,
closure_def_id: LocalDefId,
) -> IndexVec<RegionVid, ty::Region<'tcx>> {
let mut region_mapping = IndexVec::with_capacity(expected_num_vars);
region_mapping.push(tcx.lifetimes.re_static);
@ -249,7 +251,7 @@ impl<'tcx> UniversalRegions<'tcx> {
region_mapping.push(fr);
});
for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
for_each_late_bound_region_in_recursive_scope(tcx, tcx.local_parent(closure_def_id), |r| {
region_mapping.push(r);
});
@ -339,9 +341,8 @@ impl<'tcx> UniversalRegions<'tcx> {
// tests, and the resulting print-outs include def-ids
// and other things that are not stable across tests!
// So we just include the region-vid. Annoying.
let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
err.note(&format!("late-bound region is {:?}", self.to_region_vid(r),));
for_each_late_bound_region_in_recursive_scope(tcx, def_id.expect_local(), |r| {
err.note(&format!("late-bound region is {:?}", self.to_region_vid(r)));
});
}
DefiningTy::Generator(def_id, substs, _) => {
@ -354,9 +355,8 @@ impl<'tcx> UniversalRegions<'tcx> {
// FIXME: As above, we'd like to print out the region
// `r` but doing so is not stable across architectures
// and so forth.
let typeck_root_def_id = tcx.typeck_root_def_id(def_id);
for_each_late_bound_region_defined_on(tcx, typeck_root_def_id, |r| {
err.note(&format!("late-bound region is {:?}", self.to_region_vid(r),));
for_each_late_bound_region_in_recursive_scope(tcx, def_id.expect_local(), |r| {
err.note(&format!("late-bound region is {:?}", self.to_region_vid(r)));
});
}
DefiningTy::FnDef(def_id, substs) => {
@ -421,13 +421,24 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
first_extern_index
} else {
// If this is a closure, generator, or inline-const, then the late-bound regions from the enclosing
// function are actually external regions to us. For example, here, 'a is not local
// function/closures are actually external regions to us. For example, here, 'a is not local
// to the closure c (although it is local to the fn foo):
// fn foo<'a>() {
// let c = || { let x: &'a u32 = ...; }
// }
self.infcx
.replace_late_bound_regions_with_nll_infer_vars(self.mir_def.did, &mut indices);
for_each_late_bound_region_in_recursive_scope(
self.infcx.tcx,
self.infcx.tcx.local_parent(self.mir_def.did),
|r| {
debug!(?r);
if !indices.indices.contains_key(&r) {
let region_vid = self.infcx.next_nll_region_var(FR);
debug!(?region_vid);
indices.insert_late_bound_region(r, region_vid.to_region_vid());
}
},
);
// Any regions created during the execution of `defining_ty` or during the above
// late-bound region replacement are all considered 'extern' regions
self.infcx.num_region_vars()
@ -444,12 +455,16 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
bound_inputs_and_output,
&mut indices,
);
// Converse of above, if this is a function then the late-bound regions declared on its
// signature are local to the fn.
if self.mir_def.did.to_def_id() == typeck_root_def_id {
self.infcx
.replace_late_bound_regions_with_nll_infer_vars(self.mir_def.did, &mut indices);
}
// Converse of above, if this is a function/closure then the late-bound regions declared on its
// signature are local.
for_each_late_bound_region_in_item(self.infcx.tcx, self.mir_def.did, |r| {
debug!(?r);
if !indices.indices.contains_key(&r) {
let region_vid = self.infcx.next_nll_region_var(FR);
debug!(?region_vid);
indices.insert_late_bound_region(r, region_vid.to_region_vid());
}
});
let (unnormalized_output_ty, mut unnormalized_input_tys) =
inputs_and_output.split_last().unwrap();
@ -692,7 +707,13 @@ trait InferCtxtExt<'tcx> {
where
T: TypeFoldable<'tcx>;
fn replace_late_bound_regions_with_nll_infer_vars(
fn replace_late_bound_regions_with_nll_infer_vars_in_recursive_scope(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
);
fn replace_late_bound_regions_with_nll_infer_vars_in_item(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
@ -746,13 +767,28 @@ impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
/// set of late-bound regions and checks for any that we have not yet seen, adding them to the
/// inputs vector.
#[instrument(skip(self, indices))]
fn replace_late_bound_regions_with_nll_infer_vars(
fn replace_late_bound_regions_with_nll_infer_vars_in_recursive_scope(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
) {
let typeck_root_def_id = self.tcx.typeck_root_def_id(mir_def_id.to_def_id());
for_each_late_bound_region_defined_on(self.tcx, typeck_root_def_id, |r| {
for_each_late_bound_region_in_recursive_scope(self.tcx, mir_def_id, |r| {
debug!(?r);
if !indices.indices.contains_key(&r) {
let region_vid = self.next_nll_region_var(FR);
debug!(?region_vid);
indices.insert_late_bound_region(r, region_vid.to_region_vid());
}
});
}
#[instrument(skip(self, indices))]
fn replace_late_bound_regions_with_nll_infer_vars_in_item(
&self,
mir_def_id: LocalDefId,
indices: &mut UniversalRegionIndices<'tcx>,
) {
for_each_late_bound_region_in_item(self.tcx, mir_def_id, |r| {
debug!(?r);
if !indices.indices.contains_key(&r) {
let region_vid = self.next_nll_region_var(FR);
@ -803,21 +839,44 @@ impl<'tcx> UniversalRegionIndices<'tcx> {
}
}
/// Iterates over the late-bound regions defined on fn_def_id and
/// invokes `f` with the liberated form of each one.
fn for_each_late_bound_region_defined_on<'tcx>(
/// Iterates over the late-bound regions defined on `mir_def_id` and all of its
/// parents, up to the typeck root, and invokes `f` with the liberated form
/// of each one.
fn for_each_late_bound_region_in_recursive_scope<'tcx>(
tcx: TyCtxt<'tcx>,
fn_def_id: DefId,
mut mir_def_id: LocalDefId,
mut f: impl FnMut(ty::Region<'tcx>),
) {
if let Some(late_bounds) = tcx.is_late_bound_map(fn_def_id.expect_local()) {
for &region_def_id in late_bounds.iter() {
let name = tcx.item_name(region_def_id.to_def_id());
let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion {
scope: fn_def_id,
bound_region: ty::BoundRegionKind::BrNamed(region_def_id.to_def_id(), name),
}));
f(liberated_region);
let typeck_root_def_id = tcx.typeck_root_def_id(mir_def_id.to_def_id());
// Walk up the tree, collecting late-bound regions until we hit the typeck root
loop {
for_each_late_bound_region_in_item(tcx, mir_def_id, &mut f);
if mir_def_id.to_def_id() == typeck_root_def_id {
break;
} else {
mir_def_id = tcx.local_parent(mir_def_id);
}
}
}
/// Iterates over the late-bound regions defined on `mir_def_id` and all of its
/// parents, up to the typeck root, and invokes `f` with the liberated form
/// of each one.
fn for_each_late_bound_region_in_item<'tcx>(
tcx: TyCtxt<'tcx>,
mir_def_id: LocalDefId,
mut f: impl FnMut(ty::Region<'tcx>),
) {
if !tcx.def_kind(mir_def_id).is_fn_like() {
return;
}
for bound_var in tcx.late_bound_vars(tcx.hir().local_def_id_to_hir_id(mir_def_id)) {
let ty::BoundVariableKind::Region(bound_region) = bound_var else { continue; };
let liberated_region = tcx
.mk_region(ty::ReFree(ty::FreeRegion { scope: mir_def_id.to_def_id(), bound_region }));
f(liberated_region);
}
}

View File

@ -23,5 +23,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
thin-vec = "0.2.8"
thin-vec = "0.2.9"
tracing = "0.1"

View File

@ -0,0 +1,104 @@
use crate::util::check_builtin_macro_attribute;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, FnHeader, FnSig, Generics, StmtKind};
use rustc_ast::{Fn, ItemKind, Stmt, TyKind, Unsafe};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::Span;
use thin_vec::thin_vec;
pub fn expand(
ecx: &mut ExtCtxt<'_>,
_span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
check_builtin_macro_attribute(ecx, meta_item, sym::alloc_error_handler);
let orig_item = item.clone();
let not_function = || {
ecx.sess
.parse_sess
.span_diagnostic
.span_err(item.span(), "alloc_error_handler must be a function");
vec![orig_item.clone()]
};
// Allow using `#[alloc_error_handler]` on an item statement
// FIXME - if we get deref patterns, use them to reduce duplication here
let (item, is_stmt, sig_span) = match &item {
Annotatable::Item(item) => match item.kind {
ItemKind::Fn(ref fn_kind) => (item, false, ecx.with_def_site_ctxt(fn_kind.sig.span)),
_ => return not_function(),
},
Annotatable::Stmt(stmt) => match &stmt.kind {
StmtKind::Item(item_) => match item_.kind {
ItemKind::Fn(ref fn_kind) => {
(item_, true, ecx.with_def_site_ctxt(fn_kind.sig.span))
}
_ => return not_function(),
},
_ => return not_function(),
},
_ => return not_function(),
};
// Generate a bunch of new items using the AllocFnFactory
let span = ecx.with_def_site_ctxt(item.span);
// Generate item statements for the allocator methods.
let stmts = vec![generate_handler(ecx, item.ident, span, sig_span)];
// Generate anonymous constant serving as container for the allocator methods.
let const_ty = ecx.ty(sig_span, TyKind::Tup(Vec::new()));
let const_body = ecx.expr_block(ecx.block(span, stmts));
let const_item = ecx.item_const(span, Ident::new(kw::Underscore, span), const_ty, const_body);
let const_item = if is_stmt {
Annotatable::Stmt(P(ecx.stmt_item(span, const_item)))
} else {
Annotatable::Item(const_item)
};
// Return the original item and the new methods.
vec![orig_item, const_item]
}
// #[rustc_std_internal_symbol]
// unsafe fn __rg_oom(size: usize, align: usize) -> ! {
// handler(core::alloc::Layout::from_size_align_unchecked(size, align))
// }
fn generate_handler(cx: &ExtCtxt<'_>, handler: Ident, span: Span, sig_span: Span) -> Stmt {
let usize = cx.path_ident(span, Ident::new(sym::usize, span));
let ty_usize = cx.ty_path(usize);
let size = Ident::from_str_and_span("size", span);
let align = Ident::from_str_and_span("align", span);
let layout_new = cx.std_path(&[sym::alloc, sym::Layout, sym::from_size_align_unchecked]);
let layout_new = cx.expr_path(cx.path(span, layout_new));
let layout =
cx.expr_call(span, layout_new, vec![cx.expr_ident(span, size), cx.expr_ident(span, align)]);
let call = cx.expr_call_ident(sig_span, handler, vec![layout]);
let never = ast::FnRetTy::Ty(cx.ty(span, TyKind::Never));
let params = vec![cx.param(span, size, ty_usize.clone()), cx.param(span, align, ty_usize)];
let decl = cx.fn_decl(params, never);
let header = FnHeader { unsafety: Unsafe::Yes(span), ..FnHeader::default() };
let sig = FnSig { decl, header, span: span };
let body = Some(cx.block_expr(call));
let kind = ItemKind::Fn(Box::new(Fn {
defaultness: ast::Defaultness::Final,
sig,
generics: Generics::default(),
body,
}));
let special = sym::rustc_std_internal_symbol;
let special = cx.meta_word(span, special);
let attrs = thin_vec![cx.attribute(special)];
let item = cx.item(span, Ident::from_str_and_span("__rg_oom", span), attrs, kind);
cx.stmt_item(sig_span, item)
}

View File

@ -25,6 +25,7 @@ use rustc_expand::base::{MacroExpanderFn, ResolverExpand, SyntaxExtensionKind};
use rustc_expand::proc_macro::BangProcMacro;
use rustc_span::symbol::sym;
mod alloc_error_handler;
mod assert;
mod cfg;
mod cfg_accessible;
@ -94,6 +95,7 @@ pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand) {
}
register_attr! {
alloc_error_handler: alloc_error_handler::expand,
bench: test::expand_bench,
cfg_accessible: cfg_accessible::Expander,
cfg_eval: cfg_eval::expand,

View File

@ -5,6 +5,7 @@ use crate::prelude::*;
use rustc_ast::expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
use rustc_session::config::OomStrategy;
use rustc_span::symbol::sym;
/// Returns whether an allocator shim was created
pub(crate) fn codegen(
@ -23,7 +24,7 @@ pub(crate) fn codegen(
module,
unwind_context,
kind,
tcx.lang_items().oom().is_some(),
tcx.alloc_error_handler_kind(()).unwrap(),
tcx.sess.opts.unstable_opts.oom,
);
true
@ -36,7 +37,7 @@ fn codegen_inner(
module: &mut impl Module,
unwind_context: &mut UnwindContext,
kind: AllocatorKind,
has_alloc_error_handler: bool,
alloc_error_handler_kind: AllocatorKind,
oom_strategy: OomStrategy,
) {
let usize_ty = module.target_config().pointer_type();
@ -108,12 +109,12 @@ fn codegen_inner(
returns: vec![],
};
let callee_name = if has_alloc_error_handler { "__rg_oom" } else { "__rdl_oom" };
let callee_name = alloc_error_handler_kind.fn_name(sym::oom);
let func_id =
module.declare_function("__rust_alloc_error_handler", Linkage::Export, &sig).unwrap();
let callee_func_id = module.declare_function(callee_name, Linkage::Import, &sig).unwrap();
let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap();
let mut ctx = Context::new();
ctx.func.signature = sig;

View File

@ -7,7 +7,7 @@ use rustc_span::symbol::sym;
use crate::GccContext;
pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_name: &str, kind: AllocatorKind, has_alloc_error_handler: bool) {
pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) {
let context = &mods.context;
let usize =
match tcx.sess.target.pointer_width {
@ -90,14 +90,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
.collect();
let func = context.new_function(None, FunctionType::Exported, void, &args, name, false);
let kind =
if has_alloc_error_handler {
AllocatorKind::Global
}
else {
AllocatorKind::Default
};
let callee = kind.fn_name(sym::oom);
let callee = alloc_error_handler_kind.fn_name(sym::oom);
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();

View File

@ -153,11 +153,11 @@ impl CodegenBackend for GccCodegenBackend {
}
impl ExtraBackendMethods for GccCodegenBackend {
fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, has_alloc_error_handler: bool) -> Self::Module {
fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) -> Self::Module {
let mut mods = GccContext {
context: Context::default(),
};
unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, has_alloc_error_handler); }
unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind); }
mods
}

View File

@ -15,7 +15,7 @@ pub(crate) unsafe fn codegen(
module_llvm: &mut ModuleLlvm,
module_name: &str,
kind: AllocatorKind,
has_alloc_error_handler: bool,
alloc_error_handler_kind: AllocatorKind,
) {
let llcx = &*module_llvm.llcx;
let llmod = module_llvm.llmod();
@ -117,8 +117,7 @@ pub(crate) unsafe fn codegen(
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}
let kind = if has_alloc_error_handler { AllocatorKind::Global } else { AllocatorKind::Default };
let callee = kind.fn_name(sym::oom);
let callee = alloc_error_handler_kind.fn_name(sym::oom);
let callee = llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
// -> ! DIFlagNoReturn
attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);

View File

@ -130,7 +130,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
op_idx.insert(idx, constraints.len());
constraints.push(reg_to_llvm(reg, Some(&value.layout)));
}
InlineAsmOperandRef::InOut { reg, late: _, in_value, out_place: _ } => {
InlineAsmOperandRef::InOut { reg, late, in_value, out_place: _ } => {
let value = llvm_fixup_input(
self,
in_value.immediate(),
@ -138,7 +138,16 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
&in_value.layout,
);
inputs.push(value);
constraints.push(format!("{}", op_idx[&idx]));
// In the case of fixed registers, we have the choice of
// either using a tied operand or duplicating the constraint.
// We prefer the latter because it matches the behavior of
// Clang.
if late && matches!(reg, InlineAsmRegOrRegClass::Reg(_)) {
constraints.push(format!("{}", reg_to_llvm(reg, Some(&in_value.layout))));
} else {
constraints.push(format!("{}", op_idx[&idx]));
}
}
InlineAsmOperandRef::SymFn { instance } => {
inputs.push(self.cx.get_fn(instance));
@ -276,13 +285,13 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let mut attrs = SmallVec::<[_; 2]>::new();
if options.contains(InlineAsmOptions::PURE) {
if options.contains(InlineAsmOptions::NOMEM) {
attrs.push(llvm::AttributeKind::ReadNone.create_attr(self.cx.llcx));
attrs.push(llvm::MemoryEffects::None.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::READONLY) {
attrs.push(llvm::AttributeKind::ReadOnly.create_attr(self.cx.llcx));
attrs.push(llvm::MemoryEffects::ReadOnly.create_attr(self.cx.llcx));
}
attrs.push(llvm::AttributeKind::WillReturn.create_attr(self.cx.llcx));
} else if options.contains(InlineAsmOptions::NOMEM) {
attrs.push(llvm::AttributeKind::InaccessibleMemOnly.create_attr(self.cx.llcx));
attrs.push(llvm::MemoryEffects::InaccessibleMemOnly.create_attr(self.cx.llcx));
} else {
// LLVM doesn't have an attribute to represent ReadOnly + SideEffect
}
@ -496,6 +505,44 @@ fn xmm_reg_index(reg: InlineAsmReg) -> Option<u32> {
}
}
/// If the register is an AArch64 integer register then return its index.
fn a64_reg_index(reg: InlineAsmReg) -> Option<u32> {
match reg {
InlineAsmReg::AArch64(AArch64InlineAsmReg::x0) => Some(0),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x1) => Some(1),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x2) => Some(2),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x3) => Some(3),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x4) => Some(4),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x5) => Some(5),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x6) => Some(6),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x7) => Some(7),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x8) => Some(8),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x9) => Some(9),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x10) => Some(10),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x11) => Some(11),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x12) => Some(12),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x13) => Some(13),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x14) => Some(14),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x15) => Some(15),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x16) => Some(16),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x17) => Some(17),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x18) => Some(18),
// x19 is reserved
InlineAsmReg::AArch64(AArch64InlineAsmReg::x20) => Some(20),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x21) => Some(21),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x22) => Some(22),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x23) => Some(23),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x24) => Some(24),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x25) => Some(25),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x26) => Some(26),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x27) => Some(27),
InlineAsmReg::AArch64(AArch64InlineAsmReg::x28) => Some(28),
// x29 is reserved
InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) => Some(30),
_ => None,
}
}
/// If the register is an AArch64 vector register then return its index.
fn a64_vreg_index(reg: InlineAsmReg) -> Option<u32> {
match reg {
@ -526,6 +573,22 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'x'
};
format!("{{{}mm{}}}", class, idx)
} else if let Some(idx) = a64_reg_index(reg) {
let class = if let Some(layout) = layout {
match layout.size.bytes() {
8 => 'x',
_ => 'w',
}
} else {
// We use i32 as the type for discarded outputs
'w'
};
if class == 'x' && reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
// LLVM doesn't recognize x30. use lr instead.
"{lr}".to_string()
} else {
format!("{{{}{}}}", class, idx)
}
} else if let Some(idx) = a64_vreg_index(reg) {
let class = if let Some(layout) = layout {
match layout.size.bytes() {
@ -541,9 +604,6 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
'q'
};
format!("{{{}{}}}", class, idx)
} else if reg == InlineAsmReg::AArch64(AArch64InlineAsmReg::x30) {
// LLVM doesn't recognize x30
"{lr}".to_string()
} else if reg == InlineAsmReg::Arm(ArmInlineAsmReg::r14) {
// LLVM doesn't recognize r14
"{lr}".to_string()

View File

@ -13,7 +13,7 @@ use smallvec::SmallVec;
use crate::attributes;
use crate::llvm::AttributePlace::Function;
use crate::llvm::{self, AllocKindFlags, Attribute, AttributeKind, AttributePlace};
use crate::llvm::{self, AllocKindFlags, Attribute, AttributeKind, AttributePlace, MemoryEffects};
use crate::llvm_util;
pub use rustc_attr::{InlineAttr, InstructionSetAttr, OptimizeAttr};
@ -303,10 +303,10 @@ pub fn from_fn_attrs<'ll, 'tcx>(
to_add.push(AttributeKind::ReturnsTwice.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_PURE) {
to_add.push(AttributeKind::ReadOnly.create_attr(cx.llcx));
to_add.push(MemoryEffects::ReadOnly.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_CONST) {
to_add.push(AttributeKind::ReadNone.create_attr(cx.llcx));
to_add.push(MemoryEffects::None.create_attr(cx.llcx));
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
to_add.push(AttributeKind::Naked.create_attr(cx.llcx));

View File

@ -365,11 +365,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
Int(I64) => "llvm.ssub.with.overflow.i64",
Int(I128) => "llvm.ssub.with.overflow.i128",
Uint(U8) => "llvm.usub.with.overflow.i8",
Uint(U16) => "llvm.usub.with.overflow.i16",
Uint(U32) => "llvm.usub.with.overflow.i32",
Uint(U64) => "llvm.usub.with.overflow.i64",
Uint(U128) => "llvm.usub.with.overflow.i128",
Uint(_) => {
// Emit sub and icmp instead of llvm.usub.with.overflow. LLVM considers these
// to be the canonical form. It will attempt to reform llvm.usub.with.overflow
// in the backend if profitable.
let sub = self.sub(lhs, rhs);
let cmp = self.icmp(IntPredicate::IntULT, lhs, rhs);
return (sub, cmp);
}
_ => unreachable!(),
},

View File

@ -108,11 +108,11 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
tcx: TyCtxt<'tcx>,
module_name: &str,
kind: AllocatorKind,
has_alloc_error_handler: bool,
alloc_error_handler_kind: AllocatorKind,
) -> ModuleLlvm {
let mut module_llvm = ModuleLlvm::new_metadata(tcx, module_name);
unsafe {
allocator::codegen(tcx, &mut module_llvm, module_name, kind, has_alloc_error_handler);
allocator::codegen(tcx, &mut module_llvm, module_name, kind, alloc_error_handler_kind);
}
module_llvm
}

View File

@ -183,7 +183,6 @@ pub enum AttributeKind {
OptimizeNone = 24,
ReturnsTwice = 25,
ReadNone = 26,
InaccessibleMemOnly = 27,
SanitizeHWAddress = 28,
WillReturn = 29,
StackProtectReq = 30,
@ -590,6 +589,15 @@ pub enum ChecksumKind {
SHA256,
}
/// LLVMRustMemoryEffects
#[derive(Copy, Clone)]
#[repr(C)]
pub enum MemoryEffects {
None,
ReadOnly,
InaccessibleMemOnly,
}
extern "C" {
type Opaque;
}
@ -1175,6 +1183,7 @@ extern "C" {
pub fn LLVMRustCreateUWTableAttr(C: &Context, async_: bool) -> &Attribute;
pub fn LLVMRustCreateAllocSizeAttr(C: &Context, size_arg: u32) -> &Attribute;
pub fn LLVMRustCreateAllocKindAttr(C: &Context, size_arg: u64) -> &Attribute;
pub fn LLVMRustCreateMemoryEffectsAttr(C: &Context, effects: MemoryEffects) -> &Attribute;
// Operations on functions
pub fn LLVMRustGetOrInsertFunction<'a>(

View File

@ -185,6 +185,13 @@ impl AttributeKind {
}
}
impl MemoryEffects {
/// Create an LLVM Attribute with these memory effects.
pub fn create_attr(self, llcx: &Context) -> &Attribute {
unsafe { LLVMRustCreateMemoryEffectsAttr(llcx, self) }
}
}
pub fn set_section(llglobal: &Value, section_name: &str) {
let section_name_cstr = CString::new(section_name).expect("unexpected CString error");
unsafe {

View File

@ -6,11 +6,12 @@ use rustc_span::symbol::Symbol;
use object::read::archive::ArchiveFile;
use std::fmt::Display;
use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
use crate::errors::ExtractBundledLibsError;
pub trait ArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
@ -28,32 +29,35 @@ pub trait ArchiveBuilderBuilder {
is_direct_dependency: bool,
) -> PathBuf;
fn extract_bundled_libs(
&self,
rlib: &Path,
fn extract_bundled_libs<'a>(
&'a self,
rlib: &'a Path,
outdir: &Path,
bundled_lib_file_names: &FxHashSet<Symbol>,
) -> Result<(), String> {
let message = |msg: &str, e: &dyn Display| format!("{} '{}': {}", msg, &rlib.display(), e);
) -> Result<(), ExtractBundledLibsError<'_>> {
let archive_map = unsafe {
Mmap::map(File::open(rlib).map_err(|e| message("failed to open file", &e))?)
.map_err(|e| message("failed to mmap file", &e))?
Mmap::map(
File::open(rlib)
.map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
)
.map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
};
let archive = ArchiveFile::parse(&*archive_map)
.map_err(|e| message("failed to parse archive", &e))?;
.map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
for entry in archive.members() {
let entry = entry.map_err(|e| message("failed to read entry", &e))?;
let entry = entry
.map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
let data = entry
.data(&*archive_map)
.map_err(|e| message("failed to get data from archive member", &e))?;
.map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
let name = std::str::from_utf8(entry.name())
.map_err(|e| message("failed to convert name", &e))?;
.map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
continue; // We need to extract only native libraries.
}
std::fs::write(&outdir.join(&name), data)
.map_err(|e| message("failed to write file", &e))?;
.map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
}
Ok(())
}

View File

@ -919,29 +919,17 @@ fn link_natively<'a>(
)
.is_some();
sess.note_without_error("`link.exe` returned an unexpected error");
sess.emit_note(errors::LinkExeUnexpectedError);
if is_vs_installed && has_linker {
// the linker is broken
sess.note_without_error(
"the Visual Studio build tools may need to be repaired \
using the Visual Studio installer",
);
sess.note_without_error(
"or a necessary component may be missing from the \
\"C++ build tools\" workload",
);
sess.emit_note(errors::RepairVSBuildTools);
sess.emit_note(errors::MissingCppBuildToolComponent);
} else if is_vs_installed {
// the linker is not installed
sess.note_without_error(
"in the Visual Studio installer, ensure the \
\"C++ build tools\" workload is selected",
);
sess.emit_note(errors::SelectCppBuildToolWorkload);
} else {
// visual studio is not installed
sess.note_without_error(
"you may need to install Visual Studio build tools with the \
\"C++ build tools\" workload",
);
sess.emit_note(errors::VisualStudioNotInstalled);
}
}
}
@ -954,35 +942,20 @@ fn link_natively<'a>(
Err(e) => {
let linker_not_found = e.kind() == io::ErrorKind::NotFound;
let mut linker_error = {
if linker_not_found {
sess.struct_err(&format!("linker `{}` not found", linker_path.display()))
} else {
sess.struct_err(&format!(
"could not exec the linker `{}`",
linker_path.display()
))
}
};
linker_error.note(&e.to_string());
if !linker_not_found {
linker_error.note(&format!("{:?}", &cmd));
if linker_not_found {
sess.emit_err(errors::LinkerNotFound { linker_path, error: e });
} else {
sess.emit_err(errors::UnableToExeLinker {
linker_path,
error: e,
command_formatted: format!("{:?}", &cmd),
});
}
linker_error.emit();
if sess.target.is_like_msvc && linker_not_found {
sess.note_without_error(
"the msvc targets depend on the msvc linker \
but `link.exe` was not found",
);
sess.note_without_error(
"please ensure that Visual Studio 2017 or later, or Build Tools \
for Visual Studio were installed with the Visual C++ option.",
);
sess.note_without_error("VS Code is a different product, and is not sufficient.");
sess.emit_note(errors::MsvcMissingLinker);
sess.emit_note(errors::CheckInstalledVisualStudio);
sess.emit_note(errors::UnsufficientVSCodeProduct);
}
sess.abort_if_errors();
}
@ -1007,15 +980,13 @@ fn link_natively<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
sess.struct_warn(&format!(
"processing debug info with `dsymutil` failed: {}",
prog.status
))
.note(&escape_string(&output))
.emit();
sess.emit_warning(errors::ProcessingDymutilFailed {
status: prog.status,
output: escape_string(&output),
});
}
}
Err(e) => sess.fatal(&format!("unable to run `dsymutil`: {}", e)),
Err(error) => sess.emit_fatal(errors::UnableToRunDsymutil { error }),
}
}
@ -1092,15 +1063,14 @@ fn strip_symbols_with_external_utility<'a>(
if !prog.status.success() {
let mut output = prog.stderr.clone();
output.extend_from_slice(&prog.stdout);
sess.struct_warn(&format!(
"stripping debug info with `{}` failed: {}",
util, prog.status
))
.note(&escape_string(&output))
.emit();
sess.emit_warning(errors::StrippingDebugInfoFailed {
util,
status: prog.status,
output: escape_string(&output),
});
}
}
Err(e) => sess.fatal(&format!("unable to run `{}`: {}", util, e)),
Err(error) => sess.emit_fatal(errors::UnableToRun { util, error }),
}
}
@ -1153,7 +1123,8 @@ fn link_sanitizer_runtime(sess: &Session, linker: &mut dyn Linker, name: &str) {
if path.exists() {
return session_tlib;
} else {
let default_sysroot = filesearch::get_or_default_sysroot();
let default_sysroot =
filesearch::get_or_default_sysroot().expect("Failed finding sysroot");
let default_tlib = filesearch::make_target_lib_path(
&default_sysroot,
sess.opts.target_triple.triple(),
@ -1251,7 +1222,7 @@ pub fn linker_and_flavor(sess: &Session) -> (PathBuf, LinkerFlavor) {
)),
(Some(linker), None) => {
let stem = linker.file_stem().and_then(|stem| stem.to_str()).unwrap_or_else(|| {
sess.fatal("couldn't extract file stem from specified linker")
sess.emit_fatal(errors::LinkerFileStem);
});
let flavor = if stem == "emcc" {
@ -1378,13 +1349,9 @@ fn print_native_static_libs(sess: &Session, all_native_libs: &[NativeLib]) {
})
.collect();
if !lib_args.is_empty() {
sess.note_without_error(
"Link against the following native artifacts when linking \
against this static library. The order and any duplication \
can be significant on some platforms.",
);
sess.emit_note(errors::StaticLibraryNativeArtifacts);
// Prefix for greppability
sess.note_without_error(&format!("native-static-libs: {}", &lib_args.join(" ")));
sess.emit_note(errors::NativeStaticLibs { arguments: lib_args.join(" ") });
}
}
@ -1688,14 +1655,14 @@ fn add_link_script(cmd: &mut dyn Linker, sess: &Session, tmpdir: &Path, crate_ty
match (crate_type, &sess.target.link_script) {
(CrateType::Cdylib | CrateType::Executable, Some(script)) => {
if !sess.target.linker_flavor.is_gnu() {
sess.fatal("can only use link script when linking with GNU-like linker");
sess.emit_fatal(errors::LinkScriptUnavailable);
}
let file_name = ["rustc", &sess.target.llvm_target, "linkfile.ld"].join("-");
let path = tmpdir.join(file_name);
if let Err(e) = fs::write(&path, script.as_ref()) {
sess.fatal(&format!("failed to write link script to {}: {}", path.display(), e));
if let Err(error) = fs::write(&path, script.as_ref()) {
sess.emit_fatal(errors::LinkScriptWriteFailure { path, error });
}
cmd.arg("--script");
@ -1841,8 +1808,8 @@ fn add_linked_symbol_object(
let path = tmpdir.join("symbols.o");
let result = std::fs::write(&path, file.write().unwrap());
if let Err(e) = result {
sess.fatal(&format!("failed to write {}: {}", path.display(), e));
if let Err(error) = result {
sess.emit_fatal(errors::FailedToWrite { path, error });
}
cmd.add_object(&path);
}
@ -2299,14 +2266,10 @@ fn collect_natvis_visualizers(
visualizer_paths.push(visualizer_out_file);
}
Err(error) => {
sess.warn(
format!(
"Unable to write debugger visualizer file `{}`: {} ",
visualizer_out_file.display(),
error
)
.as_str(),
);
sess.emit_warning(errors::UnableToWriteDebuggerVisualizer {
path: visualizer_out_file,
error,
});
}
};
}
@ -2484,7 +2447,7 @@ fn add_upstream_rust_crates<'a>(
let rlib = &src.rlib.as_ref().unwrap().0;
archive_builder_builder
.extract_bundled_libs(rlib, tmpdir, &bundled_libs)
.unwrap_or_else(|e| sess.fatal(e));
.unwrap_or_else(|e| sess.emit_fatal(e));
}
let mut last = (None, NativeLibKind::Unspecified, None);
@ -2641,7 +2604,7 @@ fn add_upstream_rust_crates<'a>(
|| !codegen_results.crate_info.is_no_builtins.contains(&cnum);
let mut archive = archive_builder_builder.new_archive_builder(sess);
if let Err(e) = archive.add_archive(
if let Err(error) = archive.add_archive(
cratepath,
Box::new(move |f| {
if f == METADATA_FILENAME {
@ -2681,7 +2644,7 @@ fn add_upstream_rust_crates<'a>(
false
}),
) {
sess.fatal(&format!("failed to build archive from rlib: {}", e));
sess.emit_fatal(errors::RlibArchiveBuildFailure { error });
}
if archive.build(&dst) {
link_upstream(&dst);
@ -2813,14 +2776,14 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
("arm", "watchos") => "watchos",
(_, "macos") => "macosx",
_ => {
sess.err(&format!("unsupported arch `{}` for os `{}`", arch, os));
sess.emit_err(errors::UnsupportedArch { arch, os });
return;
}
};
let sdk_root = match get_apple_sdk_root(sdk_name) {
Ok(s) => s,
Err(e) => {
sess.err(&e);
sess.emit_err(e);
return;
}
};
@ -2836,7 +2799,7 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
fn get_apple_sdk_root(sdk_name: &str) -> Result<String, errors::AppleSdkRootError<'_>> {
// Following what clang does
// (https://github.com/llvm/llvm-project/blob/
// 296a80102a9b72c3eda80558fb78a3ed8849b341/clang/lib/Driver/ToolChains/Darwin.cpp#L1661-L1678)
@ -2886,7 +2849,7 @@ fn get_apple_sdk_root(sdk_name: &str) -> Result<String, String> {
match res {
Ok(output) => Ok(output.trim().to_string()),
Err(e) => Err(format!("failed to get {} SDK path: {}", sdk_name, e)),
Err(error) => Err(errors::AppleSdkRootError::SdkPath { sdk_name, error }),
}
}
@ -2919,7 +2882,7 @@ fn add_gcc_ld_path(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
}
}
} else {
sess.fatal("option `-Z gcc-ld` is used even though linker flavor is not gcc");
sess.emit_fatal(errors::OptionGccOnly);
}
}
}

View File

@ -193,8 +193,11 @@ fn exported_symbols_provider_local<'tcx>(
}
if tcx.allocator_kind(()).is_some() {
for method in ALLOCATOR_METHODS {
let symbol_name = format!("__rust_{}", method.name);
for symbol_name in ALLOCATOR_METHODS
.iter()
.map(|method| format!("__rust_{}", method.name))
.chain(["__rust_alloc_error_handler".to_string(), OomStrategy::SYMBOL.to_string()])
{
let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, &symbol_name));
symbols.push((

View File

@ -22,7 +22,6 @@ use rustc_data_structures::sync::ParallelIterator;
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem;
use rustc_hir::weak_lang_items::WEAK_ITEMS_SYMBOLS;
use rustc_index::vec::Idx;
use rustc_metadata::EncodedMetadata;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
@ -639,7 +638,14 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
let llmod_id =
cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string();
let module_llvm = tcx.sess.time("write_allocator_module", || {
backend.codegen_allocator(tcx, &llmod_id, kind, tcx.lang_items().oom().is_some())
backend.codegen_allocator(
tcx,
&llmod_id,
kind,
// If allocator_kind is Some then alloc_error_handler_kind must
// also be Some.
tcx.alloc_error_handler_kind(()).unwrap(),
)
});
Some(ModuleCodegen { name: llmod_id, module_llvm, kind: ModuleKind::Allocator })
@ -887,14 +893,14 @@ impl CrateInfo {
// by the compiler, but that's ok because all this stuff is unstable anyway.
let target = &tcx.sess.target;
if !are_upstream_rust_objects_already_included(tcx.sess) {
let missing_weak_lang_items: FxHashSet<&Symbol> = info
let missing_weak_lang_items: FxHashSet<Symbol> = info
.used_crates
.iter()
.flat_map(|cnum| {
tcx.missing_lang_items(*cnum)
.iter()
.filter(|l| lang_items::required(tcx, **l))
.filter_map(|item| WEAK_ITEMS_SYMBOLS.get(item))
.flat_map(|&cnum| tcx.missing_lang_items(cnum))
.filter(|l| l.is_weak())
.filter_map(|&l| {
let name = l.link_name()?;
lang_items::required(tcx, l).then_some(name)
})
.collect();
let prefix = if target.is_like_windows && target.arch == "x86" { "_" } else { "" };

View File

@ -354,3 +354,170 @@ impl IntoDiagnostic<'_> for LinkingFailed<'_> {
diag
}
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_exe_unexpected_error)]
pub struct LinkExeUnexpectedError;
#[derive(Diagnostic)]
#[diag(codegen_ssa_repair_vs_build_tools)]
pub struct RepairVSBuildTools;
#[derive(Diagnostic)]
#[diag(codegen_ssa_missing_cpp_build_tool_component)]
pub struct MissingCppBuildToolComponent;
#[derive(Diagnostic)]
#[diag(codegen_ssa_select_cpp_build_tool_workload)]
pub struct SelectCppBuildToolWorkload;
#[derive(Diagnostic)]
#[diag(codegen_ssa_visual_studio_not_installed)]
pub struct VisualStudioNotInstalled;
#[derive(Diagnostic)]
#[diag(codegen_ssa_linker_not_found)]
#[note]
pub struct LinkerNotFound {
pub linker_path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_exe_linker)]
#[note]
#[note(command_note)]
pub struct UnableToExeLinker {
pub linker_path: PathBuf,
pub error: Error,
pub command_formatted: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_msvc_missing_linker)]
pub struct MsvcMissingLinker;
#[derive(Diagnostic)]
#[diag(codegen_ssa_check_installed_visual_studio)]
pub struct CheckInstalledVisualStudio;
#[derive(Diagnostic)]
#[diag(codegen_ssa_unsufficient_vs_code_product)]
pub struct UnsufficientVSCodeProduct;
#[derive(Diagnostic)]
#[diag(codegen_ssa_processing_dymutil_failed)]
#[note]
pub struct ProcessingDymutilFailed {
pub status: ExitStatus,
pub output: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_run_dsymutil)]
#[note]
pub struct UnableToRunDsymutil {
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_stripping_debu_info_failed)]
#[note]
pub struct StrippingDebugInfoFailed<'a> {
pub util: &'a str,
pub status: ExitStatus,
pub output: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_run)]
pub struct UnableToRun<'a> {
pub util: &'a str,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_linker_file_stem)]
pub struct LinkerFileStem;
#[derive(Diagnostic)]
#[diag(codegen_ssa_static_library_native_artifacts)]
pub struct StaticLibraryNativeArtifacts;
#[derive(Diagnostic)]
#[diag(codegen_ssa_native_static_libs)]
pub struct NativeStaticLibs {
pub arguments: String,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_script_unavailable)]
pub struct LinkScriptUnavailable;
#[derive(Diagnostic)]
#[diag(codegen_ssa_link_script_write_failure)]
pub struct LinkScriptWriteFailure {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_failed_to_write)]
pub struct FailedToWrite {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unable_to_write_debugger_visualizer)]
pub struct UnableToWriteDebuggerVisualizer {
pub path: PathBuf,
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_rlib_archive_build_failure)]
pub struct RlibArchiveBuildFailure {
pub error: Error,
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_option_gcc_only)]
pub struct OptionGccOnly;
#[derive(Diagnostic)]
pub enum ExtractBundledLibsError<'a> {
#[diag(codegen_ssa_extract_bundled_libs_open_file)]
OpenFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_mmap_file)]
MmapFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_parse_archive)]
ParseArchive { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_read_entry)]
ReadEntry { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_archive_member)]
ArchiveMember { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_convert_name)]
ConvertName { rlib: &'a Path, error: Box<dyn std::error::Error> },
#[diag(codegen_ssa_extract_bundled_libs_write_file)]
WriteFile { rlib: &'a Path, error: Box<dyn std::error::Error> },
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_unsupported_arch)]
pub struct UnsupportedArch<'a> {
pub arch: &'a str,
pub os: &'a str,
}
#[derive(Diagnostic)]
pub enum AppleSdkRootError<'a> {
#[diag(codegen_ssa_apple_sdk_error_sdk_path)]
SdkPath { sdk_name: &'a str, error: Error },
}

View File

@ -17,6 +17,7 @@ use rustc_middle::mir::{self, AssertKind, SwitchTargets};
use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf};
use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths};
use rustc_middle::ty::{self, Instance, Ty, TypeVisitable};
use rustc_session::config::OptLevel;
use rustc_span::source_map::Span;
use rustc_span::{sym, Symbol};
use rustc_symbol_mangling::typeid::typeid_for_fnabi;
@ -286,12 +287,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
assert_eq!(discr.layout.ty, switch_ty);
let mut target_iter = targets.iter();
if target_iter.len() == 1 {
// If there are two targets (one conditional, one fallback), emit br instead of switch
// If there are two targets (one conditional, one fallback), emit `br` instead of
// `switch`.
let (test_value, target) = target_iter.next().unwrap();
let lltrue = helper.llbb_with_cleanup(self, target);
let llfalse = helper.llbb_with_cleanup(self, targets.otherwise());
if switch_ty == bx.tcx().types.bool {
// Don't generate trivial icmps when switching on bool
// Don't generate trivial icmps when switching on bool.
match test_value {
0 => bx.cond_br(discr.immediate(), llfalse, lltrue),
1 => bx.cond_br(discr.immediate(), lltrue, llfalse),
@ -303,6 +305,30 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
bx.cond_br(cmp, lltrue, llfalse);
}
} else if self.cx.sess().opts.optimize == OptLevel::No
&& target_iter.len() == 2
&& self.mir[targets.otherwise()].is_empty_unreachable()
{
// In unoptimized builds, if there are two normal targets and the `otherwise` target is
// an unreachable BB, emit `br` instead of `switch`. This leaves behind the unreachable
// BB, which will usually (but not always) be dead code.
//
// Why only in unoptimized builds?
// - In unoptimized builds LLVM uses FastISel which does not support switches, so it
// must fall back to the to the slower SelectionDAG isel. Therefore, using `br` gives
// significant compile time speedups for unoptimized builds.
// - In optimized builds the above doesn't hold, and using `br` sometimes results in
// worse generated code because LLVM can no longer tell that the value being switched
// on can only have two values, e.g. 0 and 1.
//
let (test_value1, target1) = target_iter.next().unwrap();
let (_test_value2, target2) = target_iter.next().unwrap();
let ll1 = helper.llbb_with_cleanup(self, target1);
let ll2 = helper.llbb_with_cleanup(self, target2);
let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty));
let llval = bx.const_uint_big(switch_llty, test_value1);
let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
bx.cond_br(cmp, ll1, ll2);
} else {
bx.switch(
discr.immediate(),

View File

@ -267,6 +267,7 @@ const WASM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
// tidy-alphabetical-start
("atomics", Some(sym::wasm_target_feature)),
("bulk-memory", Some(sym::wasm_target_feature)),
("multivalue", Some(sym::wasm_target_feature)),
("mutable-globals", Some(sym::wasm_target_feature)),
("nontrapping-fptoint", Some(sym::wasm_target_feature)),
("reference-types", Some(sym::wasm_target_feature)),

View File

@ -119,7 +119,7 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
tcx: TyCtxt<'tcx>,
module_name: &str,
kind: AllocatorKind,
has_alloc_error_handler: bool,
alloc_error_handler_kind: AllocatorKind,
) -> Self::Module;
/// This generates the codegen unit and returns it along with
/// a `u64` giving an estimate of the unit's processing cost.

View File

@ -598,7 +598,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// the last field). Can't have foreign types here, how would we
// adjust alignment and size for them?
let field = layout.field(self, layout.fields.count() - 1);
let Some((unsized_size, unsized_align)) = self.size_and_align_of(metadata, &field)? else {
let Some((unsized_size, mut unsized_align)) = self.size_and_align_of(metadata, &field)? else {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
@ -614,6 +614,13 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Return the sum of sizes and max of aligns.
let size = sized_size + unsized_size; // `Size` addition
// Packed types ignore the alignment of their fields.
if let ty::Adt(def, _) = layout.ty.kind() {
if def.repr().packed() {
unsized_align = sized_align;
}
}
// Choose max of two known alignments (combined value must
// be aligned according to more restrictive of the two).
let align = sized_align.max(unsized_align);

View File

@ -7,7 +7,9 @@ use std::convert::TryFrom;
use rustc_hir::def_id::DefId;
use rustc_middle::mir::{
self,
interpret::{ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar},
interpret::{
Allocation, ConstAllocation, ConstValue, GlobalId, InterpResult, PointerArithmetic, Scalar,
},
BinOp, NonDivergingIntrinsic,
};
use rustc_middle::ty;
@ -23,7 +25,6 @@ use super::{
};
mod caller_location;
mod type_name;
fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<Prov> {
let size = match kind {
@ -42,6 +43,13 @@ fn numeric_intrinsic<Prov>(name: Symbol, bits: u128, kind: Primitive) -> Scalar<
Scalar::from_uint(bits_out, size)
}
/// Directly returns an `Allocation` containing an absolute path representation of the given type.
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
let path = crate::util::type_name(tcx, ty);
let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
tcx.intern_const_alloc(alloc)
}
/// The logic for all nullary intrinsics is implemented here. These intrinsics don't get evaluated
/// inside an `InterpCx` and instead have their value computed directly from rustc internal info.
pub(crate) fn eval_nullary_intrinsic<'tcx>(
@ -55,7 +63,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
Ok(match name {
sym::type_name => {
ensure_monomorphic_enough(tcx, tp_ty)?;
let alloc = type_name::alloc_type_name(tcx, tp_ty);
let alloc = alloc_type_name(tcx, tp_ty);
ConstValue::Slice { data: alloc, start: 0, end: alloc.inner().len() }
}
sym::needs_drop => {

View File

@ -4,7 +4,7 @@
use rustc_hir::def::Namespace;
use rustc_middle::ty::layout::{LayoutOf, PrimitiveExt, TyAndLayout};
use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter};
use rustc_middle::ty::{ConstInt, DelaySpanBugEmitted, Ty};
use rustc_middle::ty::{ConstInt, Ty};
use rustc_middle::{mir, ty};
use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants};
@ -567,7 +567,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ty::ConstKind::Param(_) | ty::ConstKind::Placeholder(..) => {
throw_inval!(TooGeneric)
}
ty::ConstKind::Error(DelaySpanBugEmitted { reported, .. }) => {
ty::ConstKind::Error(reported) => {
throw_inval!(AlreadyReported(reported))
}
ty::ConstKind::Unevaluated(uv) => {

View File

@ -13,11 +13,8 @@ use rustc_middle::ty::{self, adjustment::PointerCast, Instance, InstanceDef, Ty,
use rustc_middle::ty::{Binder, TraitPredicate, TraitRef, TypeVisitable};
use rustc_mir_dataflow::{self, Analysis};
use rustc_span::{sym, Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
use rustc_trait_selection::traits::{
self, ObligationCauseCode, SelectionContext, TraitEngine, TraitEngineExt,
};
use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt, SelectionContext};
use std::mem;
use std::ops::Deref;
@ -747,35 +744,26 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
// "non-const" check. This is required for correctness here.
{
let infcx = tcx.infer_ctxt().build();
let mut fulfill_cx = <dyn TraitEngine<'_>>::new(infcx.tcx);
let ocx = ObligationCtxt::new(&infcx);
let predicates = tcx.predicates_of(callee).instantiate(tcx, substs);
let hir_id = tcx
.hir()
.local_def_id_to_hir_id(self.body.source.def_id().expect_local());
let cause = || {
ObligationCause::new(
terminator.source_info.span,
hir_id,
ObligationCauseCode::ItemObligation(callee),
)
};
let normalized = infcx.partially_normalize_associated_types_in(
cause(),
param_env,
predicates,
let cause = ObligationCause::new(
terminator.source_info.span,
hir_id,
ObligationCauseCode::ItemObligation(callee),
);
for p in normalized.obligations {
fulfill_cx.register_predicate_obligation(&infcx, p);
}
for obligation in traits::predicates_for_generics(
|_, _| cause(),
let normalized_predicates =
ocx.normalize(cause.clone(), param_env, predicates);
ocx.register_obligations(traits::predicates_for_generics(
|_, _| cause.clone(),
self.param_env,
normalized.value,
) {
fulfill_cx.register_predicate_obligation(&infcx, obligation);
}
let errors = fulfill_cx.select_all_or_error(&infcx);
normalized_predicates,
));
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors, None, false);
}

View File

@ -45,11 +45,10 @@ impl<'tcx> MirPass<'tcx> for PromoteTemps<'tcx> {
// There's not really any point in promoting errorful MIR.
//
// This does not include MIR that failed const-checking, which we still try to promote.
if body.return_ty().references_error() {
tcx.sess.delay_span_bug(body.span, "PromoteTemps: MIR had errors");
if let Err(_) = body.return_ty().error_reported() {
debug!("PromoteTemps: MIR had errors");
return;
}
if body.source.promoted.is_some() {
return;
}

View File

@ -3,7 +3,7 @@
//! context.
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items::LangItemGroup;
use rustc_hir::lang_items;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::{self, AssocItemContainer, DefIdTree, Instance, ParamEnv, Ty, TyCtxt};
use rustc_span::symbol::Ident;
@ -74,22 +74,24 @@ pub fn call_kind<'tcx>(
}
});
let fn_call = parent
.and_then(|p| tcx.lang_items().group(LangItemGroup::Fn).iter().find(|did| **did == p));
let fn_call = parent.and_then(|p| {
lang_items::FN_TRAITS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p)
});
let operator = (!from_hir_call)
.then(|| parent)
.flatten()
.and_then(|p| tcx.lang_items().group(LangItemGroup::Op).iter().find(|did| **did == p));
let operator = if !from_hir_call && let Some(p) = parent {
lang_items::OPERATORS.iter().filter_map(|&l| tcx.lang_items().get(l)).find(|&id| id == p)
} else {
None
};
let is_deref = !from_hir_call && tcx.is_diagnostic_item(sym::deref_method, method_did);
// Check for a 'special' use of 'self' -
// an FnOnce call, an operator (e.g. `<<`), or a
// deref coercion.
let kind = if let Some(&trait_id) = fn_call {
let kind = if let Some(trait_id) = fn_call {
Some(CallKind::FnCall { fn_trait_id: trait_id, self_ty: method_substs.type_at(0) })
} else if let Some(&trait_id) = operator {
} else if let Some(trait_id) = operator {
Some(CallKind::Operator { self_arg, trait_id, self_ty: method_substs.type_at(0) })
} else if is_deref {
let deref_target = tcx.get_diagnostic_item(sym::deref_target).and_then(|deref_target| {

View File

@ -4,9 +4,11 @@ mod call_kind;
pub mod collect_writes;
mod find_self_call;
mod might_permit_raw_init;
mod type_name;
pub use self::aggregate::expand_aggregate;
pub use self::alignment::is_disaligned;
pub use self::call_kind::{call_kind, CallDesugaringKind, CallKind};
pub use self::find_self_call::find_self_call;
pub use self::might_permit_raw_init::might_permit_raw_init;
pub use self::type_name::type_name;

View File

@ -1,10 +1,9 @@
use rustc_data_structures::intern::Interned;
use rustc_hir::def_id::CrateNum;
use rustc_hir::definitions::DisambiguatedDefPathData;
use rustc_middle::mir::interpret::{Allocation, ConstAllocation};
use rustc_middle::ty::{
self,
print::{with_no_verbose_constants, PrettyPrinter, Print, Printer},
print::{PrettyPrinter, Print, Printer},
subst::{GenericArg, GenericArgKind},
Ty, TyCtxt,
};
@ -74,18 +73,10 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
}
fn print_dyn_existential(
mut self,
self,
predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>,
) -> Result<Self::DynExistential, Self::Error> {
let mut first = true;
for p in predicates {
if !first {
write!(self, "+")?;
}
first = false;
self = p.print(self)?;
}
Ok(self)
self.pretty_print_dyn_existential(predicates)
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
@ -179,6 +170,11 @@ impl<'tcx> PrettyPrinter<'tcx> for AbsolutePathPrinter<'tcx> {
Ok(self)
}
fn should_print_verbose(&self) -> bool {
// `std::any::type_name` should never print verbose type names
false
}
}
impl Write for AbsolutePathPrinter<'_> {
@ -188,11 +184,6 @@ impl Write for AbsolutePathPrinter<'_> {
}
}
/// Directly returns an `Allocation` containing an absolute path representation of the given type.
pub(crate) fn alloc_type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ConstAllocation<'tcx> {
let path = with_no_verbose_constants!(
AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path
);
let alloc = Allocation::from_bytes_byte_aligned_immutable(path.into_bytes());
tcx.intern_const_alloc(alloc)
pub fn type_name<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> String {
AbsolutePathPrinter { tcx, path: String::new() }.print_type(ty).unwrap().path
}

View File

@ -25,7 +25,7 @@ smallvec = { version = "1.8.1", features = ["const_generics", "union", "may_dang
stable_deref_trait = "1.0.0"
stacker = "0.1.14"
tempfile = "3.2"
thin-vec = "0.2.8"
thin-vec = "0.2.9"
tracing = "0.1"
[dependencies.parking_lot]

View File

@ -410,6 +410,7 @@ impl<T> Lock<T> {
#[cfg(parallel_compiler)]
#[inline(always)]
#[track_caller]
pub fn lock(&self) -> LockGuard<'_, T> {
if ERROR_CHECKING {
self.0.try_lock().expect("lock was already held")
@ -420,21 +421,25 @@ impl<T> Lock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
#[track_caller]
pub fn lock(&self) -> LockGuard<'_, T> {
self.0.borrow_mut()
}
#[inline(always)]
#[track_caller]
pub fn with_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
f(&mut *self.lock())
}
#[inline(always)]
#[track_caller]
pub fn borrow(&self) -> LockGuard<'_, T> {
self.lock()
}
#[inline(always)]
#[track_caller]
pub fn borrow_mut(&self) -> LockGuard<'_, T> {
self.lock()
}
@ -476,6 +481,7 @@ impl<T> RwLock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
#[track_caller]
pub fn read(&self) -> ReadGuard<'_, T> {
self.0.borrow()
}
@ -491,6 +497,7 @@ impl<T> RwLock<T> {
}
#[inline(always)]
#[track_caller]
pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
f(&*self.read())
}
@ -509,6 +516,7 @@ impl<T> RwLock<T> {
#[cfg(not(parallel_compiler))]
#[inline(always)]
#[track_caller]
pub fn write(&self) -> WriteGuard<'_, T> {
self.0.borrow_mut()
}
@ -524,16 +532,19 @@ impl<T> RwLock<T> {
}
#[inline(always)]
#[track_caller]
pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
f(&mut *self.write())
}
#[inline(always)]
#[track_caller]
pub fn borrow(&self) -> ReadGuard<'_, T> {
self.read()
}
#[inline(always)]
#[track_caller]
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
self.write()
}

View File

@ -1200,6 +1200,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) {
false,
None,
false,
false,
));
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);

View File

@ -61,7 +61,7 @@ with `#[derive(Clone)]`.
Some types have no ownership semantics at all and are trivial to duplicate. An
example is `i32` and the other number types. We don't have to call `.clone()` to
clone them, because they are marked `Copy` in addition to `Clone`. Implicit
clone them, because they are marked `Copy` in addition to `Clone`. Implicit
cloning is more convenient in this case. We can mark our own types `Copy` if
all their members also are marked `Copy`.

View File

@ -119,3 +119,66 @@ codegen_ssa_thorin_object_read = {$error}
codegen_ssa_thorin_object_write = {$error}
codegen_ssa_thorin_gimli_read = {$error}
codegen_ssa_thorin_gimli_write = {$error}
codegen_ssa_link_exe_unexpected_error = `link.exe` returned an unexpected error
codegen_ssa_repair_vs_build_tools = the Visual Studio build tools may need to be repaired using the Visual Studio installer
codegen_ssa_missing_cpp_build_tool_component = or a necessary component may be missing from the "C++ build tools" workload
codegen_ssa_select_cpp_build_tool_workload = in the Visual Studio installer, ensure the "C++ build tools" workload is selected
codegen_ssa_visual_studio_not_installed = you may need to install Visual Studio build tools with the "C++ build tools" workload
codegen_ssa_linker_not_found = linker `{$linker_path}` not found
.note = {$error}
codegen_ssa_unable_to_exe_linker = could not exec the linker `{$linker_path}`
.note = {$error}
.command_note = {$command_formatted}
codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but `link.exe` was not found
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient.
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
.note = {$output}
codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
codegen_ssa_stripping_debu_info_failed = stripping debug info with `{$util}` failed: {$status}
.note = {$output}
codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}
codegen_ssa_linker_file_stem = couldn't extract file stem from specified linker
codegen_ssa_static_library_native_artifacts = Link against the following native artifacts when linking against this static library. The order and any duplication can be significant on some platforms.
codegen_ssa_native_static_libs = native-static-libs: {$arguments}
codegen_ssa_link_script_unavailable = can only use link script when linking with GNU-like linker
codegen_ssa_link_script_write_failure = failed to write link script to {$path}: {$error}
codegen_ssa_failed_to_write = failed to write {$path}: {$error}
codegen_ssa_unable_to_write_debugger_visualizer = Unable to write debugger visualizer file `{$path}`: {$error}
codegen_ssa_rlib_archive_build_failure = failed to build archive from rlib: {$error}
codegen_ssa_option_gcc_only = option `-Z gcc-ld` is used even though linker flavor is not gcc
codegen_ssa_extract_bundled_libs_open_file = failed to open file '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_mmap_file = failed to mmap file '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_parse_archive = failed to parse archive '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_read_entry = failed to read entry '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_archive_member = failed to get data from archive member '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_convert_name = failed to convert name '{$rlib}': {$error}
codegen_ssa_extract_bundled_libs_write_file = failed to write file '{$rlib}': {$error}
codegen_ssa_unsupported_arch = unsupported arch `{$arch}` for os `{$os}`
codegen_ssa_apple_sdk_error_sdk_path = failed to get {$sdk_name} SDK path: {error}

View File

@ -150,12 +150,28 @@ metadata_no_multiple_global_alloc =
metadata_prev_global_alloc =
previous global allocator defined here
metadata_no_multiple_alloc_error_handler =
cannot define multiple allocation error handlers
.label = cannot define a new allocation error handler
metadata_prev_alloc_error_handler =
previous allocation error handler defined here
metadata_conflicting_global_alloc =
the `#[global_allocator]` in {$other_crate_name} conflicts with global allocator in: {$crate_name}
metadata_conflicting_alloc_error_handler =
the `#[alloc_error_handler]` in {$other_crate_name} conflicts with allocation error handler in: {$crate_name}
metadata_global_alloc_required =
no global memory allocator found but one is required; link to std or add `#[global_allocator]` to a static item that implements the GlobalAlloc trait
metadata_alloc_func_required =
`#[alloc_error_handler]` function required, but not found
metadata_missing_alloc_error_handler =
use `#![feature(default_alloc_error_handler)]` for a default error handler
metadata_no_transitive_needs_dep =
the crate `{$crate_name}` cannot depend on a crate that needs {$needs_crate_name}, but it depends on `{$deps_crate_name}`

View File

@ -27,3 +27,7 @@ middle_values_too_big =
middle_cannot_be_normalized =
unable to determine layout for `{$ty}` because `{$failure_ty}` cannot be normalized
middle_strict_coherence_needs_negative_coherence =
to use `strict_coherence` on this trait, the `with_negative_coherence` feature must be enabled
.label = due to this attribute

View File

@ -21,6 +21,3 @@ monomorphize_large_assignments =
moving {$size} bytes
.label = value moved from here
.note = The current maximum size is {$limit}, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]`
monomorphize_requires_lang_item =
requires `{$lang_item}` lang_item

View File

@ -367,12 +367,6 @@ passes_unknown_external_lang_item =
passes_missing_panic_handler =
`#[panic_handler]` function required, but not found
passes_alloc_func_required =
`#[alloc_error_handler]` function required, but not found
passes_missing_alloc_error_handler =
use `#![feature(default_alloc_error_handler)]` for a default error handler
passes_missing_lang_item =
language item required, but not found: `{$name}`
.note = this can occur when a binary crate with `#![no_std]` is compiled for a target where `{$name}` is defined in the standard library
@ -457,8 +451,14 @@ passes_break_inside_async_block =
.async_block_label = enclosing `async` block
passes_outside_loop =
`{$name}` outside of a loop
.label = cannot `{$name}` outside of a loop
`{$name}` outside of a loop{$is_break ->
[true] {" or labeled block"}
*[false] {""}
}
.label = cannot `{$name}` outside of a loop{$is_break ->
[true] {" or labeled block"}
*[false] {""}
}
passes_unlabeled_in_labeled_block =
unlabeled `{$cf_type}` inside of a labeled block
@ -671,3 +671,36 @@ passes_missing_const_err =
attributes `#[rustc_const_unstable]` and `#[rustc_const_stable]` require the function or method to be `const`
.help = make the function or method const
.label = attribute specified here
passes_dead_codes =
{ $multiple ->
*[true] multiple {$descr}s are
[false] { $num ->
[one] {$descr} {$name_list} is
*[other] {$descr}s {$name_list} are
}
} never {$participle}
passes_change_fields_to_be_of_unit_type =
consider changing the { $num ->
[one] field
*[other] fields
} to be of unit type to suppress this warning while preserving the field numbering, or remove the { $num ->
[one] field
*[other] fields
}
passes_parent_info =
{$num ->
[one] {$descr}
*[other] {$descr}s
} in this {$parent_descr}
passes_ignored_derived_impls =
`{$name}` has {$trait_list_len ->
[one] a derived impl
*[other] derived impls
} for the {$trait_list_len ->
[one] trait {$trait_list}, but this is
*[other] traits {$trait_list}, but these are
} intentionally ignored during dead code analysis

View File

@ -12,6 +12,7 @@ use rustc_span::{Span, DUMMY_SP};
use std::borrow::Cow;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::panic::Location;
/// Error type for `Diagnostic`'s `suggestions` field, indicating that
/// `.disable_suggestions()` was called on the `Diagnostic`.
@ -107,6 +108,31 @@ pub struct Diagnostic {
/// If diagnostic is from Lint, custom hash function ignores notes
/// otherwise hash is based on the all the fields
pub is_lint: bool,
/// With `-Ztrack_diagnostics` enabled,
/// we print where in rustc this error was emitted.
pub emitted_at: DiagnosticLocation,
}
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct DiagnosticLocation {
file: Cow<'static, str>,
line: u32,
col: u32,
}
impl DiagnosticLocation {
#[track_caller]
fn caller() -> Self {
let loc = Location::caller();
DiagnosticLocation { file: loc.file().into(), line: loc.line(), col: loc.column() }
}
}
impl fmt::Display for DiagnosticLocation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}:{}:{}", self.file, self.line, self.col)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable)]
@ -173,10 +199,12 @@ impl StringPart {
}
impl Diagnostic {
#[track_caller]
pub fn new<M: Into<DiagnosticMessage>>(level: Level, message: M) -> Self {
Diagnostic::new_with_code(level, None, message)
}
#[track_caller]
pub fn new_with_code<M: Into<DiagnosticMessage>>(
level: Level,
code: Option<DiagnosticId>,
@ -192,6 +220,7 @@ impl Diagnostic {
args: Default::default(),
sort_span: DUMMY_SP,
is_lint: false,
emitted_at: DiagnosticLocation::caller(),
}
}

View File

@ -133,6 +133,7 @@ mod sealed_level_is_error {
impl<'a> DiagnosticBuilder<'a, ErrorGuaranteed> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
#[track_caller]
pub(crate) fn new_guaranteeing_error<M: Into<DiagnosticMessage>, const L: Level>(
handler: &'a Handler,
message: M,
@ -196,6 +197,7 @@ impl EmissionGuarantee for ErrorGuaranteed {
}
}
#[track_caller]
fn make_diagnostic_builder(
handler: &Handler,
msg: impl Into<DiagnosticMessage>,
@ -209,6 +211,7 @@ impl EmissionGuarantee for ErrorGuaranteed {
impl<'a> DiagnosticBuilder<'a, ()> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
#[track_caller]
pub(crate) fn new<M: Into<DiagnosticMessage>>(
handler: &'a Handler,
level: Level,
@ -220,6 +223,7 @@ impl<'a> DiagnosticBuilder<'a, ()> {
/// Creates a new `DiagnosticBuilder` with an already constructed
/// diagnostic.
#[track_caller]
pub(crate) fn new_diagnostic(handler: &'a Handler, diagnostic: Diagnostic) -> Self {
debug!("Created new diagnostic");
Self {
@ -308,6 +312,7 @@ impl EmissionGuarantee for Noted {
impl<'a> DiagnosticBuilder<'a, !> {
/// Convenience function for internal use, clients should use one of the
/// `struct_*` methods on [`Handler`].
#[track_caller]
pub(crate) fn new_fatal(handler: &'a Handler, message: impl Into<DiagnosticMessage>) -> Self {
let diagnostic = Diagnostic::new_with_code(Level::Fatal, None, message);
Self::new_diagnostic_fatal(handler, diagnostic)

View File

@ -11,8 +11,10 @@ use rustc_target::abi::TargetDataLayoutErrors;
use rustc_target::spec::{PanicStrategy, SplitDebuginfo, StackProtector, TargetTriple};
use std::borrow::Cow;
use std::fmt;
use std::fmt::Write;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
use std::process::ExitStatus;
pub struct DiagnosticArgFromDisplay<'a>(pub &'a dyn fmt::Display);
@ -58,6 +60,7 @@ into_diagnostic_arg_using_display!(
i128,
u128,
std::io::Error,
std::boxed::Box<dyn std::error::Error>,
std::num::NonZeroU32,
hir::Target,
Edition,
@ -66,7 +69,8 @@ into_diagnostic_arg_using_display!(
ParseIntError,
StackProtector,
&TargetTriple,
SplitDebuginfo
SplitDebuginfo,
ExitStatus,
);
impl IntoDiagnosticArg for bool {
@ -170,6 +174,37 @@ impl IntoDiagnosticArg for Level {
}
}
#[derive(Clone)]
pub struct DiagnosticSymbolList(Vec<Symbol>);
impl From<Vec<Symbol>> for DiagnosticSymbolList {
fn from(v: Vec<Symbol>) -> Self {
DiagnosticSymbolList(v)
}
}
impl IntoDiagnosticArg for DiagnosticSymbolList {
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
// FIXME: replace the logic here with a real list formatter
let symbols = match &self.0[..] {
[symbol] => format!("`{symbol}`"),
[symbol, last] => {
format!("`{symbol}` and `{last}`",)
}
[symbols @ .., last] => {
let mut result = String::new();
for symbol in symbols {
write!(result, "`{symbol}`, ").unwrap();
}
write!(result, "and `{last}`").unwrap();
result
}
[] => unreachable!(),
};
DiagnosticArgValue::Str(Cow::Owned(symbols))
}
}
impl IntoDiagnostic<'_, !> for TargetDataLayoutErrors<'_> {
fn into_diagnostic(self, handler: &Handler) -> DiagnosticBuilder<'_, !> {
let mut diag;

View File

@ -16,10 +16,10 @@ use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, Styl
use crate::styled_buffer::StyledBuffer;
use crate::translation::{to_fluent_args, Translate};
use crate::{
CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, FluentBundle, Handler,
LazyFallbackBundle, Level, MultiSpan, SubDiagnostic, SubstitutionHighlight, SuggestionStyle,
diagnostic::DiagnosticLocation, CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage,
FluentBundle, Handler, LazyFallbackBundle, Level, MultiSpan, SubDiagnostic,
SubstitutionHighlight, SuggestionStyle,
};
use rustc_lint_defs::pluralize;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
@ -64,6 +64,7 @@ impl HumanReadableErrorType {
teach: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> EmitterWriter {
let (short, color_config) = self.unzip();
let color = color_config.suggests_using_colors();
@ -77,6 +78,7 @@ impl HumanReadableErrorType {
color,
diagnostic_width,
macro_backtrace,
track_diagnostics,
)
}
}
@ -557,6 +559,7 @@ impl Emitter for EmitterWriter {
&primary_span,
&children,
&suggestions,
self.track_diagnostics.then_some(&diag.emitted_at),
);
}
@ -650,6 +653,7 @@ pub struct EmitterWriter {
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
}
#[derive(Debug)]
@ -669,6 +673,7 @@ impl EmitterWriter {
teach: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> EmitterWriter {
let dst = Destination::from_stderr(color_config);
EmitterWriter {
@ -681,6 +686,7 @@ impl EmitterWriter {
ui_testing: false,
diagnostic_width,
macro_backtrace,
track_diagnostics,
}
}
@ -694,6 +700,7 @@ impl EmitterWriter {
colored: bool,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> EmitterWriter {
EmitterWriter {
dst: Raw(dst, colored),
@ -705,6 +712,7 @@ impl EmitterWriter {
ui_testing: false,
diagnostic_width,
macro_backtrace,
track_diagnostics,
}
}
@ -1327,6 +1335,7 @@ impl EmitterWriter {
level: &Level,
max_line_num_len: usize,
is_secondary: bool,
emitted_at: Option<&DiagnosticLocation>,
) -> io::Result<()> {
let mut buffer = StyledBuffer::new();
@ -1377,7 +1386,6 @@ impl EmitterWriter {
}
}
}
let mut annotated_files = FileWithAnnotatedLines::collect_annotations(self, args, msp);
// Make sure our primary file comes first
@ -1653,6 +1661,12 @@ impl EmitterWriter {
}
}
if let Some(tracked) = emitted_at {
let track = format!("-Ztrack-diagnostics: created at {tracked}");
let len = buffer.num_lines();
buffer.append(len, &track, Style::NoStyle);
}
// final step: take our styled buffer, render it, then output it
emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
@ -1977,6 +1991,7 @@ impl EmitterWriter {
span: &MultiSpan,
children: &[SubDiagnostic],
suggestions: &[CodeSuggestion],
emitted_at: Option<&DiagnosticLocation>,
) {
let max_line_num_len = if self.ui_testing {
ANONYMIZED_LINE_NUM.len()
@ -1985,7 +2000,16 @@ impl EmitterWriter {
num_decimal_digits(n)
};
match self.emit_message_default(span, message, args, code, level, max_line_num_len, false) {
match self.emit_message_default(
span,
message,
args,
code,
level,
max_line_num_len,
false,
emitted_at,
) {
Ok(()) => {
if !children.is_empty()
|| suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden)
@ -2014,6 +2038,7 @@ impl EmitterWriter {
&child.level,
max_line_num_len,
true,
None,
) {
panic!("failed to emit error: {}", err);
}
@ -2030,6 +2055,7 @@ impl EmitterWriter {
&Level::Help,
max_line_num_len,
true,
None,
) {
panic!("failed to emit error: {}", e);
}

View File

@ -45,6 +45,7 @@ pub struct JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
}
impl JsonEmitter {
@ -57,6 +58,7 @@ impl JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> JsonEmitter {
JsonEmitter {
dst: Box::new(io::BufWriter::new(io::stderr())),
@ -69,6 +71,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
track_diagnostics,
}
}
@ -79,6 +82,7 @@ impl JsonEmitter {
fallback_bundle: LazyFallbackBundle,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> JsonEmitter {
let file_path_mapping = FilePathMapping::empty();
JsonEmitter::stderr(
@ -90,6 +94,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
track_diagnostics,
)
}
@ -103,6 +108,7 @@ impl JsonEmitter {
json_rendered: HumanReadableErrorType,
diagnostic_width: Option<usize>,
macro_backtrace: bool,
track_diagnostics: bool,
) -> JsonEmitter {
JsonEmitter {
dst,
@ -115,6 +121,7 @@ impl JsonEmitter {
json_rendered,
diagnostic_width,
macro_backtrace,
track_diagnostics,
}
}
@ -350,6 +357,7 @@ impl Diagnostic {
false,
je.diagnostic_width,
je.macro_backtrace,
je.track_diagnostics,
)
.ui_testing(je.ui_testing)
.emit_diagnostic(diag);

View File

@ -59,6 +59,7 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
HumanReadableErrorType::Short(ColorConfig::Never),
None,
false,
false,
);
let span = Span::with_root_ctxt(BytePos(span.0), BytePos(span.1));

View File

@ -376,7 +376,7 @@ pub use diagnostic::{
DiagnosticStyledString, IntoDiagnosticArg, SubDiagnostic,
};
pub use diagnostic_builder::{DiagnosticBuilder, EmissionGuarantee, Noted};
pub use diagnostic_impls::DiagnosticArgFromDisplay;
pub use diagnostic_impls::{DiagnosticArgFromDisplay, DiagnosticSymbolList};
use std::backtrace::Backtrace;
/// A handler deals with errors and other compiler output.
@ -492,6 +492,8 @@ pub struct HandlerFlags {
pub macro_backtrace: bool,
/// If true, identical diagnostics are reported only once.
pub deduplicate_diagnostics: bool,
/// Track where errors are created. Enabled with `-Ztrack-diagnostics`.
pub track_diagnostics: bool,
}
impl Drop for HandlerInner {
@ -559,6 +561,7 @@ impl Handler {
false,
None,
flags.macro_backtrace,
flags.track_diagnostics,
));
Self::with_emitter_and_flags(emitter, flags)
}
@ -664,6 +667,7 @@ impl Handler {
/// Construct a builder with the `msg` at the level appropriate for the specific `EmissionGuarantee`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_diagnostic<G: EmissionGuarantee>(
&self,
msg: impl Into<DiagnosticMessage>,
@ -677,6 +681,7 @@ impl Handler {
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_warn(
&self,
span: impl Into<MultiSpan>,
@ -693,6 +698,7 @@ impl Handler {
/// Attempting to `.emit()` the builder will only emit if either:
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[track_caller]
pub fn struct_span_warn_with_expectation(
&self,
span: impl Into<MultiSpan>,
@ -706,6 +712,7 @@ impl Handler {
/// Construct a builder at the `Allow` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_allow(
&self,
span: impl Into<MultiSpan>,
@ -719,6 +726,7 @@ impl Handler {
/// Construct a builder at the `Warning` level at the given `span` and with the `msg`.
/// Also include a code.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_warn_with_code(
&self,
span: impl Into<MultiSpan>,
@ -736,6 +744,7 @@ impl Handler {
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_warn(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Warning(None), msg)
}
@ -746,6 +755,7 @@ impl Handler {
/// Attempting to `.emit()` the builder will only emit if either:
/// * `can_emit_warnings` is `true`
/// * `is_force_warn` was set in `DiagnosticId::Lint`
#[track_caller]
pub fn struct_warn_with_expectation(
&self,
msg: impl Into<DiagnosticMessage>,
@ -756,12 +766,14 @@ impl Handler {
/// Construct a builder at the `Allow` level with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_allow(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Allow, msg)
}
/// Construct a builder at the `Expect` level with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_expect(
&self,
msg: impl Into<DiagnosticMessage>,
@ -772,6 +784,7 @@ impl Handler {
/// Construct a builder at the `Error` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_err(
&self,
span: impl Into<MultiSpan>,
@ -784,6 +797,7 @@ impl Handler {
/// Construct a builder at the `Error` level at the given `span`, with the `msg`, and `code`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_err_with_code(
&self,
span: impl Into<MultiSpan>,
@ -798,6 +812,7 @@ impl Handler {
/// Construct a builder at the `Error` level with the `msg`.
// FIXME: This method should be removed (every error should have an associated error code).
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_err(
&self,
msg: impl Into<DiagnosticMessage>,
@ -807,12 +822,14 @@ impl Handler {
/// This should only be used by `rustc_middle::lint::struct_lint_level`. Do not use it for hard errors.
#[doc(hidden)]
#[track_caller]
pub fn struct_err_lint(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, ()> {
DiagnosticBuilder::new(self, Level::Error { lint: true }, msg)
}
/// Construct a builder at the `Error` level with the `msg` and the `code`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_err_with_code(
&self,
msg: impl Into<DiagnosticMessage>,
@ -825,6 +842,7 @@ impl Handler {
/// Construct a builder at the `Warn` level with the `msg` and the `code`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_warn_with_code(
&self,
msg: impl Into<DiagnosticMessage>,
@ -837,6 +855,7 @@ impl Handler {
/// Construct a builder at the `Fatal` level at the given `span` and with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_fatal(
&self,
span: impl Into<MultiSpan>,
@ -849,6 +868,7 @@ impl Handler {
/// Construct a builder at the `Fatal` level at the given `span`, with the `msg`, and `code`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_span_fatal_with_code(
&self,
span: impl Into<MultiSpan>,
@ -862,6 +882,7 @@ impl Handler {
/// Construct a builder at the `Error` level with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_fatal(&self, msg: impl Into<DiagnosticMessage>) -> DiagnosticBuilder<'_, !> {
DiagnosticBuilder::new_fatal(self, msg)
}
@ -874,6 +895,7 @@ impl Handler {
/// Construct a builder at the `Note` level with the `msg`.
#[rustc_lint_diagnostics]
#[track_caller]
pub fn struct_note_without_error(
&self,
msg: impl Into<DiagnosticMessage>,
@ -882,12 +904,14 @@ impl Handler {
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_fatal(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) -> ! {
self.emit_diag_at_span(Diagnostic::new(Fatal, msg), span);
FatalError.raise()
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_fatal_with_code(
&self,
span: impl Into<MultiSpan>,
@ -899,6 +923,7 @@ impl Handler {
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_err(
&self,
span: impl Into<MultiSpan>,
@ -908,6 +933,7 @@ impl Handler {
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_err_with_code(
&self,
span: impl Into<MultiSpan>,
@ -921,11 +947,13 @@ impl Handler {
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_warn(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) {
self.emit_diag_at_span(Diagnostic::new(Warning(None), msg), span);
}
#[rustc_lint_diagnostics]
#[track_caller]
pub fn span_warn_with_code(
&self,
span: impl Into<MultiSpan>,
@ -954,10 +982,12 @@ impl Handler {
self.inner.borrow_mut().delay_good_path_bug(msg)
}
#[track_caller]
pub fn span_bug_no_panic(&self, span: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) {
self.emit_diag_at_span(Diagnostic::new(Bug, msg), span);
}
#[track_caller]
pub fn span_note_without_error(
&self,
span: impl Into<MultiSpan>,
@ -966,6 +996,7 @@ impl Handler {
self.emit_diag_at_span(Diagnostic::new(Note, msg), span);
}
#[track_caller]
pub fn span_note_diag(
&self,
span: Span,
@ -1452,6 +1483,7 @@ impl HandlerInner {
}
}
#[track_caller]
fn span_bug(&mut self, sp: impl Into<MultiSpan>, msg: impl Into<DiagnosticMessage>) -> ! {
self.emit_diag_at_span(Diagnostic::new(Bug, msg), sp);
panic::panic_any(ExplicitBug);

View File

@ -151,6 +151,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
false,
None,
false,
false,
);
let handler = Handler::with_emitter(true, None, Box::new(emitter));
handler.span_err(msp, "foo");

View File

@ -388,6 +388,9 @@ declare_features! (
(active, exclusive_range_pattern, "1.11.0", Some(37854), None),
/// Allows exhaustive pattern matching on types that contain uninhabited types.
(active, exhaustive_patterns, "1.13.0", Some(51085), None),
/// Allows using `efiapi`, `sysv64` and `win64` as calling convention
/// for functions with varargs.
(active, extended_varargs_abi_support, "1.65.0", Some(100189), None),
/// Allows defining `extern type`s.
(active, extern_types, "1.23.0", Some(43467), None),
/// Allows the use of `#[ffi_const]` on foreign functions.
@ -412,6 +415,8 @@ declare_features! (
(active, half_open_range_patterns_in_slices, "CURRENT_RUSTC_VERSION", Some(67264), None),
/// Allows `if let` guard in match arms.
(active, if_let_guard, "1.47.0", Some(51114), None),
/// Allows `impl Trait` as output type in `Fn` traits in return position of functions.
(active, impl_trait_in_fn_trait_return, "1.64.0", Some(99697), None),
/// Allows using imported `main` function
(active, imported_main, "1.53.0", Some(28937), None),
/// Allows associated types in inherent impls.

View File

@ -554,10 +554,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(rustc_reallocator, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
rustc_attr!(rustc_deallocator, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
rustc_attr!(rustc_allocator_zeroed, Normal, template!(Word), WarnFollowing, IMPL_DETAIL),
gated!(
alloc_error_handler, Normal, template!(Word), WarnFollowing,
experimental!(alloc_error_handler)
),
gated!(
default_lib_allocator, Normal, template!(Word), WarnFollowing, allocator_internals,
experimental!(default_lib_allocator),

View File

@ -358,6 +358,9 @@ pub trait Visitor<'v>: Sized {
fn visit_where_predicate(&mut self, predicate: &'v WherePredicate<'v>) {
walk_where_predicate(self, predicate)
}
fn visit_fn_ret_ty(&mut self, ret_ty: &'v FnRetTy<'v>) {
walk_fn_ret_ty(self, ret_ty)
}
fn visit_fn_decl(&mut self, fd: &'v FnDecl<'v>) {
walk_fn_decl(self, fd)
}
@ -410,12 +413,7 @@ pub trait Visitor<'v>: Sized {
walk_inf(self, inf);
}
fn visit_generic_arg(&mut self, generic_arg: &'v GenericArg<'v>) {
match generic_arg {
GenericArg::Lifetime(lt) => self.visit_lifetime(lt),
GenericArg::Type(ty) => self.visit_ty(ty),
GenericArg::Const(ct) => self.visit_anon_const(&ct.value),
GenericArg::Infer(inf) => self.visit_infer(inf),
}
walk_generic_arg(self, generic_arg);
}
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime)
@ -448,63 +446,6 @@ pub trait Visitor<'v>: Sized {
}
}
pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod<'v>, mod_hir_id: HirId) {
visitor.visit_id(mod_hir_id);
for &item_id in module.item_ids {
visitor.visit_nested_item(item_id);
}
}
pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body<'v>) {
walk_list!(visitor, visit_param, body.params);
visitor.visit_expr(&body.value);
}
pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local<'v>) {
// Intentionally visiting the expr first - the initialization expr
// dominates the local's definition.
walk_list!(visitor, visit_expr, &local.init);
visitor.visit_id(local.hir_id);
visitor.visit_pat(&local.pat);
if let Some(els) = local.els {
visitor.visit_block(els);
}
walk_list!(visitor, visit_ty, &local.ty);
}
pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
visitor.visit_name(ident.name);
}
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
visitor.visit_ident(label.ident);
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.hir_id);
match lifetime.name {
LifetimeName::Param(_, ParamName::Plain(ident)) => {
visitor.visit_ident(ident);
}
LifetimeName::Param(_, ParamName::Fresh)
| LifetimeName::Param(_, ParamName::Error)
| LifetimeName::Static
| LifetimeName::Error
| LifetimeName::ImplicitObjectLifetimeDefault
| LifetimeName::Infer => {}
}
}
pub fn walk_poly_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v PolyTraitRef<'v>) {
walk_list!(visitor, visit_generic_param, trait_ref.bound_generic_params);
visitor.visit_trait_ref(&trait_ref.trait_ref);
}
pub fn walk_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v TraitRef<'v>) {
visitor.visit_id(trait_ref.hir_ref_id);
visitor.visit_path(&trait_ref.path, trait_ref.hir_ref_id)
}
pub fn walk_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Param<'v>) {
visitor.visit_id(param.hir_id);
visitor.visit_pat(&param.pat);
@ -601,142 +542,80 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) {
}
}
pub fn walk_inline_asm<'v, V: Visitor<'v>>(visitor: &mut V, asm: &'v InlineAsm<'v>, id: HirId) {
for (op, op_sp) in asm.operands {
match op {
InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => {
visitor.visit_expr(expr)
pub fn walk_body<'v, V: Visitor<'v>>(visitor: &mut V, body: &'v Body<'v>) {
walk_list!(visitor, visit_param, body.params);
visitor.visit_expr(&body.value);
}
pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
visitor.visit_name(ident.name);
}
pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod<'v>, mod_hir_id: HirId) {
visitor.visit_id(mod_hir_id);
for &item_id in module.item_ids {
visitor.visit_nested_item(item_id);
}
}
pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem<'v>) {
visitor.visit_id(foreign_item.hir_id());
visitor.visit_ident(foreign_item.ident);
match foreign_item.kind {
ForeignItemKind::Fn(ref function_declaration, param_names, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_fn_decl(function_declaration);
for &param_name in param_names {
visitor.visit_ident(param_name);
}
InlineAsmOperand::Out { expr, .. } => {
if let Some(expr) = expr {
visitor.visit_expr(expr);
}
}
ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ),
ForeignItemKind::Type => (),
}
}
pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local<'v>) {
// Intentionally visiting the expr first - the initialization expr
// dominates the local's definition.
walk_list!(visitor, visit_expr, &local.init);
visitor.visit_id(local.hir_id);
visitor.visit_pat(&local.pat);
if let Some(els) = local.els {
visitor.visit_block(els);
}
walk_list!(visitor, visit_ty, &local.ty);
}
pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block<'v>) {
visitor.visit_id(block.hir_id);
walk_list!(visitor, visit_stmt, block.stmts);
walk_list!(visitor, visit_expr, &block.expr);
}
pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt<'v>) {
visitor.visit_id(statement.hir_id);
match statement.kind {
StmtKind::Local(ref local) => visitor.visit_local(local),
StmtKind::Item(item) => visitor.visit_nested_item(item),
StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => {
visitor.visit_expr(expression)
}
}
}
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm<'v>) {
visitor.visit_id(arm.hir_id);
visitor.visit_pat(&arm.pat);
if let Some(ref g) = arm.guard {
match g {
Guard::If(ref e) => visitor.visit_expr(e),
Guard::IfLet(ref l) => {
visitor.visit_let_expr(l);
}
InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
visitor.visit_expr(in_expr);
if let Some(out_expr) = out_expr {
visitor.visit_expr(out_expr);
}
}
InlineAsmOperand::Const { anon_const, .. }
| InlineAsmOperand::SymFn { anon_const, .. } => visitor.visit_anon_const(anon_const),
InlineAsmOperand::SymStatic { path, .. } => visitor.visit_qpath(path, id, *op_sp),
}
}
}
pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>, hir_id: HirId) {
visitor.visit_id(hir_id);
visitor.visit_path(path, hir_id);
}
pub fn walk_enum_def<'v, V: Visitor<'v>>(
visitor: &mut V,
enum_definition: &'v EnumDef<'v>,
item_id: HirId,
) {
visitor.visit_id(item_id);
walk_list!(visitor, visit_variant, enum_definition.variants);
}
pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, variant: &'v Variant<'v>) {
visitor.visit_ident(variant.ident);
visitor.visit_id(variant.id);
visitor.visit_variant_data(&variant.data);
walk_list!(visitor, visit_anon_const, &variant.disr_expr);
}
pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty<'v>) {
visitor.visit_id(typ.hir_id);
match typ.kind {
TyKind::Slice(ref ty) => visitor.visit_ty(ty),
TyKind::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
TyKind::Rptr(ref lifetime, ref mutable_type) => {
visitor.visit_lifetime(lifetime);
visitor.visit_ty(&mutable_type.ty)
}
TyKind::Never => {}
TyKind::Tup(tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
TyKind::BareFn(ref function_declaration) => {
walk_list!(visitor, visit_generic_param, function_declaration.generic_params);
visitor.visit_fn_decl(&function_declaration.decl);
}
TyKind::Path(ref qpath) => {
visitor.visit_qpath(qpath, typ.hir_id, typ.span);
}
TyKind::OpaqueDef(item_id, lifetimes, _in_trait) => {
visitor.visit_nested_item(item_id);
walk_list!(visitor, visit_generic_arg, lifetimes);
}
TyKind::Array(ref ty, ref length) => {
visitor.visit_ty(ty);
visitor.visit_array_length(length)
}
TyKind::TraitObject(bounds, ref lifetime, _syntax) => {
for bound in bounds {
visitor.visit_poly_trait_ref(bound);
}
visitor.visit_lifetime(lifetime);
}
TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression),
TyKind::Infer | TyKind::Err => {}
}
}
pub fn walk_inf<'v, V: Visitor<'v>>(visitor: &mut V, inf: &'v InferArg) {
visitor.visit_id(inf.hir_id);
}
pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath<'v>, id: HirId) {
match *qpath {
QPath::Resolved(ref maybe_qself, ref path) => {
walk_list!(visitor, visit_ty, maybe_qself);
visitor.visit_path(path, id)
}
QPath::TypeRelative(ref qself, ref segment) => {
visitor.visit_ty(qself);
visitor.visit_path_segment(segment);
}
QPath::LangItem(..) => {}
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>) {
for segment in path.segments {
visitor.visit_path_segment(segment);
}
}
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, segment: &'v PathSegment<'v>) {
visitor.visit_ident(segment.ident);
visitor.visit_id(segment.hir_id);
if let Some(ref args) = segment.args {
visitor.visit_generic_args(args);
}
}
pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V, generic_args: &'v GenericArgs<'v>) {
walk_list!(visitor, visit_generic_arg, generic_args.args);
walk_list!(visitor, visit_assoc_type_binding, generic_args.bindings);
}
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(
visitor: &mut V,
type_binding: &'v TypeBinding<'v>,
) {
visitor.visit_id(type_binding.hir_id);
visitor.visit_ident(type_binding.ident);
visitor.visit_generic_args(type_binding.gen_args);
match type_binding.kind {
TypeBindingKind::Equality { ref term } => match term {
Term::Ty(ref ty) => visitor.visit_ty(ty),
Term::Const(ref c) => visitor.visit_anon_const(c),
},
TypeBindingKind::Constraint { bounds } => walk_list!(visitor, visit_param_bound, bounds),
}
visitor.visit_expr(&arm.body);
}
pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) {
@ -784,257 +663,6 @@ pub fn walk_pat_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v PatField<'
visitor.visit_pat(&field.pat)
}
pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem<'v>) {
visitor.visit_id(foreign_item.hir_id());
visitor.visit_ident(foreign_item.ident);
match foreign_item.kind {
ForeignItemKind::Fn(ref function_declaration, param_names, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_fn_decl(function_declaration);
for &param_name in param_names {
visitor.visit_ident(param_name);
}
}
ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ),
ForeignItemKind::Type => (),
}
}
pub fn walk_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, bound: &'v GenericBound<'v>) {
match *bound {
GenericBound::Trait(ref typ, _modifier) => {
visitor.visit_poly_trait_ref(typ);
}
GenericBound::LangItemTrait(_, _span, hir_id, args) => {
visitor.visit_id(hir_id);
visitor.visit_generic_args(args);
}
GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime),
}
}
pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v GenericParam<'v>) {
visitor.visit_id(param.hir_id);
match param.name {
ParamName::Plain(ident) => visitor.visit_ident(ident),
ParamName::Error | ParamName::Fresh => {}
}
match param.kind {
GenericParamKind::Lifetime { .. } => {}
GenericParamKind::Type { ref default, .. } => walk_list!(visitor, visit_ty, default),
GenericParamKind::Const { ref ty, ref default } => {
visitor.visit_ty(ty);
if let Some(ref default) = default {
visitor.visit_const_param_default(param.hir_id, default);
}
}
}
}
pub fn walk_const_param_default<'v, V: Visitor<'v>>(visitor: &mut V, ct: &'v AnonConst) {
visitor.visit_anon_const(ct)
}
pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics<'v>) {
walk_list!(visitor, visit_generic_param, generics.params);
walk_list!(visitor, visit_where_predicate, generics.predicates);
}
pub fn walk_where_predicate<'v, V: Visitor<'v>>(
visitor: &mut V,
predicate: &'v WherePredicate<'v>,
) {
match *predicate {
WherePredicate::BoundPredicate(WhereBoundPredicate {
hir_id,
ref bounded_ty,
bounds,
bound_generic_params,
origin: _,
span: _,
}) => {
visitor.visit_id(hir_id);
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_generic_param, bound_generic_params);
}
WherePredicate::RegionPredicate(WhereRegionPredicate {
ref lifetime,
bounds,
span: _,
in_where_clause: _,
}) => {
visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_param_bound, bounds);
}
WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty, ref rhs_ty, span: _ }) => {
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
}
}
pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FnRetTy<'v>) {
if let FnRetTy::Return(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl<'v>) {
for ty in function_declaration.inputs {
visitor.visit_ty(ty)
}
walk_fn_ret_ty(visitor, &function_declaration.output)
}
pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>) {
match function_kind {
FnKind::ItemFn(_, generics, ..) => {
visitor.visit_generics(generics);
}
FnKind::Closure | FnKind::Method(..) => {}
}
}
pub fn walk_fn<'v, V: Visitor<'v>>(
visitor: &mut V,
function_kind: FnKind<'v>,
function_declaration: &'v FnDecl<'v>,
body_id: BodyId,
id: HirId,
) {
visitor.visit_id(id);
visitor.visit_fn_decl(function_declaration);
walk_fn_kind(visitor, function_kind);
visitor.visit_nested_body(body_id)
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem<'v>) {
// N.B., deliberately force a compilation error if/when new fields are added.
let TraitItem { ident, generics, ref defaultness, ref kind, span, owner_id: _ } = *trait_item;
let hir_id = trait_item.hir_id();
visitor.visit_ident(ident);
visitor.visit_generics(&generics);
visitor.visit_defaultness(&defaultness);
match *kind {
TraitItemKind::Const(ref ty, default) => {
visitor.visit_id(hir_id);
visitor.visit_ty(ty);
walk_list!(visitor, visit_nested_body, default);
}
TraitItemKind::Fn(ref sig, TraitFn::Required(param_names)) => {
visitor.visit_id(hir_id);
visitor.visit_fn_decl(&sig.decl);
for &param_name in param_names {
visitor.visit_ident(param_name);
}
}
TraitItemKind::Fn(ref sig, TraitFn::Provided(body_id)) => {
visitor.visit_fn(FnKind::Method(ident, sig), &sig.decl, body_id, span, hir_id);
}
TraitItemKind::Type(bounds, ref default) => {
visitor.visit_id(hir_id);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_ty, default);
}
}
}
pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
// N.B., deliberately force a compilation error if/when new fields are added.
let TraitItemRef { id, ident, ref kind, span: _ } = *trait_item_ref;
visitor.visit_nested_trait_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
}
pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem<'v>) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ImplItem {
owner_id: _,
ident,
ref generics,
ref kind,
ref defaultness,
span: _,
vis_span: _,
} = *impl_item;
visitor.visit_ident(ident);
visitor.visit_generics(generics);
visitor.visit_defaultness(defaultness);
match *kind {
ImplItemKind::Const(ref ty, body) => {
visitor.visit_id(impl_item.hir_id());
visitor.visit_ty(ty);
visitor.visit_nested_body(body);
}
ImplItemKind::Fn(ref sig, body_id) => {
visitor.visit_fn(
FnKind::Method(impl_item.ident, sig),
&sig.decl,
body_id,
impl_item.span,
impl_item.hir_id(),
);
}
ImplItemKind::Type(ref ty) => {
visitor.visit_id(impl_item.hir_id());
visitor.visit_ty(ty);
}
}
}
pub fn walk_foreign_item_ref<'v, V: Visitor<'v>>(
visitor: &mut V,
foreign_item_ref: &'v ForeignItemRef,
) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ForeignItemRef { id, ident, span: _ } = *foreign_item_ref;
visitor.visit_nested_foreign_item(id);
visitor.visit_ident(ident);
}
pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ImplItemRef { id, ident, ref kind, span: _, trait_item_def_id: _ } = *impl_item_ref;
visitor.visit_nested_impl_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
}
pub fn walk_struct_def<'v, V: Visitor<'v>>(
visitor: &mut V,
struct_definition: &'v VariantData<'v>,
) {
walk_list!(visitor, visit_id, struct_definition.ctor_hir_id());
walk_list!(visitor, visit_field_def, struct_definition.fields());
}
pub fn walk_field_def<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v FieldDef<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
visitor.visit_ty(&field.ty);
}
pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block<'v>) {
visitor.visit_id(block.hir_id);
walk_list!(visitor, visit_stmt, block.stmts);
walk_list!(visitor, visit_expr, &block.expr);
}
pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt<'v>) {
visitor.visit_id(statement.hir_id);
match statement.kind {
StmtKind::Local(ref local) => visitor.visit_local(local),
StmtKind::Item(item) => visitor.visit_nested_item(item),
StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => {
visitor.visit_expr(expression)
}
}
}
pub fn walk_array_len<'v, V: Visitor<'v>>(visitor: &mut V, len: &'v ArrayLen) {
match len {
&ArrayLen::Infer(hir_id, _span) => visitor.visit_id(hir_id),
@ -1047,20 +675,6 @@ pub fn walk_anon_const<'v, V: Visitor<'v>>(visitor: &mut V, constant: &'v AnonCo
visitor.visit_nested_body(constant.body);
}
pub fn walk_let_expr<'v, V: Visitor<'v>>(visitor: &mut V, let_expr: &'v Let<'v>) {
// match the visit order in walk_local
visitor.visit_expr(let_expr.init);
visitor.visit_id(let_expr.hir_id);
visitor.visit_pat(let_expr.pat);
walk_list!(visitor, visit_ty, let_expr.ty);
}
pub fn walk_expr_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v ExprField<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
visitor.visit_expr(&field.expr)
}
pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>) {
visitor.visit_id(expression.hir_id);
match expression.kind {
@ -1173,18 +787,387 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>)
}
}
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm<'v>) {
visitor.visit_id(arm.hir_id);
visitor.visit_pat(&arm.pat);
if let Some(ref g) = arm.guard {
match g {
Guard::If(ref e) => visitor.visit_expr(e),
Guard::IfLet(ref l) => {
visitor.visit_let_expr(l);
pub fn walk_let_expr<'v, V: Visitor<'v>>(visitor: &mut V, let_expr: &'v Let<'v>) {
// match the visit order in walk_local
visitor.visit_expr(let_expr.init);
visitor.visit_id(let_expr.hir_id);
visitor.visit_pat(let_expr.pat);
walk_list!(visitor, visit_ty, let_expr.ty);
}
pub fn walk_expr_field<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v ExprField<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
visitor.visit_expr(&field.expr)
}
pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty<'v>) {
visitor.visit_id(typ.hir_id);
match typ.kind {
TyKind::Slice(ref ty) => visitor.visit_ty(ty),
TyKind::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
TyKind::Rptr(ref lifetime, ref mutable_type) => {
visitor.visit_lifetime(lifetime);
visitor.visit_ty(&mutable_type.ty)
}
TyKind::Never => {}
TyKind::Tup(tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
TyKind::BareFn(ref function_declaration) => {
walk_list!(visitor, visit_generic_param, function_declaration.generic_params);
visitor.visit_fn_decl(&function_declaration.decl);
}
TyKind::Path(ref qpath) => {
visitor.visit_qpath(qpath, typ.hir_id, typ.span);
}
TyKind::OpaqueDef(item_id, lifetimes, _in_trait) => {
visitor.visit_nested_item(item_id);
walk_list!(visitor, visit_generic_arg, lifetimes);
}
TyKind::Array(ref ty, ref length) => {
visitor.visit_ty(ty);
visitor.visit_array_length(length)
}
TyKind::TraitObject(bounds, ref lifetime, _syntax) => {
for bound in bounds {
visitor.visit_poly_trait_ref(bound);
}
visitor.visit_lifetime(lifetime);
}
TyKind::Typeof(ref expression) => visitor.visit_anon_const(expression),
TyKind::Infer | TyKind::Err => {}
}
}
pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v GenericParam<'v>) {
visitor.visit_id(param.hir_id);
match param.name {
ParamName::Plain(ident) => visitor.visit_ident(ident),
ParamName::Error | ParamName::Fresh => {}
}
match param.kind {
GenericParamKind::Lifetime { .. } => {}
GenericParamKind::Type { ref default, .. } => walk_list!(visitor, visit_ty, default),
GenericParamKind::Const { ref ty, ref default } => {
visitor.visit_ty(ty);
if let Some(ref default) = default {
visitor.visit_const_param_default(param.hir_id, default);
}
}
}
visitor.visit_expr(&arm.body);
}
pub fn walk_const_param_default<'v, V: Visitor<'v>>(visitor: &mut V, ct: &'v AnonConst) {
visitor.visit_anon_const(ct)
}
pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics<'v>) {
walk_list!(visitor, visit_generic_param, generics.params);
walk_list!(visitor, visit_where_predicate, generics.predicates);
}
pub fn walk_where_predicate<'v, V: Visitor<'v>>(
visitor: &mut V,
predicate: &'v WherePredicate<'v>,
) {
match *predicate {
WherePredicate::BoundPredicate(WhereBoundPredicate {
hir_id,
ref bounded_ty,
bounds,
bound_generic_params,
origin: _,
span: _,
}) => {
visitor.visit_id(hir_id);
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_generic_param, bound_generic_params);
}
WherePredicate::RegionPredicate(WhereRegionPredicate {
ref lifetime,
bounds,
span: _,
in_where_clause: _,
}) => {
visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_param_bound, bounds);
}
WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty, ref rhs_ty, span: _ }) => {
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
}
}
pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl<'v>) {
for ty in function_declaration.inputs {
visitor.visit_ty(ty)
}
visitor.visit_fn_ret_ty(&function_declaration.output)
}
pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FnRetTy<'v>) {
if let FnRetTy::Return(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
pub fn walk_fn<'v, V: Visitor<'v>>(
visitor: &mut V,
function_kind: FnKind<'v>,
function_declaration: &'v FnDecl<'v>,
body_id: BodyId,
id: HirId,
) {
visitor.visit_id(id);
visitor.visit_fn_decl(function_declaration);
walk_fn_kind(visitor, function_kind);
visitor.visit_nested_body(body_id)
}
pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>) {
match function_kind {
FnKind::ItemFn(_, generics, ..) => {
visitor.visit_generics(generics);
}
FnKind::Closure | FnKind::Method(..) => {}
}
}
pub fn walk_use<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>, hir_id: HirId) {
visitor.visit_id(hir_id);
visitor.visit_path(path, hir_id);
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem<'v>) {
// N.B., deliberately force a compilation error if/when new fields are added.
let TraitItem { ident, generics, ref defaultness, ref kind, span, owner_id: _ } = *trait_item;
let hir_id = trait_item.hir_id();
visitor.visit_ident(ident);
visitor.visit_generics(&generics);
visitor.visit_defaultness(&defaultness);
match *kind {
TraitItemKind::Const(ref ty, default) => {
visitor.visit_id(hir_id);
visitor.visit_ty(ty);
walk_list!(visitor, visit_nested_body, default);
}
TraitItemKind::Fn(ref sig, TraitFn::Required(param_names)) => {
visitor.visit_id(hir_id);
visitor.visit_fn_decl(&sig.decl);
for &param_name in param_names {
visitor.visit_ident(param_name);
}
}
TraitItemKind::Fn(ref sig, TraitFn::Provided(body_id)) => {
visitor.visit_fn(FnKind::Method(ident, sig), &sig.decl, body_id, span, hir_id);
}
TraitItemKind::Type(bounds, ref default) => {
visitor.visit_id(hir_id);
walk_list!(visitor, visit_param_bound, bounds);
walk_list!(visitor, visit_ty, default);
}
}
}
pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
// N.B., deliberately force a compilation error if/when new fields are added.
let TraitItemRef { id, ident, ref kind, span: _ } = *trait_item_ref;
visitor.visit_nested_trait_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
}
pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem<'v>) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ImplItem {
owner_id: _,
ident,
ref generics,
ref kind,
ref defaultness,
span: _,
vis_span: _,
} = *impl_item;
visitor.visit_ident(ident);
visitor.visit_generics(generics);
visitor.visit_defaultness(defaultness);
match *kind {
ImplItemKind::Const(ref ty, body) => {
visitor.visit_id(impl_item.hir_id());
visitor.visit_ty(ty);
visitor.visit_nested_body(body);
}
ImplItemKind::Fn(ref sig, body_id) => {
visitor.visit_fn(
FnKind::Method(impl_item.ident, sig),
&sig.decl,
body_id,
impl_item.span,
impl_item.hir_id(),
);
}
ImplItemKind::Type(ref ty) => {
visitor.visit_id(impl_item.hir_id());
visitor.visit_ty(ty);
}
}
}
pub fn walk_foreign_item_ref<'v, V: Visitor<'v>>(
visitor: &mut V,
foreign_item_ref: &'v ForeignItemRef,
) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ForeignItemRef { id, ident, span: _ } = *foreign_item_ref;
visitor.visit_nested_foreign_item(id);
visitor.visit_ident(ident);
}
pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
// N.B., deliberately force a compilation error if/when new fields are added.
let ImplItemRef { id, ident, ref kind, span: _, trait_item_def_id: _ } = *impl_item_ref;
visitor.visit_nested_impl_item(id);
visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
}
pub fn walk_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v TraitRef<'v>) {
visitor.visit_id(trait_ref.hir_ref_id);
visitor.visit_path(&trait_ref.path, trait_ref.hir_ref_id)
}
pub fn walk_param_bound<'v, V: Visitor<'v>>(visitor: &mut V, bound: &'v GenericBound<'v>) {
match *bound {
GenericBound::Trait(ref typ, _modifier) => {
visitor.visit_poly_trait_ref(typ);
}
GenericBound::LangItemTrait(_, _span, hir_id, args) => {
visitor.visit_id(hir_id);
visitor.visit_generic_args(args);
}
GenericBound::Outlives(ref lifetime) => visitor.visit_lifetime(lifetime),
}
}
pub fn walk_poly_trait_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_ref: &'v PolyTraitRef<'v>) {
walk_list!(visitor, visit_generic_param, trait_ref.bound_generic_params);
visitor.visit_trait_ref(&trait_ref.trait_ref);
}
pub fn walk_struct_def<'v, V: Visitor<'v>>(
visitor: &mut V,
struct_definition: &'v VariantData<'v>,
) {
walk_list!(visitor, visit_id, struct_definition.ctor_hir_id());
walk_list!(visitor, visit_field_def, struct_definition.fields());
}
pub fn walk_field_def<'v, V: Visitor<'v>>(visitor: &mut V, field: &'v FieldDef<'v>) {
visitor.visit_id(field.hir_id);
visitor.visit_ident(field.ident);
visitor.visit_ty(&field.ty);
}
pub fn walk_enum_def<'v, V: Visitor<'v>>(
visitor: &mut V,
enum_definition: &'v EnumDef<'v>,
item_id: HirId,
) {
visitor.visit_id(item_id);
walk_list!(visitor, visit_variant, enum_definition.variants);
}
pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, variant: &'v Variant<'v>) {
visitor.visit_ident(variant.ident);
visitor.visit_id(variant.id);
visitor.visit_variant_data(&variant.data);
walk_list!(visitor, visit_anon_const, &variant.disr_expr);
}
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
visitor.visit_ident(label.ident);
}
pub fn walk_inf<'v, V: Visitor<'v>>(visitor: &mut V, inf: &'v InferArg) {
visitor.visit_id(inf.hir_id);
}
pub fn walk_generic_arg<'v, V: Visitor<'v>>(visitor: &mut V, generic_arg: &'v GenericArg<'v>) {
match generic_arg {
GenericArg::Lifetime(lt) => visitor.visit_lifetime(lt),
GenericArg::Type(ty) => visitor.visit_ty(ty),
GenericArg::Const(ct) => visitor.visit_anon_const(&ct.value),
GenericArg::Infer(inf) => visitor.visit_infer(inf),
}
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.hir_id);
match lifetime.name {
LifetimeName::Param(_, ParamName::Plain(ident)) => {
visitor.visit_ident(ident);
}
LifetimeName::Param(_, ParamName::Fresh)
| LifetimeName::Param(_, ParamName::Error)
| LifetimeName::Static
| LifetimeName::Error
| LifetimeName::ImplicitObjectLifetimeDefault
| LifetimeName::Infer => {}
}
}
pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath<'v>, id: HirId) {
match *qpath {
QPath::Resolved(ref maybe_qself, ref path) => {
walk_list!(visitor, visit_ty, maybe_qself);
visitor.visit_path(path, id)
}
QPath::TypeRelative(ref qself, ref segment) => {
visitor.visit_ty(qself);
visitor.visit_path_segment(segment);
}
QPath::LangItem(..) => {}
}
}
pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path<'v>) {
for segment in path.segments {
visitor.visit_path_segment(segment);
}
}
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, segment: &'v PathSegment<'v>) {
visitor.visit_ident(segment.ident);
visitor.visit_id(segment.hir_id);
if let Some(ref args) = segment.args {
visitor.visit_generic_args(args);
}
}
pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V, generic_args: &'v GenericArgs<'v>) {
walk_list!(visitor, visit_generic_arg, generic_args.args);
walk_list!(visitor, visit_assoc_type_binding, generic_args.bindings);
}
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(
visitor: &mut V,
type_binding: &'v TypeBinding<'v>,
) {
visitor.visit_id(type_binding.hir_id);
visitor.visit_ident(type_binding.ident);
visitor.visit_generic_args(type_binding.gen_args);
match type_binding.kind {
TypeBindingKind::Equality { ref term } => match term {
Term::Ty(ref ty) => visitor.visit_ty(ty),
Term::Const(ref c) => visitor.visit_anon_const(c),
},
TypeBindingKind::Constraint { bounds } => walk_list!(visitor, visit_param_bound, bounds),
}
}
pub fn walk_associated_item_kind<'v, V: Visitor<'v>>(_: &mut V, _: &'v AssocItemKind) {
@ -1198,3 +1181,27 @@ pub fn walk_defaultness<'v, V: Visitor<'v>>(_: &mut V, _: &'v Defaultness) {
// the right thing to do, should content be added in the future,
// would be to walk it.
}
pub fn walk_inline_asm<'v, V: Visitor<'v>>(visitor: &mut V, asm: &'v InlineAsm<'v>, id: HirId) {
for (op, op_sp) in asm.operands {
match op {
InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => {
visitor.visit_expr(expr)
}
InlineAsmOperand::Out { expr, .. } => {
if let Some(expr) = expr {
visitor.visit_expr(expr);
}
}
InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
visitor.visit_expr(in_expr);
if let Some(out_expr) = out_expr {
visitor.visit_expr(out_expr);
}
}
InlineAsmOperand::Const { anon_const, .. }
| InlineAsmOperand::SymFn { anon_const, .. } => visitor.visit_anon_const(anon_const),
InlineAsmOperand::SymStatic { path, .. } => visitor.visit_qpath(path, id, *op_sp),
}
}
}

View File

@ -12,35 +12,56 @@ use crate::errors::LangItemError;
use crate::{MethodKind, Target};
use rustc_ast as ast;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_macros::HashStable_Generic;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
use std::sync::LazyLock;
pub enum LangItemGroup {
Op,
Fn,
/// All of the language items, defined or not.
/// Defined lang items can come from the current crate or its dependencies.
#[derive(HashStable_Generic, Debug)]
pub struct LanguageItems {
/// Mappings from lang items to their possibly found [`DefId`]s.
/// The index corresponds to the order in [`LangItem`].
items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
/// Lang items that were not found during collection.
pub missing: Vec<LangItem>,
}
const NUM_GROUPS: usize = 2;
impl LanguageItems {
/// Construct an empty collection of lang items and no missing ones.
pub fn new() -> Self {
Self { items: [None; std::mem::variant_count::<LangItem>()], missing: Vec::new() }
}
macro_rules! expand_group {
() => {
None
};
($group:expr) => {
Some($group)
};
pub fn get(&self, item: LangItem) -> Option<DefId> {
self.items[item as usize]
}
pub fn set(&mut self, item: LangItem, def_id: DefId) {
self.items[item as usize] = Some(def_id);
}
/// Requires that a given `LangItem` was bound and returns the corresponding `DefId`.
/// If it wasn't bound, e.g. due to a missing `#[lang = "<it.name()>"]`,
/// returns an error encapsulating the `LangItem`.
pub fn require(&self, it: LangItem) -> Result<DefId, LangItemError> {
self.get(it).ok_or_else(|| LangItemError(it))
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = (LangItem, DefId)> + 'a {
self.items
.iter()
.enumerate()
.filter_map(|(i, id)| id.map(|id| (LangItem::from_u32(i as u32).unwrap(), id)))
}
}
// The actual lang items defined come at the end of this file in one handy table.
// So you probably just want to nip down to the end.
macro_rules! language_item_table {
(
$( $(#[$attr:meta])* $variant:ident $($group:expr)?, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
$( $(#[$attr:meta])* $variant:ident, $module:ident :: $name:ident, $method:ident, $target:expr, $generics:expr; )*
) => {
enum_from_u32! {
@ -66,12 +87,17 @@ macro_rules! language_item_table {
}
}
/// The [group](LangItemGroup) that this lang item belongs to,
/// or `None` if it doesn't belong to a group.
pub fn group(self) -> Option<LangItemGroup> {
use LangItemGroup::*;
/// Opposite of [`LangItem::name`]
pub fn from_name(name: Symbol) -> Option<Self> {
match name {
$( $module::$name => Some(LangItem::$variant), )*
_ => None,
}
}
pub fn target(self) -> Target {
match self {
$( LangItem::$variant => expand_group!($($group)*), )*
$( LangItem::$variant => $target, )*
}
}
@ -82,50 +108,7 @@ macro_rules! language_item_table {
}
}
/// All of the language items, defined or not.
/// Defined lang items can come from the current crate or its dependencies.
#[derive(HashStable_Generic, Debug)]
pub struct LanguageItems {
/// Mappings from lang items to their possibly found [`DefId`]s.
/// The index corresponds to the order in [`LangItem`].
pub items: Vec<Option<DefId>>,
/// Lang items that were not found during collection.
pub missing: Vec<LangItem>,
/// Mapping from [`LangItemGroup`] discriminants to all
/// [`DefId`]s of lang items in that group.
pub groups: [Vec<DefId>; NUM_GROUPS],
}
impl LanguageItems {
/// Construct an empty collection of lang items and no missing ones.
pub fn new() -> Self {
fn init_none(_: LangItem) -> Option<DefId> { None }
const EMPTY: Vec<DefId> = Vec::new();
Self {
items: vec![$(init_none(LangItem::$variant)),*],
missing: Vec::new(),
groups: [EMPTY; NUM_GROUPS],
}
}
/// Returns the mappings to the possibly found `DefId`s for each lang item.
pub fn items(&self) -> &[Option<DefId>] {
&*self.items
}
/// Requires that a given `LangItem` was bound and returns the corresponding `DefId`.
/// If it wasn't bound, e.g. due to a missing `#[lang = "<it.name()>"]`,
/// returns an error encapsulating the `LangItem`.
pub fn require(&self, it: LangItem) -> Result<DefId, LangItemError> {
self.items[it as usize].ok_or_else(|| LangItemError(it))
}
/// Returns the [`DefId`]s of all lang items in a group.
pub fn group(&self, group: LangItemGroup) -> &[DefId] {
self.groups[group as usize].as_ref()
}
$(
#[doc = concat!("Returns the [`DefId`] of the `", stringify!($name), "` lang item if it is defined.")]
pub fn $method(&self) -> Option<DefId> {
@ -133,15 +116,6 @@ macro_rules! language_item_table {
}
)*
}
/// A mapping from the name of the lang item to its order and the form it must be of.
pub static ITEM_REFS: LazyLock<FxIndexMap<Symbol, (usize, Target)>> = LazyLock::new(|| {
let mut item_refs = FxIndexMap::default();
$( item_refs.insert($module::$name, (LangItem::$variant as usize, $target)); )*
item_refs
});
// End of the macro
}
}
@ -152,14 +126,12 @@ impl<CTX> HashStable<CTX> for LangItem {
}
/// Extracts the first `lang = "$name"` out of a list of attributes.
/// The attributes `#[panic_handler]` and `#[alloc_error_handler]`
/// are also extracted out when found.
/// The `#[panic_handler]` attribute is also extracted out when found.
pub fn extract(attrs: &[ast::Attribute]) -> Option<(Symbol, Span)> {
attrs.iter().find_map(|attr| {
Some(match attr {
_ if attr.has_name(sym::lang) => (attr.value_str()?, attr.span),
_ if attr.has_name(sym::panic_handler) => (sym::panic_impl, attr.span),
_ if attr.has_name(sym::alloc_error_handler) => (sym::oom, attr.span),
_ => return None,
})
})
@ -196,30 +168,30 @@ language_item_table! {
TransmuteOpts, sym::transmute_opts, transmute_opts, Target::Struct, GenericRequirement::Exact(0);
TransmuteTrait, sym::transmute_trait, transmute_trait, Target::Trait, GenericRequirement::Exact(3);
Add(Op), sym::add, add_trait, Target::Trait, GenericRequirement::Exact(1);
Sub(Op), sym::sub, sub_trait, Target::Trait, GenericRequirement::Exact(1);
Mul(Op), sym::mul, mul_trait, Target::Trait, GenericRequirement::Exact(1);
Div(Op), sym::div, div_trait, Target::Trait, GenericRequirement::Exact(1);
Rem(Op), sym::rem, rem_trait, Target::Trait, GenericRequirement::Exact(1);
Neg(Op), sym::neg, neg_trait, Target::Trait, GenericRequirement::Exact(0);
Not(Op), sym::not, not_trait, Target::Trait, GenericRequirement::Exact(0);
BitXor(Op), sym::bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1);
BitAnd(Op), sym::bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1);
BitOr(Op), sym::bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1);
Shl(Op), sym::shl, shl_trait, Target::Trait, GenericRequirement::Exact(1);
Shr(Op), sym::shr, shr_trait, Target::Trait, GenericRequirement::Exact(1);
AddAssign(Op), sym::add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1);
SubAssign(Op), sym::sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1);
MulAssign(Op), sym::mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1);
DivAssign(Op), sym::div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1);
RemAssign(Op), sym::rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitXorAssign(Op), sym::bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitAndAssign(Op), sym::bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitOrAssign(Op), sym::bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShlAssign(Op), sym::shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShrAssign(Op), sym::shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1);
Index(Op), sym::index, index_trait, Target::Trait, GenericRequirement::Exact(1);
IndexMut(Op), sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
Add, sym::add, add_trait, Target::Trait, GenericRequirement::Exact(1);
Sub, sym::sub, sub_trait, Target::Trait, GenericRequirement::Exact(1);
Mul, sym::mul, mul_trait, Target::Trait, GenericRequirement::Exact(1);
Div, sym::div, div_trait, Target::Trait, GenericRequirement::Exact(1);
Rem, sym::rem, rem_trait, Target::Trait, GenericRequirement::Exact(1);
Neg, sym::neg, neg_trait, Target::Trait, GenericRequirement::Exact(0);
Not, sym::not, not_trait, Target::Trait, GenericRequirement::Exact(0);
BitXor, sym::bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1);
BitAnd, sym::bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1);
BitOr, sym::bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1);
Shl, sym::shl, shl_trait, Target::Trait, GenericRequirement::Exact(1);
Shr, sym::shr, shr_trait, Target::Trait, GenericRequirement::Exact(1);
AddAssign, sym::add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1);
SubAssign, sym::sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1);
MulAssign, sym::mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1);
DivAssign, sym::div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1);
RemAssign, sym::rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitXorAssign, sym::bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitAndAssign, sym::bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1);
BitOrAssign, sym::bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShlAssign, sym::shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1);
ShrAssign, sym::shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1);
Index, sym::index, index_trait, Target::Trait, GenericRequirement::Exact(1);
IndexMut, sym::index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1);
UnsafeCell, sym::unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None;
VaList, sym::va_list, va_list, Target::Struct, GenericRequirement::None;
@ -229,9 +201,9 @@ language_item_table! {
DerefTarget, sym::deref_target, deref_target, Target::AssocTy, GenericRequirement::None;
Receiver, sym::receiver, receiver_trait, Target::Trait, GenericRequirement::None;
Fn(Fn), kw::Fn, fn_trait, Target::Trait, GenericRequirement::Exact(1);
FnMut(Fn), sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
FnOnce(Fn), sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
Fn, kw::Fn, fn_trait, Target::Trait, GenericRequirement::Exact(1);
FnMut, sym::fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1);
FnOnce, sym::fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1);
FnOnceOutput, sym::fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None;
@ -241,8 +213,8 @@ language_item_table! {
Unpin, sym::unpin, unpin_trait, Target::Trait, GenericRequirement::None;
Pin, sym::pin, pin_type, Target::Struct, GenericRequirement::None;
PartialEq(Op), sym::eq, eq_trait, Target::Trait, GenericRequirement::Exact(1);
PartialOrd(Op), sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
PartialEq, sym::eq, eq_trait, Target::Trait, GenericRequirement::Exact(1);
PartialOrd, sym::partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1);
// A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
// various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
@ -266,7 +238,6 @@ language_item_table! {
ExchangeMalloc, sym::exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None;
BoxFree, sym::box_free, box_free_fn, Target::Fn, GenericRequirement::Minimum(1);
DropInPlace, sym::drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1);
Oom, sym::oom, oom, Target::Fn, GenericRequirement::None;
AllocLayout, sym::alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None;
Start, sym::start, start_fn, Target::Fn, GenericRequirement::Exact(1);
@ -338,3 +309,34 @@ pub enum GenericRequirement {
Minimum(usize),
Exact(usize),
}
pub static FN_TRAITS: &'static [LangItem] = &[LangItem::Fn, LangItem::FnMut, LangItem::FnOnce];
pub static OPERATORS: &'static [LangItem] = &[
LangItem::Add,
LangItem::Sub,
LangItem::Mul,
LangItem::Div,
LangItem::Rem,
LangItem::Neg,
LangItem::Not,
LangItem::BitXor,
LangItem::BitAnd,
LangItem::BitOr,
LangItem::Shl,
LangItem::Shr,
LangItem::AddAssign,
LangItem::SubAssign,
LangItem::MulAssign,
LangItem::DivAssign,
LangItem::RemAssign,
LangItem::BitXorAssign,
LangItem::BitAndAssign,
LangItem::BitOrAssign,
LangItem::ShlAssign,
LangItem::ShrAssign,
LangItem::Index,
LangItem::IndexMut,
LangItem::PartialEq,
LangItem::PartialOrd,
];

View File

@ -5,10 +5,10 @@
#![feature(associated_type_defaults)]
#![feature(closure_track_caller)]
#![feature(const_btree_len)]
#![feature(once_cell)]
#![feature(min_specialization)]
#![feature(never_type)]
#![feature(rustc_attrs)]
#![feature(variant_count)]
#![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)]

View File

@ -1,53 +1,30 @@
//! Validity checking for weak lang items
use crate::def_id::DefId;
use crate::{lang_items, LangItem, LanguageItems};
use crate::LangItem;
use rustc_ast as ast;
use rustc_data_structures::fx::FxIndexMap;
use rustc_span::symbol::{sym, Symbol};
use std::sync::LazyLock;
macro_rules! weak_lang_items {
($($name:ident, $item:ident, $sym:ident;)*) => (
($($item:ident, $sym:ident;)*) => {
pub static WEAK_LANG_ITEMS: &[LangItem] = &[$(LangItem::$item,)*];
pub static WEAK_ITEMS_REFS: LazyLock<FxIndexMap<Symbol, LangItem>> = LazyLock::new(|| {
let mut map = FxIndexMap::default();
$(map.insert(sym::$name, LangItem::$item);)*
map
});
impl LangItem {
pub fn is_weak(self) -> bool {
matches!(self, $(LangItem::$item)|*)
}
pub static WEAK_ITEMS_SYMBOLS: LazyLock<FxIndexMap<LangItem, Symbol>> = LazyLock::new(|| {
let mut map = FxIndexMap::default();
$(map.insert(LangItem::$item, sym::$sym);)*
map
});
pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol>
{
lang_items::extract(attrs).and_then(|(name, _)| {
$(if name == sym::$name {
Some(sym::$sym)
} else)* {
None
pub fn link_name(self) -> Option<Symbol> {
match self {
$( LangItem::$item => Some(sym::$sym),)*
_ => None,
}
}
}
})
}
impl LanguageItems {
pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
let did = Some(item_def_id);
$(self.$name() == did)||*
}
}
) }
weak_lang_items! {
panic_impl, PanicImpl, rust_begin_unwind;
eh_personality, EhPersonality, rust_eh_personality;
eh_catch_typeinfo, EhCatchTypeinfo, rust_eh_catch_typeinfo;
oom, Oom, rust_oom;
PanicImpl, rust_begin_unwind;
EhPersonality, rust_eh_personality;
EhCatchTypeinfo, rust_eh_catch_typeinfo;
}

View File

@ -177,11 +177,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
.all_traits()
.filter(|trait_def_id| {
let viz = self.tcx().visibility(*trait_def_id);
if let Some(def_id) = self.item_def_id() {
viz.is_accessible_from(def_id, self.tcx())
} else {
viz.is_visible_locally()
}
let def_id = self.item_def_id();
viz.is_accessible_from(def_id, self.tcx())
})
.collect();

View File

@ -23,7 +23,6 @@ use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Namespace, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::{walk_generics, Visitor as _};
use rustc_hir::lang_items::LangItem;
use rustc_hir::{GenericArg, GenericArgs, OpaqueTyOrigin};
use rustc_middle::middle::stability::AllowUnstable;
use rustc_middle::ty::subst::{self, GenericArgKind, InternalSubsts, SubstsRef};
@ -55,7 +54,7 @@ pub struct PathSeg(pub DefId, pub usize);
pub trait AstConv<'tcx> {
fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
fn item_def_id(&self) -> Option<DefId>;
fn item_def_id(&self) -> DefId;
/// Returns predicates in scope of the form `X: Foo<T>`, where `X`
/// is a type parameter `X` with the given id `def_id` and T
@ -501,6 +500,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
GenericParamDefKind::Const { has_default } => {
let ty = tcx.at(self.span).type_of(param.def_id);
if ty.references_error() {
return tcx.const_error(ty).into();
}
if !infer_args && has_default {
tcx.bound_const_param_default(param.def_id)
.subst(tcx, substs.unwrap())
@ -884,9 +886,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
}
let sized_def_id = tcx.lang_items().require(LangItem::Sized);
let sized_def_id = tcx.lang_items().sized_trait();
match (&sized_def_id, unbound) {
(Ok(sized_def_id), Some(tpb))
(Some(sized_def_id), Some(tpb))
if tpb.path.res == Res::Def(DefKind::Trait, *sized_def_id) =>
{
// There was in fact a `?Sized` bound, return without doing anything
@ -906,7 +908,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// There was no `?Sized` bound; add implicitly sized if `Sized` is available.
}
}
if sized_def_id.is_err() {
if sized_def_id.is_none() {
// No lang item for `Sized`, so we can't add it as a bound.
return;
}
@ -1908,6 +1910,20 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
}
}
// see if we can satisfy using an inherent associated type
for impl_ in tcx.inherent_impls(adt_def.did()) {
let assoc_ty = tcx.associated_items(impl_).find_by_name_and_kind(
tcx,
assoc_ident,
ty::AssocKind::Type,
*impl_,
);
if let Some(assoc_ty) = assoc_ty {
let ty = tcx.type_of(assoc_ty.def_id);
return Ok((ty, DefKind::AssocTy, assoc_ty.def_id));
}
}
}
// Find the type of the associated item, and the trait where the associated
@ -1977,7 +1993,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}
err.emit()
} else if let Some(reported) = qself_ty.error_reported() {
} else if let Err(reported) = qself_ty.error_reported() {
reported
} else {
// Don't print `TyErr` to the user.
@ -2080,17 +2096,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
debug!("qpath_to_ty: self.item_def_id()={:?}", def_id);
let parent_def_id = def_id
.and_then(|def_id| {
def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
})
let parent_def_id = def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
.map(|hir_id| tcx.hir().get_parent_item(hir_id).to_def_id());
debug!("qpath_to_ty: parent_def_id={:?}", parent_def_id);
// If the trait in segment is the same as the trait defining the item,
// use the `<Self as ..>` syntax in the error.
let is_part_of_self_trait_constraints = def_id == Some(trait_def_id);
let is_part_of_self_trait_constraints = def_id == trait_def_id;
let is_part_of_fn_in_self_trait = parent_def_id == Some(trait_def_id);
let type_name = if is_part_of_self_trait_constraints || is_part_of_fn_in_self_trait {

View File

@ -6,7 +6,7 @@ use super::*;
use rustc_attr as attr;
use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{ItemKind, Node, PathSegment};
@ -75,7 +75,7 @@ fn check_struct(tcx: TyCtxt<'_>, def_id: LocalDefId) {
check_simd(tcx, span, def_id);
}
check_transparent(tcx, span, def);
check_transparent(tcx, def);
check_packed(tcx, span, def);
}
@ -83,7 +83,7 @@ fn check_union(tcx: TyCtxt<'_>, def_id: LocalDefId) {
let def = tcx.adt_def(def_id);
let span = tcx.def_span(def_id);
def.destructor(tcx); // force the destructor to be evaluated
check_transparent(tcx, span, def);
check_transparent(tcx, def);
check_union_fields(tcx, span, def_id);
check_packed(tcx, span, def);
}
@ -506,11 +506,7 @@ fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, id: hir::ItemId) {
tcx.ensure().typeck(id.owner_id.def_id);
}
DefKind::Enum => {
let item = tcx.hir().item(id);
let hir::ItemKind::Enum(ref enum_definition, _) = item.kind else {
return;
};
check_enum(tcx, &enum_definition.variants, item.owner_id.def_id);
check_enum(tcx, id.owner_id.def_id);
}
DefKind::Fn => {} // entirely within check_item_body
DefKind::Impl => {
@ -1026,7 +1022,7 @@ pub(super) fn check_packed_inner(
None
}
pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtDef<'tcx>) {
pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) {
if !adt.repr().transparent() {
return;
}
@ -1035,14 +1031,14 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
feature_err(
&tcx.sess.parse_sess,
sym::transparent_unions,
sp,
tcx.def_span(adt.did()),
"transparent unions are unstable",
)
.emit();
}
if adt.variants().len() != 1 {
bad_variant_count(tcx, adt, sp, adt.did());
bad_variant_count(tcx, adt, tcx.def_span(adt.did()), adt.did());
if adt.variants().is_empty() {
// Don't bother checking the fields. No variants (and thus no fields) exist.
return;
@ -1103,7 +1099,7 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
.filter_map(|(span, zst, _align1, _non_exhaustive)| if !zst { Some(span) } else { None });
let non_zst_count = non_zst_fields.clone().count();
if non_zst_count >= 2 {
bad_non_zero_sized_fields(tcx, adt, non_zst_count, non_zst_fields, sp);
bad_non_zero_sized_fields(tcx, adt, non_zst_count, non_zst_fields, tcx.def_span(adt.did()));
}
let incompatible_zst_fields =
field_infos.clone().filter(|(_, _, _, opt)| opt.is_some()).count();
@ -1143,12 +1139,11 @@ pub(super) fn check_transparent<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, adt: ty::AdtD
}
#[allow(trivial_numeric_casts)]
fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: LocalDefId) {
fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) {
let def = tcx.adt_def(def_id);
let sp = tcx.def_span(def_id);
def.destructor(tcx); // force the destructor to be evaluated
if vs.is_empty() {
if def.variants().is_empty() {
if let Some(attr) = tcx.get_attrs(def_id.to_def_id(), sym::repr).next() {
struct_span_err!(
tcx.sess,
@ -1156,7 +1151,7 @@ fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: L
E0084,
"unsupported representation for zero-variant enum"
)
.span_label(sp, "zero-variant enum")
.span_label(tcx.def_span(def_id), "zero-variant enum")
.emit();
}
}
@ -1167,88 +1162,96 @@ fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, vs: &'tcx [hir::Variant<'tcx>], def_id: L
feature_err(
&tcx.sess.parse_sess,
sym::repr128,
sp,
tcx.def_span(def_id),
"repr with 128-bit type is unstable",
)
.emit();
}
}
for v in vs {
if let Some(ref e) = v.disr_expr {
tcx.ensure().typeck(tcx.hir().local_def_id(e.hir_id));
for v in def.variants() {
if let ty::VariantDiscr::Explicit(discr_def_id) = v.discr {
tcx.ensure().typeck(discr_def_id.expect_local());
}
}
if tcx.adt_def(def_id).repr().int.is_none() {
let is_unit = |var: &hir::Variant<'_>| matches!(var.data, hir::VariantData::Unit(..));
if def.repr().int.is_none() {
let is_unit = |var: &ty::VariantDef| matches!(var.ctor_kind, CtorKind::Const);
let has_disr = |var: &ty::VariantDef| matches!(var.discr, ty::VariantDiscr::Explicit(_));
let has_disr = |var: &hir::Variant<'_>| var.disr_expr.is_some();
let has_non_units = vs.iter().any(|var| !is_unit(var));
let disr_units = vs.iter().any(|var| is_unit(&var) && has_disr(&var));
let disr_non_unit = vs.iter().any(|var| !is_unit(&var) && has_disr(&var));
let has_non_units = def.variants().iter().any(|var| !is_unit(var));
let disr_units = def.variants().iter().any(|var| is_unit(&var) && has_disr(&var));
let disr_non_unit = def.variants().iter().any(|var| !is_unit(&var) && has_disr(&var));
if disr_non_unit || (disr_units && has_non_units) {
let mut err =
struct_span_err!(tcx.sess, sp, E0732, "`#[repr(inttype)]` must be specified");
let mut err = struct_span_err!(
tcx.sess,
tcx.def_span(def_id),
E0732,
"`#[repr(inttype)]` must be specified"
);
err.emit();
}
}
detect_discriminant_duplicate(tcx, def.discriminants(tcx).collect(), vs, sp);
check_transparent(tcx, sp, def);
detect_discriminant_duplicate(tcx, def);
check_transparent(tcx, def);
}
/// Part of enum check. Given the discriminants of an enum, errors if two or more discriminants are equal
fn detect_discriminant_duplicate<'tcx>(
tcx: TyCtxt<'tcx>,
mut discrs: Vec<(VariantIdx, Discr<'tcx>)>,
vs: &'tcx [hir::Variant<'tcx>],
self_span: Span,
) {
fn detect_discriminant_duplicate<'tcx>(tcx: TyCtxt<'tcx>, adt: ty::AdtDef<'tcx>) {
// Helper closure to reduce duplicate code. This gets called everytime we detect a duplicate.
// Here `idx` refers to the order of which the discriminant appears, and its index in `vs`
let report = |dis: Discr<'tcx>, idx: usize, err: &mut Diagnostic| {
let var = &vs[idx]; // HIR for the duplicate discriminant
let (span, display_discr) = match var.disr_expr {
Some(ref expr) => {
let report = |dis: Discr<'tcx>, idx, err: &mut Diagnostic| {
let var = adt.variant(idx); // HIR for the duplicate discriminant
let (span, display_discr) = match var.discr {
ty::VariantDiscr::Explicit(discr_def_id) => {
// In the case the discriminant is both a duplicate and overflowed, let the user know
if let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind
if let hir::Node::AnonConst(expr) = tcx.hir().get_by_def_id(discr_def_id.expect_local())
&& let hir::ExprKind::Lit(lit) = &tcx.hir().body(expr.body).value.kind
&& let rustc_ast::LitKind::Int(lit_value, _int_kind) = &lit.node
&& *lit_value != dis.val
{
(tcx.hir().span(expr.hir_id), format!("`{dis}` (overflowed from `{lit_value}`)"))
// Otherwise, format the value as-is
(tcx.def_span(discr_def_id), format!("`{dis}` (overflowed from `{lit_value}`)"))
} else {
(tcx.hir().span(expr.hir_id), format!("`{dis}`"))
// Otherwise, format the value as-is
(tcx.def_span(discr_def_id), format!("`{dis}`"))
}
}
None => {
// This should not happen.
ty::VariantDiscr::Relative(0) => (tcx.def_span(var.def_id), format!("`{dis}`")),
ty::VariantDiscr::Relative(distance_to_explicit) => {
// At this point we know this discriminant is a duplicate, and was not explicitly
// assigned by the user. Here we iterate backwards to fetch the HIR for the last
// explicitly assigned discriminant, and letting the user know that this was the
// increment startpoint, and how many steps from there leading to the duplicate
if let Some((n, hir::Variant { span, ident, .. })) =
vs[..idx].iter().rev().enumerate().find(|v| v.1.disr_expr.is_some())
if let Some(explicit_idx) =
idx.as_u32().checked_sub(distance_to_explicit).map(VariantIdx::from_u32)
{
let ve_ident = var.ident;
let n = n + 1;
let sp = if n > 1 { "variants" } else { "variant" };
let explicit_variant = adt.variant(explicit_idx);
let ve_ident = var.name;
let ex_ident = explicit_variant.name;
let sp = if distance_to_explicit > 1 { "variants" } else { "variant" };
err.span_label(
*span,
format!("discriminant for `{ve_ident}` incremented from this startpoint (`{ident}` + {n} {sp} later => `{ve_ident}` = {dis})"),
tcx.def_span(explicit_variant.def_id),
format!(
"discriminant for `{ve_ident}` incremented from this startpoint \
(`{ex_ident}` + {distance_to_explicit} {sp} later \
=> `{ve_ident}` = {dis})"
),
);
}
(vs[idx].span, format!("`{dis}`"))
(tcx.def_span(var.def_id), format!("`{dis}`"))
}
};
err.span_label(span, format!("{display_discr} assigned here"));
};
let mut discrs = adt.discriminants(tcx).collect::<Vec<_>>();
// Here we loop through the discriminants, comparing each discriminant to another.
// When a duplicate is detected, we instantiate an error and point to both
// initial and duplicate value. The duplicate discriminant is then discarded by swapping
@ -1257,29 +1260,29 @@ fn detect_discriminant_duplicate<'tcx>(
// style as we are mutating `discrs` on the fly).
let mut i = 0;
while i < discrs.len() {
let hir_var_i_idx = discrs[i].0.index();
let var_i_idx = discrs[i].0;
let mut error: Option<DiagnosticBuilder<'_, _>> = None;
let mut o = i + 1;
while o < discrs.len() {
let hir_var_o_idx = discrs[o].0.index();
let var_o_idx = discrs[o].0;
if discrs[i].1.val == discrs[o].1.val {
let err = error.get_or_insert_with(|| {
let mut ret = struct_span_err!(
tcx.sess,
self_span,
tcx.def_span(adt.did()),
E0081,
"discriminant value `{}` assigned more than once",
discrs[i].1,
);
report(discrs[i].1, hir_var_i_idx, &mut ret);
report(discrs[i].1, var_i_idx, &mut ret);
ret
});
report(discrs[o].1, hir_var_o_idx, err);
report(discrs[o].1, var_o_idx, err);
// Safe to unwrap here, as we wouldn't reach this point if `discrs` was empty
discrs[o] = *discrs.last().unwrap();

View File

@ -290,10 +290,7 @@ fn compare_predicate_entailment<'tcx>(
// type would be more appropriate. In other places we have a `Vec<Span>`
// corresponding to their `Vec<Predicate>`, but we don't have that here.
// Fixing this would improve the output of test `issue-83765.rs`.
let mut result = infcx
.at(&cause, param_env)
.sup(trait_fty, impl_fty)
.map(|infer_ok| ocx.register_infer_ok_obligations(infer_ok));
let mut result = ocx.sup(&cause, param_env, trait_fty, impl_fty);
// HACK(RPITIT): #101614. When we are trying to infer the hidden types for
// RPITITs, we need to equate the output tys instead of just subtyping. If
@ -301,12 +298,8 @@ fn compare_predicate_entailment<'tcx>(
// us to infer `_#1t = #'_#2r str`, where `'_#2r` is unconstrained, which gets
// fixed up to `ReEmpty`, and which is certainly not what we want.
if trait_fty.has_infer_types() {
result = result.and_then(|()| {
infcx
.at(&cause, param_env)
.eq(trait_sig.output(), impl_sig.output())
.map(|infer_ok| ocx.register_infer_ok_obligations(infer_ok))
});
result =
result.and_then(|()| ocx.eq(&cause, param_env, trait_sig.output(), impl_sig.output()));
}
if let Err(terr) = result {
@ -1389,10 +1382,7 @@ pub(crate) fn raw_compare_const_impl<'tcx>(
debug!("compare_const_impl: trait_ty={:?}", trait_ty);
let err = infcx
.at(&cause, param_env)
.sup(trait_ty, impl_ty)
.map(|ok| ocx.register_infer_ok_obligations(ok));
let err = ocx.sup(&cause, param_env, trait_ty, impl_ty);
if let Err(terr) = err {
debug!(
@ -1665,13 +1655,10 @@ pub fn check_type_bounds<'tcx>(
GenericParamDefKind::Const { .. } => {
let bound_var = ty::BoundVariableKind::Const;
bound_vars.push(bound_var);
tcx.mk_const(ty::ConstS {
ty: tcx.type_of(param.def_id),
kind: ty::ConstKind::Bound(
ty::INNERMOST,
ty::BoundVar::from_usize(bound_vars.len() - 1),
),
})
tcx.mk_const(
ty::ConstKind::Bound(ty::INNERMOST, ty::BoundVar::from_usize(bound_vars.len() - 1)),
tcx.type_of(param.def_id),
)
.into()
}
});

View File

@ -218,19 +218,16 @@ fn check_item<'tcx>(tcx: TyCtxt<'tcx>, item: &'tcx hir::Item<'tcx>) {
hir::ItemKind::Const(ty, ..) => {
check_item_type(tcx, def_id, ty.span, false);
}
hir::ItemKind::Struct(ref struct_def, ref ast_generics) => {
check_type_defn(tcx, item, false, |wfcx| vec![wfcx.non_enum_variant(struct_def)]);
hir::ItemKind::Struct(_, ref ast_generics) => {
check_type_defn(tcx, item, false);
check_variances_for_type_defn(tcx, item, ast_generics);
}
hir::ItemKind::Union(ref struct_def, ref ast_generics) => {
check_type_defn(tcx, item, true, |wfcx| vec![wfcx.non_enum_variant(struct_def)]);
hir::ItemKind::Union(_, ref ast_generics) => {
check_type_defn(tcx, item, true);
check_variances_for_type_defn(tcx, item, ast_generics);
}
hir::ItemKind::Enum(ref enum_def, ref ast_generics) => {
check_type_defn(tcx, item, true, |wfcx| wfcx.enum_variants(enum_def));
hir::ItemKind::Enum(_, ref ast_generics) => {
check_type_defn(tcx, item, true);
check_variances_for_type_defn(tcx, item, ast_generics);
}
hir::ItemKind::Trait(..) => {
@ -1037,27 +1034,25 @@ fn item_adt_kind(kind: &ItemKind<'_>) -> Option<AdtKind> {
}
/// In a type definition, we check that to ensure that the types of the fields are well-formed.
fn check_type_defn<'tcx, F>(
tcx: TyCtxt<'tcx>,
item: &hir::Item<'tcx>,
all_sized: bool,
mut lookup_fields: F,
) where
F: FnMut(&WfCheckingCtxt<'_, 'tcx>) -> Vec<AdtVariant<'tcx>>,
{
fn check_type_defn<'tcx>(tcx: TyCtxt<'tcx>, item: &hir::Item<'tcx>, all_sized: bool) {
let _ = tcx.representability(item.owner_id.def_id);
let adt_def = tcx.adt_def(item.owner_id);
enter_wf_checking_ctxt(tcx, item.span, item.owner_id.def_id, |wfcx| {
let variants = lookup_fields(wfcx);
let packed = tcx.adt_def(item.owner_id).repr().packed();
let variants = adt_def.variants();
let packed = adt_def.repr().packed();
for variant in &variants {
for variant in variants.iter() {
// All field types must be well-formed.
for field in &variant.fields {
let field_id = field.did.expect_local();
let hir::Node::Field(hir::FieldDef { ty: hir_ty, .. }) = tcx.hir().get_by_def_id(field_id)
else { bug!() };
let ty = wfcx.normalize(hir_ty.span, None, tcx.type_of(field.did));
wfcx.register_wf_obligation(
field.span,
Some(WellFormedLoc::Ty(field.def_id)),
field.ty.into(),
hir_ty.span,
Some(WellFormedLoc::Ty(field_id)),
ty.into(),
)
}
@ -1065,7 +1060,7 @@ fn check_type_defn<'tcx, F>(
// intermediate types must be sized.
let needs_drop_copy = || {
packed && {
let ty = variant.fields.last().unwrap().ty;
let ty = tcx.type_of(variant.fields.last().unwrap().did);
let ty = tcx.erase_regions(ty);
if ty.needs_infer() {
tcx.sess
@ -1084,27 +1079,31 @@ fn check_type_defn<'tcx, F>(
variant.fields[..variant.fields.len() - unsized_len].iter().enumerate()
{
let last = idx == variant.fields.len() - 1;
let field_id = field.did.expect_local();
let hir::Node::Field(hir::FieldDef { ty: hir_ty, .. }) = tcx.hir().get_by_def_id(field_id)
else { bug!() };
let ty = wfcx.normalize(hir_ty.span, None, tcx.type_of(field.did));
wfcx.register_bound(
traits::ObligationCause::new(
field.span,
hir_ty.span,
wfcx.body_id,
traits::FieldSized {
adt_kind: match item_adt_kind(&item.kind) {
Some(i) => i,
None => bug!(),
},
span: field.span,
span: hir_ty.span,
last,
},
),
wfcx.param_env,
field.ty,
ty,
tcx.require_lang_item(LangItem::Sized, None),
);
}
// Explicit `enum` discriminant values must const-evaluate successfully.
if let Some(discr_def_id) = variant.explicit_discr {
if let ty::VariantDiscr::Explicit(discr_def_id) = variant.discr {
let cause = traits::ObligationCause::new(
tcx.def_span(discr_def_id),
wfcx.body_id,
@ -1114,7 +1113,7 @@ fn check_type_defn<'tcx, F>(
cause,
wfcx.param_env,
ty::Binder::dummy(ty::PredicateKind::ConstEvaluatable(
ty::Const::from_anon_const(tcx, discr_def_id),
ty::Const::from_anon_const(tcx, discr_def_id.expect_local()),
))
.to_predicate(tcx),
));
@ -1675,7 +1674,7 @@ fn receiver_is_valid<'tcx>(
// `self: Self` is always valid.
if can_eq_self(receiver_ty) {
if let Err(err) = wfcx.equate_types(&cause, wfcx.param_env, self_ty, receiver_ty) {
if let Err(err) = wfcx.eq(&cause, wfcx.param_env, self_ty, receiver_ty) {
infcx.err_ctxt().report_mismatched_types(&cause, self_ty, receiver_ty, err).emit();
}
return true;
@ -1705,9 +1704,7 @@ fn receiver_is_valid<'tcx>(
if can_eq_self(potential_self_ty) {
wfcx.register_obligations(autoderef.into_obligations());
if let Err(err) =
wfcx.equate_types(&cause, wfcx.param_env, self_ty, potential_self_ty)
{
if let Err(err) = wfcx.eq(&cause, wfcx.param_env, self_ty, potential_self_ty) {
infcx
.err_ctxt()
.report_mismatched_types(&cause, self_ty, potential_self_ty, err)
@ -1925,56 +1922,6 @@ fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalDefId) {
items.par_foreign_items(|item| tcx.ensure().check_well_formed(item.owner_id));
}
///////////////////////////////////////////////////////////////////////////
// ADT
// FIXME(eddyb) replace this with getting fields/discriminants through `ty::AdtDef`.
struct AdtVariant<'tcx> {
/// Types of fields in the variant, that must be well-formed.
fields: Vec<AdtField<'tcx>>,
/// Explicit discriminant of this variant (e.g. `A = 123`),
/// that must evaluate to a constant value.
explicit_discr: Option<LocalDefId>,
}
struct AdtField<'tcx> {
ty: Ty<'tcx>,
def_id: LocalDefId,
span: Span,
}
impl<'a, 'tcx> WfCheckingCtxt<'a, 'tcx> {
// FIXME(eddyb) replace this with getting fields through `ty::AdtDef`.
fn non_enum_variant(&self, struct_def: &hir::VariantData<'_>) -> AdtVariant<'tcx> {
let fields = struct_def
.fields()
.iter()
.map(|field| {
let def_id = self.tcx().hir().local_def_id(field.hir_id);
let field_ty = self.tcx().type_of(def_id);
let field_ty = self.normalize(field.ty.span, None, field_ty);
debug!("non_enum_variant: type of field {:?} is {:?}", field, field_ty);
AdtField { ty: field_ty, span: field.ty.span, def_id }
})
.collect();
AdtVariant { fields, explicit_discr: None }
}
fn enum_variants(&self, enum_def: &hir::EnumDef<'_>) -> Vec<AdtVariant<'tcx>> {
enum_def
.variants
.iter()
.map(|variant| AdtVariant {
fields: self.non_enum_variant(&variant.data).fields,
explicit_discr: variant
.disr_expr
.map(|explicit_discr| self.tcx().hir().local_def_id(explicit_discr.hir_id)),
})
.collect()
}
}
fn error_392(
tcx: TyCtxt<'_>,
span: Span,

View File

@ -23,9 +23,7 @@ pub(crate) fn orphan_check_impl(
impl_def_id: LocalDefId,
) -> Result<(), ErrorGuaranteed> {
let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
if let Some(err) = trait_ref.error_reported() {
return Err(err);
}
trait_ref.error_reported()?;
let ret = do_orphan_check_impl(tcx, trait_ref, impl_def_id);
if tcx.trait_is_auto(trait_ref.def_id) {

View File

@ -27,8 +27,8 @@ use rustc_hir as hir;
use rustc_hir::def::CtorKind;
use rustc_hir::def_id::{DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::weak_lang_items;
use rustc_hir::{GenericParamKind, Node};
use rustc_hir::weak_lang_items::WEAK_LANG_ITEMS;
use rustc_hir::{lang_items, GenericParamKind, LangItem, Node};
use rustc_middle::hir::nested_filter;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::mono::Linkage;
@ -379,8 +379,8 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
self.tcx
}
fn item_def_id(&self) -> Option<DefId> {
Some(self.item_def_id)
fn item_def_id(&self) -> DefId {
self.item_def_id
}
fn get_type_parameter_bounds(
@ -604,11 +604,11 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
}
}
}
hir::ItemKind::Enum(ref enum_definition, _) => {
hir::ItemKind::Enum(..) => {
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
convert_enum_variant_types(tcx, def_id.to_def_id(), enum_definition.variants);
convert_enum_variant_types(tcx, def_id.to_def_id());
}
hir::ItemKind::Impl { .. } => {
tcx.ensure().generics_of(def_id);
@ -640,7 +640,8 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
}
if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
convert_variant_ctor(tcx, ctor_hir_id);
let ctor_def_id = tcx.hir().local_def_id(ctor_hir_id);
convert_variant_ctor(tcx, ctor_def_id);
}
}
@ -750,37 +751,34 @@ fn convert_impl_item(tcx: TyCtxt<'_>, impl_item_id: hir::ImplItemId) {
}
}
fn convert_variant_ctor(tcx: TyCtxt<'_>, ctor_id: hir::HirId) {
let def_id = tcx.hir().local_def_id(ctor_id);
fn convert_variant_ctor(tcx: TyCtxt<'_>, def_id: LocalDefId) {
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
}
fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId, variants: &[hir::Variant<'_>]) {
fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId) {
let def = tcx.adt_def(def_id);
let repr_type = def.repr().discr_type();
let initial = repr_type.initial_discriminant(tcx);
let mut prev_discr = None::<Discr<'_>>;
// fill the discriminant values and field types
for variant in variants {
for variant in def.variants() {
let wrapped_discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx));
prev_discr = Some(
if let Some(ref e) = variant.disr_expr {
let expr_did = tcx.hir().local_def_id(e.hir_id);
def.eval_explicit_discr(tcx, expr_did.to_def_id())
if let ty::VariantDiscr::Explicit(const_def_id) = variant.discr {
def.eval_explicit_discr(tcx, const_def_id)
} else if let Some(discr) = repr_type.disr_incr(tcx, prev_discr) {
Some(discr)
} else {
struct_span_err!(tcx.sess, variant.span, E0370, "enum discriminant overflowed")
.span_label(
variant.span,
format!("overflowed on value after {}", prev_discr.unwrap()),
)
let span = tcx.def_span(variant.def_id);
struct_span_err!(tcx.sess, span, E0370, "enum discriminant overflowed")
.span_label(span, format!("overflowed on value after {}", prev_discr.unwrap()))
.note(&format!(
"explicitly set `{} = {}` if that is desired outcome",
variant.ident, wrapped_discr
tcx.item_name(variant.def_id),
wrapped_discr
))
.emit();
None
@ -788,17 +786,16 @@ fn convert_enum_variant_types(tcx: TyCtxt<'_>, def_id: DefId, variants: &[hir::V
.unwrap_or(wrapped_discr),
);
for f in variant.data.fields() {
let def_id = tcx.hir().local_def_id(f.hir_id);
tcx.ensure().generics_of(def_id);
tcx.ensure().type_of(def_id);
tcx.ensure().predicates_of(def_id);
for f in &variant.fields {
tcx.ensure().generics_of(f.did);
tcx.ensure().type_of(f.did);
tcx.ensure().predicates_of(f.did);
}
// Convert the ctor, if any. This also registers the variant as
// an item.
if let Some(ctor_hir_id) = variant.data.ctor_hir_id() {
convert_variant_ctor(tcx, ctor_hir_id);
if let Some(ctor_def_id) = variant.ctor_def_id {
convert_variant_ctor(tcx, ctor_def_id.expect_local());
}
}
}
@ -2104,12 +2101,15 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: DefId) -> CodegenFnAttrs {
// strippable by the linker.
//
// Additionally weak lang items have predetermined symbol names.
if tcx.is_weak_lang_item(did.to_def_id()) {
if WEAK_LANG_ITEMS.iter().any(|&l| tcx.lang_items().get(l) == Some(did.to_def_id())) {
codegen_fn_attrs.flags |= CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL;
}
if let Some(name) = weak_lang_items::link_name(attrs) {
codegen_fn_attrs.export_name = Some(name);
codegen_fn_attrs.link_name = Some(name);
if let Some((name, _)) = lang_items::extract(attrs)
&& let Some(lang_item) = LangItem::from_name(name)
&& let Some(link_name) = lang_item.link_name()
{
codegen_fn_attrs.export_name = Some(link_name);
codegen_fn_attrs.link_name = Some(link_name);
}
check_link_name_xor_ordinal(tcx, &codegen_fn_attrs, link_ordinal_span);

View File

@ -1377,11 +1377,12 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
} else if let Some(body_id) = outermost_body {
let fn_id = self.tcx.hir().body_owner(body_id);
match self.tcx.hir().get(fn_id) {
Node::Item(&hir::Item { kind: hir::ItemKind::Fn(..), .. })
| Node::TraitItem(&hir::TraitItem {
Node::Item(hir::Item { kind: hir::ItemKind::Fn(..), .. })
| Node::TraitItem(hir::TraitItem {
kind: hir::TraitItemKind::Fn(..), ..
})
| Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(..), .. }) => {
| Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn(..), .. })
| Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(..), .. }) => {
let scope = self.tcx.hir().local_def_id(fn_id);
def = Region::Free(scope.to_def_id(), def.id().unwrap());
}

View File

@ -427,6 +427,8 @@ pub(super) fn explicit_predicates_of<'tcx>(
} else {
if matches!(def_kind, DefKind::AnonConst) && tcx.lazy_normalization() {
let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
let parent_def_id = tcx.hir().get_parent_item(hir_id);
if tcx.hir().opt_const_param_default_param_hir_id(hir_id).is_some() {
// In `generics_of` we set the generics' parent to be our parent's parent which means that
// we lose out on the predicates of our actual parent if we dont return those predicates here.
@ -439,8 +441,33 @@ pub(super) fn explicit_predicates_of<'tcx>(
// parent of generics returned by `generics_of`
//
// In the above code we want the anon const to have predicates in its param env for `T: Trait`
let item_def_id = tcx.hir().get_parent_item(hir_id);
// In the above code example we would be calling `explicit_predicates_of(Foo)` here
// and we would be calling `explicit_predicates_of(Foo)` here
return tcx.explicit_predicates_of(parent_def_id);
}
let parent_def_kind = tcx.def_kind(parent_def_id);
if matches!(parent_def_kind, DefKind::OpaqueTy) {
// In `instantiate_identity` we inherit the predicates of our parent.
// However, opaque types do not have a parent (see `gather_explicit_predicates_of`), which means
// that we lose out on the predicates of our actual parent if we dont return those predicates here.
//
//
// fn foo<T: Trait>() -> impl Iterator<Output = Another<{ <T as Trait>::ASSOC }> > { todo!() }
// ^^^^^^^^^^^^^^^^^^^ the def id we are calling
// explicit_predicates_of on
//
// In the above code we want the anon const to have predicates in its param env for `T: Trait`.
// However, the anon const cannot inherit predicates from its parent since it's opaque.
//
// To fix this, we call `explicit_predicates_of` directly on `foo`, the parent's parent.
// In the above example this is `foo::{opaque#0}` or `impl Iterator`
let parent_hir_id = tcx.hir().local_def_id_to_hir_id(parent_def_id.def_id);
// In the above example this is the function `foo`
let item_def_id = tcx.hir().get_parent_item(parent_hir_id);
// In the above code example we would be calling `explicit_predicates_of(foo)` here
return tcx.explicit_predicates_of(item_def_id);
}
}

View File

@ -120,7 +120,7 @@ pub struct TypeofReservedKeywordUsed<'tcx> {
#[primary_span]
#[label]
pub span: Span,
#[suggestion_verbose(code = "{ty}")]
#[suggestion(style = "verbose", code = "{ty}")]
pub opt_sugg: Option<(Span, Applicability)>,
}
@ -156,6 +156,7 @@ pub struct MissingTypeParams {
// Manual implementation of `IntoDiagnostic` to be able to call `span_to_snippet`.
impl<'a> IntoDiagnostic<'a> for MissingTypeParams {
#[track_caller]
fn into_diagnostic(self, handler: &'a Handler) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
let mut err = handler.struct_span_err_with_code(
self.span,
@ -238,7 +239,11 @@ pub struct UnusedExternCrate {
#[derive(LintDiagnostic)]
#[diag(hir_analysis_extern_crate_not_idiomatic)]
pub struct ExternCrateNotIdiomatic {
#[suggestion_short(applicability = "machine-applicable", code = "{suggestion_code}")]
#[suggestion(
style = "short",
applicability = "machine-applicable",
code = "{suggestion_code}"
)]
pub span: Span,
pub msg_code: String,
pub suggestion_code: String,

View File

@ -106,7 +106,7 @@ use rustc_middle::middle;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_middle::util;
use rustc_session::config::EntryFnType;
use rustc_session::{config::EntryFnType, parse::feature_err};
use rustc_span::{symbol::sym, Span, DUMMY_SP};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _;
@ -118,20 +118,40 @@ use astconv::AstConv;
use bounds::Bounds;
fn require_c_abi_if_c_variadic(tcx: TyCtxt<'_>, decl: &hir::FnDecl<'_>, abi: Abi, span: Span) {
match (decl.c_variadic, abi) {
// The function has the correct calling convention, or isn't a "C-variadic" function.
(false, _) | (true, Abi::C { .. }) | (true, Abi::Cdecl { .. }) => {}
// The function is a "C-variadic" function with an incorrect calling convention.
(true, _) => {
let mut err = struct_span_err!(
tcx.sess,
span,
E0045,
"C-variadic function must have C or cdecl calling convention"
);
err.span_label(span, "C-variadics require C or cdecl calling convention").emit();
}
const ERROR_HEAD: &str = "C-variadic function must have a compatible calling convention";
const CONVENTIONS_UNSTABLE: &str = "`C`, `cdecl`, `win64`, `sysv64` or `efiapi`";
const CONVENTIONS_STABLE: &str = "`C` or `cdecl`";
const UNSTABLE_EXPLAIN: &str =
"using calling conventions other than `C` or `cdecl` for varargs functions is unstable";
if !decl.c_variadic || matches!(abi, Abi::C { .. } | Abi::Cdecl { .. }) {
return;
}
let extended_abi_support = tcx.features().extended_varargs_abi_support;
let conventions = match (extended_abi_support, abi.supports_varargs()) {
// User enabled additional ABI support for varargs and function ABI matches those ones.
(true, true) => return,
// Using this ABI would be ok, if the feature for additional ABI support was enabled.
// Return CONVENTIONS_STABLE, because we want the other error to look the same.
(false, true) => {
feature_err(
&tcx.sess.parse_sess,
sym::extended_varargs_abi_support,
span,
UNSTABLE_EXPLAIN,
)
.emit();
CONVENTIONS_STABLE
}
(false, false) => CONVENTIONS_STABLE,
(true, false) => CONVENTIONS_UNSTABLE,
};
let mut err = struct_span_err!(tcx.sess, span, E0045, "{}, like {}", ERROR_HEAD, conventions);
err.span_label(span, ERROR_HEAD).emit();
}
fn require_same_types<'tcx>(

View File

@ -491,11 +491,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
..
} = self.type_var_origin(expected)? else { return None; };
let sig = *self
.typeck_results
.borrow()
.liberated_fn_sigs()
.get(hir::HirId::make_owner(self.body_id.owner.def_id))?;
let sig = self.body_fn_sig()?;
let substs = sig.output().walk().find_map(|arg| {
if let ty::GenericArgKind::Type(ty) = arg.unpack()

View File

@ -94,10 +94,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!("pointer_kind({:?}, {:?})", t, span);
let t = self.resolve_vars_if_possible(t);
if let Some(reported) = t.error_reported() {
return Err(reported);
}
t.error_reported()?;
if self.type_is_sized_modulo_regions(self.param_env, t, span) {
return Ok(Some(PointerKind::Thin));
@ -222,8 +219,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
// inference is more completely known.
match cast_ty.kind() {
ty::Dynamic(_, _, ty::Dyn) | ty::Slice(..) => {
let reported = check.report_cast_to_unsized_type(fcx);
Err(reported)
Err(check.report_cast_to_unsized_type(fcx))
}
_ => Ok(check),
}
@ -614,10 +610,11 @@ impl<'a, 'tcx> CastCheck<'tcx> {
}
fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'tcx>) -> ErrorGuaranteed {
if let Some(reported) =
self.cast_ty.error_reported().or_else(|| self.expr_ty.error_reported())
{
return reported;
if let Err(err) = self.cast_ty.error_reported() {
return err;
}
if let Err(err) = self.expr_ty.error_reported() {
return err;
}
let tstr = fcx.ty_to_string(self.cast_ty);

View File

@ -31,13 +31,11 @@ pub(super) fn check_fn<'a, 'tcx>(
fn_id: hir::HirId,
body: &'tcx hir::Body<'tcx>,
can_be_generator: Option<hir::Movability>,
return_type_pre_known: bool,
) -> (FnCtxt<'a, 'tcx>, Option<GeneratorTypes<'tcx>>) {
// Create the function context. This is either derived from scratch or,
// in the case of closures, based on the outer context.
let mut fcx = FnCtxt::new(inherited, param_env, body.value.hir_id);
fcx.ps.set(UnsafetyState::function(fn_sig.unsafety, fn_id));
fcx.return_type_pre_known = return_type_pre_known;
let tcx = fcx.tcx;
let hir = tcx.hir();
@ -51,9 +49,6 @@ pub(super) fn check_fn<'a, 'tcx>(
decl.output.span(),
param_env,
));
// If we replaced declared_ret_ty with infer vars, then we must be inferring
// an opaque type, so set a flag so we can improve diagnostics.
fcx.return_type_has_opaque = ret_ty != declared_ret_ty;
fcx.ret_coercion = Some(RefCell::new(CoerceMany::new(ret_ty)));
@ -211,13 +206,6 @@ pub(super) fn check_fn<'a, 'tcx>(
check_panic_info_fn(tcx, panic_impl_did.expect_local(), fn_sig, decl, declared_ret_ty);
}
// Check that a function marked as `#[alloc_error_handler]` has signature `fn(Layout) -> !`
if let Some(alloc_error_handler_did) = tcx.lang_items().oom()
&& alloc_error_handler_did == hir.local_def_id(fn_id).to_def_id()
{
check_alloc_error_fn(tcx, alloc_error_handler_did.expect_local(), fn_sig, decl, declared_ret_ty);
}
(fcx, gen_ty)
}
@ -273,52 +261,3 @@ fn check_panic_info_fn(
tcx.sess.span_err(span, "should have no const parameters");
}
}
fn check_alloc_error_fn(
tcx: TyCtxt<'_>,
fn_id: LocalDefId,
fn_sig: ty::FnSig<'_>,
decl: &hir::FnDecl<'_>,
declared_ret_ty: Ty<'_>,
) {
let Some(alloc_layout_did) = tcx.lang_items().alloc_layout() else {
tcx.sess.err("language item required, but not found: `alloc_layout`");
return;
};
if *declared_ret_ty.kind() != ty::Never {
tcx.sess.span_err(decl.output.span(), "return type should be `!`");
}
let inputs = fn_sig.inputs();
if inputs.len() != 1 {
tcx.sess.span_err(tcx.def_span(fn_id), "function should have one argument");
return;
}
let arg_is_alloc_layout = match inputs[0].kind() {
ty::Adt(ref adt, _) => adt.did() == alloc_layout_did,
_ => false,
};
if !arg_is_alloc_layout {
tcx.sess.span_err(decl.inputs[0].span, "argument should be `Layout`");
}
let DefKind::Fn = tcx.def_kind(fn_id) else {
let span = tcx.def_span(fn_id);
tcx.sess.span_err(span, "`#[alloc_error_handler]` should be a function");
return;
};
let generic_counts = tcx.generics_of(fn_id).own_counts();
if generic_counts.types != 0 {
let span = tcx.def_span(fn_id);
tcx.sess.span_err(span, "`#[alloc_error_handler]` function should have no type parameters");
}
if generic_counts.consts != 0 {
let span = tcx.def_span(fn_id);
tcx.sess
.span_err(span, "`#[alloc_error_handler]` function should have no const parameters");
}
}

View File

@ -15,6 +15,7 @@ use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{self, Ty};
use rustc_span::source_map::Span;
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits;
use rustc_trait_selection::traits::error_reporting::ArgKind;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
use std::cmp;
@ -82,8 +83,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
debug!(?bound_sig, ?liberated_sig);
let return_type_pre_known = !liberated_sig.output().is_ty_infer();
let generator_types = check_fn(
self,
self.param_env.without_const(),
@ -92,7 +91,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expr.hir_id,
body,
gen,
return_type_pre_known,
)
.1;
@ -225,33 +223,50 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self,
expected_vid: ty::TyVid,
) -> (Option<ExpectedSig<'tcx>>, Option<ty::ClosureKind>) {
let expected_sig =
self.obligations_for_self_ty(expected_vid).find_map(|(_, obligation)| {
debug!(?obligation.predicate);
let mut expected_sig = None;
let mut expected_kind = None;
let bound_predicate = obligation.predicate.kind();
if let ty::PredicateKind::Projection(proj_predicate) =
obligation.predicate.kind().skip_binder()
{
// Given a Projection predicate, we can potentially infer
// the complete signature.
self.deduce_sig_from_projection(
Some(obligation.cause.span),
bound_predicate.rebind(proj_predicate),
)
} else {
None
for obligation in traits::elaborate_obligations(
self.tcx,
// Reverse the obligations here, since `elaborate_*` uses a stack,
// and we want to keep inference generally in the same order of
// the registered obligations.
self.obligations_for_self_ty(expected_vid).rev().collect(),
) {
debug!(?obligation.predicate);
let bound_predicate = obligation.predicate.kind();
// Given a Projection predicate, we can potentially infer
// the complete signature.
if expected_sig.is_none()
&& let ty::PredicateKind::Projection(proj_predicate) = bound_predicate.skip_binder()
{
expected_sig = self.deduce_sig_from_projection(
Some(obligation.cause.span),
bound_predicate.rebind(proj_predicate),
);
}
// Even if we can't infer the full signature, we may be able to
// infer the kind. This can occur when we elaborate a predicate
// like `F : Fn<A>`. Note that due to subtyping we could encounter
// many viable options, so pick the most restrictive.
let trait_def_id = match bound_predicate.skip_binder() {
ty::PredicateKind::Projection(data) => {
Some(data.projection_ty.trait_def_id(self.tcx))
}
});
// Even if we can't infer the full signature, we may be able to
// infer the kind. This can occur when we elaborate a predicate
// like `F : Fn<A>`. Note that due to subtyping we could encounter
// many viable options, so pick the most restrictive.
let expected_kind = self
.obligations_for_self_ty(expected_vid)
.filter_map(|(tr, _)| self.tcx.fn_trait_kind_from_lang_item(tr.def_id()))
.fold(None, |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur))));
ty::PredicateKind::Trait(data) => Some(data.def_id()),
_ => None,
};
if let Some(closure_kind) =
trait_def_id.and_then(|def_id| self.tcx.fn_trait_kind_from_lang_item(def_id))
{
expected_kind = Some(
expected_kind
.map_or_else(|| closure_kind, |current| cmp::min(current, closure_kind)),
);
}
}
(expected_sig, expected_kind)
}
@ -689,7 +704,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let output_ty = match *ret_ty.kind() {
ty::Infer(ty::TyVar(ret_vid)) => {
self.obligations_for_self_ty(ret_vid).find_map(|(_, obligation)| {
self.obligations_for_self_ty(ret_vid).find_map(|obligation| {
get_future_output(obligation.predicate, obligation.cause.span)
})?
}

View File

@ -1782,7 +1782,8 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
// may occur at the first return expression we see in the closure
// (if it conflicts with the declared return type). Skip adding a
// note in this case, since it would be incorrect.
&& !fcx.return_type_pre_known
&& let Some(fn_sig) = fcx.body_fn_sig()
&& fn_sig.output().is_ty_var()
{
err.span_note(
sp,

View File

@ -113,8 +113,9 @@ pub struct MissingParentheseInRange {
}
#[derive(Subdiagnostic)]
#[multipart_suggestion_verbose(
#[multipart_suggestion(
hir_analysis_add_missing_parentheses_in_range,
style = "verbose",
applicability = "maybe-incorrect"
)]
pub struct AddMissingParenthesesInRange {

View File

@ -220,7 +220,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Hide the outer diverging and has_errors flags.
let old_diverges = self.diverges.replace(Diverges::Maybe);
let old_has_errors = self.has_errors.replace(false);
let ty = ensure_sufficient_stack(|| match &expr.kind {
hir::ExprKind::Path(
@ -259,7 +258,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Combine the diverging and has_error flags.
self.diverges.set(self.diverges.get() | old_diverges);
self.has_errors.set(self.has_errors.get() | old_has_errors);
debug!("type of {} is...", self.tcx.hir().node_to_string(expr.hir_id));
debug!("... {:?}, expected is {:?}", ty, expected);
@ -840,7 +838,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return_expr_ty,
);
if self.return_type_has_opaque {
if let Some(fn_sig) = self.body_fn_sig()
&& fn_sig.output().has_opaque_types()
{
// Point any obligations that were registered due to opaque type
// inference at the return expression.
self.select_obligations_where_possible(false, |errors| {

View File

@ -21,8 +21,8 @@ use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, AutoBorrowMut
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{
self, AdtKind, CanonicalUserType, DefIdTree, EarlyBinder, GenericParamDefKind, ToPolyTraitRef,
ToPredicate, Ty, UserType,
self, AdtKind, CanonicalUserType, DefIdTree, EarlyBinder, GenericParamDefKind, ToPredicate, Ty,
UserType,
};
use rustc_middle::ty::{GenericArgKind, InternalSubsts, SubstsRef, UserSelfTy, UserSubsts};
use rustc_session::lint;
@ -143,7 +143,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.typeck_results.borrow_mut().node_types_mut().insert(id, ty);
if ty.references_error() {
self.has_errors.set(true);
self.set_tainted_by_errors();
}
}
@ -650,12 +649,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
#[instrument(skip(self), level = "debug")]
fn self_type_matches_expected_vid(
&self,
trait_ref: ty::PolyTraitRef<'tcx>,
expected_vid: ty::TyVid,
) -> bool {
let self_ty = self.shallow_resolve(trait_ref.skip_binder().self_ty());
fn self_type_matches_expected_vid(&self, self_ty: Ty<'tcx>, expected_vid: ty::TyVid) -> bool {
let self_ty = self.shallow_resolve(self_ty);
debug!(?self_ty);
match *self_ty.kind() {
@ -674,54 +669,61 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(in super::super) fn obligations_for_self_ty<'b>(
&'b self,
self_ty: ty::TyVid,
) -> impl Iterator<Item = (ty::PolyTraitRef<'tcx>, traits::PredicateObligation<'tcx>)>
+ Captures<'tcx>
+ 'b {
) -> impl DoubleEndedIterator<Item = traits::PredicateObligation<'tcx>> + Captures<'tcx> + 'b
{
// FIXME: consider using `sub_root_var` here so we
// can see through subtyping.
let ty_var_root = self.root_var(self_ty);
trace!("pending_obligations = {:#?}", self.fulfillment_cx.borrow().pending_obligations());
self.fulfillment_cx
.borrow()
.pending_obligations()
.into_iter()
.filter_map(move |obligation| {
let bound_predicate = obligation.predicate.kind();
match bound_predicate.skip_binder() {
ty::PredicateKind::Projection(data) => Some((
bound_predicate.rebind(data).required_poly_trait_ref(self.tcx),
obligation,
)),
ty::PredicateKind::Trait(data) => {
Some((bound_predicate.rebind(data).to_poly_trait_ref(), obligation))
}
ty::PredicateKind::Subtype(..) => None,
ty::PredicateKind::Coerce(..) => None,
ty::PredicateKind::RegionOutlives(..) => None,
ty::PredicateKind::TypeOutlives(..) => None,
ty::PredicateKind::WellFormed(..) => None,
ty::PredicateKind::ObjectSafe(..) => None,
ty::PredicateKind::ConstEvaluatable(..) => None,
ty::PredicateKind::ConstEquate(..) => None,
// N.B., this predicate is created by breaking down a
// `ClosureType: FnFoo()` predicate, where
// `ClosureType` represents some `Closure`. It can't
// possibly be referring to the current closure,
// because we haven't produced the `Closure` for
// this closure yet; this is exactly why the other
// code is looking for a self type of an unresolved
// inference variable.
ty::PredicateKind::ClosureKind(..) => None,
ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
self.fulfillment_cx.borrow().pending_obligations().into_iter().filter_map(
move |obligation| match &obligation.predicate.kind().skip_binder() {
ty::PredicateKind::Projection(data)
if self.self_type_matches_expected_vid(
data.projection_ty.self_ty(),
ty_var_root,
) =>
{
Some(obligation)
}
})
.filter(move |(tr, _)| self.self_type_matches_expected_vid(*tr, ty_var_root))
ty::PredicateKind::Trait(data)
if self.self_type_matches_expected_vid(data.self_ty(), ty_var_root) =>
{
Some(obligation)
}
ty::PredicateKind::Trait(..)
| ty::PredicateKind::Projection(..)
| ty::PredicateKind::Subtype(..)
| ty::PredicateKind::Coerce(..)
| ty::PredicateKind::RegionOutlives(..)
| ty::PredicateKind::TypeOutlives(..)
| ty::PredicateKind::WellFormed(..)
| ty::PredicateKind::ObjectSafe(..)
| ty::PredicateKind::ConstEvaluatable(..)
| ty::PredicateKind::ConstEquate(..)
// N.B., this predicate is created by breaking down a
// `ClosureType: FnFoo()` predicate, where
// `ClosureType` represents some `Closure`. It can't
// possibly be referring to the current closure,
// because we haven't produced the `Closure` for
// this closure yet; this is exactly why the other
// code is looking for a self type of an unresolved
// inference variable.
| ty::PredicateKind::ClosureKind(..)
| ty::PredicateKind::TypeWellFormedFromEnv(..) => None,
},
)
}
pub(in super::super) fn type_var_is_sized(&self, self_ty: ty::TyVid) -> bool {
self.obligations_for_self_ty(self_ty)
.any(|(tr, _)| Some(tr.def_id()) == self.tcx.lang_items().sized_trait())
let sized_did = self.tcx.lang_items().sized_trait();
self.obligations_for_self_ty(self_ty).any(|obligation| {
match obligation.predicate.kind().skip_binder() {
ty::PredicateKind::Trait(data) => Some(data.def_id()) == sized_did,
_ => false,
}
})
}
pub(in super::super) fn err_args(&self, len: usize) -> Vec<Ty<'tcx>> {

View File

@ -1334,7 +1334,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Hide the outer diverging and `has_errors` flags.
let old_diverges = self.diverges.replace(Diverges::Maybe);
let old_has_errors = self.has_errors.replace(false);
match stmt.kind {
hir::StmtKind::Local(l) => {
@ -1364,7 +1363,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Combine the diverging and `has_error` flags.
self.diverges.set(self.diverges.get() | old_diverges);
self.has_errors.set(self.has_errors.get() | old_has_errors);
}
pub fn check_block_no_value(&self, blk: &'tcx hir::Block<'tcx>) {
@ -1544,11 +1542,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.diverges.set(prev_diverges);
}
let mut ty = ctxt.coerce.unwrap().complete(self);
if self.has_errors.get() || ty.references_error() {
ty = self.tcx.ty_error()
}
let ty = ctxt.coerce.unwrap().complete(self);
self.write_ty(blk.hir_id, ty);

View File

@ -112,21 +112,9 @@ pub struct FnCtxt<'a, 'tcx> {
/// the diverges flag is set to something other than `Maybe`.
pub(super) diverges: Cell<Diverges>,
/// Whether any child nodes have any type errors.
pub(super) has_errors: Cell<bool>,
pub(super) enclosing_breakables: RefCell<EnclosingBreakables<'tcx>>,
pub(super) inh: &'a Inherited<'tcx>,
/// True if the function or closure's return type is known before
/// entering the function/closure, i.e. if the return type is
/// either given explicitly or inferred from, say, an `Fn*` trait
/// bound. Used for diagnostic purposes only.
pub(super) return_type_pre_known: bool,
/// True if the return type has an Opaque type
pub(super) return_type_has_opaque: bool,
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@ -145,14 +133,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
resume_yield_tys: None,
ps: Cell::new(UnsafetyState::function(hir::Unsafety::Normal, hir::CRATE_HIR_ID)),
diverges: Cell::new(Diverges::Maybe),
has_errors: Cell::new(false),
enclosing_breakables: RefCell::new(EnclosingBreakables {
stack: Vec::new(),
by_id: Default::default(),
}),
inh,
return_type_pre_known: true,
return_type_has_opaque: false,
}
}
@ -194,8 +179,8 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> {
self.tcx
}
fn item_def_id(&self) -> Option<DefId> {
None
fn item_def_id(&self) -> DefId {
self.body_id.owner.to_def_id()
}
fn get_type_parameter_bounds(

View File

@ -22,6 +22,14 @@ use rustc_trait_selection::traits::error_reporting::DefIdOrName;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(crate) fn body_fn_sig(&self) -> Option<ty::FnSig<'tcx>> {
self.typeck_results
.borrow()
.liberated_fn_sigs()
.get(self.tcx.hir().get_parent_node(self.body_id))
.copied()
}
pub(in super::super) fn suggest_semicolon_at_end(&self, span: Span, err: &mut Diagnostic) {
err.span_suggestion_short(
span.shrink_to_hi(),

View File

@ -250,7 +250,7 @@ fn typeck_with_fallback<'tcx>(
param_env,
fn_sig,
);
check_fn(&inh, param_env, fn_sig, decl, id, body, None, true).0
check_fn(&inh, param_env, fn_sig, decl, id, body, None).0
} else {
let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id);
let expected_type = body_ty

View File

@ -55,8 +55,7 @@ pub enum MethodError<'tcx> {
// not-in-scope traits which may work.
PrivateMatch(DefKind, DefId, Vec<DefId>),
// Found a `Self: Sized` bound where `Self` is a trait object, also the caller may have
// forgotten to import a trait.
// Found a `Self: Sized` bound where `Self` is a trait object.
IllegalSizedBound(Vec<DefId>, bool, Span),
// Found a match, but the return type is wrong

View File

@ -1019,7 +1019,6 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let out_of_scope_traits = match self.pick_core() {
Some(Ok(p)) => vec![p.item.container_id(self.tcx)],
//Some(Ok(p)) => p.iter().map(|p| p.item.container().id()).collect(),
Some(Err(MethodError::Ambiguity(v))) => v
.into_iter()
.map(|source| match source {

View File

@ -248,7 +248,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match error {
MethodError::NoMatch(NoMatchData {
static_candidates: mut static_sources,
mut static_candidates,
unsatisfied_predicates,
out_of_scope_traits,
lev_candidate,
@ -288,9 +288,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if generics.len() > 0 {
let mut autoderef = self.autoderef(span, actual);
let candidate_found = autoderef.any(|(ty, _)| {
if let ty::Adt(adt_deref, _) = ty.kind() {
if let ty::Adt(adt_def, _) = ty.kind() {
self.tcx
.inherent_impls(adt_deref.did())
.inherent_impls(adt_def.did())
.iter()
.filter_map(|def_id| self.associated_value(*def_id, item_name))
.count()
@ -348,15 +348,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
let ty_span = match actual.kind() {
ty::Param(param_type) => {
let generics = self.tcx.generics_of(self.body_id.owner.to_def_id());
let type_param = generics.type_param(param_type, self.tcx);
Some(self.tcx.def_span(type_param.def_id))
}
ty::Param(param_type) => Some(
param_type.span_from_generics(self.tcx, self.body_id.owner.to_def_id()),
),
ty::Adt(def, _) if def.did().is_local() => Some(tcx.def_span(def.did())),
_ => None,
};
if let Some(span) = ty_span {
err.span_label(
span,
@ -386,7 +383,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut custom_span_label = false;
if !static_sources.is_empty() {
if !static_candidates.is_empty() {
err.note(
"found the following associated functions; to be used as methods, \
functions must have a `self` parameter",
@ -394,9 +391,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.span_label(span, "this is an associated function, not a method");
custom_span_label = true;
}
if static_sources.len() == 1 {
if static_candidates.len() == 1 {
let ty_str =
if let Some(CandidateSource::Impl(impl_did)) = static_sources.get(0) {
if let Some(CandidateSource::Impl(impl_did)) = static_candidates.get(0) {
// When the "method" is resolved through dereferencing, we really want the
// original type that has the associated function for accurate suggestions.
// (#61411)
@ -422,9 +419,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
err.help(&format!("try with `{}::{}`", ty_str, item_name,));
}
report_candidates(span, &mut err, &mut static_sources, sugg_span);
} else if static_sources.len() > 1 {
report_candidates(span, &mut err, &mut static_sources, sugg_span);
report_candidates(span, &mut err, &mut static_candidates, sugg_span);
} else if static_candidates.len() > 1 {
report_candidates(span, &mut err, &mut static_candidates, sugg_span);
}
let mut bound_spans = vec![];
@ -496,24 +493,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let (ty::Param(_), ty::PredicateKind::Trait(p)) =
(self_ty.kind(), parent_pred.kind().skip_binder())
{
let hir = self.tcx.hir();
let node = match p.trait_ref.self_ty().kind() {
ty::Param(_) => {
// Account for `fn` items like in `issue-35677.rs` to
// suggest restricting its type params.
let did = self.tcx.hir().body_owner_def_id(hir::BodyId {
hir_id: self.body_id,
});
Some(
self.tcx
.hir()
.get(self.tcx.hir().local_def_id_to_hir_id(did)),
)
let parent_body =
hir.body_owner(hir::BodyId { hir_id: self.body_id });
Some(hir.get(parent_body))
}
ty::Adt(def, _) => {
def.did().as_local().map(|def_id| hir.get_by_def_id(def_id))
}
ty::Adt(def, _) => def.did().as_local().map(|def_id| {
self.tcx
.hir()
.get(self.tcx.hir().local_def_id_to_hir_id(def_id))
}),
_ => None,
};
if let Some(hir::Node::Item(hir::Item { kind, .. })) = node {
@ -605,7 +596,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.iter()
.filter_map(|(p, parent, c)| c.as_ref().map(|c| (p, parent, c)))
.filter_map(|(p, parent, c)| match c.code() {
ObligationCauseCode::ImplDerivedObligation(ref data) => {
ObligationCauseCode::ImplDerivedObligation(data) => {
Some((&data.derived, p, parent, data.impl_def_id, data))
}
_ => None,
@ -620,22 +611,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match self.tcx.hir().get_if_local(impl_def_id) {
// Unmet obligation comes from a `derive` macro, point at it once to
// avoid multiple span labels pointing at the same place.
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Trait(..),
ident,
..
})) if matches!(
ident.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) =>
{
let span = ident.span.ctxt().outer_expn_data().call_site;
let mut spans: MultiSpan = span.into();
spans.push_span_label(span, derive_msg);
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { of_trait, self_ty, .. }),
..
@ -659,34 +634,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
// Unmet obligation coming from a `trait`.
Some(Node::Item(hir::Item {
kind: hir::ItemKind::Trait(..),
ident,
span: item_span,
..
})) if !matches!(
ident.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) =>
{
if let Some(pred) = parent_p {
// Done to add the "doesn't satisfy" `span_label`.
let _ = format_pred(*pred);
}
skip_list.insert(p);
let mut spans = if cause.span != *item_span {
let mut spans: MultiSpan = cause.span.into();
spans.push_span_label(cause.span, unsatisfied_msg);
spans
} else {
ident.span.into()
};
spans.push_span_label(ident.span, "in this trait");
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
// Unmet obligation coming from an `impl`.
Some(Node::Item(hir::Item {
kind:
@ -695,19 +642,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}),
span: item_span,
..
})) if !matches!(
self_ty.span.ctxt().outer_expn_data().kind,
ExpnKind::Macro(MacroKind::Derive, _)
) && !matches!(
of_trait.as_ref().map(|t| t
.path
.span
.ctxt()
.outer_expn_data()
.kind),
Some(ExpnKind::Macro(MacroKind::Derive, _))
) =>
{
})) => {
let sized_pred =
unsatisfied_predicates.iter().any(|(pred, _, _)| {
match pred.kind().skip_binder() {
@ -759,7 +694,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let entry = spanned_predicates.entry(spans);
entry.or_insert_with(|| (path, tr_self_ty, Vec::new())).2.push(p);
}
_ => {}
Some(_) => unreachable!(),
None => (),
}
}
let mut spanned_predicates: Vec<_> = spanned_predicates.into_iter().collect();
@ -863,7 +799,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.on_unimplemented_note(trait_ref, &obligation);
(message, label)
})
.unwrap_or((None, None))
.unwrap()
} else {
(None, None)
};
@ -972,7 +908,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If the method name is the name of a field with a function or closure type,
// give a helping note that it has to be called as `(x.f)(...)`.
if let SelfSource::MethodCall(expr) = source {
if !self.suggest_field_call(span, rcvr_ty, expr, item_name, &mut err)
if !self.suggest_calling_field_as_fn(span, rcvr_ty, expr, item_name, &mut err)
&& lev_candidate.is_none()
&& !custom_span_label
{
@ -982,10 +918,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
label_span_not_found(&mut err);
}
// Don't suggest (for example) `expr.field.method()` if `expr.method()`
// doesn't exist due to unsatisfied predicates.
// Don't suggest (for example) `expr.field.clone()` if `expr.clone()`
// can't be called due to `typeof(expr): Clone` not holding.
if unsatisfied_predicates.is_empty() {
self.check_for_field_method(&mut err, source, span, actual, item_name);
self.suggest_calling_method_on_field(&mut err, source, span, actual, item_name);
}
self.check_for_inner_self(&mut err, source, span, actual, item_name);
@ -1007,7 +943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
source,
out_of_scope_traits,
&unsatisfied_predicates,
&static_sources,
&static_candidates,
unsatisfied_bounds,
);
}
@ -1146,7 +1082,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None
}
fn suggest_field_call(
/// Suggest calling a field with a type that implements the `Fn*` traits instead of a method with
/// the same name as the field i.e. `(a.my_fn_ptr)(10)` instead of `a.my_fn_ptr(10)`.
fn suggest_calling_field_as_fn(
&self,
span: Span,
rcvr_ty: Ty<'tcx>,
@ -1408,7 +1346,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
false
}
fn check_for_field_method(
/// Suggest calling a method on a field i.e. `a.field.bar()` instead of `a.bar()`
fn suggest_calling_method_on_field(
&self,
err: &mut Diagnostic,
source: SelfSource<'tcx>,
@ -2021,7 +1960,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) {
let mut alt_rcvr_sugg = false;
if let (SelfSource::MethodCall(rcvr), false) = (source, unsatisfied_bounds) {
debug!(?span, ?item_name, ?rcvr_ty, ?rcvr);
debug!(
"suggest_traits_to_import: span={:?}, item_name={:?}, rcvr_ty={:?}, rcvr={:?}",
span, item_name, rcvr_ty, rcvr
);
let skippable = [
self.tcx.lang_items().clone_trait(),
self.tcx.lang_items().deref_trait(),
@ -2060,7 +2002,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// suggestions are generally misleading (see #94218).
break;
}
_ => {}
Err(_) => (),
}
for (rcvr_ty, pre) in &[

View File

@ -19,7 +19,7 @@ use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt as _;
use rustc_trait_selection::traits::{FulfillmentError, TraitEngine, TraitEngineExt};
use rustc_trait_selection::traits::FulfillmentError;
use rustc_type_ir::sty::TyKind::*;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@ -785,9 +785,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
other_ty_expr,
expected,
);
let mut fulfill = <dyn TraitEngine<'_>>::new(self.tcx);
fulfill.register_predicate_obligation(self, obligation);
Err(fulfill.select_where_possible(&self.infcx))
Err(rustc_trait_selection::traits::fully_solve_obligation(self, obligation))
}
}
}

View File

@ -109,8 +109,9 @@ pub struct InferenceBadError<'a> {
#[derive(Subdiagnostic)]
pub enum SourceKindSubdiag<'a> {
#[suggestion_verbose(
#[suggestion(
infer_source_kind_subdiag_let,
style = "verbose",
code = ": {type_name}",
applicability = "has-placeholders"
)]
@ -135,8 +136,9 @@ pub enum SourceKindSubdiag<'a> {
parent_prefix: String,
parent_name: String,
},
#[suggestion_verbose(
#[suggestion(
infer_source_kind_subdiag_generic_suggestion,
style = "verbose",
code = "::<{args}>",
applicability = "has-placeholders"
)]
@ -150,8 +152,9 @@ pub enum SourceKindSubdiag<'a> {
#[derive(Subdiagnostic)]
pub enum SourceKindMultiSuggestion<'a> {
#[multipart_suggestion_verbose(
#[multipart_suggestion(
infer_source_kind_fully_qualified,
style = "verbose",
applicability = "has-placeholders"
)]
FullyQualified {
@ -163,8 +166,9 @@ pub enum SourceKindMultiSuggestion<'a> {
adjustment: &'a str,
successor_pos: &'a str,
},
#[multipart_suggestion_verbose(
#[multipart_suggestion(
infer_source_kind_closure_return,
style = "verbose",
applicability = "has-placeholders"
)]
ClosureReturn {
@ -478,8 +482,9 @@ pub enum ImplicitStaticLifetimeSubdiag {
#[primary_span]
span: Span,
},
#[suggestion_verbose(
#[suggestion(
infer_implicit_static_lifetime_suggestion,
style = "verbose",
code = " + '_",
applicability = "maybe-incorrect"
)]

View File

@ -773,10 +773,10 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
self.fold_const(bound_to)
} else {
let var = self.canonical_var(info, const_var.into());
self.tcx().mk_const(ty::ConstS {
kind: ty::ConstKind::Bound(self.binder_index, var),
ty: self.fold_ty(const_var.ty()),
})
self.tcx().mk_const(
ty::ConstKind::Bound(self.binder_index, var),
self.fold_ty(const_var.ty()),
)
}
}
}

View File

@ -43,7 +43,7 @@ impl<'tcx> InferCtxt<'tcx> {
///
/// This is only meant to be invoked as part of constructing an
/// inference context at the start of a query (see
/// `InferCtxtBuilder::enter_with_canonical`). It basically
/// `InferCtxtBuilder::build_with_canonical`). It basically
/// brings the canonical value "into scope" within your new infcx.
///
/// At the end of processing, the substitution S (once
@ -147,12 +147,7 @@ impl<'tcx> InferCtxt<'tcx> {
CanonicalVarKind::PlaceholderConst(ty::PlaceholderConst { universe, name }, ty) => {
let universe_mapped = universe_map(universe);
let placeholder_mapped = ty::PlaceholderConst { universe: universe_mapped, name };
self.tcx
.mk_const(ty::ConstS {
kind: ty::ConstKind::Placeholder(placeholder_mapped),
ty,
})
.into()
self.tcx.mk_const(ty::ConstKind::Placeholder(placeholder_mapped), ty).into()
}
}
}

View File

@ -741,10 +741,10 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> {
substs,
substs,
)?;
Ok(self.tcx().mk_const(ty::ConstS {
ty: c.ty(),
kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
}))
Ok(self.tcx().mk_const(
ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
c.ty(),
))
}
_ => relate::super_relate_consts(self, c, c),
}
@ -955,10 +955,10 @@ impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> {
substs,
)?;
Ok(self.tcx().mk_const(ty::ConstS {
ty: c.ty(),
kind: ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
}))
Ok(self.tcx().mk_const(
ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, substs }),
c.ty(),
))
}
_ => relate::super_relate_consts(self, c, c),
}

View File

@ -94,13 +94,13 @@ impl<'tcx> InferCtxt<'tcx> {
}))
},
consts: &mut |bound_var: ty::BoundVar, ty| {
self.tcx.mk_const(ty::ConstS {
kind: ty::ConstKind::Placeholder(ty::PlaceholderConst {
self.tcx.mk_const(
ty::ConstKind::Placeholder(ty::PlaceholderConst {
universe: next_universe,
name: bound_var,
}),
ty,
})
)
},
};

View File

@ -2065,13 +2065,13 @@ fn replace_param_and_infer_substs_with_placeholder<'tcx>(
if ty.has_non_region_param() || ty.has_non_region_infer() {
bug!("const `{ct}`'s type should not reference params or types");
}
tcx.mk_const(ty::ConstS {
ty,
kind: ty::ConstKind::Placeholder(ty::PlaceholderConst {
tcx.mk_const(
ty::ConstKind::Placeholder(ty::PlaceholderConst {
universe: ty::UniverseIndex::ROOT,
name: ty::BoundVar::from_usize(idx),
}),
})
ty,
)
.into()
}
_ => arg,

View File

@ -48,12 +48,6 @@ rustc_resolve = { path = "../rustc_resolve" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
rustc_ty_utils = { path = "../rustc_ty_utils" }
[target.'cfg(unix)'.dependencies]
libc = "0.2"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3", features = ["libloaderapi"] }
[dev-dependencies]
rustc_target = { path = "../rustc_target" }

Some files were not shown because too many files have changed in this diff Show More