Merge from rustc

This commit is contained in:
The Miri Cronjob Bot 2024-03-19 05:04:07 +00:00
commit be6c1704dc
635 changed files with 8908 additions and 4776 deletions

View File

@ -58,7 +58,7 @@ jobs:
- name: mingw-check-tidy
os: ubuntu-20.04-4core-16gb
env: {}
- name: x86_64-gnu-llvm-16
- name: x86_64-gnu-llvm-17
env:
ENABLE_GCC_CODEGEN: "1"
os: ubuntu-20.04-16core-64gb
@ -323,10 +323,6 @@ jobs:
env:
RUST_BACKTRACE: 1
os: ubuntu-20.04-8core-32gb
- name: x86_64-gnu-llvm-16
env:
RUST_BACKTRACE: 1
os: ubuntu-20.04-8core-32gb
- name: x86_64-gnu-nopt
os: ubuntu-20.04-4core-16gb
env: {}

View File

@ -37,9 +37,9 @@ dependencies = [
[[package]]
name = "ahash"
version = "0.8.10"
version = "0.8.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b79b82693f705137f8fb9b37871d99e4f9a7df12b917eed79c3d3954830a60b"
checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011"
dependencies = [
"cfg-if",
"once_cell",
@ -201,9 +201,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.80"
version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1"
checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247"
dependencies = [
"backtrace",
]
@ -246,7 +246,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -293,9 +293,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "basic-toml"
version = "0.1.8"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2db21524cad41c5591204d22d75e1970a2d1f71060214ca931dc7d5afe2c14e5"
checksum = "823388e228f614e9558c6804262db37960ec8821856535f5c3f59913140558f8"
dependencies = [
"serde",
]
@ -379,9 +379,9 @@ dependencies = [
[[package]]
name = "bumpalo"
version = "3.15.3"
version = "3.15.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ea184aa71bb362a1157c896979544cc23974e08fd265f29ea96b59f0b4a555b"
checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa"
[[package]]
name = "bytecount"
@ -484,10 +484,16 @@ dependencies = [
]
[[package]]
name = "chrono"
version = "0.4.34"
name = "cfg_aliases"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "chrono"
version = "0.4.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a"
dependencies = [
"android-tzdata",
"iana-time-zone",
@ -508,9 +514,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.1"
version = "4.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da"
checksum = "949626d00e063efc93b6dca932419ceb5432f99769911c0b995f7e884c778813"
dependencies = [
"clap_builder",
"clap_derive",
@ -528,9 +534,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.1"
version = "4.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
dependencies = [
"anstream",
"anstyle",
@ -550,14 +556,14 @@ dependencies = [
[[package]]
name = "clap_derive"
version = "4.5.0"
version = "4.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47"
checksum = "90239a040c80f5e14809ca132ddc4176ab33d5e17e49691793296e3fcb34d72f"
dependencies = [
"heck",
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -584,7 +590,7 @@ dependencies = [
"regex",
"rustc_tools_util",
"serde",
"syn 2.0.52",
"syn 2.0.53",
"tempfile",
"termize",
"tester",
@ -664,9 +670,9 @@ dependencies = [
[[package]]
name = "color-eyre"
version = "0.6.2"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a667583cca8c4f8436db8de46ea8233c42a7d9ae424a82d338f2e4675229204"
checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5"
dependencies = [
"backtrace",
"color-spantrace",
@ -884,9 +890,9 @@ dependencies = [
[[package]]
name = "ctrlc"
version = "3.4.2"
version = "3.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b467862cc8610ca6fc9a1532d7777cee0804e678ab45410897b9396495994a0b"
checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
dependencies = [
"nix",
"windows-sys 0.52.0",
@ -943,7 +949,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim 0.10.0",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -954,7 +960,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
dependencies = [
"darling_core",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -969,7 +975,7 @@ version = "0.1.78"
dependencies = [
"itertools 0.12.1",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1010,7 +1016,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1020,7 +1026,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b"
dependencies = [
"derive_builder_core",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1043,7 +1049,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1132,7 +1138,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1217,9 +1223,9 @@ dependencies = [
[[package]]
name = "env_logger"
version = "0.11.2"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c012a26a7f605efc424dd53697843a72be7dc86ad2d01f7814337794a12231d"
checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9"
dependencies = [
"anstream",
"anstyle",
@ -1478,7 +1484,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -1603,9 +1609,9 @@ dependencies = [
[[package]]
name = "h2"
version = "0.3.24"
version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9"
checksum = "4fbd2820c5e49886948654ab546d0688ff24530286bdcf8fca3cefb16d4618eb"
dependencies = [
"bytes",
"fnv",
@ -1653,6 +1659,12 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heck"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.3.9"
@ -1712,9 +1724,9 @@ dependencies = [
[[package]]
name = "http"
version = "0.2.11"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb"
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
dependencies = [
"bytes",
"fnv",
@ -1910,7 +1922,7 @@ checksum = "d2abdd3a62551e8337af119c5899e600ca0c88ec8f23a46c60ba216c803dcf1a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -2090,9 +2102,9 @@ dependencies = [
[[package]]
name = "js-sys"
version = "0.3.68"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "406cda4b368d531c842222cf9d2600a9a4acce8d29423695379c6868a143a9ee"
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
dependencies = [
"wasm-bindgen",
]
@ -2188,9 +2200,9 @@ dependencies = [
[[package]]
name = "libloading"
version = "0.8.2"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2caa5afb8bf9f3a2652760ce7d4f62d21c4d5a423e68466fca30df82f2330164"
checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
dependencies = [
"cfg-if",
"windows-targets 0.52.4",
@ -2524,18 +2536,19 @@ dependencies = [
[[package]]
name = "new_debug_unreachable"
version = "1.0.4"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "nix"
version = "0.27.1"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.4.2",
"cfg-if",
"cfg_aliases",
"libc",
]
@ -2676,7 +2689,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -2869,7 +2882,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -2992,9 +3005,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
version = "1.0.78"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
@ -3261,9 +3274,9 @@ dependencies = [
[[package]]
name = "reqwest"
version = "0.11.24"
version = "0.11.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251"
checksum = "78bf93c4af7a8bb7d879d51cebe797356ff10ae8516ace542b5182d9dcac10b2"
dependencies = [
"base64",
"bytes",
@ -3903,7 +3916,7 @@ dependencies = [
"fluent-syntax",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"unic-langid",
]
@ -4038,7 +4051,7 @@ version = "0.0.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"synstructure",
]
@ -4184,7 +4197,7 @@ version = "0.0.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"synstructure",
]
@ -4809,7 +4822,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -4981,7 +4994,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -5259,7 +5272,7 @@ version = "0.24.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59"
dependencies = [
"heck",
"heck 0.4.1",
"proc-macro2",
"quote",
"rustversion",
@ -5288,9 +5301,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.52"
version = "2.0.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07"
checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032"
dependencies = [
"proc-macro2",
"quote",
@ -5311,14 +5324,14 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
name = "sysinfo"
version = "0.30.6"
version = "0.30.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6746919caf9f2a85bff759535664c060109f21975c5ac2e8652e60102bd4d196"
checksum = "0c385888ef380a852a16209afc8cfad22795dd8873d69c9a14d2e2088f118d18"
dependencies = [
"cfg-if",
"core-foundation-sys",
@ -5475,22 +5488,22 @@ checksum = "a38c90d48152c236a3ab59271da4f4ae63d678c5d7ad6b7714d7cb9760be5e4b"
[[package]]
name = "thiserror"
version = "1.0.57"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b"
checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.57"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81"
checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -5609,7 +5622,6 @@ dependencies = [
"bytes",
"libc",
"mio",
"num_cpus",
"pin-project-lite",
"socket2",
"windows-sys 0.48.0",
@ -5714,7 +5726,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -5931,7 +5943,7 @@ checksum = "fea2a4c80deb4fb3ca51f66b5e2dd91e3642bbce52234bcf22e41668281208e4"
dependencies = [
"proc-macro-hack",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"unic-langid-impl",
]
@ -6144,9 +6156,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
version = "0.2.91"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@ -6154,24 +6166,24 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.91"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
dependencies = [
"bumpalo",
"log",
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.41"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877b9c3f61ceea0e56331985743b13f3d25c406a7098d45180fb5f09bc19ed97"
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
dependencies = [
"cfg-if",
"js-sys",
@ -6181,9 +6193,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.91"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@ -6191,22 +6203,22 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.91"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.91"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "wasm-encoder"
@ -6229,9 +6241,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.68"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96565907687f7aceb35bc5fc03770a8a0471d82e479f25832f54a0e3f4b28446"
checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef"
dependencies = [
"js-sys",
"wasm-bindgen",
@ -6288,7 +6300,7 @@ dependencies = [
"rayon",
"serde",
"serde_json",
"syn 2.0.52",
"syn 2.0.53",
"windows-metadata",
]
@ -6542,7 +6554,7 @@ checksum = "9e6936f0cce458098a201c245a11bef556c6a0181129c7034d10d76d1ec3a2b8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"synstructure",
]
@ -6563,7 +6575,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]
@ -6583,7 +6595,7 @@ checksum = "e6a647510471d372f2e6c2e6b7219e44d8c574d24fdc11c610a61455782f18c3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
"synstructure",
]
@ -6606,7 +6618,7 @@ checksum = "7b4e5997cbf58990550ef1f0e5124a05e47e1ebd33a84af25739be6031a62c20"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.52",
"syn 2.0.53",
]
[[package]]

View File

@ -154,11 +154,11 @@ toolchain.
however this is not recommended as it's excrutiatingly slow, and not frequently
tested for compatability.
2. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit
3. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit
or 64-bit Rust) either from your start menu, or by running `mingw64.exe`
or `mingw32.exe` from your MSYS2 installation directory (e.g. `C:\msys64`).
3. From this terminal, install the required tools:
4. From this terminal, install the required tools:
```sh
# Update package mirrors (may be needed if you have a fresh install of MSYS2)
@ -178,7 +178,7 @@ toolchain.
mingw-w64-x86_64-ninja
```
4. Navigate to Rust's source code (or clone it), then build it:
5. Navigate to Rust's source code (or clone it), then build it:
```sh
python x.py setup dist && python x.py build && python x.py install

View File

@ -1,3 +1,120 @@
Version 1.77.0 (2024-03-21)
==========================
<a id="1.77.0-Language"></a>
Language
--------
- [Reveal opaque types within the defining body for exhaustiveness checking.](https://github.com/rust-lang/rust/pull/116821/)
- [Stabilize C-string literals.](https://github.com/rust-lang/rust/pull/117472/)
- [Stabilize THIR unsafeck.](https://github.com/rust-lang/rust/pull/117673/)
- [Add lint `static_mut_refs` to warn on references to mutable statics.](https://github.com/rust-lang/rust/pull/117556/)
- [Support async recursive calls (as long as they have indirection).](https://github.com/rust-lang/rust/pull/117703/)
- [Undeprecate lint `unstable_features` and make use of it in the compiler.](https://github.com/rust-lang/rust/pull/118639/)
- [Make inductive cycles in coherence ambiguous always.](https://github.com/rust-lang/rust/pull/118649/)
- [Get rid of type-driven traversal in const-eval interning](https://github.com/rust-lang/rust/pull/119044/),
only as a [future compatiblity lint](https://github.com/rust-lang/rust/pull/122204) for now.
- [Deny braced macro invocations in let-else.](https://github.com/rust-lang/rust/pull/119062/)
<a id="1.77.0-Compiler"></a>
Compiler
--------
- [Include lint `soft_unstable` in future breakage reports.](https://github.com/rust-lang/rust/pull/116274/)
- [Make `i128` and `u128` 16-byte aligned on x86-based targets.](https://github.com/rust-lang/rust/pull/116672/)
- [Use `--verbose` in diagnostic output.](https://github.com/rust-lang/rust/pull/119129/)
- [Improve spacing between printed tokens.](https://github.com/rust-lang/rust/pull/120227/)
- [Merge the `unused_tuple_struct_fields` lint into `dead_code`.](https://github.com/rust-lang/rust/pull/118297/)
- [Error on incorrect implied bounds in well-formedness check](https://github.com/rust-lang/rust/pull/118553/),
with a temporary exception for Bevy.
- [Fix coverage instrumentation/reports for non-ASCII source code.](https://github.com/rust-lang/rust/pull/119033/)
- [Fix `fn`/`const` items implied bounds and well-formedness check.](https://github.com/rust-lang/rust/pull/120019/)
- [Promote `riscv32{im|imafc}-unknown-none-elf` targets to tier 2.](https://github.com/rust-lang/rust/pull/118704/)
- Add several new tier 3 targets:
- [`aarch64-unknown-illumos`](https://github.com/rust-lang/rust/pull/112936/)
- [`hexagon-unknown-none-elf`](https://github.com/rust-lang/rust/pull/117601/)
- [`riscv32imafc-esp-espidf`](https://github.com/rust-lang/rust/pull/119738/)
- [`riscv32im-risc0-zkvm-elf`](https://github.com/rust-lang/rust/pull/117958/)
Refer to Rust's [platform support page][platform-support-doc]
for more information on Rust's tiered platform support.
<a id="1.77.0-Libraries"></a>
Libraries
---------
- [Implement `From<&[T; N]>` for `Cow<[T]>`.](https://github.com/rust-lang/rust/pull/113489/)
- [Remove special-case handling of `vec.split_off(0)`.](https://github.com/rust-lang/rust/pull/119917/)
<a id="1.77.0-Stabilized-APIs"></a>
Stabilized APIs
---------------
- [`array::each_ref`](https://doc.rust-lang.org/stable/std/primitive.array.html#method.each_ref)
- [`array::each_mut`](https://doc.rust-lang.org/stable/std/primitive.array.html#method.each_mut)
- [`core::net`](https://doc.rust-lang.org/stable/core/net/index.html)
- [`f32::round_ties_even`](https://doc.rust-lang.org/stable/std/primitive.f32.html#method.round_ties_even)
- [`f64::round_ties_even`](https://doc.rust-lang.org/stable/std/primitive.f64.html#method.round_ties_even)
- [`mem::offset_of!`](https://doc.rust-lang.org/stable/std/mem/macro.offset_of.html)
- [`slice::first_chunk`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.first_chunk)
- [`slice::first_chunk_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.first_chunk_mut)
- [`slice::split_first_chunk`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_first_chunk)
- [`slice::split_first_chunk_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_first_chunk_mut)
- [`slice::last_chunk`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.last_chunk)
- [`slice::last_chunk_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.last_chunk_mut)
- [`slice::split_last_chunk`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_last_chunk)
- [`slice::split_last_chunk_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.split_last_chunk_mut)
- [`slice::chunk_by`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.chunk_by)
- [`slice::chunk_by_mut`](https://doc.rust-lang.org/stable/std/primitive.slice.html#method.chunk_by_mut)
- [`Bound::map`](https://doc.rust-lang.org/stable/std/ops/enum.Bound.html#method.map)
- [`File::create_new`](https://doc.rust-lang.org/stable/std/fs/struct.File.html#method.create_new)
- [`Mutex::clear_poison`](https://doc.rust-lang.org/stable/std/sync/struct.Mutex.html#method.clear_poison)
- [`RwLock::clear_poison`](https://doc.rust-lang.org/stable/std/sync/struct.RwLock.html#method.clear_poison)
<a id="1.77.0-Cargo"></a>
Cargo
-----
- [Extend the build directive syntax with `cargo::`.](https://github.com/rust-lang/cargo/pull/12201/)
- [Stabilize metadata `id` format as `PackageIDSpec`.](https://github.com/rust-lang/cargo/pull/12914/)
- [Pull out `cargo-util-schemas` as a crate.](https://github.com/rust-lang/cargo/pull/13178/)
- [Strip all debuginfo when debuginfo is not requested.](https://github.com/rust-lang/cargo/pull/13257/)
- [Inherit jobserver from env for all kinds of runners.](https://github.com/rust-lang/cargo/pull/12776/)
- [Deprecate rustc plugin support in cargo.](https://github.com/rust-lang/cargo/pull/13248/)
<a id="1.77.0-Rustdoc"></a>
Rustdoc
-----
- [Allows links in markdown headings.](https://github.com/rust-lang/rust/pull/117662/)
- [Search for tuples and unit by type with `()`.](https://github.com/rust-lang/rust/pull/118194/)
- [Clean up the source sidebar's hide button.](https://github.com/rust-lang/rust/pull/119066/)
- [Prevent JS injection from `localStorage`.](https://github.com/rust-lang/rust/pull/120250/)
<a id="1.77.0-Misc"></a>
Misc
----
- [Recommend version-sorting for all sorting in style guide.](https://github.com/rust-lang/rust/pull/115046/)
<a id="1.77.0-Internal-Changes"></a>
Internal Changes
----------------
These changes do not affect any public interfaces of Rust, but they represent
significant improvements to the performance or internals of rustc and related
tools.
- [Add more weirdness to `weird-exprs.rs`.](https://github.com/rust-lang/rust/pull/119028/)
Version 1.76.0 (2024-02-08)
==========================

View File

@ -11,7 +11,7 @@
#![doc(rust_logo)]
#![allow(internal_features)]
#![feature(rustdoc_internals)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(associated_type_defaults)]
#![feature(box_patterns)]
#![feature(if_let_guard)]

View File

@ -755,7 +755,7 @@ pub fn walk_assoc_item<'a, V: Visitor<'a>>(
}
AssocItemKind::Delegation(box Delegation { id, qself, path, body }) => {
if let Some(qself) = qself {
visitor.visit_ty(&qself.ty);
try_visit!(visitor.visit_ty(&qself.ty));
}
try_visit!(visitor.visit_path(path, *id));
visit_opt!(visitor, visit_block, body);
@ -994,7 +994,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) -> V
ExprKind::InlineAsm(asm) => try_visit!(visitor.visit_inline_asm(asm)),
ExprKind::FormatArgs(f) => try_visit!(visitor.visit_format_args(f)),
ExprKind::OffsetOf(container, fields) => {
visitor.visit_ty(container);
try_visit!(visitor.visit_ty(container));
walk_list!(visitor, visit_ident, fields.iter().copied());
}
ExprKind::Yield(optional_expression) => {

View File

@ -604,8 +604,7 @@ fn may_contain_yield_point(e: &ast::Expr) -> bool {
if let ast::ExprKind::Await(_, _) | ast::ExprKind::Yield(_) = e.kind {
ControlFlow::Break(())
} else {
visit::walk_expr(self, e);
ControlFlow::Continue(())
visit::walk_expr(self, e)
}
}

View File

@ -463,13 +463,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
constraint.span,
"return type notation is experimental"
);
} else {
gate!(
&self,
associated_type_bounds,
constraint.span,
"associated type bounds are unstable"
);
}
}
visit::walk_assoc_constraint(self, constraint)
@ -615,7 +608,6 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
}
gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental");
gate_all_legacy_dont_use!(associated_type_bounds, "associated type bounds are unstable");
// Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now,
// used to be gated under associated_type_bounds, which are right above, so RTN needs to
// be too.

View File

@ -9,7 +9,7 @@ use rustc_data_structures::fx::FxIndexSet;
use rustc_errors::{codes::*, struct_span_code_err, Applicability, Diag, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
use rustc_hir::intravisit::{walk_block, walk_expr, Map, Visitor};
use rustc_hir::{CoroutineDesugaring, PatField};
use rustc_hir::{CoroutineKind, CoroutineSource, LangItem};
use rustc_middle::hir::nested_filter::OnlyBodies;
@ -447,8 +447,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
err.span_note(
span,
format!(
"consider changing this parameter type in {descr} `{ident}` to \
borrow instead if owning the value isn't necessary",
"consider changing this parameter type in {descr} `{ident}` to borrow \
instead if owning the value isn't necessary",
),
);
}
@ -463,7 +463,9 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
} else if let UseSpans::FnSelfUse { kind: CallKind::Normal { .. }, .. } = move_spans
{
// We already suggest cloning for these cases in `explain_captures`.
} else {
} else if self.suggest_hoisting_call_outside_loop(err, expr) {
// The place where the the type moves would be misleading to suggest clone.
// #121466
self.suggest_cloning(err, ty, expr, move_span);
}
}
@ -747,6 +749,234 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
true
}
/// In a move error that occurs on a call wihtin a loop, we try to identify cases where cloning
/// the value would lead to a logic error. We infer these cases by seeing if the moved value is
/// part of the logic to break the loop, either through an explicit `break` or if the expression
/// is part of a `while let`.
fn suggest_hoisting_call_outside_loop(&self, err: &mut Diag<'_>, expr: &hir::Expr<'_>) -> bool {
let tcx = self.infcx.tcx;
let mut can_suggest_clone = true;
// If the moved value is a locally declared binding, we'll look upwards on the expression
// tree until the scope where it is defined, and no further, as suggesting to move the
// expression beyond that point would be illogical.
let local_hir_id = if let hir::ExprKind::Path(hir::QPath::Resolved(
_,
hir::Path { res: hir::def::Res::Local(local_hir_id), .. },
)) = expr.kind
{
Some(local_hir_id)
} else {
// This case would be if the moved value comes from an argument binding, we'll just
// look within the entire item, that's fine.
None
};
/// This will allow us to look for a specific `HirId`, in our case `local_hir_id` where the
/// binding was declared, within any other expression. We'll use it to search for the
/// binding declaration within every scope we inspect.
struct Finder {
hir_id: hir::HirId,
found: bool,
}
impl<'hir> Visitor<'hir> for Finder {
fn visit_pat(&mut self, pat: &'hir hir::Pat<'hir>) {
if pat.hir_id == self.hir_id {
self.found = true;
}
hir::intravisit::walk_pat(self, pat);
}
fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
if ex.hir_id == self.hir_id {
self.found = true;
}
hir::intravisit::walk_expr(self, ex);
}
}
// The immediate HIR parent of the moved expression. We'll look for it to be a call.
let mut parent = None;
// The top-most loop where the moved expression could be moved to a new binding.
let mut outer_most_loop: Option<&hir::Expr<'_>> = None;
for (_, node) in tcx.hir().parent_iter(expr.hir_id) {
let e = match node {
hir::Node::Expr(e) => e,
hir::Node::Local(hir::Local { els: Some(els), .. }) => {
let mut finder = BreakFinder { found_breaks: vec![], found_continues: vec![] };
finder.visit_block(els);
if !finder.found_breaks.is_empty() {
// Don't suggest clone as it could be will likely end in an infinite
// loop.
// let Some(_) = foo(non_copy.clone()) else { break; }
// --- ^^^^^^^^ -----
can_suggest_clone = false;
}
continue;
}
_ => continue,
};
if let Some(&hir_id) = local_hir_id {
let mut finder = Finder { hir_id, found: false };
finder.visit_expr(e);
if finder.found {
// The current scope includes the declaration of the binding we're accessing, we
// can't look up any further for loops.
break;
}
}
if parent.is_none() {
parent = Some(e);
}
match e.kind {
hir::ExprKind::Let(_) => {
match tcx.parent_hir_node(e.hir_id) {
hir::Node::Expr(hir::Expr {
kind: hir::ExprKind::If(cond, ..), ..
}) => {
let mut finder = Finder { hir_id: expr.hir_id, found: false };
finder.visit_expr(cond);
if finder.found {
// The expression where the move error happened is in a `while let`
// condition Don't suggest clone as it will likely end in an
// infinite loop.
// while let Some(_) = foo(non_copy.clone()) { }
// --------- ^^^^^^^^
can_suggest_clone = false;
}
}
_ => {}
}
}
hir::ExprKind::Loop(..) => {
outer_most_loop = Some(e);
}
_ => {}
}
}
let loop_count: usize = tcx
.hir()
.parent_iter(expr.hir_id)
.map(|(_, node)| match node {
hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Loop(..), .. }) => 1,
_ => 0,
})
.sum();
let sm = tcx.sess.source_map();
if let Some(in_loop) = outer_most_loop {
let mut finder = BreakFinder { found_breaks: vec![], found_continues: vec![] };
finder.visit_expr(in_loop);
// All of the spans for `break` and `continue` expressions.
let spans = finder
.found_breaks
.iter()
.chain(finder.found_continues.iter())
.map(|(_, span)| *span)
.filter(|span| {
!matches!(
span.desugaring_kind(),
Some(DesugaringKind::ForLoop | DesugaringKind::WhileLoop)
)
})
.collect::<Vec<Span>>();
// All of the spans for the loops above the expression with the move error.
let loop_spans: Vec<_> = tcx
.hir()
.parent_iter(expr.hir_id)
.filter_map(|(_, node)| match node {
hir::Node::Expr(hir::Expr { span, kind: hir::ExprKind::Loop(..), .. }) => {
Some(*span)
}
_ => None,
})
.collect();
// It is possible that a user written `break` or `continue` is in the wrong place. We
// point them out at the user for them to make a determination. (#92531)
if !spans.is_empty() && loop_count > 1 {
// Getting fancy: if the spans of the loops *do not* overlap, we only use the line
// number when referring to them. If there *are* overlaps (multiple loops on the
// same line) then we use the more verbose span output (`file.rs:col:ll`).
let mut lines: Vec<_> =
loop_spans.iter().map(|sp| sm.lookup_char_pos(sp.lo()).line).collect();
lines.sort();
lines.dedup();
let fmt_span = |span: Span| {
if lines.len() == loop_spans.len() {
format!("line {}", sm.lookup_char_pos(span.lo()).line)
} else {
sm.span_to_diagnostic_string(span)
}
};
let mut spans: MultiSpan = spans.clone().into();
// Point at all the `continue`s and explicit `break`s in the relevant loops.
for (desc, elements) in [
("`break` exits", &finder.found_breaks),
("`continue` advances", &finder.found_continues),
] {
for (destination, sp) in elements {
if let Ok(hir_id) = destination.target_id
&& let hir::Node::Expr(expr) = tcx.hir().hir_node(hir_id)
&& !matches!(
sp.desugaring_kind(),
Some(DesugaringKind::ForLoop | DesugaringKind::WhileLoop)
)
{
spans.push_span_label(
*sp,
format!("this {desc} the loop at {}", fmt_span(expr.span)),
);
}
}
}
// Point at all the loops that are between this move and the parent item.
for span in loop_spans {
spans.push_span_label(sm.guess_head_span(span), "");
}
// note: verify that your loop breaking logic is correct
// --> $DIR/nested-loop-moved-value-wrong-continue.rs:41:17
// |
// 28 | for foo in foos {
// | ---------------
// ...
// 33 | for bar in &bars {
// | ----------------
// ...
// 41 | continue;
// | ^^^^^^^^ this `continue` advances the loop at line 33
err.span_note(spans, "verify that your loop breaking logic is correct");
}
if let Some(parent) = parent
&& let hir::ExprKind::MethodCall(..) | hir::ExprKind::Call(..) = parent.kind
{
// FIXME: We could check that the call's *parent* takes `&mut val` to make the
// suggestion more targeted to the `mk_iter(val).next()` case. Maybe do that only to
// check for wheter to suggest `let value` or `let mut value`.
let span = in_loop.span;
if !finder.found_breaks.is_empty()
&& let Ok(value) = sm.span_to_snippet(parent.span)
{
// We know with high certainty that this move would affect the early return of a
// loop, so we suggest moving the expression with the move out of the loop.
let indent = if let Some(indent) = sm.indentation_before(span) {
format!("\n{indent}")
} else {
" ".to_string()
};
err.multipart_suggestion(
"consider moving the expression out of the loop so it is only moved once",
vec![
(parent.span, "value".to_string()),
(span.shrink_to_lo(), format!("let mut value = {value};{indent}")),
],
Applicability::MaybeIncorrect,
);
}
}
}
can_suggest_clone
}
fn suggest_cloning(&self, err: &mut Diag<'_>, ty: Ty<'tcx>, expr: &hir::Expr<'_>, span: Span) {
let tcx = self.infcx.tcx;
// Try to find predicates on *generic params* that would allow copying `ty`
@ -3688,6 +3918,28 @@ impl<'a, 'v> Visitor<'v> for ReferencedStatementsVisitor<'a> {
}
}
/// Look for `break` expressions within any arbitrary expressions. We'll do this to infer
/// whether this is a case where the moved value would affect the exit of a loop, making it
/// unsuitable for a `.clone()` suggestion.
struct BreakFinder {
found_breaks: Vec<(hir::Destination, Span)>,
found_continues: Vec<(hir::Destination, Span)>,
}
impl<'hir> Visitor<'hir> for BreakFinder {
fn visit_expr(&mut self, ex: &'hir hir::Expr<'hir>) {
match ex.kind {
hir::ExprKind::Break(destination, _) => {
self.found_breaks.push((destination, ex.span));
}
hir::ExprKind::Continue(destination) => {
self.found_continues.push((destination, ex.span));
}
_ => {}
}
hir::intravisit::walk_expr(self, ex);
}
}
/// Given a set of spans representing statements initializing the relevant binding, visit all the
/// function expressions looking for branching code paths that *do not* initialize the binding.
struct ConditionVisitor<'b> {

View File

@ -4,7 +4,7 @@
#![feature(rustdoc_internals)]
#![doc(rust_logo)]
#![feature(assert_matches)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(box_patterns)]
#![feature(control_flow_enum)]
#![feature(let_chains)]

View File

@ -74,7 +74,7 @@ pub(crate) fn eval_mir_constant<'tcx>(
let cv = fx.monomorphize(constant.const_);
// This cannot fail because we checked all required_consts in advance.
let val = cv
.eval(fx.tcx, ty::ParamEnv::reveal_all(), Some(constant.span))
.eval(fx.tcx, ty::ParamEnv::reveal_all(), constant.span)
.expect("erroneous constant missed by mono item collection");
(val, cv.ty())
}

View File

@ -728,8 +728,10 @@ fn codegen_regular_intrinsic_call<'tcx>(
| sym::variant_count => {
intrinsic_args!(fx, args => (); intrinsic);
let const_val =
fx.tcx.const_eval_instance(ParamEnv::reveal_all(), instance, None).unwrap();
let const_val = fx
.tcx
.const_eval_instance(ParamEnv::reveal_all(), instance, source_info.span)
.unwrap();
let val = crate::constant::codegen_const_value(fx, const_val, ret.layout().ty);
ret.write_cvalue(fx, val);
}

View File

@ -131,7 +131,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
let idx = generic_args[2]
.expect_const()
.eval(fx.tcx, ty::ParamEnv::reveal_all(), Some(span))
.eval(fx.tcx, ty::ParamEnv::reveal_all(), span)
.unwrap()
.unwrap_branch();

View File

@ -19,11 +19,11 @@
#![feature(
rustc_private,
decl_macro,
associated_type_bounds,
never_type,
trusted_len,
hash_raw_entry
)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![allow(broken_intra_doc_links)]
#![recursion_limit = "256"]
#![warn(rust_2018_idioms)]

View File

@ -126,17 +126,6 @@ pub unsafe fn create_module<'ll>(
let mut target_data_layout = sess.target.data_layout.to_string();
let llvm_version = llvm_util::get_version();
if llvm_version < (17, 0, 0) {
if sess.target.arch.starts_with("powerpc") {
// LLVM 17 specifies function pointer alignment for ppc:
// https://reviews.llvm.org/D147016
target_data_layout = target_data_layout
.replace("-Fn32", "")
.replace("-Fi32", "")
.replace("-Fn64", "")
.replace("-Fi64", "");
}
}
if llvm_version < (18, 0, 0) {
if sess.target.arch == "x86" || sess.target.arch == "x86_64" {
// LLVM 18 adjusts i128 to be 128-bit aligned on x86 variants.

View File

@ -1129,7 +1129,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
if name == sym::simd_shuffle_generic {
let idx = fn_args[2]
.expect_const()
.eval(tcx, ty::ParamEnv::reveal_all(), Some(span))
.eval(tcx, ty::ParamEnv::reveal_all(), span)
.unwrap()
.unwrap_branch();
let n = idx.len() as u64;

View File

@ -1519,7 +1519,7 @@ extern "C" {
#[link(name = "llvm-wrapper", kind = "static")]
extern "C" {
pub fn LLVMRustInstallFatalErrorHandler();
pub fn LLVMRustInstallErrorHandlers();
pub fn LLVMRustDisableSystemDialogsOnCrash();
// Create and destroy contexts.

View File

@ -49,7 +49,7 @@ unsafe fn configure_llvm(sess: &Session) {
let mut llvm_c_strs = Vec::with_capacity(n_args + 1);
let mut llvm_args = Vec::with_capacity(n_args + 1);
llvm::LLVMRustInstallFatalErrorHandler();
llvm::LLVMRustInstallErrorHandlers();
// On Windows, an LLVM assertion will open an Abort/Retry/Ignore dialog
// box for the purpose of launching a debugger. However, on CI this will
// cause it to hang until it times out, which can take several hours.

View File

@ -19,6 +19,7 @@ use rustc_hir::{CoroutineDesugaring, CoroutineKind, CoroutineSource, Mutability}
use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
use rustc_middle::ty::{self, ExistentialProjection, ParamEnv, Ty, TyCtxt};
use rustc_middle::ty::{GenericArgKind, GenericArgsRef};
use rustc_span::DUMMY_SP;
use rustc_target::abi::Integer;
use smallvec::SmallVec;
@ -704,7 +705,7 @@ fn push_const_param<'tcx>(tcx: TyCtxt<'tcx>, ct: ty::Const<'tcx>, output: &mut S
// avoiding collisions and will make the emitted type names shorter.
let hash_short = tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all(), None).unwrap();
let ct = ct.eval(tcx, ty::ParamEnv::reveal_all(), DUMMY_SP).unwrap();
hcx.while_hashing_spans(false, |hcx| ct.hash_stable(hcx, &mut hasher));
hasher.finish::<Hash64>()
});

View File

@ -4,7 +4,7 @@
#![allow(internal_features)]
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(box_patterns)]
#![feature(if_let_guard)]
#![feature(let_chains)]

View File

@ -1688,7 +1688,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn try_llbb(&mut self, bb: mir::BasicBlock) -> Option<Bx::BasicBlock> {
match self.cached_llbbs[bb] {
CachedLlbb::None => {
// FIXME(eddyb) only name the block if `fewer_names` is `false`.
let llbb = Bx::append_block(self.cx, self.llfn, &format!("{bb:?}"));
self.cached_llbbs[bb] = CachedLlbb::Some(llbb);
Some(llbb)

View File

@ -24,7 +24,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// `MirUsedCollector` visited all required_consts before codegen began, so if we got here
// there can be no more constants that fail to evaluate.
self.monomorphize(constant.const_)
.eval(self.cx.tcx(), ty::ParamEnv::reveal_all(), Some(constant.span))
.eval(self.cx.tcx(), ty::ParamEnv::reveal_all(), constant.span)
.expect("erroneous constant missed by mono item collection")
}
@ -56,11 +56,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
other => span_bug!(constant.span, "{other:#?}"),
};
let uv = self.monomorphize(uv);
self.cx.tcx().const_eval_resolve_for_typeck(
ty::ParamEnv::reveal_all(),
uv,
Some(constant.span),
)
self.cx.tcx().const_eval_resolve_for_typeck(ty::ParamEnv::reveal_all(), uv, constant.span)
}
/// process constant containing SIMD shuffle indices

View File

@ -130,7 +130,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
| sym::variant_count => {
let value = bx
.tcx()
.const_eval_instance(ty::ParamEnv::reveal_all(), instance, None)
.const_eval_instance(ty::ParamEnv::reveal_all(), instance, span)
.unwrap();
OperandRef::from_const(bx, value, ret_ty).immediate_or_packed_pair(bx)
}

View File

@ -826,7 +826,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
for &const_ in &body.required_consts {
let c = self
.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
c.eval(*self.tcx, self.param_env, Some(const_.span)).map_err(|err| {
c.eval(*self.tcx, self.param_env, const_.span).map_err(|err| {
err.emit_note(*self.tcx);
err
})?;
@ -1174,7 +1174,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn eval_mir_constant(
&self,
val: &mir::Const<'tcx>,
span: Option<Span>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
M::eval_mir_constant(self, *val, span, layout, |ecx, val, span, layout| {

View File

@ -153,9 +153,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
sym::type_name => Ty::new_static_str(self.tcx.tcx),
_ => bug!(),
};
let val = self.ctfe_query(|tcx| {
tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span))
})?;
let val =
self.ctfe_query(|tcx| tcx.const_eval_global_id(self.param_env, gid, tcx.span))?;
let val = self.const_val_to_op(val, ty, Some(dest.layout))?;
self.copy_op(&val, dest)?;
}

View File

@ -525,7 +525,7 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
fn eval_mir_constant<F>(
ecx: &InterpCx<'mir, 'tcx, Self>,
val: mir::Const<'tcx>,
span: Option<Span>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>
@ -533,7 +533,7 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
F: Fn(
&InterpCx<'mir, 'tcx, Self>,
mir::Const<'tcx>,
Option<Span>,
Span,
Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
{

View File

@ -741,7 +741,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// * During ConstProp, with `TooGeneric` or since the `required_consts` were not all
// checked yet.
// * During CTFE, since promoteds in `const`/`static` initializer bodies can fail.
self.eval_mir_constant(&c, Some(constant.span), layout)?
self.eval_mir_constant(&c, constant.span, layout)?
}
};
trace!("{:?}: {:?}", mir_op, op);

View File

@ -99,7 +99,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
#[allow(rustc::untranslatable_diagnostic)]
fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, _: Span) -> Diag<'tcx> {
let FnCallNonConst { caller, callee, args, span, call_source, feature } = *self;
let ConstCx { tcx, param_env, .. } = *ccx;
let ConstCx { tcx, param_env, body, .. } = *ccx;
let diag_trait = |err, self_ty: Ty<'_>, trait_id| {
let trait_ref = TraitRef::from_method(tcx, trait_id, args);
@ -297,10 +297,12 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> {
ccx.const_kind(),
));
if let Some(feature) = feature
&& ccx.tcx.sess.is_nightly_build()
{
err.help(format!("add `#![feature({feature})]` to the crate attributes to enable",));
if let Some(feature) = feature {
ccx.tcx.disabled_nightly_features(
&mut err,
body.source.def_id().as_local().map(|local| ccx.tcx.local_def_id_to_hir_id(local)),
[(String::new(), feature)],
);
}
if let ConstContext::Static(_) = ccx.const_kind() {

View File

@ -341,7 +341,7 @@ impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
// FIXME(JakobDegen) The validator should check that `self.mir_phase <
// DropsLowered`. However, this causes ICEs with generation of drop shims, which
// seem to fail to set their `MirPhase` correctly.
if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) {
if matches!(kind, RetagKind::TwoPhase) {
self.fail(location, format!("explicit `{kind:?}` is forbidden"));
}
}
@ -1272,7 +1272,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
// FIXME(JakobDegen) The validator should check that `self.mir_phase <
// DropsLowered`. However, this causes ICEs with generation of drop shims, which
// seem to fail to set their `MirPhase` correctly.
if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) {
if matches!(kind, RetagKind::TwoPhase) {
self.fail(location, format!("explicit `{kind:?}` is forbidden"));
}
}

View File

@ -3,8 +3,6 @@ An associated type value was specified more than once.
Erroneous code example:
```compile_fail,E0719
#![feature(associated_type_bounds)]
trait FooTrait {}
trait BarTrait {}
@ -19,8 +17,6 @@ specify the associated type with the new trait.
Corrected example:
```
#![feature(associated_type_bounds)]
trait FooTrait {}
trait BarTrait {}
trait FooBarTrait: FooTrait + BarTrait {}

View File

@ -1,7 +1,7 @@
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(array_windows)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(associated_type_defaults)]
#![feature(if_let_guard)]
#![feature(let_chains)]

View File

@ -59,6 +59,8 @@ declare_features! (
(accepted, asm_sym, "1.66.0", Some(93333)),
/// Allows the definition of associated constants in `trait` or `impl` blocks.
(accepted, associated_consts, "1.20.0", Some(29646)),
/// Allows the user of associated type bounds.
(accepted, associated_type_bounds, "CURRENT_RUSTC_VERSION", Some(52662)),
/// Allows using associated `type`s in `trait`s.
(accepted, associated_types, "1.0.0", None),
/// Allows free and inherent `async fn`s, `async` blocks, and `<expr>.await` expressions.
@ -203,6 +205,8 @@ declare_features! (
(accepted, impl_header_lifetime_elision, "1.31.0", Some(15872)),
/// Allows referencing `Self` and projections in impl-trait.
(accepted, impl_trait_projections, "1.74.0", Some(103532)),
/// Allows using imported `main` function
(accepted, imported_main, "CURRENT_RUSTC_VERSION", Some(28937)),
/// Allows using `a..=b` and `..=b` as inclusive range syntaxes.
(accepted, inclusive_range_syntax, "1.26.0", Some(28237)),
/// Allows inferring outlives requirements (RFC 2093).

View File

@ -351,8 +351,6 @@ declare_features! (
(unstable, asm_unwind, "1.58.0", Some(93334)),
/// Allows users to enforce equality of associated constants `TraitImpl<AssocConst=3>`.
(unstable, associated_const_equality, "1.58.0", Some(92827)),
/// Allows the user of associated type bounds.
(unstable, associated_type_bounds, "1.34.0", Some(52662)),
/// Allows associated type defaults.
(unstable, associated_type_defaults, "1.2.0", Some(29661)),
/// Allows `async || body` closures.
@ -495,8 +493,6 @@ declare_features! (
(unstable, impl_trait_in_assoc_type, "1.70.0", Some(63063)),
/// Allows `impl Trait` as output type in `Fn` traits in return position of functions.
(unstable, impl_trait_in_fn_trait_return, "1.64.0", Some(99697)),
/// Allows using imported `main` function
(unstable, imported_main, "1.53.0", Some(28937)),
/// Allows associated types in inherent impls.
(incomplete, inherent_associated_types, "1.52.0", Some(8995)),
/// Allow anonymous constants from an inline `const` block

View File

@ -2049,7 +2049,7 @@ impl LoopSource {
}
}
#[derive(Copy, Clone, Debug, HashStable_Generic)]
#[derive(Copy, Clone, Debug, PartialEq, HashStable_Generic)]
pub enum LoopIdError {
OutsideLoopScope,
UnlabeledCfInWhileCondition,

View File

@ -899,7 +899,7 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(
GenericParamKind::Const { ref ty, ref default, is_host_effect: _ } => {
try_visit!(visitor.visit_ty(ty));
if let Some(ref default) = default {
visitor.visit_const_param_default(param.hir_id, default);
try_visit!(visitor.visit_const_param_default(param.hir_id, default));
}
}
}

View File

@ -118,6 +118,11 @@ hir_analysis_enum_discriminant_overflowed = enum discriminant overflowed
.label = overflowed on value after {$discr}
.note = explicitly set `{$item_name} = {$wrapped_discr}` if that is desired outcome
hir_analysis_escaping_bound_var_in_ty_of_assoc_const_binding =
the type of the associated constant `{$assoc_const}` cannot capture late-bound generic parameters
.label = its type cannot capture the late-bound {$var_def_kind} `{$var_name}`
.var_defined_here_label = the late-bound {$var_def_kind} `{$var_name}` is defined here
hir_analysis_field_already_declared =
field `{$field_name}` is already declared
.label = field already declared
@ -316,6 +321,22 @@ hir_analysis_opaque_captures_higher_ranked_lifetime = `impl Trait` cannot captur
.label = `impl Trait` implicitly captures all lifetimes in scope
.note = lifetime declared here
hir_analysis_param_in_ty_of_assoc_const_binding =
the type of the associated constant `{$assoc_const}` must not depend on {$param_category ->
[self] `Self`
[synthetic] `impl Trait`
*[normal] generic parameters
}
.label = its type must not depend on {$param_category ->
[self] `Self`
[synthetic] `impl Trait`
*[normal] the {$param_def_kind} `{$param_name}`
}
.param_defined_here_label = {$param_category ->
[synthetic] the `impl Trait` is specified here
*[normal] the {$param_def_kind} `{$param_name}` is defined here
}
hir_analysis_paren_sugar_attribute = the `#[rustc_paren_sugar]` attribute is a temporary means of controlling which traits can use parenthetical notation
.help = add `#![feature(unboxed_closures)]` to the crate attributes to use it
@ -432,6 +453,8 @@ hir_analysis_transparent_non_zero_sized_enum = the variant of a transparent {$de
.label = needs at most one field with non-trivial size or alignment, but has {$field_count}
.labels = this field has non-zero size or requires alignment
hir_analysis_ty_of_assoc_const_binding_note = `{$assoc_const}` has type `{$ty}`
hir_analysis_ty_param_first_local = type parameter `{$param_ty}` must be covered by another type when it appears before the first local type (`{$local_type}`)
.label = type parameter `{$param_ty}` must be covered by another type when it appears before the first local type (`{$local_type}`)
.note = implementing a foreign trait is only possible if at least one of the types for which it is implemented is local, and no uncovered type parameters appear before that first local type

View File

@ -1,12 +1,15 @@
use rustc_data_structures::fx::FxIndexMap;
use std::ops::ControlFlow;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::{codes::*, struct_span_code_err};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::ty::{self as ty, Ty};
use rustc_middle::ty::{self as ty, IsSuggestable, Ty, TyCtxt};
use rustc_span::symbol::Ident;
use rustc_span::{ErrorGuaranteed, Span};
use rustc_span::{ErrorGuaranteed, Span, Symbol};
use rustc_trait_selection::traits;
use rustc_type_ir::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor};
use smallvec::SmallVec;
use crate::astconv::{AstConv, OnlySelfBounds, PredicateFilter};
@ -433,14 +436,8 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
binding.kind
{
let ty = alias_ty.map_bound(|ty| tcx.type_of(ty.def_id).instantiate(tcx, ty.args));
// Since the arguments passed to the alias type above may contain early-bound
// generic parameters, the instantiated type may contain some as well.
// Therefore wrap it in `EarlyBinder`.
// FIXME(fmease): Reject escaping late-bound vars.
tcx.feed_anon_const_type(
anon_const.def_id,
ty::EarlyBinder::bind(ty.skip_binder()),
);
let ty = check_assoc_const_binding_type(tcx, assoc_ident, ty, binding.hir_id);
tcx.feed_anon_const_type(anon_const.def_id, ty::EarlyBinder::bind(ty));
}
alias_ty
@ -530,3 +527,167 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
Ok(())
}
}
/// Detect and reject early-bound & escaping late-bound generic params in the type of assoc const bindings.
///
/// FIXME(const_generics): This is a temporary and semi-artifical restriction until the
/// arrival of *generic const generics*[^1].
///
/// It might actually be possible that we can already support early-bound generic params
/// in such types if we just lifted some more checks in other places, too, for example
/// inside [`ty::Const::from_anon_const`]. However, even if that were the case, we should
/// probably gate this behind another feature flag.
///
/// [^1]: <https://github.com/rust-lang/project-const-generics/issues/28>.
fn check_assoc_const_binding_type<'tcx>(
tcx: TyCtxt<'tcx>,
assoc_const: Ident,
ty: ty::Binder<'tcx, Ty<'tcx>>,
hir_id: hir::HirId,
) -> Ty<'tcx> {
// We can't perform the checks for early-bound params during name resolution unlike E0770
// because this information depends on *type* resolution.
// We can't perform these checks in `resolve_bound_vars` either for the same reason.
// Consider the trait ref `for<'a> Trait<'a, C = { &0 }>`. We need to know the fully
// resolved type of `Trait::C` in order to know if it references `'a` or not.
let ty = ty.skip_binder();
if !ty.has_param() && !ty.has_escaping_bound_vars() {
return ty;
}
let mut collector = GenericParamAndBoundVarCollector {
tcx,
params: Default::default(),
vars: Default::default(),
depth: ty::INNERMOST,
};
let mut guar = ty.visit_with(&mut collector).break_value();
let ty_note = ty
.make_suggestable(tcx, false)
.map(|ty| crate::errors::TyOfAssocConstBindingNote { assoc_const, ty });
let enclosing_item_owner_id = tcx
.hir()
.parent_owner_iter(hir_id)
.find_map(|(owner_id, parent)| parent.generics().map(|_| owner_id))
.unwrap();
let generics = tcx.generics_of(enclosing_item_owner_id);
for index in collector.params {
let param = generics.param_at(index as _, tcx);
let is_self_param = param.name == rustc_span::symbol::kw::SelfUpper;
guar.get_or_insert(tcx.dcx().emit_err(crate::errors::ParamInTyOfAssocConstBinding {
span: assoc_const.span,
assoc_const,
param_name: param.name,
param_def_kind: tcx.def_descr(param.def_id),
param_category: if is_self_param {
"self"
} else if param.kind.is_synthetic() {
"synthetic"
} else {
"normal"
},
param_defined_here_label:
(!is_self_param).then(|| tcx.def_ident_span(param.def_id).unwrap()),
ty_note,
}));
}
for (var_def_id, var_name) in collector.vars {
guar.get_or_insert(tcx.dcx().emit_err(
crate::errors::EscapingBoundVarInTyOfAssocConstBinding {
span: assoc_const.span,
assoc_const,
var_name,
var_def_kind: tcx.def_descr(var_def_id),
var_defined_here_label: tcx.def_ident_span(var_def_id).unwrap(),
ty_note,
},
));
}
let guar = guar.unwrap_or_else(|| bug!("failed to find gen params or bound vars in ty"));
Ty::new_error(tcx, guar)
}
struct GenericParamAndBoundVarCollector<'tcx> {
tcx: TyCtxt<'tcx>,
params: FxIndexSet<u32>,
vars: FxIndexSet<(DefId, Symbol)>,
depth: ty::DebruijnIndex,
}
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for GenericParamAndBoundVarCollector<'tcx> {
type Result = ControlFlow<ErrorGuaranteed>;
fn visit_binder<T: TypeVisitable<TyCtxt<'tcx>>>(
&mut self,
binder: &ty::Binder<'tcx, T>,
) -> Self::Result {
self.depth.shift_in(1);
let result = binder.super_visit_with(self);
self.depth.shift_out(1);
result
}
fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
match ty.kind() {
ty::Param(param) => {
self.params.insert(param.index);
}
ty::Bound(db, bt) if *db >= self.depth => {
self.vars.insert(match bt.kind {
ty::BoundTyKind::Param(def_id, name) => (def_id, name),
ty::BoundTyKind::Anon => {
let reported = self
.tcx
.dcx()
.delayed_bug(format!("unexpected anon bound ty: {:?}", bt.var));
return ControlFlow::Break(reported);
}
});
}
_ if ty.has_param() || ty.has_bound_vars() => return ty.super_visit_with(self),
_ => {}
}
ControlFlow::Continue(())
}
fn visit_region(&mut self, re: ty::Region<'tcx>) -> Self::Result {
match re.kind() {
ty::ReEarlyParam(param) => {
self.params.insert(param.index);
}
ty::ReBound(db, br) if db >= self.depth => {
self.vars.insert(match br.kind {
ty::BrNamed(def_id, name) => (def_id, name),
ty::BrAnon | ty::BrEnv => {
let guar = self
.tcx
.dcx()
.delayed_bug(format!("unexpected bound region kind: {:?}", br.kind));
return ControlFlow::Break(guar);
}
});
}
_ => {}
}
ControlFlow::Continue(())
}
fn visit_const(&mut self, ct: ty::Const<'tcx>) -> Self::Result {
match ct.kind() {
ty::ConstKind::Param(param) => {
self.params.insert(param.index);
}
ty::ConstKind::Bound(db, ty::BoundVar { .. }) if db >= self.depth => {
let guar = self.tcx.dcx().delayed_bug("unexpected escaping late-bound const var");
return ControlFlow::Break(guar);
}
_ if ct.has_param() || ct.has_bound_vars() => return ct.super_visit_with(self),
_ => {}
}
ControlFlow::Continue(())
}
}

View File

@ -408,10 +408,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
traits with associated type `{name}`, you could use the \
fully-qualified path",
),
traits
.iter()
.map(|trait_str| format!("<Example as {trait_str}>::{name}"))
.collect::<Vec<_>>(),
traits.iter().map(|trait_str| format!("<Example as {trait_str}>::{name}")),
Applicability::HasPlaceholders,
);
}

View File

@ -16,7 +16,7 @@ use rustc_middle::ty::{
self, GenericArgsRef, GenericParamDef, GenericParamDefKind, IsSuggestable, Ty, TyCtxt,
};
use rustc_session::lint::builtin::LATE_BOUND_LIFETIME_ARGUMENTS;
use rustc_span::symbol::kw;
use rustc_span::symbol::{kw, sym};
use smallvec::SmallVec;
/// Report an error that a generic argument did not match the generic parameter that was
@ -41,9 +41,11 @@ fn generic_arg_mismatch_err(
if let GenericParamDefKind::Const { .. } = param.kind {
if matches!(arg, GenericArg::Type(hir::Ty { kind: hir::TyKind::Infer, .. })) {
err.help("const arguments cannot yet be inferred with `_`");
if sess.is_nightly_build() {
err.help("add `#![feature(generic_arg_infer)]` to the crate attributes to enable");
}
tcx.disabled_nightly_features(
&mut err,
param.def_id.as_local().map(|local| tcx.local_def_id_to_hir_id(local)),
[(String::new(), sym::generic_arg_infer)],
);
}
}

View File

@ -658,10 +658,6 @@ pub fn check_intrinsic_type(
sym::simd_shuffle => (3, 0, vec![param(0), param(0), param(1)], param(2)),
sym::simd_shuffle_generic => (2, 1, vec![param(0), param(0)], param(1)),
sym::retag_box_to_raw => {
(2, 0, vec![Ty::new_mut_ptr(tcx, param(0))], Ty::new_mut_ptr(tcx, param(0)))
}
other => {
tcx.dcx().emit_err(UnrecognizedIntrinsicFunction { span, name: other });
return;

View File

@ -291,12 +291,16 @@ fn default_body_is_unstable(
reason: reason_str,
});
let inject_span = item_did
.as_local()
.and_then(|id| tcx.crate_level_attribute_injection_span(tcx.local_def_id_to_hir_id(id)));
rustc_session::parse::add_feature_diagnostics_for_issue(
&mut err,
&tcx.sess,
feature,
rustc_feature::GateIssue::Library(issue),
false,
inject_span,
);
err.emit();

View File

@ -999,9 +999,14 @@ fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) -> Result<(),
// Implments `ConstParamTy`, suggest adding the feature to enable.
Ok(..) => true,
};
if may_suggest_feature && tcx.sess.is_nightly_build() {
diag.help(
"add `#![feature(adt_const_params)]` to the crate attributes to enable more complex and user defined types",
if may_suggest_feature {
tcx.disabled_nightly_features(
&mut diag,
Some(param.hir_id),
[(
" more complex and user defined types".to_string(),
sym::adt_const_params,
)],
);
}

View File

@ -295,6 +295,44 @@ pub struct AssocTypeBindingNotAllowed {
pub fn_trait_expansion: Option<ParenthesizedFnTraitExpansion>,
}
#[derive(Diagnostic)]
#[diag(hir_analysis_param_in_ty_of_assoc_const_binding)]
pub(crate) struct ParamInTyOfAssocConstBinding<'tcx> {
#[primary_span]
#[label]
pub span: Span,
pub assoc_const: Ident,
pub param_name: Symbol,
pub param_def_kind: &'static str,
pub param_category: &'static str,
#[label(hir_analysis_param_defined_here_label)]
pub param_defined_here_label: Option<Span>,
#[subdiagnostic]
pub ty_note: Option<TyOfAssocConstBindingNote<'tcx>>,
}
#[derive(Subdiagnostic, Clone, Copy)]
#[note(hir_analysis_ty_of_assoc_const_binding_note)]
pub(crate) struct TyOfAssocConstBindingNote<'tcx> {
pub assoc_const: Ident,
pub ty: Ty<'tcx>,
}
#[derive(Diagnostic)]
#[diag(hir_analysis_escaping_bound_var_in_ty_of_assoc_const_binding)]
pub(crate) struct EscapingBoundVarInTyOfAssocConstBinding<'tcx> {
#[primary_span]
#[label]
pub span: Span,
pub assoc_const: Ident,
pub var_name: Symbol,
pub var_def_kind: &'static str,
#[label(hir_analysis_var_defined_here_label)]
pub var_defined_here_label: Span,
#[subdiagnostic]
pub ty_note: Option<TyOfAssocConstBindingNote<'tcx>>,
}
#[derive(Subdiagnostic)]
#[help(hir_analysis_parenthesized_fn_trait_expansion)]
pub struct ParenthesizedFnTraitExpansion {

View File

@ -755,7 +755,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let hir::ExprKind::Path(hir::QPath::Resolved(None, path)) = callee_expr.kind
&& let Res::Local(_) = path.res
&& let [segment] = &path.segments[..]
&& let [segment] = &path.segments
{
for id in self.tcx.hir().items() {
if let Some(node) = self.tcx.hir().get_if_local(id.owner_id.into())

View File

@ -1420,15 +1420,13 @@ impl<'tcx> Pick<'tcx> {
}
_ => {}
}
if tcx.sess.is_nightly_build() {
for (candidate, feature) in &self.unstable_candidates {
lint.help(format!(
"add `#![feature({})]` to the crate attributes to enable `{}`",
feature,
tcx.def_path_str(candidate.item.def_id),
));
}
}
tcx.disabled_nightly_features(
lint,
Some(scope_expr_id),
self.unstable_candidates.iter().map(|(candidate, feature)| {
(format!(" `{}`", tcx.def_path_str(candidate.item.def_id)), *feature)
}),
);
},
);
}
@ -1935,7 +1933,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
}
}
/// Determine if the associated item withe the given DefId matches
/// Determine if the associated item with the given DefId matches
/// the desired name via a doc alias.
fn matches_by_doc_alias(&self, def_id: DefId) -> bool {
let Some(name) = self.method_name else {

View File

@ -83,6 +83,9 @@ struct PatInfo<'tcx, 'a> {
binding_mode: BindingMode,
top_info: TopInfo<'tcx>,
decl_origin: Option<DeclOrigin<'a>>,
/// The depth of current pattern
current_depth: u32,
}
impl<'tcx> FnCtxt<'_, 'tcx> {
@ -152,7 +155,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
decl_origin: Option<DeclOrigin<'tcx>>,
) {
let info = TopInfo { expected, origin_expr, span };
let pat_info = PatInfo { binding_mode: INITIAL_BM, top_info: info, decl_origin };
let pat_info =
PatInfo { binding_mode: INITIAL_BM, top_info: info, decl_origin, current_depth: 0 };
self.check_pat(pat, expected, pat_info);
}
@ -163,7 +167,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Conversely, inside this module, `check_pat_top` should never be used.
#[instrument(level = "debug", skip(self, pat_info))]
fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<'tcx, '_>) {
let PatInfo { binding_mode: def_bm, top_info: ti, .. } = pat_info;
let PatInfo { binding_mode: def_bm, top_info: ti, current_depth, .. } = pat_info;
let path_res = match &pat.kind {
PatKind::Path(qpath) => Some(
self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span, None),
@ -172,8 +177,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
};
let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res));
let (expected, def_bm) = self.calc_default_binding_mode(pat, expected, def_bm, adjust_mode);
let pat_info =
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin: pat_info.decl_origin };
let pat_info = PatInfo {
binding_mode: def_bm,
top_info: ti,
decl_origin: pat_info.decl_origin,
current_depth: current_depth + 1,
};
let ty = match pat.kind {
PatKind::Wild | PatKind::Err(_) => expected,
@ -1046,14 +1055,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected: Ty<'tcx>,
pat_info: PatInfo<'tcx, '_>,
) -> Ty<'tcx> {
let PatInfo { binding_mode: def_bm, top_info: ti, decl_origin } = pat_info;
let PatInfo { binding_mode: def_bm, top_info: ti, decl_origin, current_depth } = pat_info;
let tcx = self.tcx;
let on_error = |e| {
for pat in subpats {
self.check_pat(
pat,
Ty::new_error(tcx, e),
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin },
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin, current_depth },
);
}
};
@ -1120,7 +1129,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.check_pat(
subpat,
field_ty,
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin },
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin, current_depth },
);
self.tcx.check_stability(
@ -2134,7 +2143,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// The expected type must be an array or slice, but was neither, so error.
_ => {
let guar = expected.error_reported().err().unwrap_or_else(|| {
self.error_expected_array_or_slice(span, expected, pat_info.top_info)
self.error_expected_array_or_slice(span, expected, pat_info)
});
let err = Ty::new_error(self.tcx, guar);
(err, Some(err), err)
@ -2169,7 +2178,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
len: ty::Const<'tcx>,
min_len: u64,
) -> (Option<Ty<'tcx>>, Ty<'tcx>) {
let len = match len.eval(self.tcx, self.param_env, None) {
let len = match len.eval(self.tcx, self.param_env, span) {
Ok(val) => val
.try_to_scalar()
.and_then(|scalar| scalar.try_to_int().ok())
@ -2273,8 +2282,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self,
span: Span,
expected_ty: Ty<'tcx>,
ti: TopInfo<'tcx>,
pat_info: PatInfo<'tcx, '_>,
) -> ErrorGuaranteed {
let PatInfo { top_info: ti, current_depth, .. } = pat_info;
let mut err = struct_span_code_err!(
self.dcx(),
span,
@ -2292,9 +2303,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& let Some(_) = ti.origin_expr
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
{
let ty = self.resolve_vars_if_possible(ti.expected);
let is_slice_or_array_or_vector = self.is_slice_or_array_or_vector(ty);
match is_slice_or_array_or_vector.1.kind() {
let resolved_ty = self.resolve_vars_if_possible(ti.expected);
let (is_slice_or_array_or_vector, resolved_ty) =
self.is_slice_or_array_or_vector(resolved_ty);
match resolved_ty.kind() {
ty::Adt(adt_def, _)
if self.tcx.is_diagnostic_item(sym::Option, adt_def.did())
|| self.tcx.is_diagnostic_item(sym::Result, adt_def.did()) =>
@ -2309,7 +2321,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
_ => (),
}
if is_slice_or_array_or_vector.0 {
let is_top_level = current_depth <= 1;
if is_slice_or_array_or_vector && is_top_level {
err.span_suggestion(
span,
"consider slicing here",

View File

@ -1475,7 +1475,7 @@ impl<'tcx> InferCtxt<'tcx> {
param_env: ty::ParamEnv<'tcx>,
unevaluated: ty::UnevaluatedConst<'tcx>,
ty: Ty<'tcx>,
span: Option<Span>,
span: Span,
) -> Result<ty::Const<'tcx>, ErrorHandled> {
match self.const_eval_resolve(param_env, unevaluated, span) {
Ok(Some(val)) => Ok(ty::Const::new_value(self.tcx, val, ty)),
@ -1509,7 +1509,7 @@ impl<'tcx> InferCtxt<'tcx> {
&self,
mut param_env: ty::ParamEnv<'tcx>,
unevaluated: ty::UnevaluatedConst<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToValTreeResult<'tcx> {
let mut args = self.resolve_vars_if_possible(unevaluated.args);
debug!(?args);
@ -1521,12 +1521,9 @@ impl<'tcx> InferCtxt<'tcx> {
if let Some(ct) = tcx.thir_abstract_const(unevaluated.def)? {
let ct = tcx.expand_abstract_consts(ct.instantiate(tcx, args));
if let Err(e) = ct.error_reported() {
return Err(ErrorHandled::Reported(
e.into(),
span.unwrap_or(rustc_span::DUMMY_SP),
));
return Err(ErrorHandled::Reported(e.into(), span));
} else if ct.has_non_region_infer() || ct.has_non_region_param() {
return Err(ErrorHandled::TooGeneric(span.unwrap_or(rustc_span::DUMMY_SP)));
return Err(ErrorHandled::TooGeneric(span));
} else {
args = replace_param_and_infer_args_with_placeholder(tcx, args);
}

View File

@ -18,7 +18,7 @@
#![allow(internal_features)]
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(box_patterns)]
#![feature(control_flow_enum)]
#![feature(extend_one)]

View File

@ -5,49 +5,11 @@ use rustc_ast::util::unicode::TEXT_FLOW_CONTROL_CHARS;
use rustc_errors::{add_elided_lifetime_in_path_suggestion, Diag};
use rustc_errors::{Applicability, SuggestionStyle};
use rustc_middle::middle::stability;
use rustc_session::config::ExpectedValues;
use rustc_session::lint::BuiltinLintDiag;
use rustc_session::Session;
use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::BytePos;
const MAX_CHECK_CFG_NAMES_OR_VALUES: usize = 35;
fn check_cfg_expected_note(
sess: &Session,
possibilities: &[Symbol],
type_: &str,
name: Option<Symbol>,
suffix: &str,
) -> String {
use std::fmt::Write;
let n_possibilities = if sess.opts.unstable_opts.check_cfg_all_expected {
possibilities.len()
} else {
std::cmp::min(possibilities.len(), MAX_CHECK_CFG_NAMES_OR_VALUES)
};
let mut possibilities = possibilities.iter().map(Symbol::as_str).collect::<Vec<_>>();
possibilities.sort();
let and_more = possibilities.len().saturating_sub(n_possibilities);
let possibilities = possibilities[..n_possibilities].join("`, `");
let mut note = String::with_capacity(50 + possibilities.len());
write!(&mut note, "expected {type_}").unwrap();
if let Some(name) = name {
write!(&mut note, " for `{name}`").unwrap();
}
write!(&mut note, " are: {suffix}`{possibilities}`").unwrap();
if and_more > 0 {
write!(&mut note, " and {and_more} more").unwrap();
}
note
}
mod check_cfg;
pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiag, diag: &mut Diag<'_, ()>) {
match diagnostic {
@ -219,242 +181,11 @@ pub(super) fn builtin(sess: &Session, diagnostic: BuiltinLintDiag, diag: &mut Di
diag.help(help);
diag.note("see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information");
}
BuiltinLintDiag::UnexpectedCfgName((name, name_span), value) => {
#[allow(rustc::potential_query_instability)]
let possibilities: Vec<Symbol> =
sess.psess.check_config.expecteds.keys().copied().collect();
let mut names_possibilities: Vec<_> = if value.is_none() {
// We later sort and display all the possibilities, so the order here does not matter.
#[allow(rustc::potential_query_instability)]
sess.psess
.check_config
.expecteds
.iter()
.filter_map(|(k, v)| match v {
ExpectedValues::Some(v) if v.contains(&Some(name)) => Some(k),
_ => None,
})
.collect()
} else {
Vec::new()
};
let is_from_cargo = rustc_session::utils::was_invoked_from_cargo();
let mut is_feature_cfg = name == sym::feature;
if is_feature_cfg && is_from_cargo {
diag.help("consider defining some features in `Cargo.toml`");
// Suggest the most probable if we found one
} else if let Some(best_match) = find_best_match_for_name(&possibilities, name, None) {
if let Some(ExpectedValues::Some(best_match_values)) =
sess.psess.check_config.expecteds.get(&best_match)
{
// We will soon sort, so the initial order does not matter.
#[allow(rustc::potential_query_instability)]
let mut possibilities =
best_match_values.iter().flatten().map(Symbol::as_str).collect::<Vec<_>>();
possibilities.sort();
let mut should_print_possibilities = true;
if let Some((value, value_span)) = value {
if best_match_values.contains(&Some(value)) {
diag.span_suggestion(
name_span,
"there is a config with a similar name and value",
best_match,
Applicability::MaybeIncorrect,
);
should_print_possibilities = false;
} else if best_match_values.contains(&None) {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and no value",
best_match,
Applicability::MaybeIncorrect,
);
should_print_possibilities = false;
} else if let Some(first_value) = possibilities.first() {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and different values",
format!("{best_match} = \"{first_value}\""),
Applicability::MaybeIncorrect,
);
} else {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and different values",
best_match,
Applicability::MaybeIncorrect,
);
};
} else {
diag.span_suggestion(
name_span,
"there is a config with a similar name",
best_match,
Applicability::MaybeIncorrect,
);
}
if !possibilities.is_empty() && should_print_possibilities {
let possibilities = possibilities.join("`, `");
diag.help(format!(
"expected values for `{best_match}` are: `{possibilities}`"
));
}
} else {
diag.span_suggestion(
name_span,
"there is a config with a similar name",
best_match,
Applicability::MaybeIncorrect,
);
}
is_feature_cfg |= best_match == sym::feature;
} else {
if !names_possibilities.is_empty() && names_possibilities.len() <= 3 {
names_possibilities.sort();
for cfg_name in names_possibilities.iter() {
diag.span_suggestion(
name_span,
"found config with similar value",
format!("{cfg_name} = \"{name}\""),
Applicability::MaybeIncorrect,
);
}
}
if !possibilities.is_empty() {
diag.help_once(check_cfg_expected_note(
sess,
&possibilities,
"names",
None,
"",
));
}
}
let inst = if let Some((value, _value_span)) = value {
let pre = if is_from_cargo { "\\" } else { "" };
format!("cfg({name}, values({pre}\"{value}{pre}\"))")
} else {
format!("cfg({name})")
};
if is_from_cargo {
if !is_feature_cfg {
diag.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg> for more information about checking conditional configuration");
} else {
diag.help(format!("to expect this configuration use `--check-cfg={inst}`"));
diag.note("see <https://doc.rust-lang.org/nightly/unstable-book/compiler-flags/check-cfg.html> for more information about checking conditional configuration");
}
BuiltinLintDiag::UnexpectedCfgName(name, value) => {
check_cfg::unexpected_cfg_name(sess, diag, name, value)
}
BuiltinLintDiag::UnexpectedCfgValue((name, name_span), value) => {
let Some(ExpectedValues::Some(values)) = &sess.psess.check_config.expecteds.get(&name)
else {
bug!(
"it shouldn't be possible to have a diagnostic on a value whose name is not in values"
);
};
let mut have_none_possibility = false;
// We later sort possibilities if it is not empty, so the
// order here does not matter.
#[allow(rustc::potential_query_instability)]
let possibilities: Vec<Symbol> = values
.iter()
.inspect(|a| have_none_possibility |= a.is_none())
.copied()
.flatten()
.collect();
let is_from_cargo = rustc_session::utils::was_invoked_from_cargo();
// Show the full list if all possible values for a given name, but don't do it
// for names as the possibilities could be very long
if !possibilities.is_empty() {
diag.note(check_cfg_expected_note(
sess,
&possibilities,
"values",
Some(name),
if have_none_possibility { "(none), " } else { "" },
));
if let Some((value, value_span)) = value {
// Suggest the most probable if we found one
if let Some(best_match) = find_best_match_for_name(&possibilities, value, None)
{
diag.span_suggestion(
value_span,
"there is a expected value with a similar name",
format!("\"{best_match}\""),
Applicability::MaybeIncorrect,
);
}
} else if let &[first_possibility] = &possibilities[..] {
diag.span_suggestion(
name_span.shrink_to_hi(),
"specify a config value",
format!(" = \"{first_possibility}\""),
Applicability::MaybeIncorrect,
);
}
} else if have_none_possibility {
diag.note(format!("no expected value for `{name}`"));
if let Some((_value, value_span)) = value {
diag.span_suggestion(
name_span.shrink_to_hi().to(value_span),
"remove the value",
"",
Applicability::MaybeIncorrect,
);
}
} else {
diag.note(format!("no expected values for `{name}`"));
let sp = if let Some((_value, value_span)) = value {
name_span.to(value_span)
} else {
name_span
};
diag.span_suggestion(sp, "remove the condition", "", Applicability::MaybeIncorrect);
}
// We don't want to suggest adding values to well known names
// since those are defined by rustc it-self. Users can still
// do it if they want, but should not encourage them.
let is_cfg_a_well_know_name = sess.psess.check_config.well_known_names.contains(&name);
let inst = if let Some((value, _value_span)) = value {
let pre = if is_from_cargo { "\\" } else { "" };
format!("cfg({name}, values({pre}\"{value}{pre}\"))")
} else {
format!("cfg({name})")
};
if is_from_cargo {
if name == sym::feature {
if let Some((value, _value_span)) = value {
diag.help(format!(
"consider adding `{value}` as a feature in `Cargo.toml`"
));
} else {
diag.help("consider defining some features in `Cargo.toml`");
}
} else if !is_cfg_a_well_know_name {
diag.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg> for more information about checking conditional configuration");
} else {
if !is_cfg_a_well_know_name {
diag.help(format!("to expect this configuration use `--check-cfg={inst}`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/unstable-book/compiler-flags/check-cfg.html> for more information about checking conditional configuration");
}
BuiltinLintDiag::UnexpectedCfgValue(name, value) => {
check_cfg::unexpected_cfg_value(sess, diag, name, value)
}
BuiltinLintDiag::DeprecatedWhereclauseLocation(sugg) => {
let left_sp = diag.span.primary_span().unwrap();

View File

@ -0,0 +1,277 @@
use rustc_errors::{Applicability, Diag};
use rustc_session::{config::ExpectedValues, Session};
use rustc_span::edit_distance::find_best_match_for_name;
use rustc_span::{sym, Span, Symbol};
const MAX_CHECK_CFG_NAMES_OR_VALUES: usize = 35;
fn check_cfg_expected_note(
sess: &Session,
possibilities: &[Symbol],
type_: &str,
name: Option<Symbol>,
suffix: &str,
) -> String {
use std::fmt::Write;
let n_possibilities = if sess.opts.unstable_opts.check_cfg_all_expected {
possibilities.len()
} else {
std::cmp::min(possibilities.len(), MAX_CHECK_CFG_NAMES_OR_VALUES)
};
let mut possibilities = possibilities.iter().map(Symbol::as_str).collect::<Vec<_>>();
possibilities.sort();
let and_more = possibilities.len().saturating_sub(n_possibilities);
let possibilities = possibilities[..n_possibilities].join("`, `");
let mut note = String::with_capacity(50 + possibilities.len());
write!(&mut note, "expected {type_}").unwrap();
if let Some(name) = name {
write!(&mut note, " for `{name}`").unwrap();
}
write!(&mut note, " are: {suffix}`{possibilities}`").unwrap();
if and_more > 0 {
write!(&mut note, " and {and_more} more").unwrap();
}
note
}
pub(super) fn unexpected_cfg_name(
sess: &Session,
diag: &mut Diag<'_, ()>,
(name, name_span): (Symbol, Span),
value: Option<(Symbol, Span)>,
) {
#[allow(rustc::potential_query_instability)]
let possibilities: Vec<Symbol> = sess.psess.check_config.expecteds.keys().copied().collect();
let mut names_possibilities: Vec<_> = if value.is_none() {
// We later sort and display all the possibilities, so the order here does not matter.
#[allow(rustc::potential_query_instability)]
sess.psess
.check_config
.expecteds
.iter()
.filter_map(|(k, v)| match v {
ExpectedValues::Some(v) if v.contains(&Some(name)) => Some(k),
_ => None,
})
.collect()
} else {
Vec::new()
};
let is_from_cargo = rustc_session::utils::was_invoked_from_cargo();
let mut is_feature_cfg = name == sym::feature;
if is_feature_cfg && is_from_cargo {
diag.help("consider defining some features in `Cargo.toml`");
// Suggest the most probable if we found one
} else if let Some(best_match) = find_best_match_for_name(&possibilities, name, None) {
if let Some(ExpectedValues::Some(best_match_values)) =
sess.psess.check_config.expecteds.get(&best_match)
{
// We will soon sort, so the initial order does not matter.
#[allow(rustc::potential_query_instability)]
let mut possibilities =
best_match_values.iter().flatten().map(Symbol::as_str).collect::<Vec<_>>();
possibilities.sort();
let mut should_print_possibilities = true;
if let Some((value, value_span)) = value {
if best_match_values.contains(&Some(value)) {
diag.span_suggestion(
name_span,
"there is a config with a similar name and value",
best_match,
Applicability::MaybeIncorrect,
);
should_print_possibilities = false;
} else if best_match_values.contains(&None) {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and no value",
best_match,
Applicability::MaybeIncorrect,
);
should_print_possibilities = false;
} else if let Some(first_value) = possibilities.first() {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and different values",
format!("{best_match} = \"{first_value}\""),
Applicability::MaybeIncorrect,
);
} else {
diag.span_suggestion(
name_span.to(value_span),
"there is a config with a similar name and different values",
best_match,
Applicability::MaybeIncorrect,
);
};
} else {
diag.span_suggestion(
name_span,
"there is a config with a similar name",
best_match,
Applicability::MaybeIncorrect,
);
}
if !possibilities.is_empty() && should_print_possibilities {
let possibilities = possibilities.join("`, `");
diag.help(format!("expected values for `{best_match}` are: `{possibilities}`"));
}
} else {
diag.span_suggestion(
name_span,
"there is a config with a similar name",
best_match,
Applicability::MaybeIncorrect,
);
}
is_feature_cfg |= best_match == sym::feature;
} else {
if !names_possibilities.is_empty() && names_possibilities.len() <= 3 {
names_possibilities.sort();
for cfg_name in names_possibilities.iter() {
diag.span_suggestion(
name_span,
"found config with similar value",
format!("{cfg_name} = \"{name}\""),
Applicability::MaybeIncorrect,
);
}
}
if !possibilities.is_empty() {
diag.help_once(check_cfg_expected_note(sess, &possibilities, "names", None, ""));
}
}
let inst = if let Some((value, _value_span)) = value {
let pre = if is_from_cargo { "\\" } else { "" };
format!("cfg({name}, values({pre}\"{value}{pre}\"))")
} else {
format!("cfg({name})")
};
if is_from_cargo {
if !is_feature_cfg {
diag.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg> for more information about checking conditional configuration");
} else {
diag.help(format!("to expect this configuration use `--check-cfg={inst}`"));
diag.note("see <https://doc.rust-lang.org/nightly/unstable-book/compiler-flags/check-cfg.html> for more information about checking conditional configuration");
}
}
pub(super) fn unexpected_cfg_value(
sess: &Session,
diag: &mut Diag<'_, ()>,
(name, name_span): (Symbol, Span),
value: Option<(Symbol, Span)>,
) {
let Some(ExpectedValues::Some(values)) = &sess.psess.check_config.expecteds.get(&name) else {
bug!(
"it shouldn't be possible to have a diagnostic on a value whose name is not in values"
);
};
let mut have_none_possibility = false;
// We later sort possibilities if it is not empty, so the
// order here does not matter.
#[allow(rustc::potential_query_instability)]
let possibilities: Vec<Symbol> = values
.iter()
.inspect(|a| have_none_possibility |= a.is_none())
.copied()
.flatten()
.collect();
let is_from_cargo = rustc_session::utils::was_invoked_from_cargo();
// Show the full list if all possible values for a given name, but don't do it
// for names as the possibilities could be very long
if !possibilities.is_empty() {
diag.note(check_cfg_expected_note(
sess,
&possibilities,
"values",
Some(name),
if have_none_possibility { "(none), " } else { "" },
));
if let Some((value, value_span)) = value {
// Suggest the most probable if we found one
if let Some(best_match) = find_best_match_for_name(&possibilities, value, None) {
diag.span_suggestion(
value_span,
"there is a expected value with a similar name",
format!("\"{best_match}\""),
Applicability::MaybeIncorrect,
);
}
} else if let &[first_possibility] = &possibilities[..] {
diag.span_suggestion(
name_span.shrink_to_hi(),
"specify a config value",
format!(" = \"{first_possibility}\""),
Applicability::MaybeIncorrect,
);
}
} else if have_none_possibility {
diag.note(format!("no expected value for `{name}`"));
if let Some((_value, value_span)) = value {
diag.span_suggestion(
name_span.shrink_to_hi().to(value_span),
"remove the value",
"",
Applicability::MaybeIncorrect,
);
}
} else {
diag.note(format!("no expected values for `{name}`"));
let sp = if let Some((_value, value_span)) = value {
name_span.to(value_span)
} else {
name_span
};
diag.span_suggestion(sp, "remove the condition", "", Applicability::MaybeIncorrect);
}
// We don't want to suggest adding values to well known names
// since those are defined by rustc it-self. Users can still
// do it if they want, but should not encourage them.
let is_cfg_a_well_know_name = sess.psess.check_config.well_known_names.contains(&name);
let inst = if let Some((value, _value_span)) = value {
let pre = if is_from_cargo { "\\" } else { "" };
format!("cfg({name}, values({pre}\"{value}{pre}\"))")
} else {
format!("cfg({name})")
};
if is_from_cargo {
if name == sym::feature {
if let Some((value, _value_span)) = value {
diag.help(format!("consider adding `{value}` as a feature in `Cargo.toml`"));
} else {
diag.help("consider defining some features in `Cargo.toml`");
}
} else if !is_cfg_a_well_know_name {
diag.help(format!("consider using a Cargo feature instead or adding `println!(\"cargo:rustc-check-cfg={inst}\");` to the top of a `build.rs`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg> for more information about checking conditional configuration");
} else {
if !is_cfg_a_well_know_name {
diag.help(format!("to expect this configuration use `--check-cfg={inst}`"));
}
diag.note("see <https://doc.rust-lang.org/nightly/unstable-book/compiler-flags/check-cfg.html> for more information about checking conditional configuration");
}
}

View File

@ -1078,6 +1078,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
feature,
GateIssue::Language,
lint_from_cli,
None,
);
},
);

View File

@ -24,9 +24,7 @@
#include "llvm/Passes/StandardInstrumentations.h"
#include "llvm/Support/CBindingWrapping.h"
#include "llvm/Support/FileSystem.h"
#if LLVM_VERSION_GE(17, 0)
#include "llvm/Support/VirtualFileSystem.h"
#endif
#include "llvm/Target/TargetMachine.h"
#include "llvm/Transforms/IPO/AlwaysInliner.h"
#include "llvm/Transforms/IPO/FunctionImport.h"
@ -334,14 +332,8 @@ extern "C" void LLVMRustPrintTargetCPUs(LLVMTargetMachineRef TM,
std::ostringstream Buf;
#if LLVM_VERSION_GE(17, 0)
const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo();
const ArrayRef<SubtargetSubTypeKV> CPUTable = MCInfo->getAllProcessorDescriptions();
#else
Buf << "Full target CPU help is not supported by this LLVM version.\n\n";
SubtargetSubTypeKV TargetCPUKV = { TargetCPU, {{}}, {{}} };
const ArrayRef<SubtargetSubTypeKV> CPUTable = TargetCPUKV;
#endif
unsigned MaxCPULen = getLongestEntryLength(CPUTable);
Buf << "Available CPUs for this target:\n";
@ -476,10 +468,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
Options.RelaxELFRelocations = RelaxELFRelocations;
#endif
Options.UseInitArray = UseInitArray;
#if LLVM_VERSION_LT(17, 0)
Options.ExplicitEmulatedTLS = true;
#endif
Options.EmulatedTLS = UseEmulatedTls;
if (TrapUnreachable) {
@ -761,16 +749,10 @@ LLVMRustOptimize(
}
std::optional<PGOOptions> PGOOpt;
#if LLVM_VERSION_GE(17, 0)
auto FS = vfs::getRealFileSystem();
#endif
if (PGOGenPath) {
assert(!PGOUsePath && !PGOSampleUsePath);
PGOOpt = PGOOptions(PGOGenPath, "", "",
#if LLVM_VERSION_GE(17, 0)
"",
FS,
#endif
PGOOpt = PGOOptions(PGOGenPath, "", "", "", FS,
PGOOptions::IRInstr, PGOOptions::NoCSAction,
#if LLVM_VERSION_GE(19, 0)
PGOOptions::ColdFuncOpt::Default,
@ -778,33 +760,21 @@ LLVMRustOptimize(
DebugInfoForProfiling);
} else if (PGOUsePath) {
assert(!PGOSampleUsePath);
PGOOpt = PGOOptions(PGOUsePath, "", "",
#if LLVM_VERSION_GE(17, 0)
"",
FS,
#endif
PGOOpt = PGOOptions(PGOUsePath, "", "", "", FS,
PGOOptions::IRUse, PGOOptions::NoCSAction,
#if LLVM_VERSION_GE(19, 0)
PGOOptions::ColdFuncOpt::Default,
#endif
DebugInfoForProfiling);
} else if (PGOSampleUsePath) {
PGOOpt = PGOOptions(PGOSampleUsePath, "", "",
#if LLVM_VERSION_GE(17, 0)
"",
FS,
#endif
PGOOpt = PGOOptions(PGOSampleUsePath, "", "", "", FS,
PGOOptions::SampleUse, PGOOptions::NoCSAction,
#if LLVM_VERSION_GE(19, 0)
PGOOptions::ColdFuncOpt::Default,
#endif
DebugInfoForProfiling);
} else if (DebugInfoForProfiling) {
PGOOpt = PGOOptions("", "", "",
#if LLVM_VERSION_GE(17, 0)
"",
FS,
#endif
PGOOpt = PGOOptions("", "", "", "", FS,
PGOOptions::NoAction, PGOOptions::NoCSAction,
#if LLVM_VERSION_GE(19, 0)
PGOOptions::ColdFuncOpt::Default,
@ -1353,9 +1323,7 @@ LLVMRustCreateThinLTOData(LLVMRustThinLTOModule *modules,
ComputeCrossModuleImport(
Ret->Index,
Ret->ModuleToDefinedGVSummaries,
#if LLVM_VERSION_GE(17, 0)
isPrevailing,
#endif
Ret->ImportLists,
Ret->ExportLists
);

View File

@ -25,6 +25,13 @@
#include <iostream>
// for raw `write` in the bad-alloc handler
#ifdef _MSC_VER
#include <io.h>
#else
#include <unistd.h>
#endif
//===----------------------------------------------------------------------===
//
// This file defines alternate interfaces to core functions that are more
@ -88,8 +95,24 @@ static void FatalErrorHandler(void *UserData,
exit(101);
}
extern "C" void LLVMRustInstallFatalErrorHandler() {
// Custom error handler for bad-alloc LLVM errors.
//
// It aborts the process without any further allocations, similar to LLVM's
// default except that may be configured to `throw std::bad_alloc()` instead.
static void BadAllocErrorHandler(void *UserData,
const char* Reason,
bool GenCrashDiag) {
const char *OOM = "rustc-LLVM ERROR: out of memory\n";
(void)!::write(2, OOM, strlen(OOM));
(void)!::write(2, Reason, strlen(Reason));
(void)!::write(2, "\n", 1);
abort();
}
extern "C" void LLVMRustInstallErrorHandlers() {
install_bad_alloc_error_handler(BadAllocErrorHandler);
install_fatal_error_handler(FatalErrorHandler);
install_out_of_memory_new_handler();
}
extern "C" void LLVMRustDisableSystemDialogsOnCrash() {
@ -2129,19 +2152,3 @@ extern "C" LLVMValueRef LLVMConstStringInContext2(LLVMContextRef C,
return wrap(ConstantDataArray::getString(*unwrap(C), StringRef(Str, Length), !DontNullTerminate));
}
#endif
// FIXME: Remove when Rust's minimum supported LLVM version reaches 17.
// https://github.com/llvm/llvm-project/commit/35276f16e5a2cae0dfb49c0fbf874d4d2f177acc
#if LLVM_VERSION_LT(17, 0)
extern "C" LLVMValueRef LLVMConstArray2(LLVMTypeRef ElementTy,
LLVMValueRef *ConstantVals,
uint64_t Length) {
ArrayRef<Constant *> V(unwrap<Constant>(ConstantVals, Length), Length);
return wrap(ConstantArray::get(ArrayType::get(unwrap(ElementTy), Length), V));
}
extern "C" LLVMTypeRef LLVMArrayType2(LLVMTypeRef ElementTy,
uint64_t ElementCount) {
return wrap(ArrayType::get(unwrap(ElementTy), ElementCount));
}
#endif

View File

@ -1590,17 +1590,17 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
})
}
// Translate the virtual `/rustc/$hash` prefix back to a real directory
// that should hold actual sources, where possible.
//
// NOTE: if you update this, you might need to also update bootstrap's code for generating
// the `rust-src` component in `Src::run` in `src/bootstrap/dist.rs`.
let virtual_rust_source_base_dir = [
filter(sess, option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR").map(Path::new)),
filter(sess, sess.opts.unstable_opts.simulate_remapped_rust_src_base.as_deref()),
];
let try_to_translate_virtual_to_real = |name: &mut rustc_span::FileName| {
// Translate the virtual `/rustc/$hash` prefix back to a real directory
// that should hold actual sources, where possible.
//
// NOTE: if you update this, you might need to also update bootstrap's code for generating
// the `rust-src` component in `Src::run` in `src/bootstrap/dist.rs`.
let virtual_rust_source_base_dir = [
filter(sess, option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR").map(Path::new)),
filter(sess, sess.opts.unstable_opts.simulate_remapped_rust_src_base.as_deref()),
];
debug!(
"try_to_translate_virtual_to_real(name={:?}): \
virtual_rust_source_base_dir={:?}, real_rust_source_base_dir={:?}",

View File

@ -48,7 +48,7 @@
#![feature(trusted_len)]
#![feature(type_alias_impl_trait)]
#![feature(strict_provenance)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(rustc_attrs)]
#![feature(control_flow_enum)]
#![feature(trait_upcasting)]

View File

@ -398,8 +398,22 @@ pub fn lint_level(
}
}
// Finally, run `decorate`.
decorate(&mut err);
// Finally, run `decorate`. `decorate` can call `trimmed_path_str` (directly or indirectly),
// so we need to make sure when we do call `decorate` that the diagnostic is eventually
// emitted or we'll get a `must_produce_diag` ICE.
//
// When is a diagnostic *eventually* emitted? Well, that is determined by 2 factors:
// 1. If the corresponding `rustc_errors::Level` is beyond warning, i.e. `ForceWarning(_)`
// or `Error`, then the diagnostic will be emitted regardless of CLI options.
// 2. If the corresponding `rustc_errors::Level` is warning, then that can be affected by
// `-A warnings` or `--cap-lints=xxx` on the command line. In which case, the diagnostic
// will be emitted if `can_emit_warnings` is true.
let skip = err_level == rustc_errors::Level::Warning && !sess.dcx().can_emit_warnings();
if !skip {
decorate(&mut err);
}
explain_lint_level_source(lint, level, src, &mut err);
err.emit()
}

View File

@ -2,7 +2,7 @@ use std::fmt::{self, Debug, Display, Formatter};
use rustc_hir::def_id::DefId;
use rustc_session::RemapFileNameExt;
use rustc_span::Span;
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{HasDataLayout, Size};
use crate::mir::interpret::{alloc_range, AllocId, ConstAllocation, ErrorHandled, Scalar};
@ -273,7 +273,7 @@ impl<'tcx> Const<'tcx> {
self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
span: Option<Span>,
span: Span,
) -> Result<ConstValue<'tcx>, ErrorHandled> {
match self {
Const::Ty(c) => {
@ -293,7 +293,7 @@ impl<'tcx> Const<'tcx> {
/// Normalizes the constant to a value or an error if possible.
#[inline]
pub fn normalize(self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Self {
match self.eval(tcx, param_env, None) {
match self.eval(tcx, param_env, DUMMY_SP) {
Ok(val) => Self::Val(val, self.ty()),
Err(ErrorHandled::Reported(guar, _span)) => {
Self::Ty(ty::Const::new_error(tcx, guar.into(), self.ty()))
@ -313,10 +313,10 @@ impl<'tcx> Const<'tcx> {
// Avoid the `valtree_to_const_val` query. Can only be done on primitive types that
// are valtree leaves, and *not* on references. (References should return the
// pointer here, which valtrees don't represent.)
let val = c.eval(tcx, param_env, None).ok()?;
let val = c.eval(tcx, param_env, DUMMY_SP).ok()?;
Some(val.unwrap_leaf().into())
}
_ => self.eval(tcx, param_env, None).ok()?.try_to_scalar(),
_ => self.eval(tcx, param_env, DUMMY_SP).ok()?.try_to_scalar(),
}
}

View File

@ -24,7 +24,7 @@ impl<'tcx> TyCtxt<'tcx> {
let instance = ty::Instance::new(def_id, args);
let cid = GlobalId { instance, promoted: None };
let param_env = self.param_env(def_id).with_reveal_all_normalized(self);
self.const_eval_global_id(param_env, cid, None)
self.const_eval_global_id(param_env, cid, DUMMY_SP)
}
/// Resolves and evaluates a constant.
///
@ -40,7 +40,7 @@ impl<'tcx> TyCtxt<'tcx> {
self,
param_env: ty::ParamEnv<'tcx>,
ct: mir::UnevaluatedConst<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToConstValueResult<'tcx> {
// Cannot resolve `Unevaluated` constants that contain inference
// variables. We reject those here since `resolve`
@ -73,7 +73,7 @@ impl<'tcx> TyCtxt<'tcx> {
self,
param_env: ty::ParamEnv<'tcx>,
ct: ty::UnevaluatedConst<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToValTreeResult<'tcx> {
// Cannot resolve `Unevaluated` constants that contain inference
// variables. We reject those here since `resolve`
@ -130,7 +130,7 @@ impl<'tcx> TyCtxt<'tcx> {
self,
param_env: ty::ParamEnv<'tcx>,
instance: ty::Instance<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToConstValueResult<'tcx> {
self.const_eval_global_id(param_env, GlobalId { instance, promoted: None }, span)
}
@ -141,12 +141,12 @@ impl<'tcx> TyCtxt<'tcx> {
self,
param_env: ty::ParamEnv<'tcx>,
cid: GlobalId<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToConstValueResult<'tcx> {
// Const-eval shouldn't depend on lifetimes at all, so we can erase them, which should
// improve caching of queries.
let inputs = self.erase_regions(param_env.with_reveal_all_normalized(self).and(cid));
if let Some(span) = span {
if !span.is_dummy() {
// The query doesn't know where it is being invoked, so we need to fix the span.
self.at(span).eval_to_const_value_raw(inputs).map_err(|e| e.with_span(span))
} else {
@ -160,13 +160,13 @@ impl<'tcx> TyCtxt<'tcx> {
self,
param_env: ty::ParamEnv<'tcx>,
cid: GlobalId<'tcx>,
span: Option<Span>,
span: Span,
) -> EvalToValTreeResult<'tcx> {
// Const-eval shouldn't depend on lifetimes at all, so we can erase them, which should
// improve caching of queries.
let inputs = self.erase_regions(param_env.with_reveal_all_normalized(self).and(cid));
debug!(?inputs);
if let Some(span) = span {
if !span.is_dummy() {
// The query doesn't know where it is being invoked, so we need to fix the span.
self.at(span).eval_to_valtree(inputs).map_err(|e| e.with_span(span))
} else {

View File

@ -164,6 +164,19 @@ pub struct ExternalConstraintsData<'tcx> {
// FIXME: implement this.
pub region_constraints: QueryRegionConstraints<'tcx>,
pub opaque_types: Vec<(ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)>,
pub normalization_nested_goals: NestedNormalizationGoals<'tcx>,
}
#[derive(Debug, PartialEq, Eq, Clone, Hash, HashStable, Default, TypeVisitable, TypeFoldable)]
pub struct NestedNormalizationGoals<'tcx>(pub Vec<(GoalSource, Goal<'tcx, ty::Predicate<'tcx>>)>);
impl<'tcx> NestedNormalizationGoals<'tcx> {
pub fn empty() -> Self {
NestedNormalizationGoals(vec![])
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
// FIXME: Having to clone `region_constraints` for folding feels bad and
@ -183,6 +196,10 @@ impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for ExternalConstraints<'tcx> {
.iter()
.map(|opaque| opaque.try_fold_with(folder))
.collect::<Result<_, F::Error>>()?,
normalization_nested_goals: self
.normalization_nested_goals
.clone()
.try_fold_with(folder)?,
}))
}
@ -190,6 +207,7 @@ impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for ExternalConstraints<'tcx> {
TypeFolder::interner(folder).mk_external_constraints(ExternalConstraintsData {
region_constraints: self.region_constraints.clone().fold_with(folder),
opaque_types: self.opaque_types.iter().map(|opaque| opaque.fold_with(folder)).collect(),
normalization_nested_goals: self.normalization_nested_goals.clone().fold_with(folder),
})
}
}
@ -197,7 +215,8 @@ impl<'tcx> TypeFoldable<TyCtxt<'tcx>> for ExternalConstraints<'tcx> {
impl<'tcx> TypeVisitable<TyCtxt<'tcx>> for ExternalConstraints<'tcx> {
fn visit_with<V: TypeVisitor<TyCtxt<'tcx>>>(&self, visitor: &mut V) -> V::Result {
try_visit!(self.region_constraints.visit_with(visitor));
self.opaque_types.visit_with(visitor)
try_visit!(self.opaque_types.visit_with(visitor));
self.normalization_nested_goals.visit_with(visitor)
}
}
@ -239,7 +258,7 @@ impl<'tcx> TypeVisitable<TyCtxt<'tcx>> for PredefinedOpaques<'tcx> {
///
/// This is necessary as we treat nested goals different depending on
/// their source.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TypeVisitable, TypeFoldable)]
pub enum GoalSource {
Misc,
/// We're proving a where-bound of an impl.
@ -256,12 +275,6 @@ pub enum GoalSource {
ImplWhereBound,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, HashStable)]
pub enum IsNormalizesToHack {
Yes,
No,
}
/// Possible ways the given goal can be proven.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CandidateSource {

View File

@ -19,8 +19,8 @@
//! [canonicalized]: https://rustc-dev-guide.rust-lang.org/solve/canonicalization.html
use super::{
CandidateSource, Canonical, CanonicalInput, Certainty, Goal, GoalSource, IsNormalizesToHack,
NoSolution, QueryInput, QueryResult,
CandidateSource, Canonical, CanonicalInput, Certainty, Goal, GoalSource, NoSolution,
QueryInput, QueryResult,
};
use crate::{infer::canonical::CanonicalVarValues, ty};
use format::ProofTreeFormatter;
@ -50,7 +50,7 @@ pub type CanonicalState<'tcx, T> = Canonical<'tcx, State<'tcx, T>>;
#[derive(Eq, PartialEq)]
pub enum GoalEvaluationKind<'tcx> {
Root { orig_values: Vec<ty::GenericArg<'tcx>> },
Nested { is_normalizes_to_hack: IsNormalizesToHack },
Nested,
}
#[derive(Eq, PartialEq)]

View File

@ -55,10 +55,7 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> {
pub(super) fn format_goal_evaluation(&mut self, eval: &GoalEvaluation<'_>) -> std::fmt::Result {
let goal_text = match eval.kind {
GoalEvaluationKind::Root { orig_values: _ } => "ROOT GOAL",
GoalEvaluationKind::Nested { is_normalizes_to_hack } => match is_normalizes_to_hack {
IsNormalizesToHack::No => "GOAL",
IsNormalizesToHack::Yes => "NORMALIZES-TO HACK GOAL",
},
GoalEvaluationKind::Nested => "GOAL",
};
write!(self.f, "{}: {:?}", goal_text, eval.uncanonicalized_goal)?;
self.nested(|this| this.format_canonical_goal_evaluation(&eval.evaluation))

View File

@ -335,7 +335,7 @@ impl<'tcx> Const<'tcx> {
self,
tcx: TyCtxt<'tcx>,
param_env: ParamEnv<'tcx>,
span: Option<Span>,
span: Span,
) -> Result<ValTree<'tcx>, ErrorHandled> {
assert!(!self.has_escaping_bound_vars(), "escaping vars in {self:?}");
match self.kind() {
@ -349,7 +349,7 @@ impl<'tcx> Const<'tcx> {
else {
// This can happen when we run on ill-typed code.
let e = tcx.dcx().span_delayed_bug(
span.unwrap_or(DUMMY_SP),
span,
"`ty::Const::eval` called on a non-valtree-compatible type",
);
return Err(e.into());
@ -362,14 +362,14 @@ impl<'tcx> Const<'tcx> {
| ConstKind::Infer(_)
| ConstKind::Bound(_, _)
| ConstKind::Placeholder(_)
| ConstKind::Expr(_) => Err(ErrorHandled::TooGeneric(span.unwrap_or(DUMMY_SP))),
| ConstKind::Expr(_) => Err(ErrorHandled::TooGeneric(span)),
}
}
/// Normalizes the constant to a value or an error if possible.
#[inline]
pub fn normalize(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Self {
match self.eval(tcx, param_env, None) {
match self.eval(tcx, param_env, DUMMY_SP) {
Ok(val) => Self::new_value(tcx, val, self.ty()),
Err(ErrorHandled::Reported(r, _span)) => Self::new_error(tcx, r.into(), self.ty()),
Err(ErrorHandled::TooGeneric(_span)) => self,
@ -382,7 +382,7 @@ impl<'tcx> Const<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<Scalar> {
self.eval(tcx, param_env, None).ok()?.try_to_scalar()
self.eval(tcx, param_env, DUMMY_SP).ok()?.try_to_scalar()
}
#[inline]

View File

@ -43,7 +43,9 @@ use rustc_data_structures::sync::{self, FreezeReadGuard, Lock, Lrc, WorkerLocal}
#[cfg(parallel_compiler)]
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_data_structures::unord::UnordSet;
use rustc_errors::{Diag, DiagCtxt, DiagMessage, ErrorGuaranteed, LintDiagnostic, MultiSpan};
use rustc_errors::{
Applicability, Diag, DiagCtxt, DiagMessage, ErrorGuaranteed, LintDiagnostic, MultiSpan,
};
use rustc_hir as hir;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
@ -2174,6 +2176,45 @@ impl<'tcx> TyCtxt<'tcx> {
lint_level(self.sess, lint, level, src, Some(span.into()), msg, decorate);
}
/// Find the crate root and the appropriate span where `use` and outer attributes can be
/// inserted at.
pub fn crate_level_attribute_injection_span(self, hir_id: HirId) -> Option<Span> {
for (_hir_id, node) in self.hir().parent_iter(hir_id) {
if let hir::Node::Crate(m) = node {
return Some(m.spans.inject_use_span.shrink_to_lo());
}
}
None
}
pub fn disabled_nightly_features<E: rustc_errors::EmissionGuarantee>(
self,
diag: &mut Diag<'_, E>,
hir_id: Option<HirId>,
features: impl IntoIterator<Item = (String, Symbol)>,
) {
if !self.sess.is_nightly_build() {
return;
}
let span = hir_id.and_then(|id| self.crate_level_attribute_injection_span(id));
for (desc, feature) in features {
// FIXME: make this string translatable
let msg =
format!("add `#![feature({feature})]` to the crate attributes to enable{desc}");
if let Some(span) = span {
diag.span_suggestion_verbose(
span,
msg,
format!("#![feature({feature})]\n"),
Applicability::MachineApplicable,
);
} else {
diag.help(msg);
}
}
}
/// Emit a lint from a lint struct (some type that implements `LintDiagnostic`, typically
/// generated by `#[derive(LintDiagnostic)]`).
#[track_caller]

View File

@ -774,7 +774,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let (current_root, parent_root) =
if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
// Some consumers of rustc need to map MIR locations back to HIR nodes. Currently
// the the only part of rustc that tracks MIR -> HIR is the
// the only part of rustc that tracks MIR -> HIR is the
// `SourceScopeLocalData::lint_root` field that tracks lint levels for MIR
// locations. Normally the number of source scopes is limited to the set of nodes
// with lint annotations. The -Zmaximal-hir-to-mir-coverage flag changes this

View File

@ -5,7 +5,7 @@
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![feature(assert_matches)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(box_patterns)]
#![feature(if_let_guard)]
#![feature(let_chains)]

View File

@ -938,9 +938,6 @@ fn report_non_exhaustive_match<'p, 'tcx>(
};
// In the case of an empty match, replace the '`_` not covered' diagnostic with something more
// informative.
let mut err;
let pattern;
let patterns_len;
if is_empty_match && !non_empty_enum {
return cx.tcx.dcx().emit_err(NonExhaustivePatternsTypeNotEmpty {
cx,
@ -948,33 +945,23 @@ fn report_non_exhaustive_match<'p, 'tcx>(
span: sp,
ty: scrut_ty,
});
} else {
// FIXME: migration of this diagnostic will require list support
let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
err = create_e0004(
cx.tcx.sess,
sp,
format!("non-exhaustive patterns: {joined_patterns} not covered"),
);
err.span_label(
sp,
format!(
"pattern{} {} not covered",
rustc_errors::pluralize!(witnesses.len()),
joined_patterns
),
);
patterns_len = witnesses.len();
pattern = if witnesses.len() < 4 {
witnesses
.iter()
.map(|witness| cx.hoist_witness_pat(witness).to_string())
.collect::<Vec<String>>()
.join(" | ")
} else {
"_".to_string()
};
};
}
// FIXME: migration of this diagnostic will require list support
let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
let mut err = create_e0004(
cx.tcx.sess,
sp,
format!("non-exhaustive patterns: {joined_patterns} not covered"),
);
err.span_label(
sp,
format!(
"pattern{} {} not covered",
rustc_errors::pluralize!(witnesses.len()),
joined_patterns
),
);
// Point at the definition of non-covered `enum` variants.
if let Some(AdtDefinedHere { adt_def_span, ty, variants }) =
@ -1021,6 +1008,23 @@ fn report_non_exhaustive_match<'p, 'tcx>(
}
}
// Whether we suggest the actual missing patterns or `_`.
let suggest_the_witnesses = witnesses.len() < 4;
let suggested_arm = if suggest_the_witnesses {
let pattern = witnesses
.iter()
.map(|witness| cx.hoist_witness_pat(witness).to_string())
.collect::<Vec<String>>()
.join(" | ");
if witnesses.iter().all(|p| p.is_never_pattern()) && cx.tcx.features().never_patterns {
// Arms with a never pattern don't take a body.
pattern
} else {
format!("{pattern} => todo!()")
}
} else {
format!("_ => todo!()")
};
let mut suggestion = None;
let sm = cx.tcx.sess.source_map();
match arms {
@ -1033,7 +1037,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
};
suggestion = Some((
sp.shrink_to_hi().with_hi(expr_span.hi()),
format!(" {{{indentation}{more}{pattern} => todo!(),{indentation}}}",),
format!(" {{{indentation}{more}{suggested_arm},{indentation}}}",),
));
}
[only] => {
@ -1059,7 +1063,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
};
suggestion = Some((
only.span.shrink_to_hi(),
format!("{comma}{pre_indentation}{pattern} => todo!()"),
format!("{comma}{pre_indentation}{suggested_arm}"),
));
}
[.., prev, last] => {
@ -1082,7 +1086,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
if let Some(spacing) = spacing {
suggestion = Some((
last.span.shrink_to_hi(),
format!("{comma}{spacing}{pattern} => todo!()"),
format!("{comma}{spacing}{suggested_arm}"),
));
}
}
@ -1093,13 +1097,13 @@ fn report_non_exhaustive_match<'p, 'tcx>(
let msg = format!(
"ensure that all possible cases are being handled by adding a match arm with a wildcard \
pattern{}{}",
if patterns_len > 1 && patterns_len < 4 && suggestion.is_some() {
if witnesses.len() > 1 && suggest_the_witnesses && suggestion.is_some() {
", a match arm with multiple or-patterns"
} else {
// we are either not suggesting anything, or suggesting `_`
""
},
match patterns_len {
match witnesses.len() {
// non-exhaustive enum case
0 if suggestion.is_some() => " as shown",
0 => "",

View File

@ -524,12 +524,12 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
// Prefer valtrees over opaque constants.
let const_value = self
.tcx
.const_eval_global_id_for_typeck(param_env_reveal_all, cid, Some(span))
.const_eval_global_id_for_typeck(param_env_reveal_all, cid, span)
.map(|val| match val {
Some(valtree) => mir::Const::Ty(ty::Const::new_value(self.tcx, valtree, ty)),
None => mir::Const::Val(
self.tcx
.const_eval_global_id(param_env_reveal_all, cid, Some(span))
.const_eval_global_id(param_env_reveal_all, cid, span)
.expect("const_eval_global_id_for_typeck should have already failed"),
ty,
),
@ -627,15 +627,14 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
// First try using a valtree in order to destructure the constant into a pattern.
// FIXME: replace "try to do a thing, then fall back to another thing"
// but something more principled, like a trait query checking whether this can be turned into a valtree.
if let Ok(Some(valtree)) =
self.tcx.const_eval_resolve_for_typeck(self.param_env, ct, Some(span))
if let Ok(Some(valtree)) = self.tcx.const_eval_resolve_for_typeck(self.param_env, ct, span)
{
let subpattern =
self.const_to_pat(Const::Ty(ty::Const::new_value(self.tcx, valtree, ty)), id, span);
PatKind::InlineConstant { subpattern, def: def_id }
} else {
// If that fails, convert it to an opaque constant pattern.
match tcx.const_eval_resolve(self.param_env, uneval, Some(span)) {
match tcx.const_eval_resolve(self.param_env, uneval, span) {
Ok(val) => self.const_to_pat(mir::Const::Val(val, ty), id, span).kind,
Err(ErrorHandled::TooGeneric(_)) => {
// If we land here it means the const can't be evaluated because it's `TooGeneric`.

View File

@ -24,7 +24,7 @@ fn may_contain_reference<'tcx>(ty: Ty<'tcx>, depth: u32, tcx: TyCtxt<'tcx>) -> b
| ty::Str
| ty::FnDef(..)
| ty::Never => false,
// References
// References and Boxes (`noalias` sources)
ty::Ref(..) => true,
ty::Adt(..) if ty.is_box() => true,
ty::Adt(adt, _) if Some(adt.did()) == tcx.lang_items().ptr_unique() => true,
@ -125,15 +125,39 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
for i in (0..block_data.statements.len()).rev() {
let (retag_kind, place) = match block_data.statements[i].kind {
// Retag after assignments of reference type.
StatementKind::Assign(box (ref place, ref rvalue)) if needs_retag(place) => {
StatementKind::Assign(box (ref place, ref rvalue)) => {
let add_retag = match rvalue {
// Ptr-creating operations already do their own internal retagging, no
// need to also add a retag statement.
Rvalue::Ref(..) | Rvalue::AddressOf(..) => false,
_ => true,
// *Except* if we are deref'ing a Box, because those get desugared to directly working
// with the inner raw pointer! That's relevant for `AddressOf` as Miri otherwise makes it
// a NOP when the original pointer is already raw.
Rvalue::AddressOf(_mutbl, place) => {
// Using `is_box_global` here is a bit sketchy: if this code is
// generic over the allocator, we'll not add a retag! This is a hack
// to make Stacked Borrows compatible with custom allocator code.
// Long-term, we'll want to move to an aliasing model where "cast to
// raw pointer" is a complete NOP, and then this will no longer be
// an issue.
if place.is_indirect_first_projection()
&& body.local_decls[place.local].ty.is_box_global(tcx)
{
Some(RetagKind::Raw)
} else {
None
}
}
Rvalue::Ref(..) => None,
_ => {
if needs_retag(place) {
Some(RetagKind::Default)
} else {
None
}
}
};
if add_retag {
(RetagKind::Default, *place)
if let Some(kind) = add_retag {
(kind, *place)
} else {
continue;
}

View File

@ -1964,15 +1964,21 @@ fn check_must_not_suspend_ty<'tcx>(
debug!("Checking must_not_suspend for {}", ty);
match *ty.kind() {
ty::Adt(..) if ty.is_box() => {
let boxed_ty = ty.boxed_ty();
let descr_pre = &format!("{}boxed ", data.descr_pre);
ty::Adt(_, args) if ty.is_box() => {
let boxed_ty = args.type_at(0);
let allocator_ty = args.type_at(1);
check_must_not_suspend_ty(
tcx,
boxed_ty,
hir_id,
param_env,
SuspendCheckData { descr_pre, ..data },
SuspendCheckData { descr_pre: &format!("{}boxed ", data.descr_pre), ..data },
) || check_must_not_suspend_ty(
tcx,
allocator_ty,
hir_id,
param_env,
SuspendCheckData { descr_pre: &format!("{}allocator ", data.descr_pre), ..data },
)
}
ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),

View File

@ -401,9 +401,8 @@ pub(super) fn extract_branch_mappings(
}
let (span, _) = unexpand_into_body_span_with_visible_macro(raw_span, body_span)?;
let bcb_from_marker = |marker: BlockMarkerId| {
Some(basic_coverage_blocks.bcb_from_bb(block_markers[marker]?)?)
};
let bcb_from_marker =
|marker: BlockMarkerId| basic_coverage_blocks.bcb_from_bb(block_markers[marker]?);
let true_bcb = bcb_from_marker(true_marker)?;
let false_bcb = bcb_from_marker(false_marker)?;

View File

@ -394,7 +394,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
}
Operand::Constant(box constant) => {
if let Ok(constant) =
self.ecx.eval_mir_constant(&constant.const_, Some(constant.span), None)
self.ecx.eval_mir_constant(&constant.const_, constant.span, None)
{
self.assign_constant(state, place, constant, &[]);
}

View File

@ -367,7 +367,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
Repeat(..) => return None,
Constant { ref value, disambiguator: _ } => {
self.ecx.eval_mir_constant(value, None, None).ok()?
self.ecx.eval_mir_constant(value, DUMMY_SP, None).ok()?
}
Aggregate(kind, variant, ref fields) => {
let fields = fields

View File

@ -417,7 +417,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
Operand::Constant(constant) => {
let constant =
self.ecx.eval_mir_constant(&constant.const_, Some(constant.span), None).ok()?;
self.ecx.eval_mir_constant(&constant.const_, constant.span, None).ok()?;
self.process_constant(bb, lhs, constant, state);
}
// Transfer the conditions on the copied rhs.

View File

@ -261,7 +261,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
// manually normalized.
let val = self.tcx.try_normalize_erasing_regions(self.param_env, c.const_).ok()?;
self.use_ecx(|this| this.ecx.eval_mir_constant(&val, Some(c.span), None))?
self.use_ecx(|this| this.ecx.eval_mir_constant(&val, c.span, None))?
.as_mplace_or_imm()
.right()
}

View File

@ -507,7 +507,7 @@ fn run_analysis_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let passes: &[&dyn MirPass<'tcx>] = &[
&cleanup_post_borrowck::CleanupPostBorrowck,
&remove_noop_landing_pads::RemoveNoopLandingPads,
&simplify::SimplifyCfg::EarlyOpt,
&simplify::SimplifyCfg::PostAnalysis,
&deref_separator::Derefer,
];
@ -529,11 +529,11 @@ fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
// AddMovesForPackedDrops needs to run after drop
// elaboration.
&add_moves_for_packed_drops::AddMovesForPackedDrops,
// `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
// `AddRetag` needs to run after `ElaborateDrops` but before `ElaborateBoxDerefs`. Otherwise it should run fairly late,
// but before optimizations begin.
&add_retag::AddRetag,
&elaborate_box_derefs::ElaborateBoxDerefs,
&coroutine::StateTransform,
&add_retag::AddRetag,
&Lint(known_panics_lint::KnownPanicsLint),
];
pm::run_passes_no_validate(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::Initial)));
@ -544,7 +544,7 @@ fn run_runtime_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let passes: &[&dyn MirPass<'tcx>] = &[
&lower_intrinsics::LowerIntrinsics,
&remove_place_mention::RemovePlaceMention,
&simplify::SimplifyCfg::ElaborateDrops,
&simplify::SimplifyCfg::PreOptimizations,
];
pm::run_passes(tcx, body, passes, Some(MirPhase::Runtime(RuntimePhase::PostCleanup)));

View File

@ -37,8 +37,11 @@ pub enum SimplifyCfg {
Initial,
PromoteConsts,
RemoveFalseEdges,
EarlyOpt,
ElaborateDrops,
/// Runs at the beginning of "analysis to runtime" lowering, *before* drop elaboration.
PostAnalysis,
/// Runs at the end of "analysis to runtime" lowering, *after* drop elaboration.
/// This is before the main optimization passes on runtime MIR kick in.
PreOptimizations,
Final,
MakeShim,
AfterUninhabitedEnumBranching,
@ -50,8 +53,8 @@ impl SimplifyCfg {
SimplifyCfg::Initial => "SimplifyCfg-initial",
SimplifyCfg::PromoteConsts => "SimplifyCfg-promote-consts",
SimplifyCfg::RemoveFalseEdges => "SimplifyCfg-remove-false-edges",
SimplifyCfg::EarlyOpt => "SimplifyCfg-early-opt",
SimplifyCfg::ElaborateDrops => "SimplifyCfg-elaborate-drops",
SimplifyCfg::PostAnalysis => "SimplifyCfg-post-analysis",
SimplifyCfg::PreOptimizations => "SimplifyCfg-pre-optimizations",
SimplifyCfg::Final => "SimplifyCfg-final",
SimplifyCfg::MakeShim => "SimplifyCfg-make_shim",
SimplifyCfg::AfterUninhabitedEnumBranching => {

View File

@ -828,7 +828,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> {
// a codegen-time error). rustc stops after collection if there was an error, so this
// ensures codegen never has to worry about failing consts.
// (codegen relies on this and ICEs will happen if this is violated.)
let val = match const_.eval(self.tcx, param_env, Some(constant.span)) {
let val = match const_.eval(self.tcx, param_env, constant.span) {
Ok(v) => v,
Err(ErrorHandled::TooGeneric(..)) => span_bug!(
self.body.source_info(location).span,
@ -1433,7 +1433,7 @@ fn create_mono_items_for_default_impls<'tcx>(
}
}
/// Scans the CTFE alloc in order to find function calls, closures, and drop-glue.
/// Scans the CTFE alloc in order to find function pointers and statics that must be monomorphized.
fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoItems<'tcx>) {
match tcx.global_alloc(alloc_id) {
GlobalAlloc::Static(def_id) => {
@ -1446,9 +1446,13 @@ fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIt
}
GlobalAlloc::Memory(alloc) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &prov in alloc.inner().provenance().ptrs().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_alloc(tcx, prov.alloc_id(), output);
let ptrs = alloc.inner().provenance().ptrs();
// avoid `ensure_sufficient_stack` in the common case of "no pointers"
if !ptrs.is_empty() {
rustc_data_structures::stack::ensure_sufficient_stack(move || {
for &prov in ptrs.values() {
collect_alloc(tcx, prov.alloc_id(), output);
}
});
}
}

View File

@ -449,6 +449,7 @@ impl<'a> Parser<'a> {
parser
}
#[inline]
pub fn recovery(mut self, recovery: Recovery) -> Self {
self.recovery = recovery;
self
@ -461,6 +462,7 @@ impl<'a> Parser<'a> {
///
/// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
/// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
#[inline]
fn may_recover(&self) -> bool {
matches!(self.recovery, Recovery::Allowed)
}
@ -542,6 +544,7 @@ impl<'a> Parser<'a> {
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
#[inline]
fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
if !is_present {
@ -550,6 +553,7 @@ impl<'a> Parser<'a> {
is_present
}
#[inline]
fn check_noexpect(&self, tok: &TokenKind) -> bool {
self.token == *tok
}
@ -558,6 +562,7 @@ impl<'a> Parser<'a> {
///
/// the main purpose of this function is to reduce the cluttering of the suggestions list
/// which using the normal eat method could introduce in some cases.
#[inline]
pub fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check_noexpect(tok);
if is_present {
@ -567,6 +572,7 @@ impl<'a> Parser<'a> {
}
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
#[inline]
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
if is_present {
@ -577,11 +583,13 @@ impl<'a> Parser<'a> {
/// If the next token is the given keyword, returns `true` without eating it.
/// An expectation is also added for diagnostics purposes.
#[inline]
fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
#[inline]
fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.check_keyword(kw) {
return true;
@ -600,6 +608,7 @@ impl<'a> Parser<'a> {
/// If the next token is the given keyword, eats it and returns `true`.
/// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
// Public for rustfmt usage.
#[inline]
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
@ -612,6 +621,7 @@ impl<'a> Parser<'a> {
/// Eats a keyword, optionally ignoring the case.
/// If the case differs (and is ignored) an error is issued.
/// This is useful for recovery.
#[inline]
fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.eat_keyword(kw) {
return true;
@ -629,6 +639,7 @@ impl<'a> Parser<'a> {
false
}
#[inline]
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
@ -650,6 +661,7 @@ impl<'a> Parser<'a> {
self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
#[inline]
fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
if ok {
true
@ -697,6 +709,7 @@ impl<'a> Parser<'a> {
/// Checks to see if the next token is either `+` or `+=`.
/// Otherwise returns `false`.
#[inline]
fn check_plus(&mut self) -> bool {
self.check_or_expected(
self.token.is_like_plus(),

View File

@ -731,8 +731,6 @@ impl<'a> Parser<'a> {
&& matches!(args.output, ast::FnRetTy::Default(..))
{
self.psess.gated_spans.gate(sym::return_type_notation, span);
} else {
self.psess.gated_spans.gate(sym::associated_type_bounds, span);
}
}
let constraint =

View File

@ -155,16 +155,11 @@ impl<'tcx> CheckConstVisitor<'tcx> {
//
// FIXME(ecstaticmorse): Maybe this could be incorporated into `feature_err`? This
// is a pretty narrow case, however.
if tcx.sess.is_nightly_build() {
for gate in missing_secondary {
// FIXME: make this translatable
#[allow(rustc::diagnostic_outside_of_impl)]
#[allow(rustc::untranslatable_diagnostic)]
err.help(format!(
"add `#![feature({gate})]` to the crate attributes to enable"
));
}
}
tcx.disabled_nightly_features(
&mut err,
def_id.map(|id| tcx.local_def_id_to_hir_id(id)),
missing_secondary.into_iter().map(|gate| (String::new(), *gate)),
);
err.emit();
}

View File

@ -7,7 +7,6 @@ use rustc_hir::{ItemId, Node, CRATE_HIR_ID};
use rustc_middle::query::Providers;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{sigpipe, CrateType, EntryFnType};
use rustc_session::parse::feature_err;
use rustc_span::symbol::sym;
use rustc_span::{Span, Symbol};
@ -133,16 +132,6 @@ fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_>) -> Option<(DefId,
return None;
}
if main_def.is_import && !tcx.features().imported_main {
let span = main_def.span;
feature_err(
&tcx.sess,
sym::imported_main,
span,
"using an imported function as entry point `main` is experimental",
)
.emit();
}
return Some((def_id, EntryFnType::Main { sigpipe: sigpipe(tcx, def_id) }));
}
no_main_err(tcx, visitor);

View File

@ -678,15 +678,19 @@ pub enum Constructor<Cx: PatCx> {
Or,
/// Wildcard pattern.
Wildcard,
/// Never pattern. Only used in `WitnessPat`. An actual never pattern should be lowered as
/// `Wildcard`.
Never,
/// Fake extra constructor for enums that aren't allowed to be matched exhaustively. Also used
/// for those types for which we cannot list constructors explicitly, like `f64` and `str`.
/// for those types for which we cannot list constructors explicitly, like `f64` and `str`. Only
/// used in `WitnessPat`.
NonExhaustive,
/// Fake extra constructor for variants that should not be mentioned in diagnostics.
/// We use this for variants behind an unstable gate as well as
/// `#[doc(hidden)]` ones.
/// Fake extra constructor for variants that should not be mentioned in diagnostics. We use this
/// for variants behind an unstable gate as well as `#[doc(hidden)]` ones. Only used in
/// `WitnessPat`.
Hidden,
/// Fake extra constructor for constructors that are not seen in the matrix, as explained at the
/// top of the file.
/// top of the file. Only used for specialization.
Missing,
/// Fake extra constructor that indicates and empty field that is private. When we encounter one
/// we skip the column entirely so we don't observe its emptiness. Only used for specialization.
@ -708,6 +712,7 @@ impl<Cx: PatCx> Clone for Constructor<Cx> {
Constructor::Str(value) => Constructor::Str(value.clone()),
Constructor::Opaque(inner) => Constructor::Opaque(inner.clone()),
Constructor::Or => Constructor::Or,
Constructor::Never => Constructor::Never,
Constructor::Wildcard => Constructor::Wildcard,
Constructor::NonExhaustive => Constructor::NonExhaustive,
Constructor::Hidden => Constructor::Hidden,
@ -1040,10 +1045,32 @@ impl<Cx: PatCx> ConstructorSet<Cx> {
// In a `MaybeInvalid` place even an empty pattern may be reachable. We therefore
// add a dummy empty constructor here, which will be ignored if the place is
// `ValidOnly`.
missing_empty.push(NonExhaustive);
missing_empty.push(Never);
}
}
SplitConstructorSet { present, missing, missing_empty }
}
/// Whether this set only contains empty constructors.
pub(crate) fn all_empty(&self) -> bool {
match self {
ConstructorSet::Bool
| ConstructorSet::Integers { .. }
| ConstructorSet::Ref
| ConstructorSet::Union
| ConstructorSet::Unlistable => false,
ConstructorSet::NoConstructors => true,
ConstructorSet::Struct { empty } => *empty,
ConstructorSet::Variants { variants, non_exhaustive } => {
!*non_exhaustive
&& variants
.iter()
.all(|visibility| matches!(visibility, VariantVisibility::Empty))
}
ConstructorSet::Slice { array_len, subtype_is_empty } => {
*subtype_is_empty && matches!(array_len, Some(1..))
}
}
}
}

View File

@ -208,6 +208,7 @@ impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> {
}
Ok(())
}
Never => write!(f, "!"),
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
write!(f, "_ : {:?}", pat.ty())
}
@ -311,18 +312,24 @@ impl<Cx: PatCx> WitnessPat<Cx> {
pub(crate) fn new(ctor: Constructor<Cx>, fields: Vec<Self>, ty: Cx::Ty) -> Self {
Self { ctor, fields, ty }
}
pub(crate) fn wildcard(ty: Cx::Ty) -> Self {
Self::new(Wildcard, Vec::new(), ty)
/// Create a wildcard pattern for this type. If the type is empty, we create a `!` pattern.
pub(crate) fn wildcard(cx: &Cx, ty: Cx::Ty) -> Self {
let is_empty = cx.ctors_for_ty(&ty).is_ok_and(|ctors| ctors.all_empty());
let ctor = if is_empty { Never } else { Wildcard };
Self::new(ctor, Vec::new(), ty)
}
/// Construct a pattern that matches everything that starts with this constructor.
/// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
/// `Some(_)`.
pub(crate) fn wild_from_ctor(cx: &Cx, ctor: Constructor<Cx>, ty: Cx::Ty) -> Self {
if matches!(ctor, Wildcard) {
return Self::wildcard(cx, ty);
}
let fields = cx
.ctor_sub_tys(&ctor, &ty)
.filter(|(_, PrivateUninhabitedField(skip))| !skip)
.map(|(ty, _)| Self::wildcard(ty))
.map(|(ty, _)| Self::wildcard(cx, ty))
.collect();
Self::new(ctor, fields, ty)
}
@ -334,6 +341,14 @@ impl<Cx: PatCx> WitnessPat<Cx> {
&self.ty
}
pub fn is_never_pattern(&self) -> bool {
match self.ctor() {
Never => true,
Or => self.fields.iter().all(|p| p.is_never_pattern()),
_ => self.fields.iter().any(|p| p.is_never_pattern()),
}
}
pub fn iter_fields(&self) -> impl Iterator<Item = &WitnessPat<Cx>> {
self.fields.iter()
}

View File

@ -247,7 +247,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
_ => bug!("bad slice pattern {:?} {:?}", ctor, ty),
},
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
| NonExhaustive | Hidden | Missing | PrivateUninhabited | Wildcard => &[],
| Never | NonExhaustive | Hidden | Missing | PrivateUninhabited | Wildcard => &[],
Or => {
bug!("called `Fields::wildcards` on an `Or` ctor")
}
@ -275,7 +275,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
Ref => 1,
Slice(slice) => slice.arity(),
Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) | Opaque(..)
| NonExhaustive | Hidden | Missing | PrivateUninhabited | Wildcard => 0,
| Never | NonExhaustive | Hidden | Missing | PrivateUninhabited | Wildcard => 0,
Or => bug!("The `Or` constructor doesn't have a fixed arity"),
}
}
@ -824,7 +824,8 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
}
}
&Str(value) => PatKind::Constant { value },
Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
Never if self.tcx.features().never_patterns => PatKind::Never,
Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
Missing { .. } => bug!(
"trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
`Missing` should have been processed in `apply_constructors`"

View File

@ -8,7 +8,7 @@
#![doc(rust_logo)]
#![allow(internal_features)]
#![feature(rustdoc_internals)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(const_option)]
#![feature(core_intrinsics)]
#![feature(generic_nonzero)]

View File

@ -24,6 +24,9 @@ session_feature_diagnostic_for_issue =
session_feature_diagnostic_help =
add `#![feature({$feature})]` to the crate attributes to enable
session_feature_diagnostic_suggestion =
add `#![feature({$feature})]` to the crate attributes to enable
session_feature_suggest_upgrade_compiler =
this compiler was built on {$date}; consider upgrading it if it is out of date

View File

@ -144,18 +144,12 @@ pub enum InstrumentCoverage {
}
/// Individual flag values controlled by `-Z coverage-options`.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
pub struct CoverageOptions {
/// Add branch coverage instrumentation.
pub branch: bool,
}
impl Default for CoverageOptions {
fn default() -> Self {
Self { branch: false }
}
}
/// Settings for `-Z instrument-xray` flag.
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
pub struct InstrumentXRay {

View File

@ -54,6 +54,18 @@ pub struct FeatureDiagnosticHelp {
pub feature: Symbol,
}
#[derive(Subdiagnostic)]
#[suggestion(
session_feature_diagnostic_suggestion,
applicability = "maybe-incorrect",
code = "#![feature({feature})]\n"
)]
pub struct FeatureDiagnosticSuggestion {
pub feature: Symbol,
#[primary_span]
pub span: Span,
}
#[derive(Subdiagnostic)]
#[help(session_cli_feature_diagnostic_help)]
pub struct CliFeatureDiagnosticHelp {

View File

@ -3,8 +3,8 @@
use crate::config::{Cfg, CheckCfg};
use crate::errors::{
CliFeatureDiagnosticHelp, FeatureDiagnosticForIssue, FeatureDiagnosticHelp, FeatureGateError,
SuggestUpgradeCompiler,
CliFeatureDiagnosticHelp, FeatureDiagnosticForIssue, FeatureDiagnosticHelp,
FeatureDiagnosticSuggestion, FeatureGateError, SuggestUpgradeCompiler,
};
use crate::lint::{
builtin::UNSTABLE_SYNTAX_PRE_EXPANSION, BufferedEarlyLint, BuiltinLintDiag, Lint, LintId,
@ -112,7 +112,7 @@ pub fn feature_err_issue(
}
let mut err = sess.psess.dcx.create_err(FeatureGateError { span, explain: explain.into() });
add_feature_diagnostics_for_issue(&mut err, sess, feature, issue, false);
add_feature_diagnostics_for_issue(&mut err, sess, feature, issue, false, None);
err
}
@ -141,7 +141,7 @@ pub fn feature_warn_issue(
explain: &'static str,
) {
let mut err = sess.psess.dcx.struct_span_warn(span, explain);
add_feature_diagnostics_for_issue(&mut err, sess, feature, issue, false);
add_feature_diagnostics_for_issue(&mut err, sess, feature, issue, false, None);
// Decorate this as a future-incompatibility lint as in rustc_middle::lint::lint_level
let lint = UNSTABLE_SYNTAX_PRE_EXPANSION;
@ -160,7 +160,7 @@ pub fn add_feature_diagnostics<G: EmissionGuarantee>(
sess: &Session,
feature: Symbol,
) {
add_feature_diagnostics_for_issue(err, sess, feature, GateIssue::Language, false);
add_feature_diagnostics_for_issue(err, sess, feature, GateIssue::Language, false, None);
}
/// Adds the diagnostics for a feature to an existing error.
@ -175,6 +175,7 @@ pub fn add_feature_diagnostics_for_issue<G: EmissionGuarantee>(
feature: Symbol,
issue: GateIssue,
feature_from_cli: bool,
inject_span: Option<Span>,
) {
if let Some(n) = find_feature_issue(feature, issue) {
err.subdiagnostic(sess.dcx(), FeatureDiagnosticForIssue { n });
@ -184,6 +185,8 @@ pub fn add_feature_diagnostics_for_issue<G: EmissionGuarantee>(
if sess.psess.unstable_features.is_nightly_build() {
if feature_from_cli {
err.subdiagnostic(sess.dcx(), CliFeatureDiagnosticHelp { feature });
} else if let Some(span) = inject_span {
err.subdiagnostic(sess.dcx(), FeatureDiagnosticSuggestion { feature, span });
} else {
err.subdiagnostic(sess.dcx(), FeatureDiagnosticHelp { feature });
}

View File

@ -56,7 +56,7 @@ impl<'tcx> MutVisitor<'tcx> for BodyBuilder<'tcx> {
fn visit_constant(&mut self, constant: &mut mir::ConstOperand<'tcx>, location: mir::Location) {
let const_ = self.monomorphize(constant.const_);
let val = match const_.eval(self.tcx, ty::ParamEnv::reveal_all(), Some(constant.span)) {
let val = match const_.eval(self.tcx, ty::ParamEnv::reveal_all(), constant.span) {
Ok(v) => v,
Err(mir::interpret::ErrorHandled::Reported(..)) => return,
Err(mir::interpret::ErrorHandled::TooGeneric(..)) => {

View File

@ -566,7 +566,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> {
let result = tcx.const_eval_instance(
ParamEnv::reveal_all(),
instance,
Some(tcx.def_span(instance.def_id())),
tcx.def_span(instance.def_id()),
);
result
.map(|const_val| {

View File

@ -1463,7 +1463,6 @@ symbols! {
residual,
result,
resume,
retag_box_to_raw,
return_position_impl_trait_in_trait,
return_type_notation,
rhs,

View File

@ -17,7 +17,7 @@
#![allow(rustc::diagnostic_outside_of_impl)]
#![allow(rustc::untranslatable_diagnostic)]
#![feature(assert_matches)]
#![feature(associated_type_bounds)]
#![cfg_attr(bootstrap, feature(associated_type_bounds))]
#![feature(associated_type_defaults)]
#![feature(box_patterns)]
#![feature(control_flow_enum)]

View File

@ -21,8 +21,9 @@
//! However, if `?fresh_var` ends up geteting equated to another type, we retry the
//! `NormalizesTo` goal, at which point the opaque is actually defined.
use super::{EvalCtxt, GoalSource};
use super::EvalCtxt;
use rustc_infer::traits::query::NoSolution;
use rustc_infer::traits::solve::GoalSource;
use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
use rustc_middle::ty::{self, Ty};
@ -121,10 +122,11 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
ty::TermKind::Const(_) => {
if let Some(alias) = term.to_alias_ty(self.tcx()) {
let term = self.next_term_infer_of_kind(term);
self.add_goal(
GoalSource::Misc,
Goal::new(self.tcx(), param_env, ty::NormalizesTo { alias, term }),
);
self.add_normalizes_to_goal(Goal::new(
self.tcx(),
param_env,
ty::NormalizesTo { alias, term },
));
self.try_evaluate_added_goals()?;
Ok(Some(self.resolve_vars_if_possible(term)))
} else {
@ -145,18 +147,25 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
return None;
}
let ty::Alias(_, alias) = *ty.kind() else {
let ty::Alias(kind, alias) = *ty.kind() else {
return Some(ty);
};
match self.commit_if_ok(|this| {
let tcx = this.tcx();
let normalized_ty = this.next_ty_infer();
let normalizes_to_goal = Goal::new(
this.tcx(),
param_env,
ty::NormalizesTo { alias, term: normalized_ty.into() },
);
this.add_goal(GoalSource::Misc, normalizes_to_goal);
let normalizes_to = ty::NormalizesTo { alias, term: normalized_ty.into() };
match kind {
ty::AliasKind::Opaque => {
// HACK: Unlike for associated types, `normalizes-to` for opaques
// is currently not treated as a function. We do not erase the
// expected term.
this.add_goal(GoalSource::Misc, Goal::new(tcx, param_env, normalizes_to));
}
ty::AliasKind::Projection | ty::AliasKind::Inherent | ty::AliasKind::Weak => {
this.add_normalizes_to_goal(Goal::new(tcx, param_env, normalizes_to))
}
}
this.try_evaluate_added_goals()?;
Ok(this.resolve_vars_if_possible(normalized_ty))
}) {

View File

@ -9,6 +9,7 @@
//!
//! [c]: https://rustc-dev-guide.rust-lang.org/solve/canonicalization.html
use super::{CanonicalInput, Certainty, EvalCtxt, Goal};
use crate::solve::eval_ctxt::NestedGoals;
use crate::solve::{
inspect, response_no_constraints_raw, CanonicalResponse, QueryResult, Response,
};
@ -19,6 +20,7 @@ use rustc_infer::infer::canonical::CanonicalVarValues;
use rustc_infer::infer::canonical::{CanonicalExt, QueryRegionConstraints};
use rustc_infer::infer::resolve::EagerResolver;
use rustc_infer::infer::{InferCtxt, InferOk};
use rustc_infer::traits::solve::NestedNormalizationGoals;
use rustc_middle::infer::canonical::Canonical;
use rustc_middle::traits::query::NoSolution;
use rustc_middle::traits::solve::{
@ -28,6 +30,7 @@ use rustc_middle::traits::ObligationCause;
use rustc_middle::ty::{self, BoundVar, GenericArgKind, Ty, TyCtxt, TypeFoldable};
use rustc_next_trait_solver::canonicalizer::{CanonicalizeMode, Canonicalizer};
use rustc_span::DUMMY_SP;
use std::assert_matches::assert_matches;
use std::iter;
use std::ops::Deref;
@ -93,13 +96,31 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
previous call to `try_evaluate_added_goals!`"
);
let certainty = certainty.unify_with(goals_certainty);
let var_values = self.var_values;
let external_constraints = self.compute_external_query_constraints()?;
// When normalizing, we've replaced the expected term with an unconstrained
// inference variable. This means that we dropped information which could
// have been important. We handle this by instead returning the nested goals
// to the caller, where they are then handled.
//
// As we return all ambiguous nested goals, we can ignore the certainty returned
// by `try_evaluate_added_goals()`.
let (certainty, normalization_nested_goals) = if self.is_normalizes_to_goal {
let NestedGoals { normalizes_to_goals, goals } = std::mem::take(&mut self.nested_goals);
if cfg!(debug_assertions) {
assert!(normalizes_to_goals.is_empty());
if goals.is_empty() {
assert_matches!(goals_certainty, Certainty::Yes);
}
}
(certainty, NestedNormalizationGoals(goals))
} else {
let certainty = certainty.unify_with(goals_certainty);
(certainty, NestedNormalizationGoals::empty())
};
let external_constraints =
self.compute_external_query_constraints(normalization_nested_goals)?;
let (var_values, mut external_constraints) =
(var_values, external_constraints).fold_with(&mut EagerResolver::new(self.infcx));
(self.var_values, external_constraints).fold_with(&mut EagerResolver::new(self.infcx));
// Remove any trivial region constraints once we've resolved regions
external_constraints
.region_constraints
@ -146,6 +167,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)]
fn compute_external_query_constraints(
&self,
normalization_nested_goals: NestedNormalizationGoals<'tcx>,
) -> Result<ExternalConstraintsData<'tcx>, NoSolution> {
// We only check for leaks from universes which were entered inside
// of the query.
@ -176,7 +198,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
self.predefined_opaques_in_body.opaque_types.iter().all(|(pa, _)| pa != a)
});
Ok(ExternalConstraintsData { region_constraints, opaque_types })
Ok(ExternalConstraintsData { region_constraints, opaque_types, normalization_nested_goals })
}
/// After calling a canonical query, we apply the constraints returned
@ -185,13 +207,14 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// This happens in three steps:
/// - we instantiate the bound variables of the query response
/// - we unify the `var_values` of the response with the `original_values`
/// - we apply the `external_constraints` returned by the query
/// - we apply the `external_constraints` returned by the query, returning
/// the `normalization_nested_goals`
pub(super) fn instantiate_and_apply_query_response(
&mut self,
param_env: ty::ParamEnv<'tcx>,
original_values: Vec<ty::GenericArg<'tcx>>,
response: CanonicalResponse<'tcx>,
) -> Certainty {
) -> (NestedNormalizationGoals<'tcx>, Certainty) {
let instantiation = Self::compute_query_response_instantiation_values(
self.infcx,
&original_values,
@ -203,11 +226,14 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
Self::unify_query_var_values(self.infcx, param_env, &original_values, var_values);
let ExternalConstraintsData { region_constraints, opaque_types } =
external_constraints.deref();
let ExternalConstraintsData {
region_constraints,
opaque_types,
normalization_nested_goals,
} = external_constraints.deref();
self.register_region_constraints(region_constraints);
self.register_new_opaque_types(param_env, opaque_types);
certainty
(normalization_nested_goals.clone(), certainty)
}
/// This returns the canoncial variable values to instantiate the bound variables of

View File

@ -11,6 +11,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
infcx: self.infcx,
variables: self.variables,
var_values: self.var_values,
is_normalizes_to_goal: self.is_normalizes_to_goal,
predefined_opaques_in_body: self.predefined_opaques_in_body,
max_input_universe: self.max_input_universe,
search_graph: self.search_graph,
@ -25,6 +26,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
infcx: _,
variables: _,
var_values: _,
is_normalizes_to_goal: _,
predefined_opaques_in_body: _,
max_input_universe: _,
search_graph: _,

View File

@ -7,14 +7,14 @@ use rustc_infer::infer::{
BoundRegionConversionTime, DefineOpaqueTypes, InferCtxt, InferOk, TyCtxtInferExt,
};
use rustc_infer::traits::query::NoSolution;
use rustc_infer::traits::solve::MaybeCause;
use rustc_infer::traits::solve::{MaybeCause, NestedNormalizationGoals};
use rustc_infer::traits::ObligationCause;
use rustc_middle::infer::canonical::CanonicalVarInfos;
use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind};
use rustc_middle::traits::solve::inspect;
use rustc_middle::traits::solve::{
CanonicalInput, CanonicalResponse, Certainty, IsNormalizesToHack, PredefinedOpaques,
PredefinedOpaquesData, QueryResult,
CanonicalInput, CanonicalResponse, Certainty, PredefinedOpaques, PredefinedOpaquesData,
QueryResult,
};
use rustc_middle::traits::specialization_graph;
use rustc_middle::ty::{
@ -61,6 +61,14 @@ pub struct EvalCtxt<'a, 'tcx> {
/// The variable info for the `var_values`, only used to make an ambiguous response
/// with no constraints.
variables: CanonicalVarInfos<'tcx>,
/// Whether we're currently computing a `NormalizesTo` goal. Unlike other goals,
/// `NormalizesTo` goals act like functions with the expected term always being
/// fully unconstrained. This would weaken inference however, as the nested goals
/// never get the inference constraints from the actual normalized-to type. Because
/// of this we return any ambiguous nested goals from `NormalizesTo` to the caller
/// when then adds these to its own context. The caller is always an `AliasRelate`
/// goal so this never leaks out of the solver.
is_normalizes_to_goal: bool,
pub(super) var_values: CanonicalVarValues<'tcx>,
predefined_opaques_in_body: PredefinedOpaques<'tcx>,
@ -91,9 +99,9 @@ pub struct EvalCtxt<'a, 'tcx> {
pub(super) inspect: ProofTreeBuilder<'tcx>,
}
#[derive(Debug, Clone)]
#[derive(Default, Debug, Clone)]
pub(super) struct NestedGoals<'tcx> {
/// This normalizes-to goal that is treated specially during the evaluation
/// These normalizes-to goals are treated specially during the evaluation
/// loop. In each iteration we take the RHS of the projection, replace it with
/// a fresh inference variable, and only after evaluating that goal do we
/// equate the fresh inference variable with the actual RHS of the predicate.
@ -101,26 +109,24 @@ pub(super) struct NestedGoals<'tcx> {
/// This is both to improve caching, and to avoid using the RHS of the
/// projection predicate to influence the normalizes-to candidate we select.
///
/// This is not a 'real' nested goal. We must not forget to replace the RHS
/// with a fresh inference variable when we evaluate this goal. That can result
/// in a trait solver cycle. This would currently result in overflow but can be
/// can be unsound with more powerful coinduction in the future.
pub(super) normalizes_to_hack_goal: Option<Goal<'tcx, ty::NormalizesTo<'tcx>>>,
/// Forgetting to replace the RHS with a fresh inference variable when we evaluate
/// this goal results in an ICE..
pub(super) normalizes_to_goals: Vec<Goal<'tcx, ty::NormalizesTo<'tcx>>>,
/// The rest of the goals which have not yet processed or remain ambiguous.
pub(super) goals: Vec<(GoalSource, Goal<'tcx, ty::Predicate<'tcx>>)>,
}
impl<'tcx> NestedGoals<'tcx> {
pub(super) fn new() -> Self {
Self { normalizes_to_hack_goal: None, goals: Vec::new() }
Self { normalizes_to_goals: Vec::new(), goals: Vec::new() }
}
pub(super) fn is_empty(&self) -> bool {
self.normalizes_to_hack_goal.is_none() && self.goals.is_empty()
self.normalizes_to_goals.is_empty() && self.goals.is_empty()
}
pub(super) fn extend(&mut self, other: NestedGoals<'tcx>) {
assert_eq!(other.normalizes_to_hack_goal, None);
self.normalizes_to_goals.extend(other.normalizes_to_goals);
self.goals.extend(other.goals)
}
}
@ -155,6 +161,10 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
self.search_graph.solver_mode()
}
pub(super) fn set_is_normalizes_to_goal(&mut self) {
self.is_normalizes_to_goal = true;
}
/// Creates a root evaluation context and search graph. This should only be
/// used from outside of any evaluation, and other methods should be preferred
/// over using this manually (such as [`InferCtxtEvalExt::evaluate_root_goal`]).
@ -167,8 +177,8 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
let mut search_graph = search_graph::SearchGraph::new(mode);
let mut ecx = EvalCtxt {
search_graph: &mut search_graph,
infcx,
search_graph: &mut search_graph,
nested_goals: NestedGoals::new(),
inspect: ProofTreeBuilder::new_maybe_root(infcx.tcx, generate_proof_tree),
@ -180,6 +190,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
max_input_universe: ty::UniverseIndex::ROOT,
variables: ty::List::empty(),
var_values: CanonicalVarValues::dummy(),
is_normalizes_to_goal: false,
tainted: Ok(()),
};
let result = f(&mut ecx);
@ -233,6 +244,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
infcx,
variables: canonical_input.variables,
var_values,
is_normalizes_to_goal: false,
predefined_opaques_in_body: input.predefined_opaques_in_body,
max_input_universe: canonical_input.max_universe,
search_graph,
@ -319,6 +331,27 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
source: GoalSource,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(bool, Certainty), NoSolution> {
let (normalization_nested_goals, has_changed, certainty) =
self.evaluate_goal_raw(goal_evaluation_kind, source, goal)?;
assert!(normalization_nested_goals.is_empty());
Ok((has_changed, certainty))
}
/// Recursively evaluates `goal`, returning the nested goals in case
/// the nested goal is a `NormalizesTo` goal.
///
/// As all other goal kinds do not return any nested goals and
/// `NormalizesTo` is only used by `AliasRelate`, all other callsites
/// should use [`EvalCtxt::evaluate_goal`] which discards that empty
/// storage.
// FIXME(-Znext-solver=coinduction): `_source` is currently unused but will
// be necessary once we implement the new coinduction approach.
fn evaluate_goal_raw(
&mut self,
goal_evaluation_kind: GoalEvaluationKind,
_source: GoalSource,
goal: Goal<'tcx, ty::Predicate<'tcx>>,
) -> Result<(NestedNormalizationGoals<'tcx>, bool, Certainty), NoSolution> {
let (orig_values, canonical_goal) = self.canonicalize_goal(goal);
let mut goal_evaluation =
self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind);
@ -336,12 +369,12 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
Ok(response) => response,
};
let (certainty, has_changed) = self.instantiate_response_discarding_overflow(
goal.param_env,
source,
orig_values,
canonical_response,
);
let (normalization_nested_goals, certainty, has_changed) = self
.instantiate_response_discarding_overflow(
goal.param_env,
orig_values,
canonical_response,
);
self.inspect.goal_evaluation(goal_evaluation);
// FIXME: We previously had an assert here that checked that recomputing
// a goal after applying its constraints did not change its response.
@ -353,47 +386,25 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
// Once we have decided on how to handle trait-system-refactor-initiative#75,
// we should re-add an assert here.
Ok((has_changed, certainty))
Ok((normalization_nested_goals, has_changed, certainty))
}
fn instantiate_response_discarding_overflow(
&mut self,
param_env: ty::ParamEnv<'tcx>,
source: GoalSource,
original_values: Vec<ty::GenericArg<'tcx>>,
response: CanonicalResponse<'tcx>,
) -> (Certainty, bool) {
// The old solver did not evaluate nested goals when normalizing.
// It returned the selection constraints allowing a `Projection`
// obligation to not hold in coherence while avoiding the fatal error
// from overflow.
//
// We match this behavior here by considering all constraints
// from nested goals which are not from where-bounds. We will already
// need to track which nested goals are required by impl where-bounds
// for coinductive cycles, so we simply reuse that here.
//
// While we could consider overflow constraints in more cases, this should
// not be necessary for backcompat and results in better perf. It also
// avoids a potential inconsistency which would otherwise require some
// tracking for root goals as well. See #119071 for an example.
let keep_overflow_constraints = || {
self.search_graph.current_goal_is_normalizes_to()
&& source != GoalSource::ImplWhereBound
};
if let Certainty::Maybe(MaybeCause::Overflow { .. }) = response.value.certainty
&& !keep_overflow_constraints()
{
return (response.value.certainty, false);
) -> (NestedNormalizationGoals<'tcx>, Certainty, bool) {
if let Certainty::Maybe(MaybeCause::Overflow { .. }) = response.value.certainty {
return (NestedNormalizationGoals::empty(), response.value.certainty, false);
}
let has_changed = !response.value.var_values.is_identity_modulo_regions()
|| !response.value.external_constraints.opaque_types.is_empty();
let certainty =
let (normalization_nested_goals, certainty) =
self.instantiate_and_apply_query_response(param_env, original_values, response);
(certainty, has_changed)
(normalization_nested_goals, certainty, has_changed)
}
fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> {
@ -496,7 +507,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
/// Goals for the next step get directly added to the nested goals of the `EvalCtxt`.
fn evaluate_added_goals_step(&mut self) -> Result<Option<Certainty>, NoSolution> {
let tcx = self.tcx();
let mut goals = core::mem::replace(&mut self.nested_goals, NestedGoals::new());
let mut goals = core::mem::take(&mut self.nested_goals);
self.inspect.evaluate_added_goals_loop_start();
@ -508,7 +519,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
// If this loop did not result in any progress, what's our final certainty.
let mut unchanged_certainty = Some(Certainty::Yes);
if let Some(goal) = goals.normalizes_to_hack_goal.take() {
for goal in goals.normalizes_to_goals {
// Replace the goal with an unconstrained infer var, so the
// RHS does not affect projection candidate assembly.
let unconstrained_rhs = self.next_term_infer_of_kind(goal.predicate.term);
@ -517,11 +528,13 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
ty::NormalizesTo { alias: goal.predicate.alias, term: unconstrained_rhs },
);
let (_, certainty) = self.evaluate_goal(
GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::Yes },
let (NestedNormalizationGoals(nested_goals), _, certainty) = self.evaluate_goal_raw(
GoalEvaluationKind::Nested,
GoalSource::Misc,
unconstrained_goal,
)?;
// Add the nested goals from normalization to our own nested goals.
goals.goals.extend(nested_goals);
// Finally, equate the goal's RHS with the unconstrained var.
// We put the nested goals from this into goals instead of
@ -536,27 +549,23 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
// looking at the "has changed" return from evaluate_goal,
// because we expect the `unconstrained_rhs` part of the predicate
// to have changed -- that means we actually normalized successfully!
if goal.predicate.alias != self.resolve_vars_if_possible(goal.predicate.alias) {
let with_resolved_vars = self.resolve_vars_if_possible(goal);
if goal.predicate.alias != with_resolved_vars.predicate.alias {
unchanged_certainty = None;
}
match certainty {
Certainty::Yes => {}
Certainty::Maybe(_) => {
// We need to resolve vars here so that we correctly
// deal with `has_changed` in the next iteration.
self.set_normalizes_to_hack_goal(self.resolve_vars_if_possible(goal));
self.nested_goals.normalizes_to_goals.push(with_resolved_vars);
unchanged_certainty = unchanged_certainty.map(|c| c.unify_with(certainty));
}
}
}
for (source, goal) in goals.goals.drain(..) {
let (has_changed, certainty) = self.evaluate_goal(
GoalEvaluationKind::Nested { is_normalizes_to_hack: IsNormalizesToHack::No },
source,
goal,
)?;
for (source, goal) in goals.goals {
let (has_changed, certainty) =
self.evaluate_goal(GoalEvaluationKind::Nested, source, goal)?;
if has_changed {
unchanged_certainty = None;
}
@ -968,7 +977,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
ty: Ty<'tcx>,
) -> Option<ty::Const<'tcx>> {
use rustc_middle::mir::interpret::ErrorHandled;
match self.infcx.try_const_eval_resolve(param_env, unevaluated, ty, None) {
match self.infcx.try_const_eval_resolve(param_env, unevaluated, ty, DUMMY_SP) {
Ok(ct) => Some(ct),
Err(ErrorHandled::Reported(e, _)) => {
Some(ty::Const::new_error(self.tcx(), e.into(), ty))

View File

@ -24,6 +24,7 @@ where
infcx: outer_ecx.infcx,
variables: outer_ecx.variables,
var_values: outer_ecx.var_values,
is_normalizes_to_goal: outer_ecx.is_normalizes_to_goal,
predefined_opaques_in_body: outer_ecx.predefined_opaques_in_body,
max_input_universe: outer_ecx.max_input_universe,
search_graph: outer_ecx.search_graph,

View File

@ -58,12 +58,13 @@ impl<'tcx> InferCtxt<'tcx> {
}
let candidate = candidates.pop().unwrap();
let certainty = ecx.instantiate_and_apply_query_response(
trait_goal.param_env,
orig_values,
candidate.result,
);
let (normalization_nested_goals, certainty) = ecx
.instantiate_and_apply_query_response(
trait_goal.param_env,
orig_values,
candidate.result,
);
assert!(normalization_nested_goals.is_empty());
Ok(Some((candidate, certainty)))
});

View File

@ -70,7 +70,19 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> {
instantiated_goals.push(goal);
}
for &goal in &instantiated_goals {
for goal in instantiated_goals.iter().copied() {
// We need to be careful with `NormalizesTo` goals as the
// expected term has to be replaced with an unconstrained
// inference variable.
if let Some(kind) = goal.predicate.kind().no_bound_vars()
&& let ty::PredicateKind::NormalizesTo(predicate) = kind
&& !predicate.alias.is_opaque(infcx.tcx)
{
// FIXME: We currently skip these goals as
// `fn evaluate_root_goal` ICEs if there are any
// `NestedNormalizationGoals`.
continue;
};
let (_, proof_tree) = infcx.evaluate_root_goal(goal, GenerateProofTree::Yes);
let proof_tree = proof_tree.unwrap();
try_visit!(visitor.visit_goal(&InspectGoal::new(

Some files were not shown because too many files have changed in this diff Show More