Auto merge of #127278 - matthiaskrgr:rollup-fjexkdr, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #126803 (Change `asm-comments` to `verbose-asm`, always emit user comments) - #127050 (Make mtime of reproducible tarballs dependent on git commit) - #127145 (Add `as_lang_item` to `LanguageItems`, new trait solver) - #127202 (Remove global error count checks from typeck) - #127233 (Some parser cleanups) - #127248 (Add parse fail test using safe trait/impl trait) - #127264 (Small `run-make-support` API improvements) - #127270 (bootstrap: pass correct struct size to winapi) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
1cfd47fe0b
@ -204,12 +204,14 @@ impl Attribute {
|
||||
|
||||
pub fn tokens(&self) -> TokenStream {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(normal) => normal
|
||||
.tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||
.to_attr_token_stream()
|
||||
.to_tokenstream(),
|
||||
AttrKind::Normal(normal) => TokenStream::new(
|
||||
normal
|
||||
.tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||
.to_attr_token_stream()
|
||||
.to_token_trees(),
|
||||
),
|
||||
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
|
||||
token::DocComment(comment_kind, self.style, data),
|
||||
self.span,
|
||||
|
@ -23,7 +23,6 @@ use rustc_data_structures::sync::{self, Lrc};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_span::{sym, Span, SpanDecoder, SpanEncoder, Symbol, DUMMY_SP};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::{cmp, fmt, iter};
|
||||
@ -180,27 +179,25 @@ impl AttrTokenStream {
|
||||
AttrTokenStream(Lrc::new(tokens))
|
||||
}
|
||||
|
||||
/// Converts this `AttrTokenStream` to a plain `TokenStream`.
|
||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||
/// If there are inner attributes, they are inserted into the proper
|
||||
/// place in the attribute target tokens.
|
||||
pub fn to_tokenstream(&self) -> TokenStream {
|
||||
let trees: Vec<_> = self
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match &tree {
|
||||
pub fn to_token_trees(&self) -> Vec<TokenTree> {
|
||||
let mut res = Vec::with_capacity(self.0.len());
|
||||
for tree in self.0.iter() {
|
||||
match tree {
|
||||
AttrTokenTree::Token(inner, spacing) => {
|
||||
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
|
||||
res.push(TokenTree::Token(inner.clone(), *spacing));
|
||||
}
|
||||
AttrTokenTree::Delimited(span, spacing, delim, stream) => {
|
||||
smallvec![TokenTree::Delimited(
|
||||
res.push(TokenTree::Delimited(
|
||||
*span,
|
||||
*spacing,
|
||||
*delim,
|
||||
stream.to_tokenstream()
|
||||
),]
|
||||
.into_iter()
|
||||
TokenStream::new(stream.to_token_trees()),
|
||||
))
|
||||
}
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let idx = data
|
||||
@ -208,14 +205,7 @@ impl AttrTokenStream {
|
||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||
|
||||
let mut target_tokens: Vec<_> = data
|
||||
.tokens
|
||||
.to_attr_token_stream()
|
||||
.to_tokenstream()
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect();
|
||||
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
|
||||
if !inner_attrs.is_empty() {
|
||||
let mut found = false;
|
||||
// Check the last two trees (to account for a trailing semi)
|
||||
@ -251,17 +241,14 @@ impl AttrTokenStream {
|
||||
"Failed to find trailing delimited group in: {target_tokens:?}"
|
||||
);
|
||||
}
|
||||
let mut flat: SmallVec<[_; 1]> =
|
||||
SmallVec::with_capacity(target_tokens.len() + outer_attrs.len());
|
||||
for attr in outer_attrs {
|
||||
flat.extend(attr.tokens().0.iter().cloned());
|
||||
res.extend(attr.tokens().0.iter().cloned());
|
||||
}
|
||||
flat.extend(target_tokens);
|
||||
flat.into_iter()
|
||||
res.extend(target_tokens);
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
TokenStream::new(trees)
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
@ -409,8 +396,8 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new(streams: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(streams))
|
||||
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(tts))
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
@ -461,7 +448,7 @@ impl TokenStream {
|
||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
||||
};
|
||||
attr_stream.to_tokenstream()
|
||||
TokenStream::new(attr_stream.to_token_trees())
|
||||
}
|
||||
|
||||
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
||||
|
@ -38,16 +38,14 @@ pub(crate) fn cfg_eval(
|
||||
lint_node_id: NodeId,
|
||||
) -> Annotatable {
|
||||
let features = Some(features);
|
||||
CfgEval { cfg: &mut StripUnconfigured { sess, features, config_tokens: true, lint_node_id } }
|
||||
CfgEval(StripUnconfigured { sess, features, config_tokens: true, lint_node_id })
|
||||
.configure_annotatable(annotatable)
|
||||
// Since the item itself has already been configured by the `InvocationCollector`,
|
||||
// we know that fold result vector will contain exactly one element.
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
struct CfgEval<'a, 'b> {
|
||||
cfg: &'a mut StripUnconfigured<'b>,
|
||||
}
|
||||
struct CfgEval<'a>(StripUnconfigured<'a>);
|
||||
|
||||
fn flat_map_annotatable(
|
||||
vis: &mut impl MutVisitor,
|
||||
@ -125,9 +123,9 @@ fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
|
||||
res.is_break()
|
||||
}
|
||||
|
||||
impl CfgEval<'_, '_> {
|
||||
impl CfgEval<'_> {
|
||||
fn configure<T: HasAttrs + HasTokens>(&mut self, node: T) -> Option<T> {
|
||||
self.cfg.configure(node)
|
||||
self.0.configure(node)
|
||||
}
|
||||
|
||||
fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option<Annotatable> {
|
||||
@ -196,7 +194,7 @@ impl CfgEval<'_, '_> {
|
||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||
// to the captured `AttrTokenStream` (specifically, we capture
|
||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
let mut parser = Parser::new(&self.cfg.sess.psess, orig_tokens, None);
|
||||
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
||||
parser.capture_cfg = true;
|
||||
match parse_annotatable_with(&mut parser) {
|
||||
Ok(a) => annotatable = a,
|
||||
@ -212,16 +210,16 @@ impl CfgEval<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl MutVisitor for CfgEval<'_, '_> {
|
||||
impl MutVisitor for CfgEval<'_> {
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.cfg.configure_expr(expr, false);
|
||||
self.0.configure_expr(expr, false);
|
||||
mut_visit::noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.cfg.configure_expr(expr, true);
|
||||
self.0.configure_expr(expr, true);
|
||||
mut_visit::noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ impl OwnedTargetMachine {
|
||||
unique_section_names: bool,
|
||||
trap_unreachable: bool,
|
||||
singletree: bool,
|
||||
asm_comments: bool,
|
||||
verbose_asm: bool,
|
||||
emit_stack_size_section: bool,
|
||||
relax_elf_relocations: bool,
|
||||
use_init_array: bool,
|
||||
@ -64,7 +64,7 @@ impl OwnedTargetMachine {
|
||||
unique_section_names,
|
||||
trap_unreachable,
|
||||
singletree,
|
||||
asm_comments,
|
||||
verbose_asm,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
|
@ -214,7 +214,7 @@ pub fn target_machine_factory(
|
||||
sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
|
||||
let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
|
||||
|
||||
let asm_comments = sess.opts.unstable_opts.asm_comments;
|
||||
let verbose_asm = sess.opts.unstable_opts.verbose_asm;
|
||||
let relax_elf_relocations =
|
||||
sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
|
||||
|
||||
@ -289,7 +289,7 @@ pub fn target_machine_factory(
|
||||
funique_section_names,
|
||||
trap_unreachable,
|
||||
singlethread,
|
||||
asm_comments,
|
||||
verbose_asm,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
|
@ -2185,7 +2185,7 @@ extern "C" {
|
||||
UniqueSectionNames: bool,
|
||||
TrapUnreachable: bool,
|
||||
Singlethread: bool,
|
||||
AsmComments: bool,
|
||||
VerboseAsm: bool,
|
||||
EmitStackSizeSection: bool,
|
||||
RelaxELFRelocations: bool,
|
||||
UseInitArray: bool,
|
||||
|
@ -120,21 +120,21 @@ struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
|
||||
struct BestFailure {
|
||||
token: Token,
|
||||
position_in_tokenstream: usize,
|
||||
position_in_tokenstream: u32,
|
||||
msg: &'static str,
|
||||
remaining_matcher: MatcherLoc,
|
||||
}
|
||||
|
||||
impl BestFailure {
|
||||
fn is_better_position(&self, position: usize) -> bool {
|
||||
fn is_better_position(&self, position: u32) -> bool {
|
||||
position > self.position_in_tokenstream
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
@ -211,9 +211,9 @@ impl<'matcher> FailureForwarder<'matcher> {
|
||||
}
|
||||
|
||||
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
|
@ -452,7 +452,7 @@ impl TtParser {
|
||||
&mut self,
|
||||
matcher: &'matcher [MatcherLoc],
|
||||
token: &Token,
|
||||
approx_position: usize,
|
||||
approx_position: u32,
|
||||
track: &mut T,
|
||||
) -> Option<NamedParseResult<T::Failure>> {
|
||||
// Matcher positions that would be valid if the macro invocation was over now. Only
|
||||
|
@ -153,7 +153,7 @@ pub(super) trait Tracker<'matcher> {
|
||||
/// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
|
||||
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
||||
/// The usize is the approximate position of the token in the input token stream.
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure;
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
|
||||
|
||||
/// This is called before trying to match next MatcherLoc on the current token.
|
||||
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
||||
@ -182,7 +182,7 @@ pub(super) struct NoopTracker;
|
||||
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
||||
type Failure = ();
|
||||
|
||||
fn build_failure(_tok: Token, _position: usize, _msg: &'static str) -> Self::Failure {}
|
||||
fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
|
||||
|
||||
fn description() -> &'static str {
|
||||
"none"
|
||||
|
@ -11,6 +11,7 @@ use crate::def_id::DefId;
|
||||
use crate::{MethodKind, Target};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
@ -23,6 +24,7 @@ pub struct LanguageItems {
|
||||
/// Mappings from lang items to their possibly found [`DefId`]s.
|
||||
/// The index corresponds to the order in [`LangItem`].
|
||||
items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
|
||||
reverse_items: FxIndexMap<DefId, LangItem>,
|
||||
/// Lang items that were not found during collection.
|
||||
pub missing: Vec<LangItem>,
|
||||
}
|
||||
@ -30,7 +32,11 @@ pub struct LanguageItems {
|
||||
impl LanguageItems {
|
||||
/// Construct an empty collection of lang items and no missing ones.
|
||||
pub fn new() -> Self {
|
||||
Self { items: [None; std::mem::variant_count::<LangItem>()], missing: Vec::new() }
|
||||
Self {
|
||||
items: [None; std::mem::variant_count::<LangItem>()],
|
||||
reverse_items: FxIndexMap::default(),
|
||||
missing: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, item: LangItem) -> Option<DefId> {
|
||||
@ -39,6 +45,11 @@ impl LanguageItems {
|
||||
|
||||
pub fn set(&mut self, item: LangItem, def_id: DefId) {
|
||||
self.items[item as usize] = Some(def_id);
|
||||
self.reverse_items.insert(def_id, item);
|
||||
}
|
||||
|
||||
pub fn from_def_id(&self, def_id: DefId) -> Option<LangItem> {
|
||||
self.reverse_items.get(&def_id).copied()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (LangItem, DefId)> + '_ {
|
||||
|
@ -708,7 +708,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// else an error would have been flagged by the
|
||||
// `loops` pass for using break with an expression
|
||||
// where you are not supposed to.
|
||||
assert!(expr_opt.is_none() || self.dcx().has_errors().is_some());
|
||||
assert!(expr_opt.is_none() || self.tainted_by_errors().is_some());
|
||||
}
|
||||
|
||||
// If we encountered a `break`, then (no surprise) it may be possible to break from the
|
||||
|
@ -734,9 +734,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
|
||||
// struct; however, when EUV is run during typeck, it
|
||||
// may not. This will generate an error earlier in typeck,
|
||||
// so we can just ignore it.
|
||||
if self.cx.tcx().dcx().has_errors().is_none() {
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1652,7 +1652,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
self.warn_if_unreachable(stmt.hir_id, stmt.span, "statement");
|
||||
|
||||
// Hide the outer diverging and `has_errors` flags.
|
||||
// Hide the outer diverging flags.
|
||||
let old_diverges = self.diverges.replace(Diverges::Maybe);
|
||||
|
||||
match stmt.kind {
|
||||
|
@ -510,9 +510,12 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||
.report_mismatched_types(&cause, method_self_ty, self_ty, terr)
|
||||
.emit();
|
||||
} else {
|
||||
error!("{self_ty} was a subtype of {method_self_ty} but now is not?");
|
||||
// This must already have errored elsewhere.
|
||||
self.dcx().has_errors().unwrap();
|
||||
// This has/will have errored in wfcheck, which we cannot depend on from here, as typeck on functions
|
||||
// may run before wfcheck if the function is used in const eval.
|
||||
self.dcx().span_delayed_bug(
|
||||
cause.span(),
|
||||
format!("{self_ty} was a subtype of {method_self_ty} but now is not?"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -219,28 +219,9 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
|
||||
if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
|
||||
// All valid indexing looks like this; might encounter non-valid indexes at this point.
|
||||
let base_ty = self.typeck_results.expr_ty_adjusted_opt(base);
|
||||
if base_ty.is_none() {
|
||||
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base
|
||||
// that isn't in the type table. We assume more relevant errors have already been
|
||||
// emitted. (#64638)
|
||||
assert!(self.tcx().dcx().has_errors().is_some(), "bad base: `{base:?}`");
|
||||
}
|
||||
if let Some(base_ty) = base_ty
|
||||
&& let ty::Ref(_, base_ty_inner, _) = *base_ty.kind()
|
||||
{
|
||||
let index_ty =
|
||||
self.typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
|
||||
// When encountering `return [0][0]` outside of a `fn` body we would attempt
|
||||
// to access an nonexistent index. We assume that more relevant errors will
|
||||
// already have been emitted, so we only gate on this with an ICE if no
|
||||
// error has been emitted. (#64638)
|
||||
Ty::new_error_with_message(
|
||||
self.fcx.tcx,
|
||||
e.span,
|
||||
format!("bad index {index:?} for base: `{base:?}`"),
|
||||
)
|
||||
});
|
||||
let base_ty = self.typeck_results.expr_ty_adjusted(base);
|
||||
if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
|
||||
let index_ty = self.typeck_results.expr_ty_adjusted(index);
|
||||
if self.is_builtin_index(e, base_ty_inner, index_ty) {
|
||||
// Remove the method call record
|
||||
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
|
||||
|
@ -757,7 +757,6 @@ fn test_unstable_options_tracking_hash() {
|
||||
// tidy-alphabetical-start
|
||||
tracked!(allow_features, Some(vec![String::from("lang_items")]));
|
||||
tracked!(always_encode_mir, true);
|
||||
tracked!(asm_comments, true);
|
||||
tracked!(assume_incomplete_release, true);
|
||||
tracked!(binary_dep_depinfo, true);
|
||||
tracked!(box_noalias, false);
|
||||
@ -862,6 +861,7 @@ fn test_unstable_options_tracking_hash() {
|
||||
tracked!(uninit_const_chunk_threshold, 123);
|
||||
tracked!(unleash_the_miri_inside_of_you, true);
|
||||
tracked!(use_ctors_section, Some(true));
|
||||
tracked!(verbose_asm, true);
|
||||
tracked!(verify_llvm_ir, true);
|
||||
tracked!(virtual_function_elimination, true);
|
||||
tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
|
||||
|
@ -407,7 +407,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
const char *ABIStr, LLVMRustCodeModel RustCM, LLVMRustRelocModel RustReloc,
|
||||
LLVMRustCodeGenOptLevel RustOptLevel, bool UseSoftFloat,
|
||||
bool FunctionSections, bool DataSections, bool UniqueSectionNames,
|
||||
bool TrapUnreachable, bool Singlethread, bool AsmComments,
|
||||
bool TrapUnreachable, bool Singlethread, bool VerboseAsm,
|
||||
bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray,
|
||||
const char *SplitDwarfFile, const char *OutputObjFile,
|
||||
const char *DebugInfoCompression, bool UseEmulatedTls,
|
||||
@ -435,8 +435,9 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
Options.DataSections = DataSections;
|
||||
Options.FunctionSections = FunctionSections;
|
||||
Options.UniqueSectionNames = UniqueSectionNames;
|
||||
Options.MCOptions.AsmVerbose = AsmComments;
|
||||
Options.MCOptions.PreserveAsmComments = AsmComments;
|
||||
Options.MCOptions.AsmVerbose = VerboseAsm;
|
||||
// Always preserve comments that were written by the user
|
||||
Options.MCOptions.PreserveAsmComments = true;
|
||||
Options.MCOptions.ABIName = ABIStr;
|
||||
if (SplitDwarfFile) {
|
||||
Options.MCOptions.SplitDwarfFile = SplitDwarfFile;
|
||||
|
@ -27,6 +27,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self.lang_items().get(lang_item) == Some(def_id)
|
||||
}
|
||||
|
||||
pub fn as_lang_item(self, def_id: DefId) -> Option<LangItem> {
|
||||
self.lang_items().from_def_id(def_id)
|
||||
}
|
||||
|
||||
/// Given a [`DefId`] of one of the [`Fn`], [`FnMut`] or [`FnOnce`] traits,
|
||||
/// returns a corresponding [`ty::ClosureKind`].
|
||||
/// For any other [`DefId`] return `None`.
|
||||
|
@ -366,6 +366,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item))
|
||||
}
|
||||
|
||||
fn as_lang_item(self, def_id: DefId) -> Option<TraitSolverLangItem> {
|
||||
lang_item_to_trait_lang_item(self.lang_items().from_def_id(def_id)?)
|
||||
}
|
||||
|
||||
fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||
self.associated_items(def_id)
|
||||
.in_definition_order()
|
||||
@ -522,14 +526,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
self.trait_def(trait_def_id).implement_via_object
|
||||
}
|
||||
|
||||
fn fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
||||
self.fn_trait_kind_from_def_id(trait_def_id)
|
||||
}
|
||||
|
||||
fn async_fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
||||
self.async_fn_trait_kind_from_def_id(trait_def_id)
|
||||
}
|
||||
|
||||
fn supertrait_def_ids(self, trait_def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||
self.supertrait_def_ids(trait_def_id)
|
||||
}
|
||||
@ -573,46 +569,69 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
||||
match lang_item {
|
||||
TraitSolverLangItem::AsyncDestruct => LangItem::AsyncDestruct,
|
||||
TraitSolverLangItem::AsyncFnKindHelper => LangItem::AsyncFnKindHelper,
|
||||
TraitSolverLangItem::AsyncFnKindUpvars => LangItem::AsyncFnKindUpvars,
|
||||
TraitSolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
|
||||
TraitSolverLangItem::AsyncIterator => LangItem::AsyncIterator,
|
||||
TraitSolverLangItem::CallOnceFuture => LangItem::CallOnceFuture,
|
||||
TraitSolverLangItem::CallRefFuture => LangItem::CallRefFuture,
|
||||
TraitSolverLangItem::Clone => LangItem::Clone,
|
||||
TraitSolverLangItem::Copy => LangItem::Copy,
|
||||
TraitSolverLangItem::Coroutine => LangItem::Coroutine,
|
||||
TraitSolverLangItem::CoroutineReturn => LangItem::CoroutineReturn,
|
||||
TraitSolverLangItem::CoroutineYield => LangItem::CoroutineYield,
|
||||
TraitSolverLangItem::Destruct => LangItem::Destruct,
|
||||
TraitSolverLangItem::DiscriminantKind => LangItem::DiscriminantKind,
|
||||
TraitSolverLangItem::DynMetadata => LangItem::DynMetadata,
|
||||
TraitSolverLangItem::EffectsMaybe => LangItem::EffectsMaybe,
|
||||
TraitSolverLangItem::EffectsIntersection => LangItem::EffectsIntersection,
|
||||
TraitSolverLangItem::EffectsIntersectionOutput => LangItem::EffectsIntersectionOutput,
|
||||
TraitSolverLangItem::EffectsNoRuntime => LangItem::EffectsNoRuntime,
|
||||
TraitSolverLangItem::EffectsRuntime => LangItem::EffectsRuntime,
|
||||
TraitSolverLangItem::FnPtrTrait => LangItem::FnPtrTrait,
|
||||
TraitSolverLangItem::FusedIterator => LangItem::FusedIterator,
|
||||
TraitSolverLangItem::Future => LangItem::Future,
|
||||
TraitSolverLangItem::FutureOutput => LangItem::FutureOutput,
|
||||
TraitSolverLangItem::Iterator => LangItem::Iterator,
|
||||
TraitSolverLangItem::Metadata => LangItem::Metadata,
|
||||
TraitSolverLangItem::Option => LangItem::Option,
|
||||
TraitSolverLangItem::PointeeTrait => LangItem::PointeeTrait,
|
||||
TraitSolverLangItem::PointerLike => LangItem::PointerLike,
|
||||
TraitSolverLangItem::Poll => LangItem::Poll,
|
||||
TraitSolverLangItem::Sized => LangItem::Sized,
|
||||
TraitSolverLangItem::TransmuteTrait => LangItem::TransmuteTrait,
|
||||
TraitSolverLangItem::Tuple => LangItem::Tuple,
|
||||
TraitSolverLangItem::Unpin => LangItem::Unpin,
|
||||
TraitSolverLangItem::Unsize => LangItem::Unsize,
|
||||
macro_rules! bidirectional_lang_item_map {
|
||||
($($name:ident),+ $(,)?) => {
|
||||
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
||||
match lang_item {
|
||||
$(TraitSolverLangItem::$name => LangItem::$name,)+
|
||||
}
|
||||
}
|
||||
|
||||
fn lang_item_to_trait_lang_item(lang_item: LangItem) -> Option<TraitSolverLangItem> {
|
||||
Some(match lang_item {
|
||||
$(LangItem::$name => TraitSolverLangItem::$name,)+
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bidirectional_lang_item_map! {
|
||||
// tidy-alphabetical-start
|
||||
AsyncDestruct,
|
||||
AsyncFn,
|
||||
AsyncFnKindHelper,
|
||||
AsyncFnKindUpvars,
|
||||
AsyncFnMut,
|
||||
AsyncFnOnce,
|
||||
AsyncFnOnceOutput,
|
||||
AsyncIterator,
|
||||
CallOnceFuture,
|
||||
CallRefFuture,
|
||||
Clone,
|
||||
Copy,
|
||||
Coroutine,
|
||||
CoroutineReturn,
|
||||
CoroutineYield,
|
||||
Destruct,
|
||||
DiscriminantKind,
|
||||
DynMetadata,
|
||||
EffectsIntersection,
|
||||
EffectsIntersectionOutput,
|
||||
EffectsMaybe,
|
||||
EffectsNoRuntime,
|
||||
EffectsRuntime,
|
||||
Fn,
|
||||
FnMut,
|
||||
FnOnce,
|
||||
FnPtrTrait,
|
||||
FusedIterator,
|
||||
Future,
|
||||
FutureOutput,
|
||||
Iterator,
|
||||
Metadata,
|
||||
Option,
|
||||
PointeeTrait,
|
||||
PointerLike,
|
||||
Poll,
|
||||
Sized,
|
||||
TransmuteTrait,
|
||||
Tuple,
|
||||
Unpin,
|
||||
Unsize,
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
||||
impl<'tcx> rustc_type_ir::inherent::DefId<TyCtxt<'tcx>> for DefId {
|
||||
fn as_local(self) -> Option<LocalDefId> {
|
||||
self.as_local()
|
||||
|
@ -387,48 +387,83 @@ where
|
||||
G::consider_auto_trait_candidate(self, goal)
|
||||
} else if cx.trait_is_alias(trait_def_id) {
|
||||
G::consider_trait_alias_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Sized) {
|
||||
G::consider_builtin_sized_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Copy)
|
||||
|| cx.is_lang_item(trait_def_id, TraitSolverLangItem::Clone)
|
||||
{
|
||||
G::consider_builtin_copy_clone_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointerLike) {
|
||||
G::consider_builtin_pointer_like_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FnPtrTrait) {
|
||||
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
||||
} else if let Some(kind) = self.cx().fn_trait_kind_from_def_id(trait_def_id) {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, kind)
|
||||
} else if let Some(kind) = self.cx().async_fn_trait_kind_from_def_id(trait_def_id) {
|
||||
G::consider_builtin_async_fn_trait_candidates(self, goal, kind)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncFnKindHelper) {
|
||||
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Tuple) {
|
||||
G::consider_builtin_tuple_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointeeTrait) {
|
||||
G::consider_builtin_pointee_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Future) {
|
||||
G::consider_builtin_future_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Iterator) {
|
||||
G::consider_builtin_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FusedIterator) {
|
||||
G::consider_builtin_fused_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncIterator) {
|
||||
G::consider_builtin_async_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Coroutine) {
|
||||
G::consider_builtin_coroutine_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::DiscriminantKind) {
|
||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncDestruct) {
|
||||
G::consider_builtin_async_destruct_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Destruct) {
|
||||
G::consider_builtin_destruct_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::TransmuteTrait) {
|
||||
G::consider_builtin_transmute_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::EffectsIntersection) {
|
||||
G::consider_builtin_effects_intersection_candidate(self, goal)
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
match cx.as_lang_item(trait_def_id) {
|
||||
Some(TraitSolverLangItem::Sized) => G::consider_builtin_sized_candidate(self, goal),
|
||||
Some(TraitSolverLangItem::Copy | TraitSolverLangItem::Clone) => {
|
||||
G::consider_builtin_copy_clone_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Fn) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnMut) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnMut)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnOnce) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnOnce)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFn) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnMut) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(
|
||||
self,
|
||||
goal,
|
||||
ty::ClosureKind::FnMut,
|
||||
)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnOnce) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(
|
||||
self,
|
||||
goal,
|
||||
ty::ClosureKind::FnOnce,
|
||||
)
|
||||
}
|
||||
Some(TraitSolverLangItem::PointerLike) => {
|
||||
G::consider_builtin_pointer_like_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnPtrTrait) => {
|
||||
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnKindHelper) => {
|
||||
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Tuple) => G::consider_builtin_tuple_candidate(self, goal),
|
||||
Some(TraitSolverLangItem::PointeeTrait) => {
|
||||
G::consider_builtin_pointee_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Future) => {
|
||||
G::consider_builtin_future_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Iterator) => {
|
||||
G::consider_builtin_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::FusedIterator) => {
|
||||
G::consider_builtin_fused_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncIterator) => {
|
||||
G::consider_builtin_async_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Coroutine) => {
|
||||
G::consider_builtin_coroutine_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::DiscriminantKind) => {
|
||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncDestruct) => {
|
||||
G::consider_builtin_async_destruct_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Destruct) => {
|
||||
G::consider_builtin_destruct_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::TransmuteTrait) => {
|
||||
G::consider_builtin_transmute_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::EffectsIntersection) => {
|
||||
G::consider_builtin_effects_intersection_candidate(self, goal)
|
||||
}
|
||||
_ => Err(NoSolution),
|
||||
}
|
||||
};
|
||||
|
||||
candidates.extend(result);
|
||||
|
@ -9,6 +9,7 @@ use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{sym, Span, DUMMY_SP};
|
||||
|
||||
use std::ops::Range;
|
||||
use std::{iter, mem};
|
||||
|
||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||
@ -29,15 +30,15 @@ pub struct AttrWrapper {
|
||||
// The start of the outer attributes in the token cursor.
|
||||
// This allows us to create a `ReplaceRange` for the entire attribute
|
||||
// target, including outer attributes.
|
||||
start_pos: usize,
|
||||
start_pos: u32,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||
AttrWrapper { attrs, start_pos }
|
||||
}
|
||||
pub fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||
}
|
||||
|
||||
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
@ -53,7 +54,7 @@ impl AttrWrapper {
|
||||
// FIXME: require passing an NT to prevent misuse of this method
|
||||
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
|
||||
let mut self_attrs = self.attrs;
|
||||
std::mem::swap(attrs, &mut self_attrs);
|
||||
mem::swap(attrs, &mut self_attrs);
|
||||
attrs.extend(self_attrs);
|
||||
}
|
||||
|
||||
@ -91,7 +92,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||
struct LazyAttrTokenStreamImpl {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
num_calls: usize,
|
||||
num_calls: u32,
|
||||
break_last_token: bool,
|
||||
replace_ranges: Box<[ReplaceRange]>,
|
||||
}
|
||||
@ -104,15 +105,16 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
// produce an empty `TokenStream` if no calls were made, and omit the
|
||||
// final token otherwise.
|
||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||
let tokens =
|
||||
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
||||
.chain(std::iter::repeat_with(|| {
|
||||
let token = cursor_snapshot.next();
|
||||
(FlatToken::Token(token.0), token.1)
|
||||
}))
|
||||
.take(self.num_calls);
|
||||
let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
||||
.chain(iter::repeat_with(|| {
|
||||
let token = cursor_snapshot.next();
|
||||
(FlatToken::Token(token.0), token.1)
|
||||
}))
|
||||
.take(self.num_calls as usize);
|
||||
|
||||
if !self.replace_ranges.is_empty() {
|
||||
if self.replace_ranges.is_empty() {
|
||||
make_attr_token_stream(tokens, self.break_last_token)
|
||||
} else {
|
||||
let mut tokens: Vec<_> = tokens.collect();
|
||||
let mut replace_ranges = self.replace_ranges.to_vec();
|
||||
replace_ranges.sort_by_key(|(range, _)| range.start);
|
||||
@ -156,7 +158,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
// This keeps the total length of `tokens` constant throughout the
|
||||
// replacement process, allowing us to use all of the `ReplaceRanges` entries
|
||||
// without adjusting indices.
|
||||
let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
.take(range.len() - new_tokens.len());
|
||||
|
||||
tokens.splice(
|
||||
@ -164,9 +166,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
new_tokens.into_iter().chain(filler),
|
||||
);
|
||||
}
|
||||
make_token_stream(tokens.into_iter(), self.break_last_token)
|
||||
} else {
|
||||
make_token_stream(tokens, self.break_last_token)
|
||||
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -218,24 +218,23 @@ impl<'a> Parser<'a> {
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let cursor_snapshot = self.token_cursor.clone();
|
||||
let start_pos = self.num_bump_calls;
|
||||
|
||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||
let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let replace_ranges_start = self.capture_state.replace_ranges.len();
|
||||
|
||||
let ret = f(self, attrs.attrs);
|
||||
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
|
||||
let (mut ret, trailing) = ret?;
|
||||
let (mut ret, trailing) = {
|
||||
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let ret_and_trailing = f(self, attrs.attrs);
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
ret_and_trailing?
|
||||
};
|
||||
|
||||
// When we're not in `capture-cfg` mode, then bail out early if:
|
||||
// 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
|
||||
// so there's nothing for us to do.
|
||||
// 2. Our target already has tokens set (e.g. we've parsed something
|
||||
// like `#[my_attr] $item`. The actual parsing code takes care of prepending
|
||||
// any attributes to the nonterminal, so we don't need to modify the
|
||||
// already captured tokens.
|
||||
// like `#[my_attr] $item`). The actual parsing code takes care of
|
||||
// prepending any attributes to the nonterminal, so we don't need to
|
||||
// modify the already captured tokens.
|
||||
// Note that this check is independent of `force_collect`- if we already
|
||||
// have tokens, or can't even store them, then there's never a need to
|
||||
// force collection of new tokens.
|
||||
@ -276,37 +275,32 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let replace_ranges_end = self.capture_state.replace_ranges.len();
|
||||
|
||||
let mut end_pos = self.num_bump_calls;
|
||||
|
||||
let mut captured_trailing = false;
|
||||
|
||||
// Capture a trailing token if requested by the callback 'f'
|
||||
match trailing {
|
||||
TrailingToken::None => {}
|
||||
let captured_trailing = match trailing {
|
||||
TrailingToken::None => false,
|
||||
TrailingToken::Gt => {
|
||||
assert_eq!(self.token.kind, token::Gt);
|
||||
false
|
||||
}
|
||||
TrailingToken::Semi => {
|
||||
assert_eq!(self.token.kind, token::Semi);
|
||||
end_pos += 1;
|
||||
captured_trailing = true;
|
||||
true
|
||||
}
|
||||
TrailingToken::MaybeComma => {
|
||||
if self.token.kind == token::Comma {
|
||||
end_pos += 1;
|
||||
captured_trailing = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
TrailingToken::MaybeComma => self.token.kind == token::Comma,
|
||||
};
|
||||
|
||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
||||
// then extend the range of captured tokens to include it, since the parser
|
||||
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
|
||||
// into an `AttrTokenStream`, we will create the proper token.
|
||||
if self.break_last_token {
|
||||
assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
|
||||
end_pos += 1;
|
||||
}
|
||||
assert!(
|
||||
!(self.break_last_token && captured_trailing),
|
||||
"Cannot set break_last_token and have trailing token"
|
||||
);
|
||||
|
||||
let end_pos = self.num_bump_calls
|
||||
+ captured_trailing as u32
|
||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
|
||||
// extend the range of captured tokens to include it, since the parser was not actually
|
||||
// bumped past it. When the `LazyAttrTokenStream` gets converted into an
|
||||
// `AttrTokenStream`, we will create the proper token.
|
||||
+ self.break_last_token as u32;
|
||||
|
||||
let num_calls = end_pos - start_pos;
|
||||
|
||||
@ -318,14 +312,11 @@ impl<'a> Parser<'a> {
|
||||
// Grab any replace ranges that occur *inside* the current AST node.
|
||||
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
||||
// to an `AttrTokenStream`.
|
||||
let start_calls: u32 = start_pos.try_into().unwrap();
|
||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_replace_ranges.iter().cloned())
|
||||
.map(|(range, tokens)| {
|
||||
((range.start - start_calls)..(range.end - start_calls), tokens)
|
||||
})
|
||||
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -340,7 +331,7 @@ impl<'a> Parser<'a> {
|
||||
// If we support tokens at all
|
||||
if let Some(target_tokens) = ret.tokens_mut() {
|
||||
if target_tokens.is_none() {
|
||||
// Store se our newly captured tokens into the AST node
|
||||
// Store our newly captured tokens into the AST node.
|
||||
*target_tokens = Some(tokens.clone());
|
||||
}
|
||||
}
|
||||
@ -382,10 +373,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||
/// of open and close delims.
|
||||
fn make_token_stream(
|
||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
|
||||
/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
|
||||
/// close delims.
|
||||
fn make_attr_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
break_last_token: bool,
|
||||
) -> AttrTokenStream {
|
||||
@ -464,6 +455,6 @@ mod size_asserts {
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AttrWrapper, 16);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 104);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 96);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ pub struct Parser<'a> {
|
||||
expected_tokens: Vec<TokenType>,
|
||||
token_cursor: TokenCursor,
|
||||
// The number of calls to `bump`, i.e. the position in the token stream.
|
||||
num_bump_calls: usize,
|
||||
num_bump_calls: u32,
|
||||
// During parsing we may sometimes need to 'unglue' a glued token into two
|
||||
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
|
||||
// them one at a time. This process bypasses the normal capturing mechanism
|
||||
@ -192,7 +192,7 @@ pub struct Parser<'a> {
|
||||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 264);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 256);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -1572,7 +1572,7 @@ impl<'a> Parser<'a> {
|
||||
self.expected_tokens.clear();
|
||||
}
|
||||
|
||||
pub fn approx_token_stream_pos(&self) -> usize {
|
||||
pub fn approx_token_stream_pos(&self) -> u32 {
|
||||
self.num_bump_calls
|
||||
}
|
||||
}
|
||||
|
@ -1630,8 +1630,6 @@ options! {
|
||||
"only allow the listed language features to be enabled in code (comma separated)"),
|
||||
always_encode_mir: bool = (false, parse_bool, [TRACKED],
|
||||
"encode MIR of all functions into the crate metadata (default: no)"),
|
||||
asm_comments: bool = (false, parse_bool, [TRACKED],
|
||||
"generate comments into the assembly (may change behavior) (default: no)"),
|
||||
assert_incr_state: Option<String> = (None, parse_opt_string, [UNTRACKED],
|
||||
"assert that the incremental cache is in given state: \
|
||||
either `loaded` or `not-loaded`."),
|
||||
@ -2107,6 +2105,8 @@ written to standard error output)"),
|
||||
"Generate sync unwind tables instead of async unwind tables (default: no)"),
|
||||
validate_mir: bool = (false, parse_bool, [UNTRACKED],
|
||||
"validate MIR after each transformation"),
|
||||
verbose_asm: bool = (false, parse_bool, [TRACKED],
|
||||
"add descriptive comments from LLVM to the assembly (may change behavior) (default: no)"),
|
||||
#[rustc_lint_opt_deny_field_access("use `Session::verbose_internals` instead of this field")]
|
||||
verbose_internals: bool = (false, parse_bool, [TRACKED_NO_CRATE_HASH],
|
||||
"in general, enable more debug printouts (default: no)"),
|
||||
|
@ -220,6 +220,8 @@ pub trait Interner:
|
||||
|
||||
fn is_lang_item(self, def_id: Self::DefId, lang_item: TraitSolverLangItem) -> bool;
|
||||
|
||||
fn as_lang_item(self, def_id: Self::DefId) -> Option<TraitSolverLangItem>;
|
||||
|
||||
fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator<Item = Self::DefId>;
|
||||
|
||||
fn for_each_relevant_impl(
|
||||
@ -245,10 +247,6 @@ pub trait Interner:
|
||||
|
||||
fn trait_may_be_implemented_via_object(self, trait_def_id: Self::DefId) -> bool;
|
||||
|
||||
fn fn_trait_kind_from_def_id(self, trait_def_id: Self::DefId) -> Option<ty::ClosureKind>;
|
||||
|
||||
fn async_fn_trait_kind_from_def_id(self, trait_def_id: Self::DefId) -> Option<ty::ClosureKind>;
|
||||
|
||||
fn supertrait_def_ids(self, trait_def_id: Self::DefId)
|
||||
-> impl IntoIterator<Item = Self::DefId>;
|
||||
|
||||
|
@ -3,8 +3,11 @@
|
||||
pub enum TraitSolverLangItem {
|
||||
// tidy-alphabetical-start
|
||||
AsyncDestruct,
|
||||
AsyncFn,
|
||||
AsyncFnKindHelper,
|
||||
AsyncFnKindUpvars,
|
||||
AsyncFnMut,
|
||||
AsyncFnOnce,
|
||||
AsyncFnOnceOutput,
|
||||
AsyncIterator,
|
||||
CallOnceFuture,
|
||||
@ -22,6 +25,9 @@ pub enum TraitSolverLangItem {
|
||||
EffectsMaybe,
|
||||
EffectsNoRuntime,
|
||||
EffectsRuntime,
|
||||
Fn,
|
||||
FnMut,
|
||||
FnOnce,
|
||||
FnPtrTrait,
|
||||
FusedIterator,
|
||||
Future,
|
||||
|
@ -317,9 +317,7 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||
|
||||
use windows::{
|
||||
Win32::Foundation::HANDLE,
|
||||
Win32::System::ProcessStatus::{
|
||||
K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS, PROCESS_MEMORY_COUNTERS_EX,
|
||||
},
|
||||
Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS},
|
||||
Win32::System::Threading::GetProcessTimes,
|
||||
Win32::System::Time::FileTimeToSystemTime,
|
||||
};
|
||||
@ -331,6 +329,7 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||
let mut kernel_filetime = Default::default();
|
||||
let mut kernel_time = Default::default();
|
||||
let mut memory_counters = PROCESS_MEMORY_COUNTERS::default();
|
||||
let memory_counters_size = std::mem::size_of_val(&memory_counters);
|
||||
|
||||
unsafe {
|
||||
GetProcessTimes(
|
||||
@ -347,15 +346,9 @@ fn format_rusage_data(child: Child) -> Option<String> {
|
||||
|
||||
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
|
||||
// with the given handle and none of that process's children.
|
||||
unsafe {
|
||||
K32GetProcessMemoryInfo(
|
||||
handle,
|
||||
&mut memory_counters,
|
||||
std::mem::size_of::<PROCESS_MEMORY_COUNTERS_EX>() as u32,
|
||||
)
|
||||
}
|
||||
.ok()
|
||||
.ok()?;
|
||||
unsafe { K32GetProcessMemoryInfo(handle, &mut memory_counters, memory_counters_size as u32) }
|
||||
.ok()
|
||||
.ok()?;
|
||||
|
||||
// Guide on interpreting these numbers:
|
||||
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
|
||||
|
@ -9,9 +9,9 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::core::builder::Builder;
|
||||
use crate::core::{build_steps::dist::distdir, builder::Kind};
|
||||
use crate::utils::channel;
|
||||
use crate::utils::exec::BootstrapCommand;
|
||||
use crate::utils::helpers::{move_file, t};
|
||||
use crate::utils::{channel, helpers};
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum OverlayKind {
|
||||
@ -351,6 +351,30 @@ impl<'a> Tarball<'a> {
|
||||
};
|
||||
|
||||
cmd.args(["--compression-profile", compression_profile]);
|
||||
|
||||
// We want to use a pinned modification time for files in the archive
|
||||
// to achieve better reproducibility. However, using the same mtime for all
|
||||
// releases is not ideal, because it can break e.g. Cargo mtime checking
|
||||
// (https://github.com/rust-lang/rust/issues/125578).
|
||||
// Therefore, we set mtime to the date of the latest commit (if we're managed
|
||||
// by git). In this way, the archive will still be always the same for a given commit
|
||||
// (achieving reproducibility), but it will also change between different commits and
|
||||
// Rust versions, so that it won't break mtime-based caches.
|
||||
//
|
||||
// Note that this only overrides the mtime of files, not directories, due to the
|
||||
// limitations of the tarballer tool. Directories will have their mtime set to 2006.
|
||||
|
||||
// Get the UTC timestamp of the last git commit, if we're under git.
|
||||
// We need to use UTC, so that anyone who tries to rebuild from the same commit
|
||||
// gets the same timestamp.
|
||||
if self.builder.rust_info().is_managed_git_subrepository() {
|
||||
// %ct means committer date
|
||||
let timestamp = helpers::output(
|
||||
helpers::git(Some(&self.builder.src)).arg("log").arg("-1").arg("--format=%ct"),
|
||||
);
|
||||
cmd.args(["--override-file-mtime", timestamp.trim()]);
|
||||
}
|
||||
|
||||
self.builder.run(cmd);
|
||||
|
||||
// Ensure there are no symbolic links in the tarball. In particular,
|
||||
|
70
src/doc/unstable-book/src/compiler-flags/verbose-asm.md
Normal file
70
src/doc/unstable-book/src/compiler-flags/verbose-asm.md
Normal file
@ -0,0 +1,70 @@
|
||||
# `verbose-asm`
|
||||
|
||||
The tracking issue for this feature is: [#126802](https://github.com/rust-lang/rust/issues/126802).
|
||||
|
||||
------------------------
|
||||
|
||||
This enables passing `-Zverbose-asm` to get contextual comments added by LLVM.
|
||||
|
||||
Sample code:
|
||||
|
||||
```rust
|
||||
#[no_mangle]
|
||||
pub fn foo(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
```
|
||||
|
||||
Default output:
|
||||
|
||||
```asm
|
||||
foo:
|
||||
push rax
|
||||
add edi, esi
|
||||
mov dword ptr [rsp + 4], edi
|
||||
seto al
|
||||
jo .LBB0_2
|
||||
mov eax, dword ptr [rsp + 4]
|
||||
pop rcx
|
||||
ret
|
||||
.LBB0_2:
|
||||
lea rdi, [rip + .L__unnamed_1]
|
||||
mov rax, qword ptr [rip + core::panicking::panic_const::panic_const_add_overflow::h9c85248fe0d735b2@GOTPCREL]
|
||||
call rax
|
||||
|
||||
.L__unnamed_2:
|
||||
.ascii "/app/example.rs"
|
||||
|
||||
.L__unnamed_1:
|
||||
.quad .L__unnamed_2
|
||||
.asciz "\017\000\000\000\000\000\000\000\004\000\000\000\005\000\000"
|
||||
```
|
||||
|
||||
With `-Zverbose-asm`:
|
||||
|
||||
```asm
|
||||
foo: # @foo
|
||||
# %bb.0:
|
||||
push rax
|
||||
add edi, esi
|
||||
mov dword ptr [rsp + 4], edi # 4-byte Spill
|
||||
seto al
|
||||
jo .LBB0_2
|
||||
# %bb.1:
|
||||
mov eax, dword ptr [rsp + 4] # 4-byte Reload
|
||||
pop rcx
|
||||
ret
|
||||
.LBB0_2:
|
||||
lea rdi, [rip + .L__unnamed_1]
|
||||
mov rax, qword ptr [rip + core::panicking::panic_const::panic_const_add_overflow::h9c85248fe0d735b2@GOTPCREL]
|
||||
call rax
|
||||
# -- End function
|
||||
.L__unnamed_2:
|
||||
.ascii "/app/example.rs"
|
||||
|
||||
.L__unnamed_1:
|
||||
.quad .L__unnamed_2
|
||||
.asciz "\017\000\000\000\000\000\000\000\004\000\000\000\005\000\000"
|
||||
|
||||
# DW_AT_external
|
||||
```
|
@ -321,8 +321,9 @@ pub fn set_host_rpath(cmd: &mut Command) {
|
||||
/// Read the contents of a file that cannot simply be read by
|
||||
/// read_to_string, due to invalid utf8 data, then assert that it contains `expected`.
|
||||
#[track_caller]
|
||||
pub fn invalid_utf8_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
||||
pub fn invalid_utf8_contains<P: AsRef<Path>, S: AsRef<str>>(path: P, expected: S) {
|
||||
let buffer = fs_wrapper::read(path.as_ref());
|
||||
let expected = expected.as_ref();
|
||||
if !String::from_utf8_lossy(&buffer).contains(expected) {
|
||||
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
||||
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
||||
@ -335,8 +336,9 @@ pub fn invalid_utf8_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
||||
/// Read the contents of a file that cannot simply be read by
|
||||
/// read_to_string, due to invalid utf8 data, then assert that it does not contain `expected`.
|
||||
#[track_caller]
|
||||
pub fn invalid_utf8_not_contains<P: AsRef<Path>>(path: P, expected: &str) {
|
||||
pub fn invalid_utf8_not_contains<P: AsRef<Path>, S: AsRef<str>>(path: P, expected: S) {
|
||||
let buffer = fs_wrapper::read(path.as_ref());
|
||||
let expected = expected.as_ref();
|
||||
if String::from_utf8_lossy(&buffer).contains(expected) {
|
||||
eprintln!("=== FILE CONTENTS (LOSSY) ===");
|
||||
eprintln!("{}", String::from_utf8_lossy(&buffer));
|
||||
|
@ -86,7 +86,8 @@ impl Rustc {
|
||||
}
|
||||
|
||||
/// Specify type(s) of output files to generate.
|
||||
pub fn emit(&mut self, kinds: &str) -> &mut Self {
|
||||
pub fn emit<S: AsRef<str>>(&mut self, kinds: S) -> &mut Self {
|
||||
let kinds = kinds.as_ref();
|
||||
self.cmd.arg(format!("--emit={kinds}"));
|
||||
self
|
||||
}
|
||||
|
@ -55,6 +55,12 @@ actor! {
|
||||
/// The formats used to compress the tarball
|
||||
#[arg(value_name = "FORMAT", default_value_t)]
|
||||
compression_formats: CompressionFormats,
|
||||
|
||||
/// Modification time that will be set for all files added to the archive.
|
||||
/// The default is the date of the first Rust commit from 2006.
|
||||
/// This serves for better reproducibility of the archives.
|
||||
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||
override_file_mtime: u64,
|
||||
}
|
||||
}
|
||||
|
||||
@ -145,7 +151,8 @@ impl Combiner {
|
||||
.input(self.package_name)
|
||||
.output(path_to_str(&output)?.into())
|
||||
.compression_profile(self.compression_profile)
|
||||
.compression_formats(self.compression_formats);
|
||||
.compression_formats(self.compression_formats)
|
||||
.override_file_mtime(self.override_file_mtime);
|
||||
tarballer.run()?;
|
||||
|
||||
Ok(())
|
||||
|
@ -61,6 +61,12 @@ actor! {
|
||||
/// The formats used to compress the tarball
|
||||
#[arg(value_name = "FORMAT", default_value_t)]
|
||||
compression_formats: CompressionFormats,
|
||||
|
||||
/// Modification time that will be set for all files added to the archive.
|
||||
/// The default is the date of the first Rust commit from 2006.
|
||||
/// This serves for better reproducibility of the archives.
|
||||
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||
override_file_mtime: u64,
|
||||
}
|
||||
}
|
||||
|
||||
@ -114,7 +120,8 @@ impl Generator {
|
||||
.input(self.package_name)
|
||||
.output(path_to_str(&output)?.into())
|
||||
.compression_profile(self.compression_profile)
|
||||
.compression_formats(self.compression_formats);
|
||||
.compression_formats(self.compression_formats)
|
||||
.override_file_mtime(self.override_file_mtime);
|
||||
tarballer.run()?;
|
||||
|
||||
Ok(())
|
||||
|
@ -32,6 +32,12 @@ actor! {
|
||||
/// The formats used to compress the tarball.
|
||||
#[arg(value_name = "FORMAT", default_value_t)]
|
||||
compression_formats: CompressionFormats,
|
||||
|
||||
/// Modification time that will be set for all files added to the archive.
|
||||
/// The default is the date of the first Rust commit from 2006.
|
||||
/// This serves for better reproducibility of the archives.
|
||||
#[arg(value_name = "FILE_MTIME", default_value_t = 1153704088)]
|
||||
override_file_mtime: u64,
|
||||
}
|
||||
}
|
||||
|
||||
@ -65,6 +71,8 @@ impl Tarballer {
|
||||
let buf = BufWriter::with_capacity(1024 * 1024, encoder);
|
||||
let mut builder = Builder::new(buf);
|
||||
// Make uid, gid and mtime deterministic to improve reproducibility
|
||||
// The modification time of directories will be set to the date of the first Rust commit.
|
||||
// The modification time of files will be set to `override_file_mtime` (see `append_path`).
|
||||
builder.mode(HeaderMode::Deterministic);
|
||||
|
||||
let pool = rayon::ThreadPoolBuilder::new().num_threads(2).build().unwrap();
|
||||
@ -77,7 +85,7 @@ impl Tarballer {
|
||||
}
|
||||
for path in files {
|
||||
let src = Path::new(&self.work_dir).join(&path);
|
||||
append_path(&mut builder, &src, &path)
|
||||
append_path(&mut builder, &src, &path, self.override_file_mtime)
|
||||
.with_context(|| format!("failed to tar file '{}'", src.display()))?;
|
||||
}
|
||||
builder
|
||||
@ -93,10 +101,16 @@ impl Tarballer {
|
||||
}
|
||||
}
|
||||
|
||||
fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) -> Result<()> {
|
||||
fn append_path<W: Write>(
|
||||
builder: &mut Builder<W>,
|
||||
src: &Path,
|
||||
path: &String,
|
||||
override_file_mtime: u64,
|
||||
) -> Result<()> {
|
||||
let stat = symlink_metadata(src)?;
|
||||
let mut header = Header::new_gnu();
|
||||
header.set_metadata_in_mode(&stat, HeaderMode::Deterministic);
|
||||
header.set_mtime(override_file_mtime);
|
||||
|
||||
if stat.file_type().is_symlink() {
|
||||
let link = read_link(src)?;
|
||||
|
12
tests/assembly/asm-comments.rs
Normal file
12
tests/assembly/asm-comments.rs
Normal file
@ -0,0 +1,12 @@
|
||||
//@ assembly-output: emit-asm
|
||||
//@ only-x86_64
|
||||
// Check that comments in assembly get passed
|
||||
|
||||
#![crate_type = "lib"]
|
||||
|
||||
// CHECK-LABEL: test_comments:
|
||||
#[no_mangle]
|
||||
pub fn test_comments() {
|
||||
// CHECK: example comment
|
||||
unsafe { core::arch::asm!("nop // example comment") };
|
||||
}
|
@ -4,7 +4,7 @@ use run_make_support::{fs_wrapper, rustc};
|
||||
|
||||
fn emit_and_check(out_dir: &Path, out_file: &str, format: &str) {
|
||||
let out_file = out_dir.join(out_file);
|
||||
rustc().input("foo.rs").emit(&format!("{format}={}", out_file.display())).run();
|
||||
rustc().input("foo.rs").emit(format!("{format}={}", out_file.display())).run();
|
||||
assert!(out_file.is_file());
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,8 @@
|
||||
error: expected one of `!` or `::`, found keyword `impl`
|
||||
--> $DIR/safe-impl-trait.rs:5:6
|
||||
|
|
||||
LL | safe impl Bar for () { }
|
||||
| ^^^^ expected one of `!` or `::`
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
@ -0,0 +1,8 @@
|
||||
//@ revisions: gated ungated
|
||||
#![cfg_attr(gated, feature(unsafe_extern_blocks))]
|
||||
|
||||
trait Bar {}
|
||||
safe impl Bar for () { }
|
||||
//~^ ERROR expected one of `!` or `::`, found keyword `impl`
|
||||
|
||||
fn main() {}
|
@ -0,0 +1,8 @@
|
||||
error: expected one of `!` or `::`, found keyword `impl`
|
||||
--> $DIR/safe-impl-trait.rs:5:6
|
||||
|
|
||||
LL | safe impl Bar for () { }
|
||||
| ^^^^ expected one of `!` or `::`
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
@ -0,0 +1,8 @@
|
||||
error: expected one of `!` or `::`, found keyword `trait`
|
||||
--> $DIR/safe-trait.rs:4:6
|
||||
|
|
||||
LL | safe trait Foo {}
|
||||
| ^^^^^ expected one of `!` or `::`
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
7
tests/ui/rust-2024/unsafe-extern-blocks/safe-trait.rs
Normal file
7
tests/ui/rust-2024/unsafe-extern-blocks/safe-trait.rs
Normal file
@ -0,0 +1,7 @@
|
||||
//@ revisions: gated ungated
|
||||
#![cfg_attr(gated, feature(unsafe_extern_blocks))]
|
||||
|
||||
safe trait Foo {}
|
||||
//~^ ERROR expected one of `!` or `::`, found keyword `trait`
|
||||
|
||||
fn main() {}
|
@ -0,0 +1,8 @@
|
||||
error: expected one of `!` or `::`, found keyword `trait`
|
||||
--> $DIR/safe-trait.rs:4:6
|
||||
|
|
||||
LL | safe trait Foo {}
|
||||
| ^^^^^ expected one of `!` or `::`
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
29
tests/ui/self/arbitrary-self-from-method-substs-ice.rs
Normal file
29
tests/ui/self/arbitrary-self-from-method-substs-ice.rs
Normal file
@ -0,0 +1,29 @@
|
||||
//! The same as the non-ICE test, but const eval will run typeck of
|
||||
//! `get` before running wfcheck (as that may in itself trigger const
|
||||
//! eval again, and thus cause bogus cycles). This used to ICE because
|
||||
//! we asserted that an error had already been emitted.
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
struct Foo(u32);
|
||||
impl Foo {
|
||||
const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||
//~^ ERROR: `R` cannot be used as the type of `self`
|
||||
//~| ERROR destructor of `R` cannot be evaluated at compile-time
|
||||
self.0
|
||||
//~^ ERROR cannot borrow here, since the borrowed element may contain interior mutability
|
||||
//~| ERROR cannot call non-const fn `<R as Deref>::deref` in constant function
|
||||
}
|
||||
}
|
||||
|
||||
const FOO: () = {
|
||||
let foo = Foo(1);
|
||||
foo.get::<&Foo>();
|
||||
};
|
||||
|
||||
const BAR: [(); {
|
||||
FOO;
|
||||
0
|
||||
}] = [];
|
||||
|
||||
fn main() {}
|
46
tests/ui/self/arbitrary-self-from-method-substs-ice.stderr
Normal file
46
tests/ui/self/arbitrary-self-from-method-substs-ice.stderr
Normal file
@ -0,0 +1,46 @@
|
||||
error[E0658]: cannot borrow here, since the borrowed element may contain interior mutability
|
||||
--> $DIR/arbitrary-self-from-method-substs-ice.rs:13:9
|
||||
|
|
||||
LL | self.0
|
||||
| ^^^^
|
||||
|
|
||||
= note: see issue #80384 <https://github.com/rust-lang/rust/issues/80384> for more information
|
||||
= help: add `#![feature(const_refs_to_cell)]` to the crate attributes to enable
|
||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||
|
||||
error[E0015]: cannot call non-const fn `<R as Deref>::deref` in constant functions
|
||||
--> $DIR/arbitrary-self-from-method-substs-ice.rs:13:9
|
||||
|
|
||||
LL | self.0
|
||||
| ^^^^^^
|
||||
|
|
||||
= note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
|
||||
help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
|
||||
|
|
||||
LL + #![feature(const_trait_impl)]
|
||||
|
|
||||
|
||||
error[E0493]: destructor of `R` cannot be evaluated at compile-time
|
||||
--> $DIR/arbitrary-self-from-method-substs-ice.rs:10:43
|
||||
|
|
||||
LL | const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||
| ^^^^ the destructor for this type cannot be evaluated in constant functions
|
||||
...
|
||||
LL | }
|
||||
| - value is dropped here
|
||||
|
||||
error[E0658]: `R` cannot be used as the type of `self` without the `arbitrary_self_types` feature
|
||||
--> $DIR/arbitrary-self-from-method-substs-ice.rs:10:49
|
||||
|
|
||||
LL | const fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||
| ^
|
||||
|
|
||||
= note: see issue #44874 <https://github.com/rust-lang/rust/issues/44874> for more information
|
||||
= help: add `#![feature(arbitrary_self_types)]` to the crate attributes to enable
|
||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||
= help: consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one of the previous types except `Self`)
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0015, E0493, E0658.
|
||||
For more information about an error, try `rustc --explain E0015`.
|
@ -9,7 +9,6 @@ LL | fn get<R: Deref<Target = Self>>(self: R) -> u32 {
|
||||
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
|
||||
= help: consider changing to `self`, `&self`, `&mut self`, `self: Box<Self>`, `self: Rc<Self>`, `self: Arc<Self>`, or `self: Pin<P>` (where P is one of the previous types except `Self`)
|
||||
|
||||
ERROR rustc_hir_typeck::method::confirm Foo was a subtype of &Foo but now is not?
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0658`.
|
||||
|
Loading…
x
Reference in New Issue
Block a user