Auto merge of #109019 - matthiaskrgr:rollup-ihjntil, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #104363 (Make `unused_allocation` lint against `Box::new` too) - #106633 (Stabilize `nonzero_min_max`) - #106844 (allow negative numeric literals in `concat!`) - #108071 (Implement goal caching with the new solver) - #108542 (Force parentheses around `match` expression in binary expression) - #108690 (Place size limits on query keys and values) - #108708 (Prevent overflow through Arc::downgrade) - #108739 (Prevent the `start_bx` basic block in codegen from having two `Builder`s at the same time) - #108806 (Querify register_tools and post-expansion early lints) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
8a73f50d87
@ -436,6 +436,7 @@ fn compute_hir_hash(
|
||||
pub fn lower_to_hir(tcx: TyCtxt<'_>, (): ()) -> hir::Crate<'_> {
|
||||
let sess = tcx.sess;
|
||||
tcx.ensure().output_filenames(());
|
||||
let _ = tcx.early_lint_checks(()); // Borrows `resolver_for_lowering`.
|
||||
let (mut resolver, krate) = tcx.resolver_for_lowering(()).steal();
|
||||
|
||||
let ast_index = index_crate(&resolver.node_id_to_def_id, &krate);
|
||||
|
@ -244,6 +244,10 @@ fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr
|
||||
(&ast::ExprKind::Let { .. }, _) if !parser::needs_par_as_let_scrutinee(prec) => {
|
||||
parser::PREC_FORCE_PAREN
|
||||
}
|
||||
// For a binary expression like `(match () { _ => a }) OP b`, the parens are required
|
||||
// otherwise the parser would interpret `match () { _ => a }` as a statement,
|
||||
// with the remaining `OP b` not making sense. So we force parens.
|
||||
(&ast::ExprKind::Match(..), _) => parser::PREC_FORCE_PAREN,
|
||||
_ => left_prec,
|
||||
};
|
||||
|
||||
|
@ -42,6 +42,18 @@ pub fn expand_concat(
|
||||
has_errors = true;
|
||||
}
|
||||
},
|
||||
// We also want to allow negative numeric literals.
|
||||
ast::ExprKind::Unary(ast::UnOp::Neg, ref expr) if let ast::ExprKind::Lit(token_lit) = expr.kind => {
|
||||
match ast::LitKind::from_token_lit(token_lit) {
|
||||
Ok(ast::LitKind::Int(i, _)) => accumulator.push_str(&format!("-{i}")),
|
||||
Ok(ast::LitKind::Float(f, _)) => accumulator.push_str(&format!("-{f}")),
|
||||
Err(err) => {
|
||||
report_lit_error(&cx.sess.parse_sess, err, token_lit, e.span);
|
||||
has_errors = true;
|
||||
}
|
||||
_ => missing_literal.push(e.span),
|
||||
}
|
||||
}
|
||||
ast::ExprKind::IncludedBytes(..) => {
|
||||
cx.span_err(e.span, "cannot concatenate a byte string literal")
|
||||
}
|
||||
@ -53,9 +65,10 @@ pub fn expand_concat(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !missing_literal.is_empty() {
|
||||
let mut err = cx.struct_span_err(missing_literal, "expected a literal");
|
||||
err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`");
|
||||
err.note("only literals (like `\"foo\"`, `-42` and `3.14`) can be passed to `concat!()`");
|
||||
err.emit();
|
||||
return DummyResult::any(sp);
|
||||
} else if has_errors {
|
||||
|
@ -258,6 +258,10 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
// Apply debuginfo to the newly allocated locals.
|
||||
fx.debug_introduce_locals(&mut start_bx);
|
||||
|
||||
// The builders will be created separately for each basic block at `codegen_block`.
|
||||
// So drop the builder of `start_llbb` to avoid having two at the same time.
|
||||
drop(start_bx);
|
||||
|
||||
// Codegen the body of each block using reverse postorder
|
||||
for (bb, _) in traversal::reverse_postorder(&mir) {
|
||||
fx.codegen_block(bb);
|
||||
|
@ -331,6 +331,7 @@ fn run_compiler(
|
||||
if let Some(ppm) = &sess.opts.pretty {
|
||||
if ppm.needs_ast_map() {
|
||||
queries.global_ctxt()?.enter(|tcx| {
|
||||
tcx.ensure().early_lint_checks(());
|
||||
pretty::print_after_hir_lowering(tcx, *ppm);
|
||||
Ok(())
|
||||
})?;
|
||||
|
@ -12,13 +12,13 @@
|
||||
use rustc_ast::visit::{AssocCtxt, Visitor};
|
||||
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, Item, NodeId, PatKind};
|
||||
use rustc_attr::{self as attr, Deprecation, Stability};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
use rustc_errors::{
|
||||
Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, MultiSpan, PResult,
|
||||
};
|
||||
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
|
||||
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics};
|
||||
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics, RegisteredTools};
|
||||
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
|
||||
use rustc_session::errors::report_lit_error;
|
||||
use rustc_session::{parse::ParseSess, Limit, Session};
|
||||
@ -947,14 +947,14 @@ fn cfg_accessible(
|
||||
fn declare_proc_macro(&mut self, id: NodeId);
|
||||
|
||||
/// Tools registered with `#![register_tool]` and used by tool attributes and lints.
|
||||
fn registered_tools(&self) -> &FxHashSet<Ident>;
|
||||
fn registered_tools(&self) -> &RegisteredTools;
|
||||
}
|
||||
|
||||
pub trait LintStoreExpand {
|
||||
fn pre_expansion_lint(
|
||||
&self,
|
||||
sess: &Session,
|
||||
registered_tools: &FxHashSet<Ident>,
|
||||
registered_tools: &RegisteredTools,
|
||||
node_id: NodeId,
|
||||
attrs: &[Attribute],
|
||||
items: &[P<Item>],
|
||||
|
@ -53,6 +53,12 @@ pub struct Obligation<'tcx, T> {
|
||||
pub recursion_depth: usize,
|
||||
}
|
||||
|
||||
impl<'tcx, P> From<Obligation<'tcx, P>> for solve::Goal<'tcx, P> {
|
||||
fn from(value: Obligation<'tcx, P>) -> Self {
|
||||
solve::Goal { param_env: value.param_env, predicate: value.predicate }
|
||||
}
|
||||
}
|
||||
|
||||
pub type PredicateObligation<'tcx> = Obligation<'tcx, ty::Predicate<'tcx>>;
|
||||
pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>;
|
||||
|
||||
|
@ -11,7 +11,7 @@
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
||||
use rustc_errors::PResult;
|
||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand, ResolverExpand};
|
||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
|
||||
use rustc_hir::def_id::{StableCrateId, LOCAL_CRATE};
|
||||
use rustc_lint::{unerased_lint_store, BufferedEarlyLint, EarlyCheckNode, LintStore};
|
||||
use rustc_metadata::creader::CStore;
|
||||
@ -178,7 +178,7 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
|
||||
let sess = tcx.sess;
|
||||
let lint_store = unerased_lint_store(tcx);
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
pre_expansion_lint(sess, lint_store, resolver.registered_tools(), &krate, crate_name);
|
||||
pre_expansion_lint(sess, lint_store, tcx.registered_tools(()), &krate, crate_name);
|
||||
rustc_builtin_macros::register_builtin_macros(resolver);
|
||||
|
||||
krate = sess.time("crate_injection", || {
|
||||
@ -302,6 +302,16 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
|
||||
|
||||
// Done with macro expansion!
|
||||
|
||||
resolver.resolve_crate(&krate);
|
||||
|
||||
krate
|
||||
}
|
||||
|
||||
fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
|
||||
let sess = tcx.sess;
|
||||
let (resolver, krate) = &*tcx.resolver_for_lowering(()).borrow();
|
||||
let mut lint_buffer = resolver.lint_buffer.steal();
|
||||
|
||||
if sess.opts.unstable_opts.input_stats {
|
||||
eprintln!("Post-expansion node count: {}", count_nodes(&krate));
|
||||
}
|
||||
@ -310,8 +320,6 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
|
||||
hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS", "ast-stats-2");
|
||||
}
|
||||
|
||||
resolver.resolve_crate(&krate);
|
||||
|
||||
// Needs to go *after* expansion to be able to check the results of macro expansion.
|
||||
sess.time("complete_gated_feature_checking", || {
|
||||
rustc_ast_passes::feature_gate::check_crate(&krate, sess);
|
||||
@ -321,7 +329,7 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
|
||||
sess.parse_sess.buffered_lints.with_lock(|buffered_lints| {
|
||||
info!("{} parse sess buffered_lints", buffered_lints.len());
|
||||
for early_lint in buffered_lints.drain(..) {
|
||||
resolver.lint_buffer().add_early_lint(early_lint);
|
||||
lint_buffer.add_early_lint(early_lint);
|
||||
}
|
||||
});
|
||||
|
||||
@ -340,20 +348,16 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
|
||||
}
|
||||
});
|
||||
|
||||
sess.time("early_lint_checks", || {
|
||||
let lint_buffer = Some(std::mem::take(resolver.lint_buffer()));
|
||||
rustc_lint::check_ast_node(
|
||||
sess,
|
||||
false,
|
||||
lint_store,
|
||||
resolver.registered_tools(),
|
||||
lint_buffer,
|
||||
rustc_lint::BuiltinCombinedEarlyLintPass::new(),
|
||||
&krate,
|
||||
)
|
||||
});
|
||||
|
||||
krate
|
||||
let lint_store = unerased_lint_store(tcx);
|
||||
rustc_lint::check_ast_node(
|
||||
sess,
|
||||
false,
|
||||
lint_store,
|
||||
tcx.registered_tools(()),
|
||||
Some(lint_buffer),
|
||||
rustc_lint::BuiltinCombinedEarlyLintPass::new(),
|
||||
&**krate,
|
||||
)
|
||||
}
|
||||
|
||||
// Returns all the paths that correspond to generated files.
|
||||
@ -557,6 +561,7 @@ fn resolver_for_lowering<'tcx>(
|
||||
(): (),
|
||||
) -> &'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)> {
|
||||
let arenas = Resolver::arenas();
|
||||
let _ = tcx.registered_tools(()); // Uses `crate_for_resolver`.
|
||||
let krate = tcx.crate_for_resolver(()).steal();
|
||||
let mut resolver = Resolver::new(tcx, &krate, &arenas);
|
||||
let krate = configure_and_expand(krate, &mut resolver);
|
||||
@ -629,6 +634,7 @@ fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
||||
providers.hir_crate = rustc_ast_lowering::lower_to_hir;
|
||||
providers.output_filenames = output_filenames;
|
||||
providers.resolver_for_lowering = resolver_for_lowering;
|
||||
providers.early_lint_checks = early_lint_checks;
|
||||
proc_macro_decls::provide(providers);
|
||||
rustc_const_eval::provide(providers);
|
||||
rustc_middle::hir::provide(providers);
|
||||
@ -637,6 +643,7 @@ fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
||||
rustc_mir_transform::provide(providers);
|
||||
rustc_monomorphize::provide(providers);
|
||||
rustc_privacy::provide(providers);
|
||||
rustc_resolve::provide(providers);
|
||||
rustc_hir_analysis::provide(providers);
|
||||
rustc_hir_typeck::provide(providers);
|
||||
ty::provide(providers);
|
||||
|
@ -128,7 +128,7 @@ fn lint_expectations(tcx: TyCtxt<'_>, (): ()) -> Vec<(LintExpectationId, LintExp
|
||||
},
|
||||
warn_about_weird_lints: false,
|
||||
store,
|
||||
registered_tools: &tcx.resolutions(()).registered_tools,
|
||||
registered_tools: &tcx.registered_tools(()),
|
||||
};
|
||||
|
||||
builder.add_command_line();
|
||||
@ -156,7 +156,7 @@ fn shallow_lint_levels_on(tcx: TyCtxt<'_>, owner: hir::OwnerId) -> ShallowLintLe
|
||||
},
|
||||
warn_about_weird_lints: false,
|
||||
store,
|
||||
registered_tools: &tcx.resolutions(()).registered_tools,
|
||||
registered_tools: &tcx.registered_tools(()),
|
||||
};
|
||||
|
||||
if owner == hir::CRATE_OWNER_ID {
|
||||
|
@ -1349,9 +1349,8 @@ fn check_item(&mut self, cx: &EarlyContext<'_>, item: &ast::Item) {
|
||||
/// ### Example
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(box_syntax)]
|
||||
/// fn main() {
|
||||
/// let a = (box [1, 2, 3]).len();
|
||||
/// let a = Box::new([1, 2, 3]).len();
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
@ -1373,6 +1372,11 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAllocation {
|
||||
fn check_expr(&mut self, cx: &LateContext<'_>, e: &hir::Expr<'_>) {
|
||||
match e.kind {
|
||||
hir::ExprKind::Box(_) => {}
|
||||
hir::ExprKind::Call(path_expr, [_])
|
||||
if let hir::ExprKind::Path(qpath) = &path_expr.kind
|
||||
&& let Some(did) = cx.qpath_res(qpath, path_expr.hir_id).opt_def_id()
|
||||
&& cx.tcx.is_diagnostic_item(sym::box_new, did)
|
||||
=> {}
|
||||
_ => return,
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
pub use self::Level::*;
|
||||
use rustc_ast::node_id::NodeId;
|
||||
use rustc_ast::{AttrId, Attribute};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
|
||||
use rustc_error_messages::{DiagnosticMessage, MultiSpan};
|
||||
use rustc_hir::HashStableContext;
|
||||
@ -533,6 +533,7 @@ pub enum BuiltinLintDiagnostics {
|
||||
|
||||
/// Lints that are buffered up early on in the `Session` before the
|
||||
/// `LintLevels` is calculated.
|
||||
#[derive(Debug)]
|
||||
pub struct BufferedEarlyLint {
|
||||
/// The span of code that we are linting on.
|
||||
pub span: MultiSpan,
|
||||
@ -551,7 +552,7 @@ pub struct BufferedEarlyLint {
|
||||
pub diagnostic: BuiltinLintDiagnostics,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Default, Debug)]
|
||||
pub struct LintBuffer {
|
||||
pub map: FxIndexMap<NodeId, Vec<BufferedEarlyLint>>,
|
||||
}
|
||||
@ -601,6 +602,8 @@ pub fn buffer_lint_with_diagnostic(
|
||||
}
|
||||
}
|
||||
|
||||
pub type RegisteredTools = FxIndexSet<Ident>;
|
||||
|
||||
/// Declares a static item of type `&'static Lint`.
|
||||
///
|
||||
/// See <https://rustc-dev-guide.rust-lang.org/diagnostics.html> for
|
||||
|
@ -108,6 +108,7 @@ macro_rules! arena_types {
|
||||
// (during lowering) and the `librustc_middle` arena (for decoding MIR)
|
||||
[decode] asm_template: rustc_ast::InlineAsmTemplatePiece,
|
||||
[decode] used_trait_imports: rustc_data_structures::unord::UnordSet<rustc_hir::def_id::LocalDefId>,
|
||||
[decode] registered_tools: rustc_middle::ty::RegisteredTools,
|
||||
[decode] is_late_bound_map: rustc_data_structures::fx::FxIndexSet<rustc_hir::ItemLocalId>,
|
||||
[decode] impl_source: rustc_middle::traits::ImplSource<'tcx, ()>,
|
||||
|
||||
|
@ -26,6 +26,15 @@
|
||||
desc { "triggering a delay span bug" }
|
||||
}
|
||||
|
||||
query registered_tools(_: ()) -> &'tcx ty::RegisteredTools {
|
||||
arena_cache
|
||||
desc { "compute registered tools for crate" }
|
||||
}
|
||||
|
||||
query early_lint_checks(_: ()) -> () {
|
||||
desc { "perform lints prior to macro expansion" }
|
||||
}
|
||||
|
||||
query resolutions(_: ()) -> &'tcx ty::ResolverGlobalCtxt {
|
||||
feedable
|
||||
no_hash
|
||||
|
@ -1,12 +1,104 @@
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_query_system::cache::Cache;
|
||||
|
||||
use crate::infer::canonical::QueryRegionConstraints;
|
||||
use crate::infer::canonical::{CanonicalVarValues, QueryRegionConstraints};
|
||||
use crate::traits::query::NoSolution;
|
||||
use crate::traits::Canonical;
|
||||
use crate::ty::{
|
||||
FallibleTypeFolder, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeVisitable, TypeVisitor,
|
||||
self, FallibleTypeFolder, ToPredicate, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeVisitable,
|
||||
TypeVisitor,
|
||||
};
|
||||
|
||||
pub type EvaluationCache<'tcx> = Cache<CanonicalGoal<'tcx>, QueryResult<'tcx>>;
|
||||
|
||||
/// A goal is a statement, i.e. `predicate`, we want to prove
|
||||
/// given some assumptions, i.e. `param_env`.
|
||||
///
|
||||
/// Most of the time the `param_env` contains the `where`-bounds of the function
|
||||
/// we're currently typechecking while the `predicate` is some trait bound.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub struct Goal<'tcx, P> {
|
||||
pub param_env: ty::ParamEnv<'tcx>,
|
||||
pub predicate: P,
|
||||
}
|
||||
|
||||
impl<'tcx, P> Goal<'tcx, P> {
|
||||
pub fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
predicate: impl ToPredicate<'tcx, P>,
|
||||
) -> Goal<'tcx, P> {
|
||||
Goal { param_env, predicate: predicate.to_predicate(tcx) }
|
||||
}
|
||||
|
||||
/// Updates the goal to one with a different `predicate` but the same `param_env`.
|
||||
pub fn with<Q>(self, tcx: TyCtxt<'tcx>, predicate: impl ToPredicate<'tcx, Q>) -> Goal<'tcx, Q> {
|
||||
Goal { param_env: self.param_env, predicate: predicate.to_predicate(tcx) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub struct Response<'tcx> {
|
||||
pub var_values: CanonicalVarValues<'tcx>,
|
||||
/// Additional constraints returned by this query.
|
||||
pub external_constraints: ExternalConstraints<'tcx>,
|
||||
pub certainty: Certainty,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub enum Certainty {
|
||||
Yes,
|
||||
Maybe(MaybeCause),
|
||||
}
|
||||
|
||||
impl Certainty {
|
||||
pub const AMBIGUOUS: Certainty = Certainty::Maybe(MaybeCause::Ambiguity);
|
||||
|
||||
/// When proving multiple goals using **AND**, e.g. nested obligations for an impl,
|
||||
/// use this function to unify the certainty of these goals
|
||||
pub fn unify_and(self, other: Certainty) -> Certainty {
|
||||
match (self, other) {
|
||||
(Certainty::Yes, Certainty::Yes) => Certainty::Yes,
|
||||
(Certainty::Yes, Certainty::Maybe(_)) => other,
|
||||
(Certainty::Maybe(_), Certainty::Yes) => self,
|
||||
(Certainty::Maybe(MaybeCause::Overflow), Certainty::Maybe(MaybeCause::Overflow)) => {
|
||||
Certainty::Maybe(MaybeCause::Overflow)
|
||||
}
|
||||
// If at least one of the goals is ambiguous, hide the overflow as the ambiguous goal
|
||||
// may still result in failure.
|
||||
(Certainty::Maybe(MaybeCause::Ambiguity), Certainty::Maybe(_))
|
||||
| (Certainty::Maybe(_), Certainty::Maybe(MaybeCause::Ambiguity)) => {
|
||||
Certainty::Maybe(MaybeCause::Ambiguity)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Why we failed to evaluate a goal.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub enum MaybeCause {
|
||||
/// We failed due to ambiguity. This ambiguity can either
|
||||
/// be a true ambiguity, i.e. there are multiple different answers,
|
||||
/// or we hit a case where we just don't bother, e.g. `?x: Trait` goals.
|
||||
Ambiguity,
|
||||
/// We gave up due to an overflow, most often by hitting the recursion limit.
|
||||
Overflow,
|
||||
}
|
||||
|
||||
pub type CanonicalGoal<'tcx, T = ty::Predicate<'tcx>> = Canonical<'tcx, Goal<'tcx, T>>;
|
||||
|
||||
pub type CanonicalResponse<'tcx> = Canonical<'tcx, Response<'tcx>>;
|
||||
|
||||
/// The result of evaluating a canonical query.
|
||||
///
|
||||
/// FIXME: We use a different type than the existing canonical queries. This is because
|
||||
/// we need to add a `Certainty` for `overflow` and may want to restructure this code without
|
||||
/// having to worry about changes to currently used code. Once we've made progress on this
|
||||
/// solver, merge the two responses again.
|
||||
pub type QueryResult<'tcx> = Result<CanonicalResponse<'tcx>, NoSolution>;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
pub struct ExternalConstraints<'tcx>(pub(crate) Interned<'tcx, ExternalConstraintsData<'tcx>>);
|
||||
|
||||
|
@ -17,6 +17,7 @@
|
||||
};
|
||||
use crate::thir::Thir;
|
||||
use crate::traits;
|
||||
use crate::traits::solve;
|
||||
use crate::traits::solve::{ExternalConstraints, ExternalConstraintsData};
|
||||
use crate::ty::query::{self, TyCtxtAt};
|
||||
use crate::ty::{
|
||||
@ -537,6 +538,9 @@ pub struct GlobalCtxt<'tcx> {
|
||||
/// Merge this with `selection_cache`?
|
||||
pub evaluation_cache: traits::EvaluationCache<'tcx>,
|
||||
|
||||
/// Caches the results of goal evaluation in the new solver.
|
||||
pub new_solver_evaluation_cache: solve::EvaluationCache<'tcx>,
|
||||
|
||||
/// Data layout specification for the current target.
|
||||
pub data_layout: TargetDataLayout,
|
||||
|
||||
@ -712,6 +716,7 @@ pub fn create_global_ctxt(
|
||||
pred_rcache: Default::default(),
|
||||
selection_cache: Default::default(),
|
||||
evaluation_cache: Default::default(),
|
||||
new_solver_evaluation_cache: Default::default(),
|
||||
data_layout,
|
||||
alloc_map: Lock::new(interpret::AllocMap::new()),
|
||||
}
|
||||
|
@ -34,6 +34,7 @@
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
@ -44,6 +45,8 @@
|
||||
use rustc_macros::HashStable;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_session::lint::LintBuffer;
|
||||
pub use rustc_session::lint::RegisteredTools;
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{ExpnId, ExpnKind, Span};
|
||||
@ -148,8 +151,6 @@
|
||||
|
||||
// Data types
|
||||
|
||||
pub type RegisteredTools = FxHashSet<Ident>;
|
||||
|
||||
pub struct ResolverOutputs {
|
||||
pub global_ctxt: ResolverGlobalCtxt,
|
||||
pub ast_lowering: ResolverAstLowering,
|
||||
@ -175,7 +176,6 @@ pub struct ResolverGlobalCtxt {
|
||||
/// Mapping from ident span to path span for paths that don't exist as written, but that
|
||||
/// exist under `std`. For example, wrote `str::from_utf8` instead of `std::str::from_utf8`.
|
||||
pub confused_type_with_std_module: FxHashMap<Span, Span>,
|
||||
pub registered_tools: RegisteredTools,
|
||||
pub doc_link_resolutions: FxHashMap<LocalDefId, DocLinkResMap>,
|
||||
pub doc_link_traits_in_scope: FxHashMap<LocalDefId, Vec<DefId>>,
|
||||
pub all_macro_rules: FxHashMap<Symbol, Res<ast::NodeId>>,
|
||||
@ -209,6 +209,9 @@ pub struct ResolverAstLowering {
|
||||
pub builtin_macro_kinds: FxHashMap<LocalDefId, MacroKind>,
|
||||
/// List functions and methods for which lifetime elision was successful.
|
||||
pub lifetime_elision_allowed: FxHashSet<ast::NodeId>,
|
||||
|
||||
/// Lints that were emitted by the resolver and early lints.
|
||||
pub lint_buffer: Steal<LintBuffer>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
|
@ -252,6 +252,36 @@ pub mod query_storage {
|
||||
)*
|
||||
}
|
||||
|
||||
$(
|
||||
// Ensure that keys grow no larger than 64 bytes
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
const _: () = {
|
||||
if mem::size_of::<query_keys::$name<'static>>() > 64 {
|
||||
panic!("{}", concat!(
|
||||
"the query `",
|
||||
stringify!($name),
|
||||
"` has a key type `",
|
||||
stringify!($($K)*),
|
||||
"` that is too large"
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
// Ensure that values grow no larger than 64 bytes
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
const _: () = {
|
||||
if mem::size_of::<query_values::$name<'static>>() > 64 {
|
||||
panic!("{}", concat!(
|
||||
"the query `",
|
||||
stringify!($name),
|
||||
"` has a value type `",
|
||||
stringify!($V),
|
||||
"` that is too large"
|
||||
));
|
||||
}
|
||||
};
|
||||
)*
|
||||
|
||||
pub struct QueryArenas<'tcx> {
|
||||
$($(#[$attr])* pub $name: query_if_arena!([$($modifiers)*]
|
||||
(WorkerLocal<TypedArena<<$V as Deref>::Target>>)
|
||||
|
@ -27,6 +27,7 @@
|
||||
use rustc_ast::{AngleBracketedArg, Crate, Expr, ExprKind, GenericArg, GenericArgs, LitKind, Path};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{Lrc, MappedReadGuard};
|
||||
use rustc_errors::{
|
||||
Applicability, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed, SubdiagnosticMessage,
|
||||
@ -965,7 +966,7 @@ pub struct Resolver<'a, 'tcx> {
|
||||
/// A small map keeping true kinds of built-in macros that appear to be fn-like on
|
||||
/// the surface (`macro` items in libcore), but are actually attributes or derives.
|
||||
builtin_macro_kinds: FxHashMap<LocalDefId, MacroKind>,
|
||||
registered_tools: RegisteredTools,
|
||||
registered_tools: &'tcx RegisteredTools,
|
||||
macro_use_prelude: FxHashMap<Symbol, &'a NameBinding<'a>>,
|
||||
macro_map: FxHashMap<DefId, MacroData>,
|
||||
dummy_ext_bang: Lrc<SyntaxExtension>,
|
||||
@ -1233,7 +1234,7 @@ pub fn new(
|
||||
}
|
||||
}
|
||||
|
||||
let registered_tools = macros::registered_tools(tcx.sess, &krate.attrs);
|
||||
let registered_tools = tcx.registered_tools(());
|
||||
|
||||
let features = tcx.sess.features_untracked();
|
||||
|
||||
@ -1408,7 +1409,6 @@ pub fn into_outputs(self) -> ResolverOutputs {
|
||||
trait_impls: self.trait_impls,
|
||||
proc_macros,
|
||||
confused_type_with_std_module,
|
||||
registered_tools: self.registered_tools,
|
||||
doc_link_resolutions: self.doc_link_resolutions,
|
||||
doc_link_traits_in_scope: self.doc_link_traits_in_scope,
|
||||
all_macro_rules: self.all_macro_rules,
|
||||
@ -1426,6 +1426,7 @@ pub fn into_outputs(self) -> ResolverOutputs {
|
||||
trait_map: self.trait_map,
|
||||
builtin_macro_kinds: self.builtin_macro_kinds,
|
||||
lifetime_elision_allowed: self.lifetime_elision_allowed,
|
||||
lint_buffer: Steal::new(self.lint_buffer),
|
||||
};
|
||||
ResolverOutputs { global_ctxt, ast_lowering }
|
||||
}
|
||||
@ -2032,3 +2033,7 @@ fn with_root_span(node_id: NodeId, path_span: Span, root_span: Span) -> Finalize
|
||||
Finalize { node_id, path_span, root_span, report_private: true }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut ty::query::Providers) {
|
||||
providers.registered_tools = macros::registered_tools;
|
||||
}
|
||||
|
@ -8,7 +8,6 @@
|
||||
use rustc_ast::{self as ast, Inline, ItemKind, ModKind, NodeId};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_attr::StabilityLevel;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::intern::Interned;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{struct_span_err, Applicability};
|
||||
@ -20,11 +19,11 @@
|
||||
use rustc_hir::def_id::{CrateNum, LocalDefId};
|
||||
use rustc_middle::middle::stability;
|
||||
use rustc_middle::ty::RegisteredTools;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::lint::builtin::{LEGACY_DERIVE_HELPERS, SOFT_UNSTABLE};
|
||||
use rustc_session::lint::builtin::{UNUSED_MACROS, UNUSED_MACRO_RULES};
|
||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::{self, ExpnData, ExpnKind, LocalExpnId};
|
||||
use rustc_span::hygiene::{AstPass, MacroKind};
|
||||
@ -111,15 +110,17 @@ fn fast_print_path(path: &ast::Path) -> Symbol {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn registered_tools(sess: &Session, attrs: &[ast::Attribute]) -> FxHashSet<Ident> {
|
||||
let mut registered_tools = FxHashSet::default();
|
||||
for attr in sess.filter_by_name(attrs, sym::register_tool) {
|
||||
pub(crate) fn registered_tools(tcx: TyCtxt<'_>, (): ()) -> RegisteredTools {
|
||||
let mut registered_tools = RegisteredTools::default();
|
||||
let krate = tcx.crate_for_resolver(()).borrow();
|
||||
for attr in tcx.sess.filter_by_name(&krate.attrs, sym::register_tool) {
|
||||
for nested_meta in attr.meta_item_list().unwrap_or_default() {
|
||||
match nested_meta.ident() {
|
||||
Some(ident) => {
|
||||
if let Some(old_ident) = registered_tools.replace(ident) {
|
||||
let msg = format!("{} `{}` was already registered", "tool", ident);
|
||||
sess.struct_span_err(ident.span, &msg)
|
||||
tcx.sess
|
||||
.struct_span_err(ident.span, &msg)
|
||||
.span_label(old_ident.span, "already registered here")
|
||||
.emit();
|
||||
}
|
||||
@ -127,7 +128,10 @@ pub(crate) fn registered_tools(sess: &Session, attrs: &[ast::Attribute]) -> FxHa
|
||||
None => {
|
||||
let msg = format!("`{}` only accepts identifiers", sym::register_tool);
|
||||
let span = nested_meta.span();
|
||||
sess.struct_span_err(span, &msg).span_label(span, "not an identifier").emit();
|
||||
tcx.sess
|
||||
.struct_span_err(span, &msg)
|
||||
.span_label(span, "not an identifier")
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -429,6 +429,7 @@
|
||||
borrowck_graphviz_format,
|
||||
borrowck_graphviz_postflow,
|
||||
box_free,
|
||||
box_new,
|
||||
box_patterns,
|
||||
box_syntax,
|
||||
bpf_target_feature,
|
||||
|
@ -2,11 +2,12 @@
|
||||
|
||||
#[cfg(doc)]
|
||||
use super::trait_goals::structural_traits::*;
|
||||
use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, MaybeCause, QueryResult};
|
||||
use super::EvalCtxt;
|
||||
use itertools::Itertools;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_infer::traits::util::elaborate_predicates;
|
||||
use rustc_middle::traits::solve::{CanonicalResponse, Certainty, Goal, MaybeCause, QueryResult};
|
||||
use rustc_middle::ty::TypeFoldable;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use std::fmt::Debug;
|
||||
|
@ -21,11 +21,13 @@
|
||||
use rustc_infer::infer::canonical::{Canonical, CanonicalVarValues};
|
||||
use rustc_infer::infer::{InferCtxt, InferOk, TyCtxtInferExt};
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_infer::traits::Obligation;
|
||||
use rustc_middle::traits::solve::{ExternalConstraints, ExternalConstraintsData};
|
||||
use rustc_middle::traits::solve::{
|
||||
CanonicalGoal, CanonicalResponse, Certainty, ExternalConstraints, ExternalConstraintsData,
|
||||
Goal, MaybeCause, QueryResult, Response,
|
||||
};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{
|
||||
CoercePredicate, RegionOutlivesPredicate, SubtypePredicate, ToPredicate, TypeOutlivesPredicate,
|
||||
CoercePredicate, RegionOutlivesPredicate, SubtypePredicate, TypeOutlivesPredicate,
|
||||
};
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
||||
@ -43,45 +45,6 @@
|
||||
pub use eval_ctxt::EvalCtxt;
|
||||
pub use fulfill::FulfillmentCtxt;
|
||||
|
||||
/// A goal is a statement, i.e. `predicate`, we want to prove
|
||||
/// given some assumptions, i.e. `param_env`.
|
||||
///
|
||||
/// Most of the time the `param_env` contains the `where`-bounds of the function
|
||||
/// we're currently typechecking while the `predicate` is some trait bound.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub struct Goal<'tcx, P> {
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
predicate: P,
|
||||
}
|
||||
|
||||
impl<'tcx, P> Goal<'tcx, P> {
|
||||
pub fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
predicate: impl ToPredicate<'tcx, P>,
|
||||
) -> Goal<'tcx, P> {
|
||||
Goal { param_env, predicate: predicate.to_predicate(tcx) }
|
||||
}
|
||||
|
||||
/// Updates the goal to one with a different `predicate` but the same `param_env`.
|
||||
fn with<Q>(self, tcx: TyCtxt<'tcx>, predicate: impl ToPredicate<'tcx, Q>) -> Goal<'tcx, Q> {
|
||||
Goal { param_env: self.param_env, predicate: predicate.to_predicate(tcx) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, P> From<Obligation<'tcx, P>> for Goal<'tcx, P> {
|
||||
fn from(obligation: Obligation<'tcx, P>) -> Goal<'tcx, P> {
|
||||
Goal { param_env: obligation.param_env, predicate: obligation.predicate }
|
||||
}
|
||||
}
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub struct Response<'tcx> {
|
||||
pub var_values: CanonicalVarValues<'tcx>,
|
||||
/// Additional constraints returned by this query.
|
||||
pub external_constraints: ExternalConstraints<'tcx>,
|
||||
pub certainty: Certainty,
|
||||
}
|
||||
|
||||
trait CanonicalResponseExt {
|
||||
fn has_no_inference_or_external_constraints(&self) -> bool;
|
||||
}
|
||||
@ -94,56 +57,6 @@ fn has_no_inference_or_external_constraints(&self) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub enum Certainty {
|
||||
Yes,
|
||||
Maybe(MaybeCause),
|
||||
}
|
||||
|
||||
impl Certainty {
|
||||
pub const AMBIGUOUS: Certainty = Certainty::Maybe(MaybeCause::Ambiguity);
|
||||
|
||||
/// When proving multiple goals using **AND**, e.g. nested obligations for an impl,
|
||||
/// use this function to unify the certainty of these goals
|
||||
pub fn unify_and(self, other: Certainty) -> Certainty {
|
||||
match (self, other) {
|
||||
(Certainty::Yes, Certainty::Yes) => Certainty::Yes,
|
||||
(Certainty::Yes, Certainty::Maybe(_)) => other,
|
||||
(Certainty::Maybe(_), Certainty::Yes) => self,
|
||||
(Certainty::Maybe(MaybeCause::Overflow), Certainty::Maybe(MaybeCause::Overflow)) => {
|
||||
Certainty::Maybe(MaybeCause::Overflow)
|
||||
}
|
||||
// If at least one of the goals is ambiguous, hide the overflow as the ambiguous goal
|
||||
// may still result in failure.
|
||||
(Certainty::Maybe(MaybeCause::Ambiguity), Certainty::Maybe(_))
|
||||
| (Certainty::Maybe(_), Certainty::Maybe(MaybeCause::Ambiguity)) => {
|
||||
Certainty::Maybe(MaybeCause::Ambiguity)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Why we failed to evaluate a goal.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)]
|
||||
pub enum MaybeCause {
|
||||
/// We failed due to ambiguity. This ambiguity can either
|
||||
/// be a true ambiguity, i.e. there are multiple different answers,
|
||||
/// or we hit a case where we just don't bother, e.g. `?x: Trait` goals.
|
||||
Ambiguity,
|
||||
/// We gave up due to an overflow, most often by hitting the recursion limit.
|
||||
Overflow,
|
||||
}
|
||||
|
||||
type CanonicalGoal<'tcx, T = ty::Predicate<'tcx>> = Canonical<'tcx, Goal<'tcx, T>>;
|
||||
type CanonicalResponse<'tcx> = Canonical<'tcx, Response<'tcx>>;
|
||||
/// The result of evaluating a canonical query.
|
||||
///
|
||||
/// FIXME: We use a different type than the existing canonical queries. This is because
|
||||
/// we need to add a `Certainty` for `overflow` and may want to restructure this code without
|
||||
/// having to worry about changes to currently used code. Once we've made progress on this
|
||||
/// solver, merge the two responses again.
|
||||
pub type QueryResult<'tcx> = Result<CanonicalResponse<'tcx>, NoSolution>;
|
||||
|
||||
pub trait InferCtxtEvalExt<'tcx> {
|
||||
/// Evaluates a goal from **outside** of the trait solver.
|
||||
///
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use super::assembly;
|
||||
use super::trait_goals::structural_traits;
|
||||
use super::{Certainty, EvalCtxt, Goal, QueryResult};
|
||||
use super::EvalCtxt;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::DefId;
|
||||
@ -11,6 +11,7 @@
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_infer::traits::specialization_graph::LeafDef;
|
||||
use rustc_infer::traits::Reveal;
|
||||
use rustc_middle::traits::solve::{CanonicalResponse, Certainty, Goal, QueryResult};
|
||||
use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
|
||||
use rustc_middle::ty::ProjectionPredicate;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
@ -512,7 +513,7 @@ fn consider_builtin_unsize_candidate(
|
||||
fn consider_builtin_dyn_upcast_candidates(
|
||||
_ecx: &mut EvalCtxt<'_, 'tcx>,
|
||||
goal: Goal<'tcx, Self>,
|
||||
) -> Vec<super::CanonicalResponse<'tcx>> {
|
||||
) -> Vec<CanonicalResponse<'tcx>> {
|
||||
bug!("`Unsize` does not have an associated type: {:?}", goal);
|
||||
}
|
||||
|
||||
|
@ -8,12 +8,10 @@
|
||||
//!
|
||||
//! FIXME(@lcnr): Write that section, feel free to ping me if you need help here
|
||||
//! before then or if I still haven't done that before January 2023.
|
||||
use super::overflow::OverflowData;
|
||||
use super::StackDepth;
|
||||
use crate::solve::{CanonicalGoal, QueryResult};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_middle::traits::solve::{CanonicalGoal, QueryResult};
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
pub struct EntryIndex {}
|
||||
@ -98,26 +96,3 @@ pub(super) fn provisional_result(&self, entry_index: EntryIndex) -> QueryResult<
|
||||
self.entries[entry_index].response
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn try_move_finished_goal_to_global_cache<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
overflow_data: &mut OverflowData,
|
||||
stack: &IndexVec<super::StackDepth, super::StackElem<'tcx>>,
|
||||
goal: CanonicalGoal<'tcx>,
|
||||
response: QueryResult<'tcx>,
|
||||
) {
|
||||
// We move goals to the global cache if we either did not hit an overflow or if it's
|
||||
// the root goal as that will now always hit the same overflow limit.
|
||||
//
|
||||
// NOTE: We cannot move any non-root goals to the global cache even if their final result
|
||||
// isn't impacted by the overflow as that goal still has unstable query dependencies
|
||||
// because it didn't go its full depth.
|
||||
//
|
||||
// FIXME(@lcnr): We could still cache subtrees which are not impacted by overflow though.
|
||||
// Tracking that info correctly isn't trivial, so I haven't implemented it for now.
|
||||
let should_cache_globally = !overflow_data.did_overflow() || stack.is_empty();
|
||||
if should_cache_globally {
|
||||
// FIXME: move the provisional entry to the global cache.
|
||||
let _ = (tcx, goal, response);
|
||||
}
|
||||
}
|
||||
|
@ -2,11 +2,12 @@
|
||||
mod overflow;
|
||||
|
||||
use self::cache::ProvisionalEntry;
|
||||
use super::{CanonicalGoal, Certainty, MaybeCause, QueryResult};
|
||||
pub(super) use crate::solve::search_graph::overflow::OverflowHandler;
|
||||
use cache::ProvisionalCache;
|
||||
use overflow::OverflowData;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::dep_graph::DepKind;
|
||||
use rustc_middle::traits::solve::{CanonicalGoal, Certainty, MaybeCause, QueryResult};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use std::{collections::hash_map::Entry, mem};
|
||||
|
||||
@ -139,10 +140,9 @@ fn try_push_stack(
|
||||
/// updated the provisional cache and we have to recompute the current goal.
|
||||
///
|
||||
/// FIXME: Refer to the rustc-dev-guide entry once it exists.
|
||||
#[instrument(level = "debug", skip(self, tcx, actual_goal), ret)]
|
||||
#[instrument(level = "debug", skip(self, actual_goal), ret)]
|
||||
fn try_finalize_goal(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
actual_goal: CanonicalGoal<'tcx>,
|
||||
response: QueryResult<'tcx>,
|
||||
) -> bool {
|
||||
@ -176,72 +176,87 @@ fn try_finalize_goal(
|
||||
self.stack.push(StackElem { goal, has_been_used: false });
|
||||
false
|
||||
} else {
|
||||
self.try_move_finished_goal_to_global_cache(tcx, stack_elem);
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn try_move_finished_goal_to_global_cache(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
stack_elem: StackElem<'tcx>,
|
||||
) {
|
||||
let StackElem { goal, .. } = stack_elem;
|
||||
let cache = &mut self.provisional_cache;
|
||||
let provisional_entry_index = *cache.lookup_table.get(&goal).unwrap();
|
||||
let provisional_entry = &mut cache.entries[provisional_entry_index];
|
||||
let depth = provisional_entry.depth;
|
||||
|
||||
// If not, we're done with this goal.
|
||||
//
|
||||
// Check whether that this goal doesn't depend on a goal deeper on the stack
|
||||
// and if so, move it and all nested goals to the global cache.
|
||||
//
|
||||
// Note that if any nested goal were to depend on something deeper on the stack,
|
||||
// this would have also updated the depth of the current goal.
|
||||
if depth == self.stack.next_index() {
|
||||
for (i, entry) in cache.entries.drain_enumerated(provisional_entry_index.index()..) {
|
||||
let actual_index = cache.lookup_table.remove(&entry.goal);
|
||||
debug_assert_eq!(Some(i), actual_index);
|
||||
debug_assert!(entry.depth == depth);
|
||||
cache::try_move_finished_goal_to_global_cache(
|
||||
tcx,
|
||||
&mut self.overflow_data,
|
||||
&self.stack,
|
||||
entry.goal,
|
||||
entry.response,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn with_new_goal(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
canonical_goal: CanonicalGoal<'tcx>,
|
||||
mut loop_body: impl FnMut(&mut Self) -> QueryResult<'tcx>,
|
||||
) -> QueryResult<'tcx> {
|
||||
if let Some(result) = tcx.new_solver_evaluation_cache.get(&canonical_goal, tcx) {
|
||||
return result;
|
||||
}
|
||||
|
||||
match self.try_push_stack(tcx, canonical_goal) {
|
||||
Ok(()) => {}
|
||||
// Our goal is already on the stack, eager return.
|
||||
Err(response) => return response,
|
||||
}
|
||||
|
||||
self.repeat_while_none(
|
||||
|this| {
|
||||
let result = this.deal_with_overflow(tcx, canonical_goal);
|
||||
let stack_elem = this.stack.pop().unwrap();
|
||||
this.try_move_finished_goal_to_global_cache(tcx, stack_elem);
|
||||
result
|
||||
},
|
||||
|this| {
|
||||
let result = loop_body(this);
|
||||
if this.try_finalize_goal(tcx, canonical_goal, result) {
|
||||
Some(result)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
// This is for global caching, so we properly track query dependencies.
|
||||
// Everything that affects the `Result` should be performed within this
|
||||
// `with_anon_task` closure.
|
||||
let (result, dep_node) = tcx.dep_graph.with_anon_task(tcx, DepKind::TraitSelect, || {
|
||||
self.repeat_while_none(
|
||||
|this| {
|
||||
let result = this.deal_with_overflow(tcx, canonical_goal);
|
||||
let _ = this.stack.pop().unwrap();
|
||||
result
|
||||
},
|
||||
|this| {
|
||||
let result = loop_body(this);
|
||||
this.try_finalize_goal(canonical_goal, result).then(|| result)
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
let cache = &mut self.provisional_cache;
|
||||
let provisional_entry_index = *cache.lookup_table.get(&canonical_goal).unwrap();
|
||||
let provisional_entry = &mut cache.entries[provisional_entry_index];
|
||||
let depth = provisional_entry.depth;
|
||||
|
||||
// If not, we're done with this goal.
|
||||
//
|
||||
// Check whether that this goal doesn't depend on a goal deeper on the stack
|
||||
// and if so, move it to the global cache.
|
||||
//
|
||||
// Note that if any nested goal were to depend on something deeper on the stack,
|
||||
// this would have also updated the depth of the current goal.
|
||||
if depth == self.stack.next_index() {
|
||||
// If the current goal is the head of a cycle, we drop all other
|
||||
// cycle participants without moving them to the global cache.
|
||||
let other_cycle_participants = provisional_entry_index.index() + 1;
|
||||
for (i, entry) in cache.entries.drain_enumerated(other_cycle_participants..) {
|
||||
let actual_index = cache.lookup_table.remove(&entry.goal);
|
||||
debug_assert_eq!(Some(i), actual_index);
|
||||
debug_assert!(entry.depth == depth);
|
||||
}
|
||||
|
||||
let current_goal = cache.entries.pop().unwrap();
|
||||
let actual_index = cache.lookup_table.remove(¤t_goal.goal);
|
||||
debug_assert_eq!(Some(provisional_entry_index), actual_index);
|
||||
debug_assert!(current_goal.depth == depth);
|
||||
|
||||
// We move the root goal to the global cache if we either did not hit an overflow or if it's
|
||||
// the root goal as that will now always hit the same overflow limit.
|
||||
//
|
||||
// NOTE: We cannot move any non-root goals to the global cache. When replaying the root goal's
|
||||
// dependencies, our non-root goal may no longer appear as child of the root goal.
|
||||
//
|
||||
// See https://github.com/rust-lang/rust/pull/108071 for some additional context.
|
||||
let should_cache_globally = !self.overflow_data.did_overflow() || self.stack.is_empty();
|
||||
if should_cache_globally {
|
||||
tcx.new_solver_evaluation_cache.insert(
|
||||
current_goal.goal,
|
||||
dep_node,
|
||||
current_goal.response,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,11 @@
|
||||
use rustc_infer::infer::canonical::Canonical;
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_middle::traits::solve::{Certainty, MaybeCause, QueryResult};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::Limit;
|
||||
|
||||
use super::SearchGraph;
|
||||
use crate::solve::{response_no_constraints, Certainty, EvalCtxt, MaybeCause, QueryResult};
|
||||
use crate::solve::{response_no_constraints, EvalCtxt};
|
||||
|
||||
/// When detecting a solver overflow, we return ambiguity. Overflow can be
|
||||
/// *hidden* by either a fatal error in an **AND** or a trivial success in an **OR**.
|
||||
|
@ -2,12 +2,12 @@
|
||||
|
||||
use std::iter;
|
||||
|
||||
use super::assembly;
|
||||
use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult};
|
||||
use super::{assembly, EvalCtxt};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_infer::traits::query::NoSolution;
|
||||
use rustc_infer::traits::util::supertraits;
|
||||
use rustc_middle::traits::solve::{CanonicalResponse, Certainty, Goal, QueryResult};
|
||||
use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
|
||||
use rustc_middle::ty::{self, ToPredicate, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{TraitPredicate, TypeVisitableExt};
|
||||
|
@ -1,9 +1,10 @@
|
||||
use rustc_middle::traits::solve::{Certainty, Goal, MaybeCause};
|
||||
use rustc_middle::ty;
|
||||
use rustc_session::config::TraitSolver;
|
||||
|
||||
use crate::infer::canonical::OriginalQueryValues;
|
||||
use crate::infer::InferCtxt;
|
||||
use crate::solve::{Certainty, Goal, InferCtxtEvalExt, MaybeCause};
|
||||
use crate::solve::InferCtxtEvalExt;
|
||||
use crate::traits::{EvaluationResult, OverflowError, PredicateObligation, SelectionContext};
|
||||
|
||||
pub trait InferCtxtExt<'tcx> {
|
||||
|
@ -214,6 +214,7 @@ impl<T> Box<T> {
|
||||
#[inline(always)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[must_use]
|
||||
#[rustc_diagnostic_item = "box_new"]
|
||||
pub fn new(x: T) -> Self {
|
||||
#[rustc_box]
|
||||
Box::new(x)
|
||||
|
@ -51,8 +51,16 @@
|
||||
///
|
||||
/// Going above this limit will abort your program (although not
|
||||
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
|
||||
/// Trying to go above it might call a `panic` (if not actually going above it).
|
||||
///
|
||||
/// This is a global invariant, and also applies when using a compare-exchange loop.
|
||||
///
|
||||
/// See comment in `Arc::clone`.
|
||||
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
|
||||
|
||||
/// The error in case either counter reaches above `MAX_REFCOUNT`, and we can `panic` safely.
|
||||
const INTERNAL_OVERFLOW_ERROR: &str = "Arc counter overflow";
|
||||
|
||||
#[cfg(not(sanitize = "thread"))]
|
||||
macro_rules! acquire {
|
||||
($x:expr) => {
|
||||
@ -1104,6 +1112,9 @@ pub fn downgrade(this: &Self) -> Weak<T> {
|
||||
continue;
|
||||
}
|
||||
|
||||
// We can't allow the refcount to increase much past `MAX_REFCOUNT`.
|
||||
assert!(cur <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
|
||||
|
||||
// NOTE: this code currently ignores the possibility of overflow
|
||||
// into usize::MAX; in general both Rc and Arc need to be adjusted
|
||||
// to deal with overflow.
|
||||
@ -1519,6 +1530,11 @@ fn clone(&self) -> Arc<T> {
|
||||
// the worst already happened and we actually do overflow the `usize` counter. However, that
|
||||
// requires the counter to grow from `isize::MAX` to `usize::MAX` between the increment
|
||||
// above and the `abort` below, which seems exceedingly unlikely.
|
||||
//
|
||||
// This is a global invariant, and also applies when using a compare-exchange loop to increment
|
||||
// counters in other methods.
|
||||
// Otherwise, the counter could be brought to an almost-overflow using a compare-exchange loop,
|
||||
// and then overflow using a few `fetch_add`s.
|
||||
if old_size > MAX_REFCOUNT {
|
||||
abort();
|
||||
}
|
||||
@ -2180,9 +2196,7 @@ pub fn upgrade(&self) -> Option<Arc<T>> {
|
||||
return None;
|
||||
}
|
||||
// See comments in `Arc::clone` for why we do this (for `mem::forget`).
|
||||
if n > MAX_REFCOUNT {
|
||||
abort();
|
||||
}
|
||||
assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR);
|
||||
Some(n + 1)
|
||||
})
|
||||
.ok()
|
||||
|
@ -4,7 +4,6 @@
|
||||
use core::clone::Clone;
|
||||
use core::convert::TryInto;
|
||||
use core::ops::Deref;
|
||||
use core::result::Result::{Err, Ok};
|
||||
|
||||
use std::boxed::Box;
|
||||
|
||||
@ -15,7 +14,7 @@ fn test_owned_clone() {
|
||||
assert!(a == b);
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Test;
|
||||
|
||||
#[test]
|
||||
@ -23,24 +22,17 @@ fn any_move() {
|
||||
let a = Box::new(8) as Box<dyn Any>;
|
||||
let b = Box::new(Test) as Box<dyn Any>;
|
||||
|
||||
match a.downcast::<i32>() {
|
||||
Ok(a) => {
|
||||
assert!(a == Box::new(8));
|
||||
}
|
||||
Err(..) => panic!(),
|
||||
}
|
||||
match b.downcast::<Test>() {
|
||||
Ok(a) => {
|
||||
assert!(a == Box::new(Test));
|
||||
}
|
||||
Err(..) => panic!(),
|
||||
}
|
||||
let a: Box<i32> = a.downcast::<i32>().unwrap();
|
||||
assert_eq!(*a, 8);
|
||||
|
||||
let b: Box<Test> = b.downcast::<Test>().unwrap();
|
||||
assert_eq!(*b, Test);
|
||||
|
||||
let a = Box::new(8) as Box<dyn Any>;
|
||||
let b = Box::new(Test) as Box<dyn Any>;
|
||||
|
||||
assert!(a.downcast::<Box<Test>>().is_err());
|
||||
assert!(b.downcast::<Box<i32>>().is_err());
|
||||
assert!(a.downcast::<Box<i32>>().is_err());
|
||||
assert!(b.downcast::<Box<Test>>().is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1147,12 +1147,10 @@ impl $Ty {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(nonzero_min_max)]
|
||||
///
|
||||
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
|
||||
#[doc = concat!("assert_eq!(", stringify!($Ty), "::MIN.get(), 1", stringify!($Int), ");")]
|
||||
/// ```
|
||||
#[unstable(feature = "nonzero_min_max", issue = "89065")]
|
||||
#[stable(feature = "nonzero_min_max", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub const MIN: Self = Self::new(1).unwrap();
|
||||
|
||||
/// The largest value that can be represented by this non-zero
|
||||
@ -1162,12 +1160,10 @@ impl $Ty {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(nonzero_min_max)]
|
||||
///
|
||||
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
|
||||
#[doc = concat!("assert_eq!(", stringify!($Ty), "::MAX.get(), ", stringify!($Int), "::MAX);")]
|
||||
/// ```
|
||||
#[unstable(feature = "nonzero_min_max", issue = "89065")]
|
||||
#[stable(feature = "nonzero_min_max", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub const MAX: Self = Self::new(<$Int>::MAX).unwrap();
|
||||
}
|
||||
)+
|
||||
@ -1189,12 +1185,10 @@ impl $Ty {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(nonzero_min_max)]
|
||||
///
|
||||
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
|
||||
#[doc = concat!("assert_eq!(", stringify!($Ty), "::MIN.get(), ", stringify!($Int), "::MIN);")]
|
||||
/// ```
|
||||
#[unstable(feature = "nonzero_min_max", issue = "89065")]
|
||||
#[stable(feature = "nonzero_min_max", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub const MIN: Self = Self::new(<$Int>::MIN).unwrap();
|
||||
|
||||
/// The largest value that can be represented by this non-zero
|
||||
@ -1208,12 +1202,10 @@ impl $Ty {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(nonzero_min_max)]
|
||||
///
|
||||
#[doc = concat!("# use std::num::", stringify!($Ty), ";")]
|
||||
#[doc = concat!("assert_eq!(", stringify!($Ty), "::MAX.get(), ", stringify!($Int), "::MAX);")]
|
||||
/// ```
|
||||
#[unstable(feature = "nonzero_min_max", issue = "89065")]
|
||||
#[stable(feature = "nonzero_min_max", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub const MAX: Self = Self::new(<$Int>::MAX).unwrap();
|
||||
}
|
||||
)+
|
||||
|
@ -2,9 +2,7 @@
|
||||
// error-pattern:so long
|
||||
// ignore-emscripten no processes
|
||||
|
||||
#![allow(unused_allocation)]
|
||||
#![allow(unreachable_code)]
|
||||
#![allow(unused_variables)]
|
||||
|
||||
fn main() {
|
||||
let mut x = Vec::new();
|
||||
|
@ -2,7 +2,7 @@
|
||||
// run-rustfix
|
||||
// rustfix-only-machine-applicable
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
#[allow(unused_must_use, unused_allocation)]
|
||||
fn main() {
|
||||
let small = [1, 2];
|
||||
let big = [0u8; 33];
|
||||
|
@ -2,7 +2,7 @@
|
||||
// run-rustfix
|
||||
// rustfix-only-machine-applicable
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
#[allow(unused_must_use, unused_allocation)]
|
||||
fn main() {
|
||||
let small = [1, 2];
|
||||
let big = [0u8; 33];
|
||||
|
7
tests/ui/lint/unused/unused-allocation.rs
Normal file
7
tests/ui/lint/unused/unused-allocation.rs
Normal file
@ -0,0 +1,7 @@
|
||||
#![feature(rustc_attrs, stmt_expr_attributes)]
|
||||
#![deny(unused_allocation)]
|
||||
|
||||
fn main() {
|
||||
_ = (#[rustc_box] Box::new([1])).len(); //~ error: unnecessary allocation, use `&` instead
|
||||
_ = Box::new([1]).len(); //~ error: unnecessary allocation, use `&` instead
|
||||
}
|
20
tests/ui/lint/unused/unused-allocation.stderr
Normal file
20
tests/ui/lint/unused/unused-allocation.stderr
Normal file
@ -0,0 +1,20 @@
|
||||
error: unnecessary allocation, use `&` instead
|
||||
--> $DIR/unused-allocation.rs:5:9
|
||||
|
|
||||
LL | _ = (#[rustc_box] Box::new([1])).len();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/unused-allocation.rs:2:9
|
||||
|
|
||||
LL | #![deny(unused_allocation)]
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: unnecessary allocation, use `&` instead
|
||||
--> $DIR/unused-allocation.rs:6:9
|
||||
|
|
||||
LL | _ = Box::new([1]).len();
|
||||
| ^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
@ -4,7 +4,7 @@ error: expected a literal
|
||||
LL | let _ = concat!(x, y, z, "bar");
|
||||
| ^ ^ ^
|
||||
|
|
||||
= note: only literals (like `"foo"`, `42` and `3.14`) can be passed to `concat!()`
|
||||
= note: only literals (like `"foo"`, `-42` and `3.14`) can be passed to `concat!()`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -16,7 +16,7 @@ error: expected a literal
|
||||
LL | concat!(foo);
|
||||
| ^^^
|
||||
|
|
||||
= note: only literals (like `"foo"`, `42` and `3.14`) can be passed to `concat!()`
|
||||
= note: only literals (like `"foo"`, `-42` and `3.14`) can be passed to `concat!()`
|
||||
|
||||
error: expected a literal
|
||||
--> $DIR/concat.rs:5:13
|
||||
@ -24,7 +24,7 @@ error: expected a literal
|
||||
LL | concat!(foo());
|
||||
| ^^^^^
|
||||
|
|
||||
= note: only literals (like `"foo"`, `42` and `3.14`) can be passed to `concat!()`
|
||||
= note: only literals (like `"foo"`, `-42` and `3.14`) can be passed to `concat!()`
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
|
10
tests/ui/macros/issue-106837.rs
Normal file
10
tests/ui/macros/issue-106837.rs
Normal file
@ -0,0 +1,10 @@
|
||||
fn main() {
|
||||
concat!(-42);
|
||||
concat!(-3.14);
|
||||
|
||||
concat!(-"hello");
|
||||
//~^ ERROR expected a literal
|
||||
|
||||
concat!(--1);
|
||||
//~^ ERROR expected a literal
|
||||
}
|
18
tests/ui/macros/issue-106837.stderr
Normal file
18
tests/ui/macros/issue-106837.stderr
Normal file
@ -0,0 +1,18 @@
|
||||
error: expected a literal
|
||||
--> $DIR/issue-106837.rs:5:13
|
||||
|
|
||||
LL | concat!(-"hello");
|
||||
| ^^^^^^^^
|
||||
|
|
||||
= note: only literals (like `"foo"`, `-42` and `3.14`) can be passed to `concat!()`
|
||||
|
||||
error: expected a literal
|
||||
--> $DIR/issue-106837.rs:8:13
|
||||
|
|
||||
LL | concat!(--1);
|
||||
| ^^^
|
||||
|
|
||||
= note: only literals (like `"foo"`, `-42` and `3.14`) can be passed to `concat!()`
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
24
tests/ui/macros/issue-98790.rs
Normal file
24
tests/ui/macros/issue-98790.rs
Normal file
@ -0,0 +1,24 @@
|
||||
// run-pass
|
||||
|
||||
macro_rules! stringify_item {
|
||||
($item:item) => {
|
||||
stringify!($item)
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! repro {
|
||||
($expr:expr) => {
|
||||
stringify_item! {
|
||||
pub fn repro() -> bool {
|
||||
$expr
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn main() {
|
||||
assert_eq!(
|
||||
repro!(match () { () => true } | true),
|
||||
"pub fn repro() -> bool { (match () { () => true, }) | true }"
|
||||
);
|
||||
}
|
@ -164,7 +164,7 @@ fn main() {
|
||||
// mac call
|
||||
|
||||
// match
|
||||
[ match elem { _ => elem } == 3 ] => "Assertion failed: match elem { _ => elem, } == 3"
|
||||
[ match elem { _ => elem } == 3 ] => "Assertion failed: (match elem { _ => elem, }) == 3"
|
||||
|
||||
// ret
|
||||
[ (|| { return elem; })() == 3 ] => "Assertion failed: (|| { return elem; })() == 3"
|
||||
|
@ -1,4 +1,5 @@
|
||||
// run-pass
|
||||
#![allow(unused_allocation)]
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
@ -13,7 +14,7 @@ fn trait_method<'a>(self: &'a Box<Rc<Self>>) -> &'a [i32] {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let v = vec![1,2,3];
|
||||
let v = vec![1, 2, 3];
|
||||
|
||||
assert_eq!(&[1,2,3], Box::new(Rc::new(v)).trait_method());
|
||||
assert_eq!(&[1, 2, 3], Box::new(Rc::new(v)).trait_method());
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
// run-pass
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_allocation)]
|
||||
|
||||
use std::mem;
|
||||
|
||||
@ -20,7 +20,6 @@
|
||||
|
||||
// Raising alignment may not alter size.
|
||||
#[repr(align(8))]
|
||||
#[allow(dead_code)]
|
||||
struct Align8Many {
|
||||
a: i32,
|
||||
b: i32,
|
||||
@ -29,9 +28,8 @@ struct Align8Many {
|
||||
}
|
||||
|
||||
enum Enum {
|
||||
#[allow(dead_code)]
|
||||
A(i32),
|
||||
B(Align16)
|
||||
B(Align16),
|
||||
}
|
||||
|
||||
// Nested alignment - use `#[repr(C)]` to suppress field reordering for sizeof test
|
||||
@ -73,7 +71,7 @@ struct AlignLarge {
|
||||
|
||||
union UnionContainsAlign {
|
||||
a: Align16,
|
||||
b: f32
|
||||
b: f32,
|
||||
}
|
||||
|
||||
impl Align16 {
|
||||
@ -158,7 +156,7 @@ pub fn main() {
|
||||
// Note that the size of Nested may change if struct field re-ordering is enabled
|
||||
assert_eq!(mem::align_of::<Nested>(), 16);
|
||||
assert_eq!(mem::size_of::<Nested>(), 48);
|
||||
let a = Nested{ a: 1, b: 2, c: Align16(3), d: 4};
|
||||
let a = Nested { a: 1, b: 2, c: Align16(3), d: 4 };
|
||||
assert_eq!(mem::align_of_val(&a), 16);
|
||||
assert_eq!(mem::align_of_val(&a.b), 4);
|
||||
assert_eq!(mem::align_of_val(&a.c), 16);
|
||||
@ -179,8 +177,8 @@ pub fn main() {
|
||||
assert_eq!(a.0, 15);
|
||||
assert_eq!(mem::align_of_val(a), 16);
|
||||
assert_eq!(mem::size_of_val(a), 16);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
assert!(is_aligned_to(&e, 16));
|
||||
|
||||
@ -197,8 +195,8 @@ pub fn main() {
|
||||
}
|
||||
|
||||
// arrays of aligned elements should also be aligned
|
||||
assert_eq!(mem::align_of::<[Align16;2]>(), 16);
|
||||
assert_eq!(mem::size_of::<[Align16;2]>(), 32);
|
||||
assert_eq!(mem::align_of::<[Align16; 2]>(), 16);
|
||||
assert_eq!(mem::size_of::<[Align16; 2]>(), 32);
|
||||
|
||||
let a = [Align16(0), Align16(1)];
|
||||
assert_eq!(mem::align_of_val(&a[0]), 16);
|
||||
@ -209,7 +207,7 @@ pub fn main() {
|
||||
assert_eq!(mem::align_of_val(Box::new(Align16(0)).as_ref()), 16);
|
||||
|
||||
// check heap array is aligned
|
||||
let a = vec!(Align16(0), Align16(1));
|
||||
let a = vec![Align16(0), Align16(1)];
|
||||
assert_eq!(mem::align_of_val(&a[0]), 16);
|
||||
assert_eq!(mem::align_of_val(&a[1]), 16);
|
||||
|
||||
@ -224,16 +222,14 @@ pub fn main() {
|
||||
|
||||
assert_eq!(mem::align_of::<AlignContainsPacked4C>(), 16);
|
||||
assert_eq!(mem::size_of::<AlignContainsPacked4C>(), 32);
|
||||
let a = AlignContainsPacked4C { a: Packed4C{ a: 1, b: 2 }, b: 3 };
|
||||
let a = AlignContainsPacked4C { a: Packed4C { a: 1, b: 2 }, b: 3 };
|
||||
assert_eq!(mem::align_of_val(&a), 16);
|
||||
assert_eq!(mem::align_of_val(&a.a), 4);
|
||||
assert_eq!(mem::align_of_val(&a.b), mem::align_of::<u64>());
|
||||
assert_eq!(mem::size_of_val(&a), 32);
|
||||
assert!(is_aligned_to(&a, 16));
|
||||
|
||||
let mut large = Box::new(AlignLarge {
|
||||
stuff: [0; 0x10000],
|
||||
});
|
||||
let mut large = Box::new(AlignLarge { stuff: [0; 0x10000] });
|
||||
large.stuff[0] = 132;
|
||||
*large.stuff.last_mut().unwrap() = 102;
|
||||
assert_eq!(large.stuff[0], 132);
|
||||
|
Loading…
Reference in New Issue
Block a user