Auto merge of #118152 - matthiaskrgr:rollup-bqcck4w, r=matthiaskrgr

Rollup of 5 pull requests

Successful merges:

 - #117972 (Add VarDebugInfo to Stable MIR)
 - #118109 (rustdoc-search: simplify `checkPath` and `sortResults`)
 - #118110 (Document `DefiningAnchor` a bit more)
 - #118112 (Don't ICE when ambiguity is found when selecting `Index` implementation in typeck)
 - #118135 (Remove quotation from filename in stable_mir)

Failed merges:

 - #118012 (Add support for global allocation in smir)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-11-22 00:30:56 +00:00
commit ed10a53025
17 changed files with 286 additions and 115 deletions

View File

@ -3063,7 +3063,9 @@ fn find_and_report_unsatisfied_index_impl(
return None; return None;
}; };
self.commit_if_ok(|_| { self.commit_if_ok(|snapshot| {
let outer_universe = self.universe();
let ocx = ObligationCtxt::new(self); let ocx = ObligationCtxt::new(self);
let impl_args = self.fresh_args_for_item(base_expr.span, impl_def_id); let impl_args = self.fresh_args_for_item(base_expr.span, impl_def_id);
let impl_trait_ref = let impl_trait_ref =
@ -3073,7 +3075,7 @@ fn find_and_report_unsatisfied_index_impl(
// Match the impl self type against the base ty. If this fails, // Match the impl self type against the base ty. If this fails,
// we just skip this impl, since it's not particularly useful. // we just skip this impl, since it's not particularly useful.
let impl_trait_ref = ocx.normalize(&cause, self.param_env, impl_trait_ref); let impl_trait_ref = ocx.normalize(&cause, self.param_env, impl_trait_ref);
ocx.eq(&cause, self.param_env, impl_trait_ref.self_ty(), base_ty)?; ocx.eq(&cause, self.param_env, base_ty, impl_trait_ref.self_ty())?;
// Register the impl's predicates. One of these predicates // Register the impl's predicates. One of these predicates
// must be unsatisfied, or else we wouldn't have gotten here // must be unsatisfied, or else we wouldn't have gotten here
@ -3109,11 +3111,23 @@ fn find_and_report_unsatisfied_index_impl(
Ty::new_projection(self.tcx, index_trait_output_def_id, impl_trait_ref.args), Ty::new_projection(self.tcx, index_trait_output_def_id, impl_trait_ref.args),
); );
let errors = ocx.select_where_possible(); let true_errors = ocx.select_where_possible();
// Do a leak check -- we can't really report report a useful error here,
// but it at least avoids an ICE when the error has to do with higher-ranked
// lifetimes.
self.leak_check(outer_universe, Some(snapshot))?;
// Bail if we have ambiguity errors, which we can't report in a useful way.
let ambiguity_errors = ocx.select_all_or_error();
if true_errors.is_empty() && !ambiguity_errors.is_empty() {
return Err(NoSolution);
}
// There should be at least one error reported. If not, we // There should be at least one error reported. If not, we
// will still delay a span bug in `report_fulfillment_errors`. // will still delay a span bug in `report_fulfillment_errors`.
Ok::<_, NoSolution>(( Ok::<_, NoSolution>((
self.err_ctxt().report_fulfillment_errors(errors), self.err_ctxt().report_fulfillment_errors(true_errors),
impl_trait_ref.args.type_at(1), impl_trait_ref.args.type_at(1),
element_ty, element_ty,
)) ))

View File

@ -956,13 +956,26 @@ pub enum CodegenObligationError {
FulfillmentError, FulfillmentError,
} }
/// Defines the treatment of opaque types in a given inference context.
///
/// This affects both what opaques are allowed to be defined, but also whether
/// opaques are replaced with inference vars eagerly in the old solver (e.g.
/// in projection, and in the signature during function type-checking).
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, HashStable, TypeFoldable, TypeVisitable)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, HashStable, TypeFoldable, TypeVisitable)]
pub enum DefiningAnchor { pub enum DefiningAnchor {
/// `DefId` of the item. /// Define opaques which are in-scope of the `LocalDefId`. Also, eagerly
/// replace opaque types in `replace_opaque_types_with_inference_vars`.
Bind(LocalDefId), Bind(LocalDefId),
/// When opaque types are not resolved, we `Bubble` up, meaning /// In contexts where we don't currently know what opaques are allowed to be
/// return the opaque/hidden type pair from query, for caller of query to handle it. /// defined, such as (old solver) canonical queries, we will simply allow
/// opaques to be defined, but "bubble" them up in the canonical response or
/// otherwise treat them to be handled later.
///
/// We do not eagerly replace opaque types in `replace_opaque_types_with_inference_vars`,
/// which may affect what predicates pass and fail in the old trait solver.
Bubble, Bubble,
/// Used to catch type mismatch errors when handling opaque types. /// Do not allow any opaques to be defined. This is used to catch type mismatch
/// errors when handling opaque types, and also should be used when we would
/// otherwise reveal opaques (such as [`Reveal::All`] reveal mode).
Error, Error,
} }

View File

@ -18,7 +18,10 @@
use rustc_span::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc_span::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc_target::abi::FieldIdx; use rustc_target::abi::FieldIdx;
use stable_mir::mir::mono::InstanceDef; use stable_mir::mir::mono::InstanceDef;
use stable_mir::mir::{Body, CopyNonOverlapping, Statement, UserTypeProjection, VariantIdx}; use stable_mir::mir::{
Body, ConstOperand, CopyNonOverlapping, Statement, UserTypeProjection, VarDebugInfoFragment,
VariantIdx,
};
use stable_mir::ty::{ use stable_mir::ty::{
AdtDef, AdtKind, ClosureDef, ClosureKind, Const, ConstId, ConstantKind, EarlyParamRegion, AdtDef, AdtKind, ClosureDef, ClosureKind, Const, ConstId, ConstantKind, EarlyParamRegion,
FloatTy, FnDef, GenericArgs, GenericParamDef, IntTy, LineInfo, Movability, RigidTy, Span, FloatTy, FnDef, GenericArgs, GenericParamDef, IntTy, LineInfo, Movability, RigidTy, Span,
@ -69,15 +72,13 @@ fn span_to_string(&self, span: stable_mir::ty::Span) -> String {
fn get_filename(&self, span: &Span) -> Filename { fn get_filename(&self, span: &Span) -> Filename {
let tables = self.0.borrow(); let tables = self.0.borrow();
opaque( tables
&tables .tcx
.tcx .sess
.sess .source_map()
.source_map() .span_to_filename(tables[*span])
.span_to_filename(tables[*span]) .display(rustc_span::FileNameDisplayPreference::Local)
.display(rustc_span::FileNameDisplayPreference::Local) .to_string()
.to_string(),
)
} }
fn get_lines(&self, span: &Span) -> LineInfo { fn get_lines(&self, span: &Span) -> LineInfo {
@ -444,10 +445,24 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
}) })
.collect(), .collect(),
self.arg_count, self.arg_count,
self.var_debug_info.iter().map(|info| info.stable(tables)).collect(),
) )
} }
} }
impl<'tcx> Stable<'tcx> for mir::VarDebugInfo<'tcx> {
type T = stable_mir::mir::VarDebugInfo;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
stable_mir::mir::VarDebugInfo {
name: self.name.to_string(),
source_info: self.source_info.stable(tables),
composite: self.composite.as_ref().map(|composite| composite.stable(tables)),
value: self.value.stable(tables),
argument_index: self.argument_index,
}
}
}
impl<'tcx> Stable<'tcx> for mir::Statement<'tcx> { impl<'tcx> Stable<'tcx> for mir::Statement<'tcx> {
type T = stable_mir::mir::Statement; type T = stable_mir::mir::Statement;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
@ -455,6 +470,42 @@ fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
} }
} }
impl<'tcx> Stable<'tcx> for mir::SourceInfo {
type T = stable_mir::mir::SourceInfo;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
stable_mir::mir::SourceInfo { span: self.span.stable(tables), scope: self.scope.into() }
}
}
impl<'tcx> Stable<'tcx> for mir::VarDebugInfoFragment<'tcx> {
type T = stable_mir::mir::VarDebugInfoFragment;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
VarDebugInfoFragment {
ty: self.ty.stable(tables),
projection: self.projection.iter().map(|e| e.stable(tables)).collect(),
}
}
}
impl<'tcx> Stable<'tcx> for mir::VarDebugInfoContents<'tcx> {
type T = stable_mir::mir::VarDebugInfoContents;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
match self {
mir::VarDebugInfoContents::Place(place) => {
stable_mir::mir::VarDebugInfoContents::Place(place.stable(tables))
}
mir::VarDebugInfoContents::Const(const_operand) => {
let op = ConstOperand {
span: const_operand.span.stable(tables),
user_ty: const_operand.user_ty.map(|index| index.as_usize()),
const_: const_operand.const_.stable(tables),
};
stable_mir::mir::VarDebugInfoContents::Const(op)
}
}
}
}
impl<'tcx> Stable<'tcx> for mir::StatementKind<'tcx> { impl<'tcx> Stable<'tcx> for mir::StatementKind<'tcx> {
type T = stable_mir::mir::StatementKind; type T = stable_mir::mir::StatementKind;
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T { fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {

View File

@ -110,7 +110,7 @@ pub enum ItemKind {
Const, Const,
} }
pub type Filename = Opaque; pub type Filename = String;
/// Holds information about an item in the crate. /// Holds information about an item in the crate.
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]

View File

@ -2,7 +2,7 @@
use crate::ty::{ use crate::ty::{
AdtDef, ClosureDef, Const, CoroutineDef, GenericArgs, Movability, Region, RigidTy, Ty, TyKind, AdtDef, ClosureDef, Const, CoroutineDef, GenericArgs, Movability, Region, RigidTy, Ty, TyKind,
}; };
use crate::{Error, Opaque, Span}; use crate::{Error, Opaque, Span, Symbol};
use std::io; use std::io;
/// The SMIR representation of a single function. /// The SMIR representation of a single function.
@ -19,6 +19,9 @@ pub struct Body {
// The number of arguments this function takes. // The number of arguments this function takes.
pub(super) arg_count: usize, pub(super) arg_count: usize,
// Debug information pertaining to user variables, including captures.
pub(super) var_debug_info: Vec<VarDebugInfo>,
} }
impl Body { impl Body {
@ -26,14 +29,19 @@ impl Body {
/// ///
/// A constructor is required to build a `Body` from outside the crate /// A constructor is required to build a `Body` from outside the crate
/// because the `arg_count` and `locals` fields are private. /// because the `arg_count` and `locals` fields are private.
pub fn new(blocks: Vec<BasicBlock>, locals: LocalDecls, arg_count: usize) -> Self { pub fn new(
blocks: Vec<BasicBlock>,
locals: LocalDecls,
arg_count: usize,
var_debug_info: Vec<VarDebugInfo>,
) -> Self {
// If locals doesn't contain enough entries, it can lead to panics in // If locals doesn't contain enough entries, it can lead to panics in
// `ret_local`, `arg_locals`, and `inner_locals`. // `ret_local`, `arg_locals`, and `inner_locals`.
assert!( assert!(
locals.len() > arg_count, locals.len() > arg_count,
"A Body must contain at least a local for the return value and each of the function's arguments" "A Body must contain at least a local for the return value and each of the function's arguments"
); );
Self { blocks, locals, arg_count } Self { blocks, locals, arg_count, var_debug_info }
} }
/// Return local that holds this function's return value. /// Return local that holds this function's return value.
@ -427,6 +435,42 @@ pub struct Place {
pub projection: Vec<ProjectionElem>, pub projection: Vec<ProjectionElem>,
} }
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct VarDebugInfo {
pub name: Symbol,
pub source_info: SourceInfo,
pub composite: Option<VarDebugInfoFragment>,
pub value: VarDebugInfoContents,
pub argument_index: Option<u16>,
}
pub type SourceScope = u32;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct SourceInfo {
pub span: Span,
pub scope: SourceScope,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct VarDebugInfoFragment {
pub ty: Ty,
pub projection: Vec<ProjectionElem>,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum VarDebugInfoContents {
Place(Place),
Const(ConstOperand),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct ConstOperand {
pub span: Span,
pub user_ty: Option<UserTypeAnnotationIndex>,
pub const_: Const,
}
// In MIR ProjectionElem is parameterized on the second Field argument and the Index argument. This // In MIR ProjectionElem is parameterized on the second Field argument and the Index argument. This
// is so it can be used for both Places (for which the projection elements are of type // is so it can be used for both Places (for which the projection elements are of type
// ProjectionElem<Local, Ty>) and user-provided type annotations (for which the projection elements // ProjectionElem<Local, Ty>) and user-provided type annotations (for which the projection elements

View File

@ -128,8 +128,12 @@ fn visit_assert_msg(&mut self, msg: &AssertMessage, location: Location) {
self.super_assert_msg(msg, location) self.super_assert_msg(msg, location)
} }
fn visit_var_debug_info(&mut self, var_debug_info: &VarDebugInfo) {
self.super_var_debug_info(var_debug_info);
}
fn super_body(&mut self, body: &Body) { fn super_body(&mut self, body: &Body) {
let Body { blocks, locals: _, arg_count } = body; let Body { blocks, locals: _, arg_count, var_debug_info } = body;
for bb in blocks { for bb in blocks {
self.visit_basic_block(bb); self.visit_basic_block(bb);
@ -145,6 +149,10 @@ fn super_body(&mut self, body: &Body) {
for (idx, arg) in body.inner_locals().iter().enumerate() { for (idx, arg) in body.inner_locals().iter().enumerate() {
self.visit_local_decl(idx + local_start, arg) self.visit_local_decl(idx + local_start, arg)
} }
for info in var_debug_info.iter() {
self.visit_var_debug_info(info);
}
} }
fn super_basic_block(&mut self, bb: &BasicBlock) { fn super_basic_block(&mut self, bb: &BasicBlock) {
@ -382,6 +390,24 @@ fn super_args(&mut self, args: &GenericArgs) {
let _ = args; let _ = args;
} }
fn super_var_debug_info(&mut self, var_debug_info: &VarDebugInfo) {
let VarDebugInfo { source_info, composite, value, name: _, argument_index: _ } =
var_debug_info;
self.visit_span(&source_info.span);
let location = Location(source_info.span);
if let Some(composite) = composite {
self.visit_ty(&composite.ty, location);
}
match value {
VarDebugInfoContents::Place(place) => {
self.visit_place(place, PlaceContext::NON_USE, location);
}
VarDebugInfoContents::Const(constant) => {
self.visit_const(&constant.const_, location);
}
}
}
fn super_assert_msg(&mut self, msg: &AssertMessage, location: Location) { fn super_assert_msg(&mut self, msg: &AssertMessage, location: Location) {
match msg { match msg {
AssertMessage::BoundsCheck { len, index } => { AssertMessage::BoundsCheck { len, index } => {

View File

@ -16,6 +16,13 @@
/// Consequently, every change to this type should be synchronized to /// Consequently, every change to this type should be synchronized to
/// the `itemTypes` mapping table in `html/static/js/search.js`. /// the `itemTypes` mapping table in `html/static/js/search.js`.
/// ///
/// The search engine in search.js also uses item type numbers as a tie breaker when
/// sorting results. Keywords and primitives are given first because we want them to be easily
/// found by new users who don't know about advanced features like type filters. The rest are
/// mostly in an arbitrary order, but it's easier to test the search engine when
/// it's deterministic, and these are strictly finer-grained than language namespaces, so
/// using the path and the item type together to sort ensures that search sorting is stable.
///
/// In addition, code in `html::render` uses this enum to generate CSS classes, page prefixes, and /// In addition, code in `html::render` uses this enum to generate CSS classes, page prefixes, and
/// module headings. If you are adding to this enum and want to ensure that the sidebar also prints /// module headings. If you are adding to this enum and want to ensure that the sidebar also prints
/// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an /// a heading, edit the listing in `html/render.rs`, function `sidebar_module`. This uses an
@ -23,28 +30,28 @@
#[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)] #[derive(Copy, PartialEq, Eq, Hash, Clone, Debug, PartialOrd, Ord)]
#[repr(u8)] #[repr(u8)]
pub(crate) enum ItemType { pub(crate) enum ItemType {
Module = 0, Keyword = 0,
ExternCrate = 1, Primitive = 1,
Import = 2, Module = 2,
Struct = 3, ExternCrate = 3,
Enum = 4, Import = 4,
Function = 5, Struct = 5,
TypeAlias = 6, Enum = 6,
Static = 7, Function = 7,
Trait = 8, TypeAlias = 8,
Impl = 9, Static = 9,
TyMethod = 10, Trait = 10,
Method = 11, Impl = 11,
StructField = 12, TyMethod = 12,
Variant = 13, Method = 13,
Macro = 14, StructField = 14,
Primitive = 15, Variant = 15,
AssocType = 16, Macro = 16,
Constant = 17, AssocType = 17,
AssocConst = 18, Constant = 18,
Union = 19, AssocConst = 19,
ForeignType = 20, Union = 20,
Keyword = 21, ForeignType = 21,
OpaqueTy = 22, OpaqueTy = 22,
ProcAttribute = 23, ProcAttribute = 23,
ProcDerive = 24, ProcDerive = 24,

View File

@ -18,28 +18,28 @@ if (!Array.prototype.toSpliced) {
// This mapping table should match the discriminants of // This mapping table should match the discriminants of
// `rustdoc::formats::item_type::ItemType` type in Rust. // `rustdoc::formats::item_type::ItemType` type in Rust.
const itemTypes = [ const itemTypes = [
"keyword",
"primitive",
"mod", "mod",
"externcrate", "externcrate",
"import", "import",
"struct", "struct", // 5
"enum", "enum",
"fn", // 5 "fn",
"type", "type",
"static", "static",
"trait", "trait", // 10
"impl", "impl",
"tymethod", // 10 "tymethod",
"method", "method",
"structfield", "structfield",
"variant", "variant", // 15
"macro", "macro",
"primitive", // 15
"associatedtype", "associatedtype",
"constant", "constant",
"associatedconstant", "associatedconstant",
"union", "union", // 20
"foreigntype", // 20 "foreigntype",
"keyword",
"existential", "existential",
"attr", "attr",
"derive", "derive",
@ -48,6 +48,8 @@ const itemTypes = [
]; ];
const longItemTypes = [ const longItemTypes = [
"keyword",
"primitive type",
"module", "module",
"extern crate", "extern crate",
"re-export", "re-export",
@ -63,13 +65,11 @@ const longItemTypes = [
"struct field", "struct field",
"enum variant", "enum variant",
"macro", "macro",
"primitive type",
"assoc type", "assoc type",
"constant", "constant",
"assoc const", "assoc const",
"union", "union",
"foreign type", "foreign type",
"keyword",
"existential type", "existential type",
"attribute macro", "attribute macro",
"derive macro", "derive macro",
@ -77,8 +77,6 @@ const longItemTypes = [
]; ];
// used for special search precedence // used for special search precedence
const TY_PRIMITIVE = itemTypes.indexOf("primitive");
const TY_KEYWORD = itemTypes.indexOf("keyword");
const TY_GENERIC = itemTypes.indexOf("generic"); const TY_GENERIC = itemTypes.indexOf("generic");
const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../"; const ROOT_PATH = typeof window !== "undefined" ? window.rootPath : "../";
@ -1317,16 +1315,6 @@ function initSearch(rawSearchIndex) {
return (a > b ? +1 : -1); return (a > b ? +1 : -1);
} }
// special precedence for primitive and keyword pages
if ((aaa.item.ty === TY_PRIMITIVE && bbb.item.ty !== TY_KEYWORD) ||
(aaa.item.ty === TY_KEYWORD && bbb.item.ty !== TY_PRIMITIVE)) {
return -1;
}
if ((bbb.item.ty === TY_PRIMITIVE && aaa.item.ty !== TY_PRIMITIVE) ||
(bbb.item.ty === TY_KEYWORD && aaa.item.ty !== TY_KEYWORD)) {
return 1;
}
// sort by description (no description goes later) // sort by description (no description goes later)
a = (aaa.item.desc === ""); a = (aaa.item.desc === "");
b = (bbb.item.desc === ""); b = (bbb.item.desc === "");
@ -1840,26 +1828,16 @@ function initSearch(rawSearchIndex) {
const length = path.length; const length = path.length;
const clength = contains.length; const clength = contains.length;
if (clength > length) { pathiter: for (let i = length - clength; i >= 0; i -= 1) {
return maxEditDistance + 1;
}
for (let i = 0; i < length; ++i) {
if (i + clength > length) {
break;
}
let dist_total = 0; let dist_total = 0;
let aborted = false;
for (let x = 0; x < clength; ++x) { for (let x = 0; x < clength; ++x) {
const dist = editDistance(path[i + x], contains[x], maxEditDistance); const dist = editDistance(path[i + x], contains[x], maxEditDistance);
if (dist > maxEditDistance) { if (dist > maxEditDistance) {
aborted = true; continue pathiter;
break;
} }
dist_total += dist; dist_total += dist;
} }
if (!aborted) { ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
ret_dist = Math.min(ret_dist, Math.round(dist_total / clength));
}
} }
return ret_dist; return ret_dist;
} }
@ -2953,7 +2931,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
// https://mathiasbynens.be/notes/shapes-ics // https://mathiasbynens.be/notes/shapes-ics
const crateRow = { const crateRow = {
crate: crate, crate: crate,
ty: 1, // == ExternCrate ty: 3, // == ExternCrate
name: crate, name: crate,
path: "", path: "",
desc: crateCorpus.doc, desc: crateCorpus.doc,

View File

@ -3,7 +3,7 @@
const EXPECTED = { const EXPECTED = {
'query': 'fn', 'query': 'fn',
'others': [ 'others': [
{ 'path': 'std', 'name': 'fn', ty: 15 }, // 15 is for primitive types { 'path': 'std', 'name': 'fn', ty: 1 }, // 1 is for primitive types
{ 'path': 'std', 'name': 'fn', ty: 21 }, // 21 is for keywords { 'path': 'std', 'name': 'fn', ty: 0 }, // 0 is for keywords
], ],
}; };

View File

@ -3,7 +3,7 @@
const EXPECTED = { const EXPECTED = {
'query': 'panic', 'query': 'panic',
'others': [ 'others': [
{ 'path': 'std', 'name': 'panic', ty: 14 }, // 15 is for macros { 'path': 'std', 'name': 'panic', ty: 16 }, // 16 is for macros
{ 'path': 'std', 'name': 'panic', ty: 0 }, // 0 is for modules { 'path': 'std', 'name': 'panic', ty: 2 }, // 2 is for modules
], ],
}; };

View File

@ -81,7 +81,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "never", pathLast: "never",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}] }]
], ],
], ],
@ -112,7 +112,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "[]", pathLast: "[]",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}] }]
], ],
], ],
@ -149,10 +149,10 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "never", pathLast: "never",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}] }]
], ],
], ],

View File

@ -7,7 +7,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "foo", pathLast: "foo",
generics: [], generics: [],
typeFilter: 5, typeFilter: 7,
}], }],
foundElems: 1, foundElems: 1,
original: "fn:foo", original: "fn:foo",
@ -23,7 +23,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "foo", pathLast: "foo",
generics: [], generics: [],
typeFilter: 4, typeFilter: 6,
}], }],
foundElems: 1, foundElems: 1,
original: "enum : foo", original: "enum : foo",
@ -48,7 +48,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "macro", pathLast: "macro",
generics: [], generics: [],
typeFilter: 14, typeFilter: 16,
}], }],
foundElems: 1, foundElems: 1,
original: "macro!", original: "macro!",
@ -64,7 +64,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "mac", pathLast: "mac",
generics: [], generics: [],
typeFilter: 14, typeFilter: 16,
}], }],
foundElems: 1, foundElems: 1,
original: "macro:mac!", original: "macro:mac!",
@ -80,7 +80,7 @@ const PARSED = [
pathWithoutLast: ["a"], pathWithoutLast: ["a"],
pathLast: "mac", pathLast: "mac",
generics: [], generics: [],
typeFilter: 14, typeFilter: 16,
}], }],
foundElems: 1, foundElems: 1,
original: "a::mac!", original: "a::mac!",
@ -99,7 +99,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "foo", pathLast: "foo",
generics: [], generics: [],
typeFilter: 5, typeFilter: 7,
}], }],
userQuery: "-> fn:foo", userQuery: "-> fn:foo",
error: null, error: null,
@ -121,10 +121,10 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "bar", pathLast: "bar",
generics: [], generics: [],
typeFilter: 5, typeFilter: 7,
} }
], ],
typeFilter: 5, typeFilter: 7,
}], }],
userQuery: "-> fn:foo<fn:bar>", userQuery: "-> fn:foo<fn:bar>",
error: null, error: null,
@ -146,7 +146,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "bar", pathLast: "bar",
generics: [], generics: [],
typeFilter: 5, typeFilter: 7,
}, },
{ {
name: "baz::fuzz", name: "baz::fuzz",
@ -154,10 +154,10 @@ const PARSED = [
pathWithoutLast: ["baz"], pathWithoutLast: ["baz"],
pathLast: "fuzz", pathLast: "fuzz",
generics: [], generics: [],
typeFilter: 4, typeFilter: 6,
}, },
], ],
typeFilter: 5, typeFilter: 7,
}], }],
userQuery: "-> fn:foo<fn:bar, enum : baz::fuzz>", userQuery: "-> fn:foo<fn:bar, enum : baz::fuzz>",
error: null, error: null,

View File

@ -13,7 +13,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "never", pathLast: "never",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}, },
], ],
typeFilter: -1, typeFilter: -1,
@ -32,7 +32,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "never", pathLast: "never",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}], }],
foundElems: 1, foundElems: 1,
original: "!", original: "!",
@ -48,7 +48,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "a", pathLast: "a",
generics: [], generics: [],
typeFilter: 14, typeFilter: 16,
}], }],
foundElems: 1, foundElems: 1,
original: "a!", original: "a!",

View File

@ -89,7 +89,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "never", pathLast: "never",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}], }],
userQuery: "-> !", userQuery: "-> !",
error: null, error: null,

View File

@ -43,16 +43,16 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "[]", pathLast: "[]",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,
@ -70,7 +70,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "[]", pathLast: "[]",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}, },
{ {
name: "u8", name: "u8",
@ -105,7 +105,7 @@ const PARSED = [
typeFilter: -1, typeFilter: -1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,
@ -140,7 +140,7 @@ const PARSED = [
typeFilter: -1, typeFilter: -1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,
@ -176,7 +176,7 @@ const PARSED = [
typeFilter: -1, typeFilter: -1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,
@ -194,7 +194,7 @@ const PARSED = [
pathWithoutLast: [], pathWithoutLast: [],
pathLast: "[]", pathLast: "[]",
generics: [], generics: [],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,
@ -284,7 +284,7 @@ const PARSED = [
typeFilter: -1, typeFilter: -1,
}, },
], ],
typeFilter: 15, typeFilter: 1,
}, },
], ],
foundElems: 1, foundElems: 1,

View File

@ -0,0 +1,29 @@
// Test against ICE in #118111
use std::ops::Index;
struct Map<T, F> {
f: F,
inner: T,
}
impl<T, F, Idx> Index<Idx> for Map<T, F>
where
T: Index<Idx>,
F: FnOnce(&T, Idx) -> Idx,
{
type Output = T::Output;
fn index(&self, index: Idx) -> &Self::Output {
todo!()
}
}
fn main() {
Map { inner: [0_usize], f: |_, i: usize| 1_usize }[0];
//~^ ERROR cannot index into a value of type
// Problem here is that
// `f: |_, i: usize| ...`
// should be
// `f: |_: &_, i: usize| ...`
}

View File

@ -0,0 +1,9 @@
error[E0608]: cannot index into a value of type `Map<[usize; 1], {closure@$DIR/bad-index-modulo-higher-ranked-regions.rs:23:32: 23:45}>`
--> $DIR/bad-index-modulo-higher-ranked-regions.rs:23:55
|
LL | Map { inner: [0_usize], f: |_, i: usize| 1_usize }[0];
| ^^^
error: aborting due to previous error
For more information about this error, try `rustc --explain E0608`.