Auto merge of #17863 - Veykril:include-diags, r=Veykril
fix: Resolve included files to their calling modules in IDE layer Fixes https://github.com/rust-lang/rust-analyzer/issues/17390 at the expense of reporting duplicate diagnostics for modules that have includes in them when both the calling and called file are included.
This commit is contained in:
commit
ff63552892
@ -240,14 +240,14 @@ fn fields_attrs_source_map(
|
|||||||
|
|
||||||
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
|
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
|
||||||
|
|
||||||
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>;
|
fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>;
|
||||||
}
|
}
|
||||||
|
|
||||||
// return: macro call id and include file id
|
// return: macro call id and include file id
|
||||||
fn include_macro_invoc(
|
fn include_macro_invoc(
|
||||||
db: &dyn DefDatabase,
|
db: &dyn DefDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
) -> Vec<(MacroCallId, EditionedFileId)> {
|
) -> Arc<[(MacroCallId, EditionedFileId)]> {
|
||||||
db.crate_def_map(krate)
|
db.crate_def_map(krate)
|
||||||
.modules
|
.modules
|
||||||
.values()
|
.values()
|
||||||
|
@ -770,59 +770,62 @@ fn is_from_include_file(
|
|||||||
let file_id = self.find_file(&parent).file_id.file_id()?;
|
let file_id = self.find_file(&parent).file_id.file_id()?;
|
||||||
|
|
||||||
// iterate related crates and find all include! invocations that include_file_id matches
|
// iterate related crates and find all include! invocations that include_file_id matches
|
||||||
for (invoc, _) in self
|
for iter in self
|
||||||
.db
|
.db
|
||||||
.relevant_crates(file_id.file_id())
|
.relevant_crates(file_id.file_id())
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|krate| self.db.include_macro_invoc(*krate))
|
.map(|krate| self.db.include_macro_invoc(*krate))
|
||||||
.filter(|&(_, include_file_id)| include_file_id == file_id)
|
|
||||||
{
|
{
|
||||||
let macro_file = invoc.as_macro_file();
|
for (invoc, _) in
|
||||||
let expansion_info = {
|
iter.iter().filter(|&&(_, include_file_id)| include_file_id == file_id)
|
||||||
self.with_ctx(|ctx| {
|
{
|
||||||
ctx.cache
|
let macro_file = invoc.as_macro_file();
|
||||||
.expansion_info_cache
|
let expansion_info = {
|
||||||
.entry(macro_file)
|
self.with_ctx(|ctx| {
|
||||||
.or_insert_with(|| {
|
ctx.cache
|
||||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
.expansion_info_cache
|
||||||
|
.entry(macro_file)
|
||||||
|
.or_insert_with(|| {
|
||||||
|
let exp_info = macro_file.expansion_info(self.db.upcast());
|
||||||
|
|
||||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||||
if let InFile { file_id, value: Some(value) } = exp_info.arg() {
|
if let InFile { file_id, value: Some(value) } = exp_info.arg() {
|
||||||
self.cache(value.ancestors().last().unwrap(), file_id);
|
self.cache(value.ancestors().last().unwrap(), file_id);
|
||||||
}
|
}
|
||||||
self.cache(value, file_id.into());
|
self.cache(value, file_id.into());
|
||||||
|
|
||||||
exp_info
|
exp_info
|
||||||
})
|
})
|
||||||
.clone()
|
.clone()
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME: uncached parse
|
// FIXME: uncached parse
|
||||||
// Create the source analyzer for the macro call scope
|
// Create the source analyzer for the macro call scope
|
||||||
let Some(sa) = expansion_info
|
let Some(sa) = expansion_info
|
||||||
.arg()
|
.arg()
|
||||||
.value
|
.value
|
||||||
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
|
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
|
||||||
else {
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
// get mapped token in the include! macro file
|
// get mapped token in the include! macro file
|
||||||
let span = span::Span {
|
let span = span::Span {
|
||||||
range: token.text_range(),
|
range: token.text_range(),
|
||||||
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||||
ctx: SyntaxContextId::ROOT,
|
ctx: SyntaxContextId::ROOT,
|
||||||
};
|
};
|
||||||
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
||||||
expansion_info.map_range_down_exact(span)
|
expansion_info.map_range_down_exact(span)
|
||||||
else {
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
// if we find one, then return
|
// if we find one, then return
|
||||||
if let Some(t) = mapped_tokens.next() {
|
if let Some(t) = mapped_tokens.next() {
|
||||||
return Some((sa, file_id.into(), t, span));
|
return Some((sa, file_id.into(), t, span));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,8 +94,9 @@
|
|||||||
},
|
},
|
||||||
hir::{BindingId, LabelId},
|
hir::{BindingId, LabelId},
|
||||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
||||||
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
|
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
|
||||||
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
|
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
|
||||||
|
VariantId,
|
||||||
};
|
};
|
||||||
use hir_expand::{
|
use hir_expand::{
|
||||||
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
|
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
|
||||||
@ -131,11 +132,30 @@ impl SourceToDefCtx<'_, '_> {
|
|||||||
for &crate_id in self.db.relevant_crates(file).iter() {
|
for &crate_id in self.db.relevant_crates(file).iter() {
|
||||||
// Note: `mod` declarations in block modules cannot be supported here
|
// Note: `mod` declarations in block modules cannot be supported here
|
||||||
let crate_def_map = self.db.crate_def_map(crate_id);
|
let crate_def_map = self.db.crate_def_map(crate_id);
|
||||||
mods.extend(
|
let n_mods = mods.len();
|
||||||
|
let modules = |file| {
|
||||||
crate_def_map
|
crate_def_map
|
||||||
.modules_for_file(file)
|
.modules_for_file(file)
|
||||||
.map(|local_id| crate_def_map.module_id(local_id)),
|
.map(|local_id| crate_def_map.module_id(local_id))
|
||||||
)
|
};
|
||||||
|
mods.extend(modules(file));
|
||||||
|
if mods.len() == n_mods {
|
||||||
|
mods.extend(
|
||||||
|
self.db
|
||||||
|
.include_macro_invoc(crate_id)
|
||||||
|
.iter()
|
||||||
|
.filter(|&&(_, file_id)| file_id == file)
|
||||||
|
.flat_map(|(call, _)| {
|
||||||
|
modules(
|
||||||
|
call.lookup(self.db.upcast())
|
||||||
|
.kind
|
||||||
|
.file_id()
|
||||||
|
.original_file(self.db.upcast())
|
||||||
|
.file_id(),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if mods.is_empty() {
|
if mods.is_empty() {
|
||||||
// FIXME: detached file
|
// FIXME: detached file
|
||||||
|
@ -273,11 +273,7 @@ fn f() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn include_does_not_break_diagnostics() {
|
fn include_does_not_break_diagnostics() {
|
||||||
let mut config = DiagnosticsConfig::test_sample();
|
check_diagnostics(
|
||||||
config.disabled.insert("inactive-code".to_owned());
|
|
||||||
config.disabled.insert("unlinked-file".to_owned());
|
|
||||||
check_diagnostics_with_config(
|
|
||||||
config,
|
|
||||||
r#"
|
r#"
|
||||||
//- minicore: include
|
//- minicore: include
|
||||||
//- /lib.rs crate:lib
|
//- /lib.rs crate:lib
|
||||||
|
@ -499,6 +499,18 @@ mod bar {
|
|||||||
mod bar {
|
mod bar {
|
||||||
mod foo;
|
mod foo;
|
||||||
}
|
}
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn include_macro_works() {
|
||||||
|
check_diagnostics(
|
||||||
|
r#"
|
||||||
|
//- minicore: include
|
||||||
|
//- /main.rs
|
||||||
|
include!("bar/foo/mod.rs");
|
||||||
|
//- /bar/foo/mod.rs
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -251,6 +251,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
|
|||||||
let mut actual = annotations.remove(&file_id).unwrap_or_default();
|
let mut actual = annotations.remove(&file_id).unwrap_or_default();
|
||||||
let expected = extract_annotations(&db.file_text(file_id));
|
let expected = extract_annotations(&db.file_text(file_id));
|
||||||
actual.sort_by_key(|(range, _)| range.start());
|
actual.sort_by_key(|(range, _)| range.start());
|
||||||
|
// FIXME: We should panic on duplicates instead, but includes currently cause us to report
|
||||||
|
// diagnostics twice for the calling module when both files are queried.
|
||||||
|
actual.dedup();
|
||||||
|
// actual.iter().duplicates().for_each(|(range, msg)| {
|
||||||
|
// panic!("duplicate diagnostic at {:?}: {msg:?}", line_index.line_col(range.start()))
|
||||||
|
// });
|
||||||
if expected.is_empty() {
|
if expected.is_empty() {
|
||||||
// makes minicore smoke test debuggable
|
// makes minicore smoke test debuggable
|
||||||
for (e, _) in &actual {
|
for (e, _) in &actual {
|
||||||
|
Loading…
Reference in New Issue
Block a user