Auto merge of #17863 - Veykril:include-diags, r=Veykril
fix: Resolve included files to their calling modules in IDE layer Fixes https://github.com/rust-lang/rust-analyzer/issues/17390 at the expense of reporting duplicate diagnostics for modules that have includes in them when both the calling and called file are included.
This commit is contained in:
commit
ff63552892
@ -240,14 +240,14 @@ fn fields_attrs_source_map(
|
||||
|
||||
fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
|
||||
|
||||
fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>;
|
||||
fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>;
|
||||
}
|
||||
|
||||
// return: macro call id and include file id
|
||||
fn include_macro_invoc(
|
||||
db: &dyn DefDatabase,
|
||||
krate: CrateId,
|
||||
) -> Vec<(MacroCallId, EditionedFileId)> {
|
||||
) -> Arc<[(MacroCallId, EditionedFileId)]> {
|
||||
db.crate_def_map(krate)
|
||||
.modules
|
||||
.values()
|
||||
|
@ -770,59 +770,62 @@ fn is_from_include_file(
|
||||
let file_id = self.find_file(&parent).file_id.file_id()?;
|
||||
|
||||
// iterate related crates and find all include! invocations that include_file_id matches
|
||||
for (invoc, _) in self
|
||||
for iter in self
|
||||
.db
|
||||
.relevant_crates(file_id.file_id())
|
||||
.iter()
|
||||
.flat_map(|krate| self.db.include_macro_invoc(*krate))
|
||||
.filter(|&(_, include_file_id)| include_file_id == file_id)
|
||||
.map(|krate| self.db.include_macro_invoc(*krate))
|
||||
{
|
||||
let macro_file = invoc.as_macro_file();
|
||||
let expansion_info = {
|
||||
self.with_ctx(|ctx| {
|
||||
ctx.cache
|
||||
.expansion_info_cache
|
||||
.entry(macro_file)
|
||||
.or_insert_with(|| {
|
||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
||||
for (invoc, _) in
|
||||
iter.iter().filter(|&&(_, include_file_id)| include_file_id == file_id)
|
||||
{
|
||||
let macro_file = invoc.as_macro_file();
|
||||
let expansion_info = {
|
||||
self.with_ctx(|ctx| {
|
||||
ctx.cache
|
||||
.expansion_info_cache
|
||||
.entry(macro_file)
|
||||
.or_insert_with(|| {
|
||||
let exp_info = macro_file.expansion_info(self.db.upcast());
|
||||
|
||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||
if let InFile { file_id, value: Some(value) } = exp_info.arg() {
|
||||
self.cache(value.ancestors().last().unwrap(), file_id);
|
||||
}
|
||||
self.cache(value, file_id.into());
|
||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||
if let InFile { file_id, value: Some(value) } = exp_info.arg() {
|
||||
self.cache(value.ancestors().last().unwrap(), file_id);
|
||||
}
|
||||
self.cache(value, file_id.into());
|
||||
|
||||
exp_info
|
||||
})
|
||||
.clone()
|
||||
})
|
||||
};
|
||||
exp_info
|
||||
})
|
||||
.clone()
|
||||
})
|
||||
};
|
||||
|
||||
// FIXME: uncached parse
|
||||
// Create the source analyzer for the macro call scope
|
||||
let Some(sa) = expansion_info
|
||||
.arg()
|
||||
.value
|
||||
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
// FIXME: uncached parse
|
||||
// Create the source analyzer for the macro call scope
|
||||
let Some(sa) = expansion_info
|
||||
.arg()
|
||||
.value
|
||||
.and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// get mapped token in the include! macro file
|
||||
let span = span::Span {
|
||||
range: token.text_range(),
|
||||
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||
ctx: SyntaxContextId::ROOT,
|
||||
};
|
||||
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
||||
expansion_info.map_range_down_exact(span)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
// get mapped token in the include! macro file
|
||||
let span = span::Span {
|
||||
range: token.text_range(),
|
||||
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
|
||||
ctx: SyntaxContextId::ROOT,
|
||||
};
|
||||
let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
|
||||
expansion_info.map_range_down_exact(span)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// if we find one, then return
|
||||
if let Some(t) = mapped_tokens.next() {
|
||||
return Some((sa, file_id.into(), t, span));
|
||||
// if we find one, then return
|
||||
if let Some(t) = mapped_tokens.next() {
|
||||
return Some((sa, file_id.into(), t, span));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,8 +94,9 @@
|
||||
},
|
||||
hir::{BindingId, LabelId},
|
||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
||||
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
|
||||
StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
|
||||
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
|
||||
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
|
||||
VariantId,
|
||||
};
|
||||
use hir_expand::{
|
||||
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
|
||||
@ -131,11 +132,30 @@ impl SourceToDefCtx<'_, '_> {
|
||||
for &crate_id in self.db.relevant_crates(file).iter() {
|
||||
// Note: `mod` declarations in block modules cannot be supported here
|
||||
let crate_def_map = self.db.crate_def_map(crate_id);
|
||||
mods.extend(
|
||||
let n_mods = mods.len();
|
||||
let modules = |file| {
|
||||
crate_def_map
|
||||
.modules_for_file(file)
|
||||
.map(|local_id| crate_def_map.module_id(local_id)),
|
||||
)
|
||||
.map(|local_id| crate_def_map.module_id(local_id))
|
||||
};
|
||||
mods.extend(modules(file));
|
||||
if mods.len() == n_mods {
|
||||
mods.extend(
|
||||
self.db
|
||||
.include_macro_invoc(crate_id)
|
||||
.iter()
|
||||
.filter(|&&(_, file_id)| file_id == file)
|
||||
.flat_map(|(call, _)| {
|
||||
modules(
|
||||
call.lookup(self.db.upcast())
|
||||
.kind
|
||||
.file_id()
|
||||
.original_file(self.db.upcast())
|
||||
.file_id(),
|
||||
)
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
if mods.is_empty() {
|
||||
// FIXME: detached file
|
||||
|
@ -273,11 +273,7 @@ fn f() {
|
||||
|
||||
#[test]
|
||||
fn include_does_not_break_diagnostics() {
|
||||
let mut config = DiagnosticsConfig::test_sample();
|
||||
config.disabled.insert("inactive-code".to_owned());
|
||||
config.disabled.insert("unlinked-file".to_owned());
|
||||
check_diagnostics_with_config(
|
||||
config,
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- minicore: include
|
||||
//- /lib.rs crate:lib
|
||||
|
@ -499,6 +499,18 @@ mod bar {
|
||||
mod bar {
|
||||
mod foo;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn include_macro_works() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- minicore: include
|
||||
//- /main.rs
|
||||
include!("bar/foo/mod.rs");
|
||||
//- /bar/foo/mod.rs
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -251,6 +251,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur
|
||||
let mut actual = annotations.remove(&file_id).unwrap_or_default();
|
||||
let expected = extract_annotations(&db.file_text(file_id));
|
||||
actual.sort_by_key(|(range, _)| range.start());
|
||||
// FIXME: We should panic on duplicates instead, but includes currently cause us to report
|
||||
// diagnostics twice for the calling module when both files are queried.
|
||||
actual.dedup();
|
||||
// actual.iter().duplicates().for_each(|(range, msg)| {
|
||||
// panic!("duplicate diagnostic at {:?}: {msg:?}", line_index.line_col(range.start()))
|
||||
// });
|
||||
if expected.is_empty() {
|
||||
// makes minicore smoke test debuggable
|
||||
for (e, _) in &actual {
|
||||
|
Loading…
Reference in New Issue
Block a user