Auto merge of #16909 - Veykril:spans, r=Veykril
fix: Keep the span for `Attr::Literal` around
This commit is contained in:
commit
20290b2992
@ -148,12 +148,12 @@ pub(crate) fn is_cfg_enabled(&self, cfg_options: &CfgOptions) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lang(&self) -> Option<&SmolStr> {
|
||||
pub fn lang(&self) -> Option<&str> {
|
||||
self.by_key("lang").string_value()
|
||||
}
|
||||
|
||||
pub fn lang_item(&self) -> Option<LangItem> {
|
||||
self.by_key("lang").string_value().and_then(|it| LangItem::from_str(it))
|
||||
self.by_key("lang").string_value().and_then(LangItem::from_str)
|
||||
}
|
||||
|
||||
pub fn has_doc_hidden(&self) -> bool {
|
||||
@ -178,7 +178,7 @@ pub fn doc_aliases(&self) -> impl Iterator<Item = SmolStr> + '_ {
|
||||
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
|
||||
}
|
||||
|
||||
pub fn export_name(&self) -> Option<&SmolStr> {
|
||||
pub fn export_name(&self) -> Option<&str> {
|
||||
self.by_key("export_name").string_value()
|
||||
}
|
||||
|
||||
@ -565,7 +565,7 @@ pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::Subtree> {
|
||||
self.attrs().filter_map(|attr| attr.token_tree_value())
|
||||
}
|
||||
|
||||
pub fn string_value(self) -> Option<&'attr SmolStr> {
|
||||
pub fn string_value(self) -> Option<&'attr str> {
|
||||
self.attrs().find_map(|attr| attr.string_value())
|
||||
}
|
||||
|
||||
|
@ -192,7 +192,7 @@ fn collect_lang_item<T>(
|
||||
|
||||
pub(crate) fn lang_attr(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> {
|
||||
let attrs = db.attrs(item);
|
||||
attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(it))
|
||||
attrs.by_key("lang").string_value().and_then(LangItem::from_str)
|
||||
}
|
||||
|
||||
pub(crate) fn notable_traits_in_deps(
|
||||
|
@ -24,7 +24,7 @@
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId};
|
||||
use stdx::always;
|
||||
use syntax::{ast, SmolStr};
|
||||
use syntax::ast;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
@ -312,7 +312,7 @@ fn seed_with_top_level(&mut self) {
|
||||
}
|
||||
}
|
||||
() if *attr_name == hir_expand::name![crate_type] => {
|
||||
if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
|
||||
if let Some("proc-macro") = attr.string_value() {
|
||||
self.is_proc_macro = true;
|
||||
}
|
||||
}
|
||||
@ -1902,7 +1902,7 @@ fn process_macro_use_extern_crate<'a>(
|
||||
}
|
||||
|
||||
fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
|
||||
let path_attr = attrs.by_key("path").string_value().map(SmolStr::as_str);
|
||||
let path_attr = attrs.by_key("path").string_value();
|
||||
let is_macro_use = attrs.by_key("macro_use").exists();
|
||||
let module = &self.item_tree[module_id];
|
||||
match &module.kind {
|
||||
@ -2146,7 +2146,7 @@ fn collect_macro_rules(&mut self, id: FileItemTreeId<MacroRules>, module: Module
|
||||
Some(it) => {
|
||||
// FIXME: a hacky way to create a Name from string.
|
||||
name = tt::Ident {
|
||||
text: it.clone(),
|
||||
text: it.into(),
|
||||
span: Span {
|
||||
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
|
||||
anchor: span::SpanAnchor {
|
||||
|
@ -8,8 +8,8 @@
|
||||
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use span::{Span, SyntaxContextId};
|
||||
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
||||
use triomphe::Arc;
|
||||
use syntax::{ast, format_smolstr, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
||||
use triomphe::ThinArc;
|
||||
|
||||
use crate::{
|
||||
db::ExpandDatabase,
|
||||
@ -22,8 +22,7 @@
|
||||
/// Syntactical attributes, without filtering of `cfg_attr`s.
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct RawAttrs {
|
||||
// FIXME: Make this a ThinArc
|
||||
entries: Option<Arc<[Attr]>>,
|
||||
entries: Option<ThinArc<(), Attr>>,
|
||||
}
|
||||
|
||||
impl ops::Deref for RawAttrs {
|
||||
@ -31,7 +30,7 @@ impl ops::Deref for RawAttrs {
|
||||
|
||||
fn deref(&self) -> &[Attr] {
|
||||
match &self.entries {
|
||||
Some(it) => it,
|
||||
Some(it) => &it.slice,
|
||||
None => &[],
|
||||
}
|
||||
}
|
||||
@ -45,20 +44,34 @@ pub fn new(
|
||||
owner: &dyn ast::HasAttrs,
|
||||
span_map: SpanMapRef<'_>,
|
||||
) -> Self {
|
||||
let entries = collect_attrs(owner).filter_map(|(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
|
||||
id,
|
||||
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
|
||||
}),
|
||||
});
|
||||
let entries: Arc<[Attr]> = Arc::from_iter(entries);
|
||||
let entries: Vec<_> = collect_attrs(owner)
|
||||
.filter_map(|(id, attr)| match attr {
|
||||
Either::Left(attr) => {
|
||||
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
|
||||
}
|
||||
Either::Right(comment) => comment.doc_comment().map(|doc| {
|
||||
let span = span_map.span_for_range(comment.syntax().text_range());
|
||||
Attr {
|
||||
id,
|
||||
input: Some(Interned::new(AttrInput::Literal(tt::Literal {
|
||||
// FIXME: Escape quotes from comment content
|
||||
text: SmolStr::new(format_smolstr!("\"{doc}\"",)),
|
||||
span,
|
||||
}))),
|
||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||
ctxt: span.ctx,
|
||||
}
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
|
||||
let entries = if entries.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), entries.into_iter()))
|
||||
};
|
||||
|
||||
RawAttrs { entries }
|
||||
}
|
||||
|
||||
pub fn from_attrs_owner(
|
||||
@ -75,16 +88,20 @@ pub fn merge(&self, other: Self) -> Self {
|
||||
(None, entries @ Some(_)) => Self { entries },
|
||||
(Some(entries), None) => Self { entries: Some(entries.clone()) },
|
||||
(Some(a), Some(b)) => {
|
||||
let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
|
||||
Self {
|
||||
entries: Some(Arc::from_iter(a.iter().cloned().chain(b.iter().map(|it| {
|
||||
let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
|
||||
let items = a
|
||||
.slice
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(b.slice.iter().map(|it| {
|
||||
let mut it = it.clone();
|
||||
it.id.id = (it.id.ast_index() as u32 + last_ast_index)
|
||||
| (it.id.cfg_attr_index().unwrap_or(0) as u32)
|
||||
<< AttrId::AST_INDEX_BITS;
|
||||
it
|
||||
})))),
|
||||
}
|
||||
}))
|
||||
.collect::<Vec<_>>();
|
||||
Self { entries: Some(ThinArc::from_header_and_iter((), items.into_iter())) }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -100,41 +117,47 @@ pub fn filter(self, db: &dyn ExpandDatabase, krate: CrateId) -> RawAttrs {
|
||||
}
|
||||
|
||||
let crate_graph = db.crate_graph();
|
||||
let new_attrs = Arc::from_iter(self.iter().flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr =
|
||||
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
let new_attrs =
|
||||
self.iter()
|
||||
.flat_map(|attr| -> SmallVec<[_; 1]> {
|
||||
let is_cfg_attr =
|
||||
attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![attr.clone()];
|
||||
}
|
||||
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
};
|
||||
let subtree = match attr.token_tree_value() {
|
||||
Some(it) => it,
|
||||
_ => return smallvec![attr.clone()],
|
||||
};
|
||||
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs = parts
|
||||
.enumerate()
|
||||
.take(1 << AttrId::CFG_ATTR_BITS)
|
||||
.filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
|
||||
let (cfg, parts) = match parse_cfg_attr_input(subtree) {
|
||||
Some(it) => it,
|
||||
None => return smallvec![attr.clone()],
|
||||
};
|
||||
let index = attr.id;
|
||||
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|
||||
|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)),
|
||||
);
|
||||
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) };
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
let cfg_options = &crate_graph[krate].cfg_options;
|
||||
let cfg = Subtree { delimiter: subtree.delimiter, token_trees: Box::from(cfg) };
|
||||
let cfg = CfgExpr::parse(&cfg);
|
||||
if cfg_options.check(&cfg) == Some(false) {
|
||||
smallvec![]
|
||||
} else {
|
||||
cov_mark::hit!(cfg_attr_active);
|
||||
|
||||
attrs.collect()
|
||||
}
|
||||
}));
|
||||
|
||||
RawAttrs { entries: Some(new_attrs) }
|
||||
attrs.collect()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let entries = if new_attrs.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ThinArc::from_header_and_iter((), new_attrs.into_iter()))
|
||||
};
|
||||
RawAttrs { entries }
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,8 +202,7 @@ pub struct Attr {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AttrInput {
|
||||
/// `#[attr = "string"]`
|
||||
// FIXME: This is losing span
|
||||
Literal(SmolStr),
|
||||
Literal(tt::Literal),
|
||||
/// `#[attr(subtree)]`
|
||||
TokenTree(Box<tt::Subtree>),
|
||||
}
|
||||
@ -188,7 +210,7 @@ pub enum AttrInput {
|
||||
impl fmt::Display for AttrInput {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
|
||||
AttrInput::Literal(lit) => write!(f, " = {lit}"),
|
||||
AttrInput::TokenTree(tt) => tt.fmt(f),
|
||||
}
|
||||
}
|
||||
@ -208,11 +230,10 @@ fn from_src(
|
||||
})?);
|
||||
let span = span_map.span_for_range(range);
|
||||
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
|
||||
let value = match lit.kind() {
|
||||
ast::LiteralKind::String(string) => string.value()?.into(),
|
||||
_ => lit.syntax().first_token()?.text().trim_matches('"').into(),
|
||||
};
|
||||
Some(Interned::new(AttrInput::Literal(value)))
|
||||
Some(Interned::new(AttrInput::Literal(tt::Literal {
|
||||
text: lit.token().text().into(),
|
||||
span,
|
||||
})))
|
||||
} else if let Some(tt) = ast.token_tree() {
|
||||
let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span);
|
||||
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
|
||||
@ -245,9 +266,8 @@ fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<
|
||||
}
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))) => {
|
||||
let input = match input.get(1) {
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text, .. }))) => {
|
||||
//FIXME the trimming here isn't quite right, raw strings are not handled
|
||||
Some(Interned::new(AttrInput::Literal(text.trim_matches('"').into())))
|
||||
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
|
||||
Some(Interned::new(AttrInput::Literal(lit.clone())))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
@ -265,9 +285,14 @@ pub fn path(&self) -> &ModPath {
|
||||
|
||||
impl Attr {
|
||||
/// #[path = "string"]
|
||||
pub fn string_value(&self) -> Option<&SmolStr> {
|
||||
pub fn string_value(&self) -> Option<&str> {
|
||||
match self.input.as_deref()? {
|
||||
AttrInput::Literal(it) => Some(it),
|
||||
AttrInput::Literal(it) => match it.text.strip_prefix('r') {
|
||||
Some(it) => it.trim_matches('#'),
|
||||
None => it.text.as_str(),
|
||||
}
|
||||
.strip_prefix('"')?
|
||||
.strip_suffix('"'),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -2008,8 +2008,7 @@ pub fn is_main(self, db: &dyn HirDatabase) -> bool {
|
||||
}
|
||||
let data = db.function_data(self.id);
|
||||
|
||||
data.name.to_smol_str() == "main"
|
||||
|| data.attrs.export_name().map(core::ops::Deref::deref) == Some("main")
|
||||
data.name.to_smol_str() == "main" || data.attrs.export_name() == Some("main")
|
||||
}
|
||||
|
||||
/// Does this function have the ignore attribute?
|
||||
|
@ -540,7 +540,7 @@ pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool {
|
||||
/// Whether the given trait is an operator trait or not.
|
||||
pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
|
||||
match trait_.attrs(self.db).lang() {
|
||||
Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
|
||||
Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
@ -283,6 +283,10 @@ fn test_disabled_diagnostics() {
|
||||
|
||||
#[test]
|
||||
fn minicore_smoke_test() {
|
||||
if test_utils::skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
||||
fn check(minicore: MiniCore) {
|
||||
let source = minicore.source_code();
|
||||
let mut config = DiagnosticsConfig::test_sample();
|
||||
|
@ -10,7 +10,7 @@
|
||||
debug::{DebugQueryTable, TableEntry},
|
||||
Query, QueryTable,
|
||||
},
|
||||
CrateData, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId,
|
||||
CompressedFileTextQuery, CrateData, FileId, ParseQuery, SourceDatabase, SourceRootId,
|
||||
},
|
||||
symbol_index::ModuleSymbolsQuery,
|
||||
};
|
||||
@ -38,7 +38,7 @@
|
||||
pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
|
||||
let mut buf = String::new();
|
||||
|
||||
format_to!(buf, "{}\n", collect_query(FileTextQuery.in_db(db)));
|
||||
format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
|
||||
format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
|
||||
format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
|
||||
format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
|
||||
@ -160,7 +160,7 @@ impl QueryCollect for ParseMacroExpansionQuery {
|
||||
type Collector = SyntaxTreeStats<true>;
|
||||
}
|
||||
|
||||
impl QueryCollect for FileTextQuery {
|
||||
impl QueryCollect for CompressedFileTextQuery {
|
||||
type Collector = FilesStats;
|
||||
}
|
||||
|
||||
@ -188,8 +188,8 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
}
|
||||
}
|
||||
|
||||
impl StatCollect<FileId, Arc<str>> for FilesStats {
|
||||
fn collect_entry(&mut self, _: FileId, value: Option<Arc<str>>) {
|
||||
impl StatCollect<FileId, Arc<[u8]>> for FilesStats {
|
||||
fn collect_entry(&mut self, _: FileId, value: Option<Arc<[u8]>>) {
|
||||
self.total += 1;
|
||||
self.size += value.unwrap().len();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user