2021-01-09 23:07:32 +03:00
|
|
|
//! "Recursive" Syntax highlighting for code in doctests and fixtures.
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-03-17 20:57:30 +01:00
|
|
|
use std::{mem, ops::Range};
|
2021-03-17 18:59:54 +01:00
|
|
|
|
2021-03-16 18:57:47 +01:00
|
|
|
use either::Either;
|
2021-03-18 13:16:27 +01:00
|
|
|
use hir::{HasAttrs, InFile, Semantics};
|
2021-03-18 15:22:27 +01:00
|
|
|
use ide_db::{call_info::ActiveParameter, defs::Definition, SymbolKind};
|
2021-03-16 18:57:47 +01:00
|
|
|
use syntax::{
|
2021-03-19 21:23:57 +01:00
|
|
|
ast::{self, AstNode},
|
2021-03-16 21:05:07 +01:00
|
|
|
match_ast, AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize,
|
2021-03-16 18:57:47 +01:00
|
|
|
};
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-03-17 20:57:30 +01:00
|
|
|
use crate::{
|
|
|
|
doc_links::extract_definitions_from_markdown, Analysis, HlMod, HlRange, HlTag, RootDatabase,
|
|
|
|
};
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-01-08 01:39:02 +03:00
|
|
|
use super::{highlights::Highlights, injector::Injector};
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-01-09 17:31:22 +03:00
|
|
|
pub(super) fn ra_fixture(
|
2021-01-09 15:18:49 +03:00
|
|
|
hl: &mut Highlights,
|
2020-04-28 11:01:51 +02:00
|
|
|
sema: &Semantics<RootDatabase>,
|
2020-11-06 22:21:56 +01:00
|
|
|
literal: ast::String,
|
2020-04-28 11:01:51 +02:00
|
|
|
expanded: SyntaxToken,
|
|
|
|
) -> Option<()> {
|
|
|
|
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
|
2021-03-20 23:22:09 +01:00
|
|
|
if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
|
2020-04-28 11:01:51 +02:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let value = literal.value()?;
|
|
|
|
|
|
|
|
if let Some(range) = literal.open_quote_text_range() {
|
2021-01-09 15:18:49 +03:00
|
|
|
hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
let mut inj = Injector::default();
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
let mut text = &*value;
|
|
|
|
let mut offset: TextSize = 0.into();
|
2020-04-28 11:01:51 +02:00
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
while !text.is_empty() {
|
2021-01-07 18:21:00 +03:00
|
|
|
let marker = "$0";
|
2021-01-09 15:54:38 +03:00
|
|
|
let idx = text.find(marker).unwrap_or(text.len());
|
|
|
|
let (chunk, next) = text.split_at(idx);
|
|
|
|
inj.add(chunk, TextRange::at(offset, TextSize::of(chunk)));
|
2021-01-07 18:21:00 +03:00
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
text = next;
|
|
|
|
offset += TextSize::of(chunk);
|
2021-01-07 18:21:00 +03:00
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
if let Some(next) = text.strip_prefix(marker) {
|
2021-01-09 16:07:41 +03:00
|
|
|
if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) {
|
|
|
|
hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None });
|
|
|
|
}
|
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
text = next;
|
2021-01-07 18:21:00 +03:00
|
|
|
|
2021-01-09 15:54:38 +03:00
|
|
|
let marker_len = TextSize::of(marker);
|
|
|
|
offset += marker_len;
|
2021-01-07 18:21:00 +03:00
|
|
|
}
|
|
|
|
}
|
2021-01-09 15:54:38 +03:00
|
|
|
|
|
|
|
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string());
|
|
|
|
|
|
|
|
for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
|
|
|
|
for range in inj.map_range_up(hl_range.range) {
|
|
|
|
if let Some(range) = literal.map_range_up(range) {
|
|
|
|
hl_range.range = range;
|
2021-03-17 01:56:31 +01:00
|
|
|
hl.add(hl_range);
|
2021-01-07 18:21:00 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-01-09 15:54:38 +03:00
|
|
|
|
|
|
|
if let Some(range) = literal.close_quote_text_range() {
|
|
|
|
hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
|
|
|
|
}
|
|
|
|
|
|
|
|
Some(())
|
2021-01-07 18:21:00 +03:00
|
|
|
}
|
|
|
|
|
2020-06-16 23:03:59 +02:00
|
|
|
const RUSTDOC_FENCE: &'static str = "```";
|
2021-01-01 17:31:32 +01:00
|
|
|
const RUSTDOC_FENCE_TOKENS: &[&'static str] = &[
|
|
|
|
"",
|
|
|
|
"rust",
|
|
|
|
"should_panic",
|
|
|
|
"ignore",
|
|
|
|
"no_run",
|
|
|
|
"compile_fail",
|
|
|
|
"edition2015",
|
|
|
|
"edition2018",
|
|
|
|
"edition2021",
|
|
|
|
];
|
2020-06-16 23:03:59 +02:00
|
|
|
|
2021-03-16 18:57:47 +01:00
|
|
|
fn doc_attributes<'node>(
|
|
|
|
sema: &Semantics<RootDatabase>,
|
|
|
|
node: &'node SyntaxNode,
|
2021-03-19 21:23:57 +01:00
|
|
|
) -> Option<(hir::AttrsWithOwner, Definition)> {
|
2021-03-16 18:57:47 +01:00
|
|
|
match_ast! {
|
|
|
|
match node {
|
2021-03-19 21:23:57 +01:00
|
|
|
ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Module(def)))),
|
|
|
|
ast::Module(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Module(def)))),
|
|
|
|
ast::Fn(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Function(def)))),
|
|
|
|
ast::Struct(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Struct(def))))),
|
|
|
|
ast::Union(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Union(def))))),
|
|
|
|
ast::Enum(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(def))))),
|
|
|
|
ast::Variant(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Variant(def)))),
|
|
|
|
ast::Trait(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Trait(def)))),
|
|
|
|
ast::Static(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Static(def)))),
|
|
|
|
ast::Const(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::Const(def)))),
|
|
|
|
ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::ModuleDef(hir::ModuleDef::TypeAlias(def)))),
|
|
|
|
ast::Impl(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::SelfType(def))),
|
|
|
|
ast::RecordField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
|
|
|
|
ast::TupleField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
|
|
|
|
ast::MacroRules(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Macro(def))),
|
2021-03-16 18:57:47 +01:00
|
|
|
// ast::MacroDef(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
|
|
|
|
// ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
|
|
|
|
_ => return None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-09 17:31:22 +03:00
|
|
|
/// Injection of syntax highlighting of doctests.
|
2021-03-18 13:16:27 +01:00
|
|
|
pub(super) fn doc_comment(
|
|
|
|
hl: &mut Highlights,
|
|
|
|
sema: &Semantics<RootDatabase>,
|
|
|
|
node: InFile<&SyntaxNode>,
|
|
|
|
) {
|
2021-03-19 21:23:57 +01:00
|
|
|
let (attributes, def) = match doc_attributes(sema, node.value) {
|
2021-03-16 18:57:47 +01:00
|
|
|
Some(it) => it,
|
|
|
|
None => return,
|
|
|
|
};
|
2021-01-09 17:31:22 +03:00
|
|
|
|
2021-01-08 01:39:02 +03:00
|
|
|
let mut inj = Injector::default();
|
2021-01-09 17:31:22 +03:00
|
|
|
inj.add_unmapped("fn doctest() {\n");
|
2021-01-08 01:39:02 +03:00
|
|
|
|
2021-03-19 21:23:57 +01:00
|
|
|
let attrs_source_map = attributes.source_map(sema.db);
|
2021-03-17 20:57:30 +01:00
|
|
|
|
2020-06-16 23:03:59 +02:00
|
|
|
let mut is_codeblock = false;
|
2020-04-28 11:01:51 +02:00
|
|
|
let mut is_doctest = false;
|
2021-01-09 17:31:22 +03:00
|
|
|
|
2020-04-28 11:01:51 +02:00
|
|
|
// Replace the original, line-spanning comment ranges by new, only comment-prefix
|
|
|
|
// spanning comment ranges.
|
|
|
|
let mut new_comments = Vec::new();
|
2021-03-17 20:57:30 +01:00
|
|
|
let mut intra_doc_links = Vec::new();
|
2021-03-16 21:05:07 +01:00
|
|
|
let mut string;
|
|
|
|
for attr in attributes.by_key("doc").attrs() {
|
2021-03-18 13:16:27 +01:00
|
|
|
let InFile { file_id, value: src } = attrs_source_map.source_of(&attr);
|
|
|
|
if file_id != node.file_id {
|
|
|
|
continue;
|
|
|
|
}
|
2021-03-16 21:05:07 +01:00
|
|
|
let (line, range, prefix) = match &src {
|
|
|
|
Either::Left(it) => {
|
|
|
|
string = match find_doc_string_in_attr(attr, it) {
|
|
|
|
Some(it) => it,
|
|
|
|
None => continue,
|
|
|
|
};
|
|
|
|
let text_range = string.syntax().text_range();
|
|
|
|
let text_range = TextRange::new(
|
|
|
|
text_range.start() + TextSize::from(1),
|
|
|
|
text_range.end() - TextSize::from(1),
|
|
|
|
);
|
|
|
|
let text = string.text();
|
|
|
|
(&text[1..text.len() - 1], text_range, "")
|
|
|
|
}
|
2021-03-16 18:57:47 +01:00
|
|
|
Either::Right(comment) => {
|
|
|
|
(comment.text(), comment.syntax().text_range(), comment.prefix())
|
|
|
|
}
|
|
|
|
};
|
2021-03-16 21:05:07 +01:00
|
|
|
|
2021-03-17 18:59:54 +01:00
|
|
|
let mut pos = TextSize::from(prefix.len() as u32);
|
|
|
|
let mut range_start = range.start();
|
|
|
|
for line in line.split('\n') {
|
|
|
|
let line_len = TextSize::from(line.len() as u32);
|
|
|
|
let prev_range_start = {
|
|
|
|
let next_range_start = range_start + line_len + TextSize::from(1);
|
|
|
|
mem::replace(&mut range_start, next_range_start)
|
|
|
|
};
|
|
|
|
// only first line has the prefix so take it away for future iterations
|
|
|
|
let mut pos = mem::take(&mut pos);
|
|
|
|
|
|
|
|
match line.find(RUSTDOC_FENCE) {
|
|
|
|
Some(idx) => {
|
|
|
|
is_codeblock = !is_codeblock;
|
|
|
|
// Check whether code is rust by inspecting fence guards
|
|
|
|
let guards = &line[idx + RUSTDOC_FENCE.len()..];
|
|
|
|
let is_rust =
|
|
|
|
guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim()));
|
|
|
|
is_doctest = is_codeblock && is_rust;
|
|
|
|
continue;
|
|
|
|
}
|
2021-03-17 20:57:30 +01:00
|
|
|
None if !is_doctest => {
|
|
|
|
intra_doc_links.extend(
|
|
|
|
extract_definitions_from_markdown(line)
|
|
|
|
.into_iter()
|
2021-03-23 19:19:44 +01:00
|
|
|
.filter_map(|(range, link, ns)| {
|
|
|
|
Some(range).zip(validate_intra_doc_link(sema.db, &def, &link, ns))
|
2021-03-17 20:57:30 +01:00
|
|
|
})
|
2021-03-23 19:19:44 +01:00
|
|
|
.map(|(Range { start, end }, def)| {
|
2021-03-18 15:22:27 +01:00
|
|
|
(
|
|
|
|
def,
|
|
|
|
TextRange::at(
|
|
|
|
prev_range_start + TextSize::from(start as u32),
|
|
|
|
TextSize::from((end - start) as u32),
|
|
|
|
),
|
2021-03-17 20:57:30 +01:00
|
|
|
)
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
continue;
|
|
|
|
}
|
2021-03-17 18:59:54 +01:00
|
|
|
None => (),
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
2021-01-09 17:31:22 +03:00
|
|
|
|
2021-03-17 18:59:54 +01:00
|
|
|
// whitespace after comment is ignored
|
|
|
|
if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
|
|
|
|
pos += TextSize::of(ws);
|
|
|
|
}
|
|
|
|
// lines marked with `#` should be ignored in output, we skip the `#` char
|
|
|
|
if line[pos.into()..].starts_with('#') {
|
|
|
|
pos += TextSize::of('#');
|
|
|
|
}
|
2021-01-09 17:31:22 +03:00
|
|
|
|
2021-03-17 18:59:54 +01:00
|
|
|
new_comments.push(TextRange::at(prev_range_start, pos));
|
|
|
|
inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
|
|
|
|
inj.add_unmapped("\n");
|
|
|
|
}
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
2021-03-17 20:57:30 +01:00
|
|
|
|
2021-03-18 15:22:27 +01:00
|
|
|
for (def, range) in intra_doc_links {
|
2021-03-17 20:57:30 +01:00
|
|
|
hl.add(HlRange {
|
|
|
|
range,
|
2021-03-18 15:22:27 +01:00
|
|
|
highlight: module_def_to_hl_tag(def)
|
|
|
|
| HlMod::Documentation
|
|
|
|
| HlMod::Injected
|
|
|
|
| HlMod::IntraDocLink,
|
2021-03-17 20:57:30 +01:00
|
|
|
binding_hash: None,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
if new_comments.is_empty() {
|
|
|
|
return; // no need to run an analysis on an empty file
|
|
|
|
}
|
|
|
|
|
2021-01-09 17:31:22 +03:00
|
|
|
inj.add_unmapped("\n}");
|
|
|
|
|
|
|
|
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string());
|
2021-01-08 01:39:02 +03:00
|
|
|
|
2021-03-17 20:57:30 +01:00
|
|
|
for HlRange { range, highlight, binding_hash } in
|
|
|
|
analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap()
|
|
|
|
{
|
|
|
|
for range in inj.map_range_up(range) {
|
|
|
|
hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
|
2021-01-08 01:39:02 +03:00
|
|
|
}
|
|
|
|
}
|
2021-01-09 17:31:22 +03:00
|
|
|
|
|
|
|
for range in new_comments {
|
|
|
|
hl.add(HlRange {
|
|
|
|
range,
|
|
|
|
highlight: HlTag::Comment | HlMod::Documentation,
|
|
|
|
binding_hash: None,
|
|
|
|
});
|
|
|
|
}
|
2020-04-28 11:01:51 +02:00
|
|
|
}
|
2021-03-16 21:05:07 +01:00
|
|
|
|
|
|
|
fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
|
2021-03-18 22:25:10 +01:00
|
|
|
match it.expr() {
|
2021-03-16 21:05:07 +01:00
|
|
|
// #[doc = lit]
|
2021-03-18 22:25:10 +01:00
|
|
|
Some(ast::Expr::Literal(lit)) => match lit.kind() {
|
2021-03-16 21:05:07 +01:00
|
|
|
ast::LiteralKind::String(it) => Some(it),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
// #[cfg_attr(..., doc = "", ...)]
|
|
|
|
None => {
|
|
|
|
// We gotta hunt the string token manually here
|
|
|
|
let text = attr.string_value()?;
|
|
|
|
// FIXME: We just pick the first string literal that has the same text as the doc attribute
|
|
|
|
// This means technically we might highlight the wrong one
|
|
|
|
it.syntax()
|
|
|
|
.descendants_with_tokens()
|
|
|
|
.filter_map(NodeOrToken::into_token)
|
|
|
|
.filter_map(ast::String::cast)
|
|
|
|
.find(|string| {
|
|
|
|
string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
|
|
|
|
})
|
|
|
|
}
|
2021-03-18 22:25:10 +01:00
|
|
|
_ => return None,
|
2021-03-16 21:05:07 +01:00
|
|
|
}
|
|
|
|
}
|
2021-03-17 20:57:30 +01:00
|
|
|
|
|
|
|
fn validate_intra_doc_link(
|
|
|
|
db: &RootDatabase,
|
|
|
|
def: &Definition,
|
|
|
|
link: &str,
|
|
|
|
ns: Option<hir::Namespace>,
|
2021-03-18 15:22:27 +01:00
|
|
|
) -> Option<hir::ModuleDef> {
|
2021-03-17 20:57:30 +01:00
|
|
|
match def {
|
|
|
|
Definition::ModuleDef(def) => match def {
|
|
|
|
hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
hir::ModuleDef::BuiltinType(_) => None,
|
|
|
|
},
|
|
|
|
Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
|
|
|
|
Definition::SelfType(_)
|
|
|
|
| Definition::Local(_)
|
|
|
|
| Definition::GenericParam(_)
|
|
|
|
| Definition::Label(_) => None,
|
|
|
|
}
|
2021-03-18 15:22:27 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
fn module_def_to_hl_tag(def: hir::ModuleDef) -> HlTag {
|
|
|
|
let symbol = match def {
|
|
|
|
hir::ModuleDef::Module(_) => SymbolKind::Module,
|
|
|
|
hir::ModuleDef::Function(_) => SymbolKind::Function,
|
|
|
|
hir::ModuleDef::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
|
|
|
|
hir::ModuleDef::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
|
|
|
|
hir::ModuleDef::Adt(hir::Adt::Union(_)) => SymbolKind::Union,
|
|
|
|
hir::ModuleDef::Variant(_) => SymbolKind::Variant,
|
|
|
|
hir::ModuleDef::Const(_) => SymbolKind::Const,
|
|
|
|
hir::ModuleDef::Static(_) => SymbolKind::Static,
|
|
|
|
hir::ModuleDef::Trait(_) => SymbolKind::Trait,
|
|
|
|
hir::ModuleDef::TypeAlias(_) => SymbolKind::TypeAlias,
|
|
|
|
hir::ModuleDef::BuiltinType(_) => return HlTag::BuiltinType,
|
|
|
|
};
|
|
|
|
HlTag::Symbol(symbol)
|
2021-03-17 20:57:30 +01:00
|
|
|
}
|