276 lines
9.9 KiB
Rust
Raw Normal View History

2021-01-09 23:07:32 +03:00
//! "Recursive" Syntax highlighting for code in doctests and fixtures.
use std::mem;
use either::Either;
use hir::{HasAttrs, Semantics};
2020-10-24 11:07:10 +03:00
use ide_db::call_info::ActiveParameter;
use syntax::{
ast::{self, AstNode, AttrsOwner, DocCommentsOwner},
match_ast, AstToken, NodeOrToken, SyntaxNode, SyntaxToken, TextRange, TextSize,
};
2021-01-09 14:48:15 +03:00
use crate::{Analysis, HlMod, HlRange, HlTag, RootDatabase};
use super::{highlights::Highlights, injector::Injector};
2021-01-09 17:31:22 +03:00
pub(super) fn ra_fixture(
2021-01-09 15:18:49 +03:00
hl: &mut Highlights,
sema: &Semantics<RootDatabase>,
literal: ast::String,
expanded: SyntaxToken,
) -> Option<()> {
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
if !active_parameter.name.starts_with("ra_fixture") {
return None;
}
let value = literal.value()?;
if let Some(range) = literal.open_quote_text_range() {
2021-01-09 15:18:49 +03:00
hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
}
2021-01-09 15:54:38 +03:00
let mut inj = Injector::default();
2021-01-09 15:54:38 +03:00
let mut text = &*value;
let mut offset: TextSize = 0.into();
2021-01-09 15:54:38 +03:00
while !text.is_empty() {
2021-01-07 18:21:00 +03:00
let marker = "$0";
2021-01-09 15:54:38 +03:00
let idx = text.find(marker).unwrap_or(text.len());
let (chunk, next) = text.split_at(idx);
inj.add(chunk, TextRange::at(offset, TextSize::of(chunk)));
2021-01-07 18:21:00 +03:00
2021-01-09 15:54:38 +03:00
text = next;
offset += TextSize::of(chunk);
2021-01-07 18:21:00 +03:00
2021-01-09 15:54:38 +03:00
if let Some(next) = text.strip_prefix(marker) {
2021-01-09 16:07:41 +03:00
if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) {
hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None });
}
2021-01-09 15:54:38 +03:00
text = next;
2021-01-07 18:21:00 +03:00
2021-01-09 15:54:38 +03:00
let marker_len = TextSize::of(marker);
offset += marker_len;
2021-01-07 18:21:00 +03:00
}
}
2021-01-09 15:54:38 +03:00
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string());
for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
for range in inj.map_range_up(hl_range.range) {
if let Some(range) = literal.map_range_up(range) {
hl_range.range = range;
hl.add(hl_range);
2021-01-07 18:21:00 +03:00
}
}
}
2021-01-09 15:54:38 +03:00
if let Some(range) = literal.close_quote_text_range() {
hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
}
Some(())
2021-01-07 18:21:00 +03:00
}
const RUSTDOC_FENCE: &'static str = "```";
2021-01-01 17:31:32 +01:00
const RUSTDOC_FENCE_TOKENS: &[&'static str] = &[
"",
"rust",
"should_panic",
"ignore",
"no_run",
"compile_fail",
"edition2015",
"edition2018",
"edition2021",
];
// Basically an owned dyn AttrsOwner without extra Boxing
struct AttrsOwnerNode {
node: SyntaxNode,
}
impl AttrsOwnerNode {
fn new<N: DocCommentsOwner>(node: N) -> Self {
AttrsOwnerNode { node: node.syntax().clone() }
}
}
impl AttrsOwner for AttrsOwnerNode {}
impl AstNode for AttrsOwnerNode {
fn can_cast(_: syntax::SyntaxKind) -> bool
where
Self: Sized,
{
false
}
fn cast(_: SyntaxNode) -> Option<Self>
where
Self: Sized,
{
None
}
fn syntax(&self) -> &SyntaxNode {
&self.node
}
}
fn doc_attributes<'node>(
sema: &Semantics<RootDatabase>,
node: &'node SyntaxNode,
) -> Option<(AttrsOwnerNode, hir::Attrs)> {
match_ast! {
match node {
ast::SourceFile(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Fn(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Struct(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Union(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::RecordField(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::TupleField(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Enum(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Variant(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Trait(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Module(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Static(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Const(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::TypeAlias(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::Impl(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
ast::MacroRules(it) => sema.to_def(&it).map(|def| (AttrsOwnerNode::new(it), def.attrs(sema.db))),
// ast::MacroDef(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
// ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
_ => return None
}
}
}
2021-01-09 17:31:22 +03:00
/// Injection of syntax highlighting of doctests.
pub(super) fn doc_comment(hl: &mut Highlights, sema: &Semantics<RootDatabase>, node: &SyntaxNode) {
let (owner, attributes) = match doc_attributes(sema, node) {
Some(it) => it,
None => return,
};
2021-01-09 17:31:22 +03:00
if attributes.docs().map_or(true, |docs| !String::from(docs).contains(RUSTDOC_FENCE)) {
2021-01-09 17:31:22 +03:00
return;
}
2021-03-17 11:22:40 +01:00
let attrs_source_map = attributes.source_map(&owner);
2021-01-09 17:31:22 +03:00
let mut inj = Injector::default();
2021-01-09 17:31:22 +03:00
inj.add_unmapped("fn doctest() {\n");
let mut is_codeblock = false;
let mut is_doctest = false;
2021-01-09 17:31:22 +03:00
// Replace the original, line-spanning comment ranges by new, only comment-prefix
// spanning comment ranges.
let mut new_comments = Vec::new();
let mut string;
for attr in attributes.by_key("doc").attrs() {
2021-03-17 11:22:40 +01:00
let src = attrs_source_map.source_of(&attr);
let (line, range, prefix) = match &src {
Either::Left(it) => {
string = match find_doc_string_in_attr(attr, it) {
Some(it) => it,
None => continue,
};
let text_range = string.syntax().text_range();
let text_range = TextRange::new(
text_range.start() + TextSize::from(1),
text_range.end() - TextSize::from(1),
);
let text = string.text();
(&text[1..text.len() - 1], text_range, "")
}
Either::Right(comment) => {
(comment.text(), comment.syntax().text_range(), comment.prefix())
}
};
let mut pos = TextSize::from(prefix.len() as u32);
let mut range_start = range.start();
for line in line.split('\n') {
let line_len = TextSize::from(line.len() as u32);
let prev_range_start = {
let next_range_start = range_start + line_len + TextSize::from(1);
mem::replace(&mut range_start, next_range_start)
};
// only first line has the prefix so take it away for future iterations
let mut pos = mem::take(&mut pos);
match line.find(RUSTDOC_FENCE) {
Some(idx) => {
is_codeblock = !is_codeblock;
// Check whether code is rust by inspecting fence guards
let guards = &line[idx + RUSTDOC_FENCE.len()..];
let is_rust =
guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim()));
is_doctest = is_codeblock && is_rust;
continue;
}
None if !is_doctest => continue,
None => (),
}
2021-01-09 17:31:22 +03:00
// whitespace after comment is ignored
if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
pos += TextSize::of(ws);
}
// lines marked with `#` should be ignored in output, we skip the `#` char
if line[pos.into()..].starts_with('#') {
pos += TextSize::of('#');
}
2021-01-09 17:31:22 +03:00
new_comments.push(TextRange::at(prev_range_start, pos));
inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
inj.add_unmapped("\n");
}
}
2021-01-09 17:31:22 +03:00
inj.add_unmapped("\n}");
let (analysis, tmp_file_id) = Analysis::from_single_file(inj.text().to_string());
for h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() {
for r in inj.map_range_up(h.range) {
2021-01-09 17:31:22 +03:00
hl.add(HlRange {
range: r,
2021-01-09 14:44:01 +03:00
highlight: h.highlight | HlMod::Injected,
binding_hash: h.binding_hash,
});
}
}
2021-01-09 17:31:22 +03:00
for range in new_comments {
hl.add(HlRange {
range,
highlight: HlTag::Comment | HlMod::Documentation,
binding_hash: None,
});
}
}
fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
match it.literal() {
// #[doc = lit]
Some(lit) => match lit.kind() {
ast::LiteralKind::String(it) => Some(it),
_ => None,
},
// #[cfg_attr(..., doc = "", ...)]
None => {
// We gotta hunt the string token manually here
let text = attr.string_value()?;
// FIXME: We just pick the first string literal that has the same text as the doc attribute
// This means technically we might highlight the wrong one
it.syntax()
.descendants_with_tokens()
.filter_map(NodeOrToken::into_token)
.filter_map(ast::String::cast)
.find(|string| {
string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
})
}
}
}