Re-implement syntax fixups
This commit is contained in:
parent
7a8c4c001b
commit
f48fa0c6cb
@ -34,8 +34,6 @@ impl SyntaxContextId {
|
||||
// we need a special value that behaves as the current context.
|
||||
pub const SELF_REF: Self =
|
||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||
// Used for syntax fixups
|
||||
pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) });
|
||||
|
||||
pub fn is_root(self) -> bool {
|
||||
self == Self::ROOT
|
||||
|
@ -9,6 +9,7 @@
|
||||
use either::Either;
|
||||
use limit::Limit;
|
||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{
|
||||
ast::{self, HasAttrs, HasDocComments},
|
||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||
@ -20,6 +21,7 @@
|
||||
attrs::RawAttrs,
|
||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||
builtin_fn_macro::EagerExpander,
|
||||
fixup::{self, SyntaxFixupUndoInfo},
|
||||
hygiene::{self, SyntaxContextData, Transparency},
|
||||
span::{RealSpanMap, SpanMap, SpanMapRef},
|
||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||
@ -135,7 +137,7 @@ fn apply_mark(
|
||||
fn macro_arg(
|
||||
&self,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>>;
|
||||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
|
||||
/// Fetches the expander for this macro.
|
||||
#[salsa::transparent]
|
||||
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
|
||||
@ -189,15 +191,33 @@ pub fn expand_speculative(
|
||||
) -> Option<(SyntaxNode, SyntaxToken)> {
|
||||
let loc = db.lookup_intern_macro_call(actual_macro_call);
|
||||
|
||||
// Build the subtree and token mapping for the speculative args
|
||||
let _censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id);
|
||||
let span_map = SpanMapRef::RealSpanMap(&span_map);
|
||||
let mut tt = mbe::syntax_node_to_token_tree(
|
||||
speculative_args,
|
||||
// we don't leak these spans into any query so its fine to make them absolute
|
||||
span_map,
|
||||
);
|
||||
|
||||
// Build the subtree and token mapping for the speculative args
|
||||
let (mut tt, undo_info) = match loc.kind {
|
||||
MacroCallKind::FnLike { .. } => {
|
||||
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
|
||||
}
|
||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||
let censor = censor_for_macro_input(&loc, speculative_args);
|
||||
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
speculative_args,
|
||||
span_map,
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
),
|
||||
fixups.undo_info,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let attr_arg = match loc.kind {
|
||||
MacroCallKind::Attr { invoc_attr_index, .. } => {
|
||||
@ -227,7 +247,7 @@ pub fn expand_speculative(
|
||||
|
||||
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
|
||||
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
|
||||
let speculative_expansion = match loc.def.kind {
|
||||
let mut speculative_expansion = match loc.def.kind {
|
||||
MacroDefKind::ProcMacro(expander, ..) => {
|
||||
tt.delimiter = tt::Delimiter::UNSPECIFIED;
|
||||
let call_site = loc.span(db);
|
||||
@ -261,6 +281,7 @@ pub fn expand_speculative(
|
||||
};
|
||||
|
||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
||||
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
||||
|
||||
let syntax_node = node.syntax_node();
|
||||
@ -347,7 +368,9 @@ fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxN
|
||||
fn macro_arg(
|
||||
db: &dyn ExpandDatabase,
|
||||
id: MacroCallId,
|
||||
) -> ValueResult<Option<Arc<tt::Subtree>>, Arc<Box<[SyntaxError]>>> {
|
||||
// FIXME: consider the following by putting fixup info into eager call info args
|
||||
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
|
||||
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
|
||||
let mismatched_delimiters = |arg: &SyntaxNode| {
|
||||
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
||||
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
||||
@ -375,7 +398,7 @@ fn macro_arg(
|
||||
.then(|| loc.eager.as_deref())
|
||||
.flatten()
|
||||
{
|
||||
ValueResult::ok(Some(arg.clone()))
|
||||
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
|
||||
} else {
|
||||
let (parse, map) = parse_with_map(db, loc.kind.file_id());
|
||||
let root = parse.syntax_node();
|
||||
@ -404,22 +427,27 @@ fn macro_arg(
|
||||
}
|
||||
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
|
||||
};
|
||||
let censor = censor_for_macro_input(&loc, &syntax);
|
||||
let mut tt = match loc.kind {
|
||||
let (mut tt, undo_info) = match loc.kind {
|
||||
MacroCallKind::FnLike { .. } => {
|
||||
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
||||
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
|
||||
}
|
||||
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
|
||||
// let mut fixups = crate::fixup::fixup_syntax(&syntax);
|
||||
// fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
|
||||
// let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
// &node,
|
||||
// fixups.token_map,
|
||||
// fixups.next_id,
|
||||
// fixups.replace,
|
||||
// fixups.append,
|
||||
// );
|
||||
mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor)
|
||||
let censor = censor_for_macro_input(&loc, &syntax);
|
||||
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
|
||||
fixups.append.retain(|it, _| match it {
|
||||
syntax::NodeOrToken::Node(it) => !censor.contains(it),
|
||||
syntax::NodeOrToken::Token(_) => true,
|
||||
});
|
||||
fixups.remove.extend(censor);
|
||||
(
|
||||
mbe::syntax_node_to_token_tree_modified(
|
||||
&syntax,
|
||||
map,
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
),
|
||||
fixups.undo_info,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
@ -430,15 +458,15 @@ fn macro_arg(
|
||||
|
||||
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
|
||||
match parse.errors() {
|
||||
[] => ValueResult::ok(Some(Arc::new(tt))),
|
||||
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
|
||||
errors => ValueResult::new(
|
||||
Some(Arc::new(tt)),
|
||||
Some((Arc::new(tt), undo_info)),
|
||||
// Box::<[_]>::from(res.errors()), not stable yet
|
||||
Arc::new(errors.to_vec().into_boxed_slice()),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
ValueResult::ok(Some(Arc::new(tt)))
|
||||
ValueResult::ok(Some((Arc::new(tt), undo_info)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -447,7 +475,7 @@ fn macro_arg(
|
||||
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
|
||||
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
|
||||
/// - attributes expect the invoking attribute to be stripped
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> Vec<SyntaxNode> {
|
||||
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
|
||||
// FIXME: handle `cfg_attr`
|
||||
(|| {
|
||||
let censor = match loc.kind {
|
||||
@ -574,13 +602,13 @@ fn macro_expand(
|
||||
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
|
||||
let node = ast_id.to_ptr(db).to_node(&root);
|
||||
|
||||
// FIXME: we might need to remove the spans from the input to the derive macro here
|
||||
// FIXME: Use censoring
|
||||
let _censor = censor_for_macro_input(&loc, node.syntax());
|
||||
expander.expand(db, macro_call_id, &node, map.as_ref())
|
||||
}
|
||||
_ => {
|
||||
let ValueResult { value, err } = db.macro_arg(macro_call_id);
|
||||
let Some(macro_arg) = value else {
|
||||
let Some((macro_arg, undo_info)) = value else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
@ -608,7 +636,7 @@ fn macro_expand(
|
||||
// As such we just return the input subtree here.
|
||||
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
|
||||
return ExpandResult {
|
||||
value: Arc::new(arg.clone()),
|
||||
value: macro_arg.clone(),
|
||||
err: err.map(|err| {
|
||||
let mut buf = String::new();
|
||||
for err in &**err {
|
||||
@ -624,7 +652,11 @@ fn macro_expand(
|
||||
MacroDefKind::BuiltInEager(it, _) => {
|
||||
it.expand(db, macro_call_id, &arg).map_err(Into::into)
|
||||
}
|
||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, macro_call_id, &arg),
|
||||
MacroDefKind::BuiltInAttr(it, _) => {
|
||||
let mut res = it.expand(db, macro_call_id, &arg);
|
||||
fixup::reverse_fixups(&mut res.value, &undo_info);
|
||||
res
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@ -647,9 +679,8 @@ fn macro_expand(
|
||||
}
|
||||
|
||||
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
|
||||
// FIXME: Syntax fix ups
|
||||
let loc = db.lookup_intern_macro_call(id);
|
||||
let Some(macro_arg) = db.macro_arg(id).value else {
|
||||
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
|
||||
return ExpandResult {
|
||||
value: Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
@ -672,7 +703,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||
};
|
||||
|
||||
let call_site = loc.span(db);
|
||||
let ExpandResult { value: tt, err } = expander.expand(
|
||||
let ExpandResult { value: mut tt, err } = expander.expand(
|
||||
db,
|
||||
loc.def.krate,
|
||||
loc.krate,
|
||||
@ -690,6 +721,8 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
||||
return value;
|
||||
}
|
||||
|
||||
fixup::reverse_fixups(&mut tt, &undo_info);
|
||||
|
||||
ExpandResult { value: Arc::new(tt), err }
|
||||
}
|
||||
|
||||
|
@ -1,22 +1,24 @@
|
||||
//! To make attribute macros work reliably when typing, we need to take care to
|
||||
//! fix up syntax errors in the code we're passing to them.
|
||||
use std::mem;
|
||||
|
||||
use base_db::{
|
||||
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId},
|
||||
span::{ErasedFileAstId, SpanAnchor, SpanData},
|
||||
FileId,
|
||||
};
|
||||
use la_arena::RawIdx;
|
||||
use mbe::TokenMap;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::SmallVec;
|
||||
use syntax::{
|
||||
ast::{self, AstNode, HasLoopBody},
|
||||
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
|
||||
};
|
||||
use triomphe::Arc;
|
||||
use tt::Spacing;
|
||||
|
||||
use crate::tt::{Ident, Leaf, Punct, Subtree};
|
||||
use crate::{
|
||||
span::SpanMapRef,
|
||||
tt::{Ident, Leaf, Punct, Subtree},
|
||||
};
|
||||
|
||||
/// The result of calculating fixes for a syntax node -- a bunch of changes
|
||||
/// (appending to and replacing nodes), the information that is needed to
|
||||
@ -24,14 +26,19 @@
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct SyntaxFixups {
|
||||
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
|
||||
pub(crate) replace: FxHashMap<SyntaxElement, Vec<()>>,
|
||||
pub(crate) remove: FxHashSet<SyntaxNode>,
|
||||
pub(crate) undo_info: SyntaxFixupUndoInfo,
|
||||
}
|
||||
|
||||
/// This is the information needed to reverse the fixups.
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub struct SyntaxFixupUndoInfo {
|
||||
original: Box<[Subtree]>,
|
||||
// FIXME: ThinArc<[Subtree]>
|
||||
original: Option<Arc<Box<[Subtree]>>>,
|
||||
}
|
||||
|
||||
impl SyntaxFixupUndoInfo {
|
||||
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
|
||||
}
|
||||
|
||||
// censoring -> just don't convert the node
|
||||
@ -39,47 +46,45 @@ pub struct SyntaxFixupUndoInfo {
|
||||
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
|
||||
// to remove later
|
||||
|
||||
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
|
||||
let mut append = FxHashMap::<SyntaxElement, _>::default();
|
||||
let mut replace = FxHashMap::<SyntaxElement, _>::default();
|
||||
let mut remove = FxHashSet::<SyntaxNode>::default();
|
||||
let mut preorder = node.preorder();
|
||||
let mut original = Vec::new();
|
||||
let dummy_range = TextRange::empty(TextSize::new(0));
|
||||
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
|
||||
// the index into the replacement vec but only if the end points to !0
|
||||
let dummy_anchor =
|
||||
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) };
|
||||
let fake_span =
|
||||
SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE };
|
||||
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) };
|
||||
let fake_span = |range| SpanData {
|
||||
range: dummy_range,
|
||||
anchor: dummy_anchor,
|
||||
ctx: span_map.span_for_range(range).ctx,
|
||||
};
|
||||
while let Some(event) = preorder.next() {
|
||||
let syntax::WalkEvent::Enter(node) = event else { continue };
|
||||
|
||||
/*
|
||||
let node_range = node.text_range();
|
||||
if can_handle_error(&node) && has_error_to_handle(&node) {
|
||||
remove.insert(node.clone().into());
|
||||
// the node contains an error node, we have to completely replace it by something valid
|
||||
let (original_tree, new_tmap, new_next_id) =
|
||||
mbe::syntax_node_to_token_tree_with_modifications(
|
||||
&node,
|
||||
mem::take(&mut token_map),
|
||||
next_id,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
);
|
||||
token_map = new_tmap;
|
||||
next_id = new_next_id;
|
||||
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
|
||||
let idx = original.len() as u32;
|
||||
original.push(original_tree);
|
||||
let replacement = SyntheticToken {
|
||||
kind: SyntaxKind::IDENT,
|
||||
let replacement = Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
range: node.text_range(),
|
||||
id: SyntheticTokenId(idx),
|
||||
};
|
||||
replace.insert(node.clone().into(), vec![replacement]);
|
||||
span: SpanData {
|
||||
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
|
||||
anchor: dummy_anchor,
|
||||
ctx: span_map.span_for_range(node_range).ctx,
|
||||
},
|
||||
});
|
||||
append.insert(node.clone().into(), vec![replacement]);
|
||||
preorder.skip_subtree();
|
||||
continue;
|
||||
}
|
||||
*/
|
||||
|
||||
// In some other situations, we can fix things by just appending some tokens.
|
||||
let end_range = TextRange::empty(node.text_range().end());
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FieldExpr(it) => {
|
||||
@ -88,7 +93,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
append.insert(node.clone().into(), vec![
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
span: fake_span
|
||||
span: fake_span(node_range),
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -99,7 +104,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: ';',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range),
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -110,7 +115,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: ';',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -125,7 +130,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
append.insert(if_token.into(), vec![
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -135,12 +140,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -155,7 +160,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
append.insert(while_token.into(), vec![
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -165,12 +170,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -182,12 +187,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -202,7 +207,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
append.insert(match_token.into(), vec![
|
||||
Leaf::Ident(Ident {
|
||||
text: "__ra_fixup".into(),
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -213,12 +218,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -236,7 +241,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
].map(|text|
|
||||
Leaf::Ident(Ident {
|
||||
text: text.into(),
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
);
|
||||
|
||||
@ -253,12 +258,12 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
Leaf::Punct(Punct {
|
||||
char: '{',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
Leaf::Punct(Punct {
|
||||
char: '}',
|
||||
spacing: Spacing::Alone,
|
||||
span: fake_span
|
||||
span: fake_span(node_range)
|
||||
}),
|
||||
]);
|
||||
}
|
||||
@ -267,10 +272,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
|
||||
}
|
||||
}
|
||||
}
|
||||
let needs_fixups = !append.is_empty() || !original.is_empty();
|
||||
SyntaxFixups {
|
||||
append,
|
||||
replace,
|
||||
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
|
||||
remove,
|
||||
undo_info: SyntaxFixupUndoInfo {
|
||||
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -287,42 +295,55 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
|
||||
}
|
||||
|
||||
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
|
||||
let Some(undo_info) = undo_info.original.as_deref() else { return };
|
||||
let undo_info = &**undo_info;
|
||||
reverse_fixups_(tt, undo_info);
|
||||
}
|
||||
|
||||
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
|
||||
let tts = std::mem::take(&mut tt.token_trees);
|
||||
tt.token_trees = tts
|
||||
.into_iter()
|
||||
// delete all fake nodes
|
||||
.filter(|tt| match tt {
|
||||
tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE,
|
||||
tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE,
|
||||
tt::TokenTree::Leaf(leaf) => {
|
||||
let span = leaf.span();
|
||||
span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0)
|
||||
}
|
||||
tt::TokenTree::Subtree(_) => true,
|
||||
})
|
||||
.flat_map(|tt| match tt {
|
||||
tt::TokenTree::Subtree(mut tt) => {
|
||||
reverse_fixups_(&mut tt, undo_info);
|
||||
SmallVec::from_const([tt.into()])
|
||||
}
|
||||
tt::TokenTree::Leaf(leaf) => {
|
||||
if leaf.span().anchor.file_id == FileId(!0) {
|
||||
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
|
||||
if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
original.token_trees.into()
|
||||
} else {
|
||||
SmallVec::from_const([original.into()])
|
||||
}
|
||||
} else {
|
||||
SmallVec::from_const([leaf.into()])
|
||||
}
|
||||
}
|
||||
})
|
||||
// .flat_map(|tt| match tt {
|
||||
// tt::TokenTree::Subtree(mut tt) => {
|
||||
// reverse_fixups(&mut tt, undo_info);
|
||||
// SmallVec::from_const([tt.into()])
|
||||
// }
|
||||
// tt::TokenTree::Leaf(leaf) => {
|
||||
// if let Some(id) = leaf.span().anchor {
|
||||
// let original = undo_info.original[id.0 as usize].clone();
|
||||
// if original.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||
// original.token_trees.into()
|
||||
// } else {
|
||||
// SmallVec::from_const([original.into()])
|
||||
// }
|
||||
// } else {
|
||||
// SmallVec::from_const([leaf.into()])
|
||||
// }
|
||||
// }
|
||||
// })
|
||||
.collect();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base_db::FileId;
|
||||
use expect_test::{expect, Expect};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::tt;
|
||||
|
||||
use super::reverse_fixups;
|
||||
use crate::{
|
||||
fixup::reverse_fixups,
|
||||
span::{RealSpanMap, SpanMap},
|
||||
tt,
|
||||
};
|
||||
|
||||
// The following three functions are only meant to check partial structural equivalence of
|
||||
// `TokenTree`s, see the last assertion in `check()`.
|
||||
@ -352,13 +373,13 @@ fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
|
||||
#[track_caller]
|
||||
fn check(ra_fixture: &str, mut expect: Expect) {
|
||||
let parsed = syntax::SourceFile::parse(ra_fixture);
|
||||
let fixups = super::fixup_syntax(&parsed.syntax_node());
|
||||
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
|
||||
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0))));
|
||||
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
|
||||
let mut tt = mbe::syntax_node_to_token_tree_modified(
|
||||
&parsed.syntax_node(),
|
||||
fixups.token_map,
|
||||
fixups.next_id,
|
||||
fixups.replace,
|
||||
span_map.as_ref(),
|
||||
fixups.append,
|
||||
fixups.remove,
|
||||
);
|
||||
|
||||
let actual = format!("{tt}\n");
|
||||
@ -374,14 +395,15 @@ fn check(ra_fixture: &str, mut expect: Expect) {
|
||||
parse.syntax_node()
|
||||
);
|
||||
|
||||
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
|
||||
reverse_fixups(&mut tt, &fixups.undo_info);
|
||||
|
||||
// the fixed-up + reversed version should be equivalent to the original input
|
||||
// modulo token IDs and `Punct`s' spacing.
|
||||
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
|
||||
let original_as_tt =
|
||||
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
|
||||
assert!(
|
||||
check_subtree_eq(&tt, &original_as_tt),
|
||||
"different token tree: {tt:?},\n{original_as_tt:?}"
|
||||
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
|
||||
);
|
||||
}
|
||||
|
||||
@ -394,7 +416,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {for _ in __ra_fixup {}}
|
||||
fn foo () {for _ in __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -422,7 +444,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {for bar in qux {}}
|
||||
fn foo () {for bar in qux { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -453,7 +475,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match __ra_fixup {}}
|
||||
fn foo () {match __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -485,7 +507,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {match __ra_fixup {}}
|
||||
fn foo () {match __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -600,7 +622,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if a {}}
|
||||
fn foo () {if a { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -614,7 +636,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if __ra_fixup {}}
|
||||
fn foo () {if __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -628,7 +650,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {if __ra_fixup {} {}}
|
||||
fn foo () {if __ra_fixup {} { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -642,7 +664,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {while __ra_fixup {}}
|
||||
fn foo () {while __ra_fixup { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -656,7 +678,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {while foo {}}
|
||||
fn foo () {while foo { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
@ -683,7 +705,7 @@ fn foo() {
|
||||
}
|
||||
"#,
|
||||
expect![[r#"
|
||||
fn foo () {loop {}}
|
||||
fn foo () {loop { }}
|
||||
"#]],
|
||||
)
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
pub mod attrs;
|
||||
pub mod span;
|
||||
pub mod files;
|
||||
// mod fixup;
|
||||
mod fixup;
|
||||
|
||||
use triomphe::Arc;
|
||||
|
||||
@ -42,6 +42,7 @@
|
||||
builtin_derive_macro::BuiltinDeriveExpander,
|
||||
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
|
||||
db::TokenExpander,
|
||||
fixup::SyntaxFixupUndoInfo,
|
||||
mod_path::ModPath,
|
||||
proc_macro::ProcMacroExpander,
|
||||
span::{ExpansionSpanMap, SpanMap},
|
||||
@ -695,8 +696,14 @@ pub fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFileId) -> ExpansionInf
|
||||
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
|
||||
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
|
||||
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||
Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() })
|
||||
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
|
||||
(
|
||||
Arc::new(tt::Subtree {
|
||||
delimiter: tt::Delimiter::UNSPECIFIED,
|
||||
token_trees: Vec::new(),
|
||||
}),
|
||||
SyntaxFixupUndoInfo::NONE,
|
||||
)
|
||||
});
|
||||
|
||||
let def = loc.def.ast_id().left().and_then(|id| {
|
||||
|
@ -150,7 +150,7 @@ fn test_unresolved_module_diagnostic() {
|
||||
],
|
||||
),
|
||||
main_node: Some(
|
||||
InFile {
|
||||
InFileWrapper {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
|
@ -36,7 +36,7 @@
|
||||
syntax_bridge::{
|
||||
map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree,
|
||||
parse_to_token_tree_static_span, syntax_node_to_token_tree,
|
||||
syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper,
|
||||
syntax_node_to_token_tree_modified, token_tree_to_syntax_node, SpanMapper,
|
||||
},
|
||||
token_map::TokenMap,
|
||||
};
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
|
||||
|
||||
use stdx::non_empty_vec::NonEmptyVec;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use stdx::{never, non_empty_vec::NonEmptyVec};
|
||||
use syntax::{
|
||||
ast::{self, make::tokens::doc_comment},
|
||||
AstToken, NodeOrToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
|
||||
@ -74,14 +75,15 @@ pub fn syntax_node_to_token_tree<Anchor, Ctx, SpanMap>(
|
||||
Ctx: SyntaxContext,
|
||||
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
||||
{
|
||||
let mut c = Converter::new(node, vec![], map);
|
||||
let mut c = Converter::new(node, map, Default::default(), Default::default());
|
||||
convert_tokens(&mut c)
|
||||
}
|
||||
|
||||
pub fn syntax_node_to_token_tree_censored<Anchor, Ctx, SpanMap>(
|
||||
pub fn syntax_node_to_token_tree_modified<Anchor, Ctx, SpanMap>(
|
||||
node: &SyntaxNode,
|
||||
map: SpanMap,
|
||||
censored: Vec<SyntaxNode>,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Anchor, Ctx>>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
) -> tt::Subtree<SpanData<Anchor, Ctx>>
|
||||
where
|
||||
SpanMap: SpanMapper<SpanData<Anchor, Ctx>>,
|
||||
@ -89,7 +91,7 @@ pub fn syntax_node_to_token_tree_censored<Anchor, Ctx, SpanMap>(
|
||||
Anchor: Copy,
|
||||
Ctx: SyntaxContext,
|
||||
{
|
||||
let mut c = Converter::new(node, censored, map);
|
||||
let mut c = Converter::new(node, map, append, remove);
|
||||
convert_tokens(&mut c)
|
||||
}
|
||||
|
||||
@ -237,102 +239,105 @@ fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
|
||||
while let Some((token, abs_range)) = conv.bump() {
|
||||
let tt::Subtree { delimiter, token_trees: result } = stack.last_mut();
|
||||
|
||||
let kind = token.kind(conv);
|
||||
|
||||
let tt = match kind {
|
||||
// Desugar doc comments into doc attributes
|
||||
COMMENT => {
|
||||
let span = conv.span_for(abs_range);
|
||||
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
|
||||
result.extend(tokens);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
_ if kind.is_punct() && kind != UNDERSCORE => {
|
||||
let expected = match delimiter.kind {
|
||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
||||
tt::DelimiterKind::Bracket => Some(T![']']),
|
||||
tt::DelimiterKind::Invisible => None,
|
||||
};
|
||||
|
||||
// Current token is a closing delimiter that we expect, fix up the closing span
|
||||
// and end the subtree here
|
||||
if matches!(expected, Some(expected) if expected == kind) {
|
||||
if let Some(mut subtree) = stack.pop() {
|
||||
subtree.delimiter.close = conv.span_for(abs_range);
|
||||
stack.last_mut().token_trees.push(subtree.into());
|
||||
let tt = match token.as_leaf() {
|
||||
Some(leaf) => tt::TokenTree::Leaf(leaf.clone()),
|
||||
None => match token.kind(conv) {
|
||||
// Desugar doc comments into doc attributes
|
||||
COMMENT => {
|
||||
let span = conv.span_for(abs_range);
|
||||
if let Some(tokens) = conv.convert_doc_comment(&token, span) {
|
||||
result.extend(tokens);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let delim = match kind {
|
||||
T!['('] => Some(tt::DelimiterKind::Parenthesis),
|
||||
T!['{'] => Some(tt::DelimiterKind::Brace),
|
||||
T!['['] => Some(tt::DelimiterKind::Bracket),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Start a new subtree
|
||||
if let Some(kind) = delim {
|
||||
let open = conv.span_for(abs_range);
|
||||
stack.push(tt::Subtree {
|
||||
delimiter: tt::Delimiter {
|
||||
open,
|
||||
// will be overwritten on subtree close above
|
||||
close: open,
|
||||
kind,
|
||||
},
|
||||
token_trees: vec![],
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
let spacing = match conv.peek().map(|next| next.kind(conv)) {
|
||||
Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
let Some(char) = token.to_char(conv) else {
|
||||
panic!("Token from lexer must be single char: token = {token:#?}")
|
||||
};
|
||||
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) }).into()
|
||||
}
|
||||
_ => {
|
||||
macro_rules! make_leaf {
|
||||
($i:ident) => {
|
||||
tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }.into()
|
||||
kind if kind.is_punct() && kind != UNDERSCORE => {
|
||||
let expected = match delimiter.kind {
|
||||
tt::DelimiterKind::Parenthesis => Some(T![')']),
|
||||
tt::DelimiterKind::Brace => Some(T!['}']),
|
||||
tt::DelimiterKind::Bracket => Some(T![']']),
|
||||
tt::DelimiterKind::Invisible => None,
|
||||
};
|
||||
}
|
||||
let leaf: tt::Leaf<_> = match kind {
|
||||
T![true] | T![false] => make_leaf!(Ident),
|
||||
IDENT => make_leaf!(Ident),
|
||||
UNDERSCORE => make_leaf!(Ident),
|
||||
k if k.is_keyword() => make_leaf!(Ident),
|
||||
k if k.is_literal() => make_leaf!(Literal),
|
||||
LIFETIME_IDENT => {
|
||||
let apostrophe = tt::Leaf::from(tt::Punct {
|
||||
char: '\'',
|
||||
spacing: tt::Spacing::Joint,
|
||||
span: conv
|
||||
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
|
||||
});
|
||||
result.push(apostrophe.into());
|
||||
|
||||
let ident = tt::Leaf::from(tt::Ident {
|
||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||
span: conv.span_for(TextRange::at(
|
||||
abs_range.start() + TextSize::of('\''),
|
||||
abs_range.end(),
|
||||
)),
|
||||
});
|
||||
result.push(ident.into());
|
||||
// Current token is a closing delimiter that we expect, fix up the closing span
|
||||
// and end the subtree here
|
||||
if matches!(expected, Some(expected) if expected == kind) {
|
||||
if let Some(mut subtree) = stack.pop() {
|
||||
subtree.delimiter.close = conv.span_for(abs_range);
|
||||
stack.last_mut().token_trees.push(subtree.into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
leaf.into()
|
||||
}
|
||||
let delim = match kind {
|
||||
T!['('] => Some(tt::DelimiterKind::Parenthesis),
|
||||
T!['{'] => Some(tt::DelimiterKind::Brace),
|
||||
T!['['] => Some(tt::DelimiterKind::Bracket),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Start a new subtree
|
||||
if let Some(kind) = delim {
|
||||
let open = conv.span_for(abs_range);
|
||||
stack.push(tt::Subtree {
|
||||
delimiter: tt::Delimiter {
|
||||
open,
|
||||
// will be overwritten on subtree close above
|
||||
close: open,
|
||||
kind,
|
||||
},
|
||||
token_trees: vec![],
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
let spacing = match conv.peek().map(|next| next.kind(conv)) {
|
||||
Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
let Some(char) = token.to_char(conv) else {
|
||||
panic!("Token from lexer must be single char: token = {token:#?}")
|
||||
};
|
||||
tt::Leaf::from(tt::Punct { char, spacing, span: conv.span_for(abs_range) })
|
||||
.into()
|
||||
}
|
||||
kind => {
|
||||
macro_rules! make_leaf {
|
||||
($i:ident) => {
|
||||
tt::$i { span: conv.span_for(abs_range), text: token.to_text(conv) }
|
||||
.into()
|
||||
};
|
||||
}
|
||||
let leaf: tt::Leaf<_> = match kind {
|
||||
T![true] | T![false] => make_leaf!(Ident),
|
||||
IDENT => make_leaf!(Ident),
|
||||
UNDERSCORE => make_leaf!(Ident),
|
||||
k if k.is_keyword() => make_leaf!(Ident),
|
||||
k if k.is_literal() => make_leaf!(Literal),
|
||||
LIFETIME_IDENT => {
|
||||
let apostrophe = tt::Leaf::from(tt::Punct {
|
||||
char: '\'',
|
||||
spacing: tt::Spacing::Joint,
|
||||
span: conv
|
||||
.span_for(TextRange::at(abs_range.start(), TextSize::of('\''))),
|
||||
});
|
||||
result.push(apostrophe.into());
|
||||
|
||||
let ident = tt::Leaf::from(tt::Ident {
|
||||
text: SmolStr::new(&token.to_text(conv)[1..]),
|
||||
span: conv.span_for(TextRange::at(
|
||||
abs_range.start() + TextSize::of('\''),
|
||||
abs_range.end(),
|
||||
)),
|
||||
});
|
||||
result.push(ident.into());
|
||||
continue;
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
leaf.into()
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
result.push(tt);
|
||||
@ -470,16 +475,20 @@ struct StaticRawConverter<'a, S> {
|
||||
span: S,
|
||||
}
|
||||
|
||||
trait SrcToken<Ctx>: std::fmt::Debug {
|
||||
trait SrcToken<Ctx, S>: std::fmt::Debug {
|
||||
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
|
||||
|
||||
fn to_char(&self, ctx: &Ctx) -> Option<char>;
|
||||
|
||||
fn to_text(&self, ctx: &Ctx) -> SmolStr;
|
||||
|
||||
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
trait TokenConverter<S>: Sized {
|
||||
type Token: SrcToken<Self>;
|
||||
type Token: SrcToken<Self, S>;
|
||||
|
||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>>;
|
||||
|
||||
@ -490,7 +499,7 @@ trait TokenConverter<S>: Sized {
|
||||
fn span_for(&self, range: TextRange) -> S;
|
||||
}
|
||||
|
||||
impl<Anchor> SrcToken<RawConverter<'_, Anchor>> for usize {
|
||||
impl<Anchor, S> SrcToken<RawConverter<'_, Anchor>, S> for usize {
|
||||
fn kind(&self, ctx: &RawConverter<'_, Anchor>) -> SyntaxKind {
|
||||
ctx.lexed.kind(*self)
|
||||
}
|
||||
@ -504,7 +513,7 @@ fn to_text(&self, ctx: &RawConverter<'_, Anchor>) -> SmolStr {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Span> SrcToken<StaticRawConverter<'_, S>> for usize {
|
||||
impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
|
||||
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
|
||||
ctx.lexed.kind(*self)
|
||||
}
|
||||
@ -593,32 +602,79 @@ fn span_for(&self, _: TextRange) -> S {
|
||||
}
|
||||
}
|
||||
|
||||
struct Converter<SpanMap> {
|
||||
struct Converter<SpanMap, S> {
|
||||
current: Option<SyntaxToken>,
|
||||
current_leafs: Vec<tt::Leaf<S>>,
|
||||
preorder: PreorderWithTokens,
|
||||
range: TextRange,
|
||||
punct_offset: Option<(SyntaxToken, TextSize)>,
|
||||
/// Used to make the emitted text ranges in the spans relative to the span anchor.
|
||||
map: SpanMap,
|
||||
censored: Vec<SyntaxNode>,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
}
|
||||
|
||||
impl<SpanMap> Converter<SpanMap> {
|
||||
fn new(node: &SyntaxNode, censored: Vec<SyntaxNode>, map: SpanMap) -> Self {
|
||||
let range = node.text_range();
|
||||
let mut preorder = node.preorder_with_tokens();
|
||||
let first = Self::next_token(&mut preorder, &censored);
|
||||
Converter { current: first, preorder, range, punct_offset: None, censored, map }
|
||||
impl<SpanMap, S> Converter<SpanMap, S> {
|
||||
fn new(
|
||||
node: &SyntaxNode,
|
||||
map: SpanMap,
|
||||
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
|
||||
remove: FxHashSet<SyntaxNode>,
|
||||
) -> Self {
|
||||
let mut this = Converter {
|
||||
current: None,
|
||||
preorder: node.preorder_with_tokens(),
|
||||
range: node.text_range(),
|
||||
punct_offset: None,
|
||||
map,
|
||||
append,
|
||||
remove,
|
||||
current_leafs: vec![],
|
||||
};
|
||||
let first = this.next_token();
|
||||
this.current = first;
|
||||
this
|
||||
}
|
||||
|
||||
fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Option<SyntaxToken> {
|
||||
while let Some(ev) = preorder.next() {
|
||||
fn next_token(&mut self) -> Option<SyntaxToken> {
|
||||
// while let Some(ev) = self.preorder.next() {
|
||||
// match ev {
|
||||
// WalkEvent::Enter(SyntaxElement::Token(t)) => {
|
||||
// if let Some(leafs) = self.append.remove(&t.clone().into()) {
|
||||
// self.current_leafs.extend(leafs);
|
||||
// }
|
||||
// return Some(t);
|
||||
// }
|
||||
// WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
||||
// self.preorder.skip_subtree();
|
||||
// if let Some(leafs) = self.append.remove(&n.into()) {
|
||||
// self.current_leafs.extend(leafs);
|
||||
// }
|
||||
// }
|
||||
// _ => (),
|
||||
// }
|
||||
// }
|
||||
// None;
|
||||
|
||||
while let Some(ev) = self.preorder.next() {
|
||||
match ev {
|
||||
WalkEvent::Enter(SyntaxElement::Token(t)) => return Some(t),
|
||||
WalkEvent::Enter(SyntaxElement::Node(n)) if censor.contains(&n) => {
|
||||
preorder.skip_subtree()
|
||||
WalkEvent::Enter(SyntaxElement::Node(n)) if self.remove.contains(&n) => {
|
||||
self.preorder.skip_subtree();
|
||||
if let Some(mut v) = self.append.remove(&n.into()) {
|
||||
v.reverse();
|
||||
self.current_leafs.extend(v);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
WalkEvent::Enter(SyntaxElement::Node(_)) => (),
|
||||
WalkEvent::Leave(ele) => {
|
||||
if let Some(mut v) = self.append.remove(&ele) {
|
||||
v.reverse();
|
||||
self.current_leafs.extend(v);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
None
|
||||
@ -626,45 +682,62 @@ fn next_token(preorder: &mut PreorderWithTokens, censor: &[SyntaxNode]) -> Optio
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum SynToken {
|
||||
enum SynToken<S> {
|
||||
Ordinary(SyntaxToken),
|
||||
Punct(SyntaxToken, usize),
|
||||
Punct { token: SyntaxToken, offset: usize },
|
||||
Leaf(tt::Leaf<S>),
|
||||
}
|
||||
|
||||
impl SynToken {
|
||||
impl<S> SynToken<S> {
|
||||
fn token(&self) -> &SyntaxToken {
|
||||
match self {
|
||||
SynToken::Ordinary(it) | SynToken::Punct(it, _) => it,
|
||||
SynToken::Ordinary(it) | SynToken::Punct { token: it, offset: _ } => it,
|
||||
SynToken::Leaf(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<SpanMap> SrcToken<Converter<SpanMap>> for SynToken {
|
||||
fn kind(&self, ctx: &Converter<SpanMap>) -> SyntaxKind {
|
||||
impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
|
||||
fn kind(&self, ctx: &Converter<SpanMap, S>) -> SyntaxKind {
|
||||
match self {
|
||||
SynToken::Ordinary(token) => token.kind(),
|
||||
SynToken::Punct(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
||||
SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
|
||||
SynToken::Leaf(_) => {
|
||||
never!();
|
||||
SyntaxKind::ERROR
|
||||
}
|
||||
}
|
||||
}
|
||||
fn to_char(&self, _ctx: &Converter<SpanMap>) -> Option<char> {
|
||||
fn to_char(&self, _ctx: &Converter<SpanMap, S>) -> Option<char> {
|
||||
match self {
|
||||
SynToken::Ordinary(_) => None,
|
||||
SynToken::Punct(it, i) => it.text().chars().nth(*i),
|
||||
SynToken::Punct { token: it, offset: i } => it.text().chars().nth(*i),
|
||||
SynToken::Leaf(_) => None,
|
||||
}
|
||||
}
|
||||
fn to_text(&self, _ctx: &Converter<SpanMap>) -> SmolStr {
|
||||
fn to_text(&self, _ctx: &Converter<SpanMap, S>) -> SmolStr {
|
||||
match self {
|
||||
SynToken::Ordinary(token) | SynToken::Punct(token, _) => token.text().into(),
|
||||
SynToken::Ordinary(token) | SynToken::Punct { token, offset: _ } => token.text().into(),
|
||||
SynToken::Leaf(_) => {
|
||||
never!();
|
||||
"".into()
|
||||
}
|
||||
}
|
||||
}
|
||||
fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
|
||||
match self {
|
||||
SynToken::Ordinary(_) | SynToken::Punct { .. } => None,
|
||||
SynToken::Leaf(it) => Some(it),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap>
|
||||
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
|
||||
where
|
||||
S: Span,
|
||||
SpanMap: SpanMapper<S>,
|
||||
{
|
||||
type Token = SynToken;
|
||||
type Token = SynToken<S>;
|
||||
fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option<Vec<tt::TokenTree<S>>> {
|
||||
convert_doc_comment(token.token(), span)
|
||||
}
|
||||
@ -676,20 +749,31 @@ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
|
||||
let range = punct.text_range();
|
||||
self.punct_offset = Some((punct.clone(), offset));
|
||||
let range = TextRange::at(range.start() + offset, TextSize::of('.'));
|
||||
return Some((SynToken::Punct(punct, u32::from(offset) as usize), range));
|
||||
return Some((
|
||||
SynToken::Punct { token: punct, offset: u32::from(offset) as usize },
|
||||
range,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(leaf) = self.current_leafs.pop() {
|
||||
if self.current_leafs.is_empty() {
|
||||
self.current = self.next_token();
|
||||
}
|
||||
return Some((SynToken::Leaf(leaf), TextRange::empty(TextSize::new(0))));
|
||||
}
|
||||
|
||||
let curr = self.current.clone()?;
|
||||
if !self.range.contains_range(curr.text_range()) {
|
||||
return None;
|
||||
}
|
||||
self.current = Self::next_token(&mut self.preorder, &self.censored);
|
||||
|
||||
self.current = self.next_token();
|
||||
let token = if curr.kind().is_punct() {
|
||||
self.punct_offset = Some((curr.clone(), 0.into()));
|
||||
let range = curr.text_range();
|
||||
let range = TextRange::at(range.start(), TextSize::of('.'));
|
||||
(SynToken::Punct(curr, 0 as usize), range)
|
||||
(SynToken::Punct { token: curr, offset: 0 as usize }, range)
|
||||
} else {
|
||||
self.punct_offset = None;
|
||||
let range = curr.text_range();
|
||||
@ -703,7 +787,7 @@ fn peek(&self) -> Option<Self::Token> {
|
||||
if let Some((punct, mut offset)) = self.punct_offset.clone() {
|
||||
offset += TextSize::of('.');
|
||||
if usize::from(offset) < punct.text().len() {
|
||||
return Some(SynToken::Punct(punct, usize::from(offset)));
|
||||
return Some(SynToken::Punct { token: punct, offset: usize::from(offset) });
|
||||
}
|
||||
}
|
||||
|
||||
@ -713,7 +797,7 @@ fn peek(&self) -> Option<Self::Token> {
|
||||
}
|
||||
|
||||
let token = if curr.kind().is_punct() {
|
||||
SynToken::Punct(curr, 0 as usize)
|
||||
SynToken::Punct { token: curr, offset: 0 as usize }
|
||||
} else {
|
||||
SynToken::Ordinary(curr)
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user