Auto merge of #16912 - Veykril:span-trait, r=Veykril

internal: Remove span trait
This commit is contained in:
bors 2024-03-21 19:11:12 +00:00
commit 4d442d84d7
11 changed files with 52 additions and 55 deletions

1
Cargo.lock generated
View File

@ -2115,7 +2115,6 @@ name = "tt"
version = "0.0.0"
dependencies = [
"smol_str",
"span",
"stdx",
"text-size",
]

View File

@ -623,6 +623,10 @@ pub fn remove_crates_except(&mut self, to_keep: &[CrateId]) -> Vec<Option<CrateI
}
id_map
}
pub fn shrink_to_fit(&mut self) {
self.arena.shrink_to_fit();
}
}
impl ops::Index<CrateId> for CrateGraph {

View File

@ -1,5 +1,7 @@
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
use std::fmt;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{SpanAnchor, SpanData, SpanMap};
use stdx::{never, non_empty_vec::NonEmptyVec};
@ -9,30 +11,27 @@
SyntaxKind::*,
SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
};
use tt::{
buffer::{Cursor, TokenBuffer},
Span,
};
use tt::buffer::{Cursor, TokenBuffer};
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
#[cfg(test)]
mod tests;
pub trait SpanMapper<S: Span> {
pub trait SpanMapper<S> {
fn span_for(&self, range: TextRange) -> S;
}
impl<S> SpanMapper<SpanData<S>> for SpanMap<S>
where
SpanData<S>: Span,
SpanData<S>: Copy,
{
fn span_for(&self, range: TextRange) -> SpanData<S> {
self.span_at(range.start())
}
}
impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
impl<S: Copy, SM: SpanMapper<S>> SpanMapper<S> for &SM {
fn span_for(&self, range: TextRange) -> S {
SM::span_for(self, range)
}
@ -78,8 +77,7 @@ pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
span: SpanData<Ctx>,
) -> tt::Subtree<SpanData<Ctx>>
where
SpanData<Ctx>: Span,
Ctx: Copy,
SpanData<Ctx>: Copy + fmt::Debug,
SpanMap: SpanMapper<SpanData<Ctx>>,
{
let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
@ -98,8 +96,7 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
) -> tt::Subtree<SpanData<Ctx>>
where
SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Span,
Ctx: Copy,
SpanData<Ctx>: Copy + fmt::Debug,
{
let mut c = Converter::new(node, map, append, remove, call_site);
convert_tokens(&mut c)
@ -124,8 +121,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
entry_point: parser::TopEntryPoint,
) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
where
SpanData<Ctx>: Span,
Ctx: Copy,
SpanData<Ctx>: Copy + fmt::Debug,
{
let buffer = match tt {
tt::Subtree {
@ -161,7 +157,7 @@ pub fn parse_to_token_tree<Ctx>(
text: &str,
) -> Option<tt::Subtree<SpanData<Ctx>>>
where
SpanData<Ctx>: Span,
SpanData<Ctx>: Copy + fmt::Debug,
Ctx: Copy,
{
let lexed = parser::LexedStr::new(text);
@ -175,7 +171,7 @@ pub fn parse_to_token_tree<Ctx>(
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
where
S: Span,
S: Copy + fmt::Debug,
{
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
@ -186,11 +182,10 @@ pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Sub
}
/// Split token tree with separate expr: $($e:expr)SEP*
pub fn parse_exprs_with_sep<S: Span>(
tt: &tt::Subtree<S>,
sep: char,
span: S,
) -> Vec<tt::Subtree<S>> {
pub fn parse_exprs_with_sep<S>(tt: &tt::Subtree<S>, sep: char, span: S) -> Vec<tt::Subtree<S>>
where
S: Copy + fmt::Debug,
{
if tt.token_trees.is_empty() {
return Vec::new();
}
@ -226,7 +221,8 @@ pub fn parse_exprs_with_sep<S: Span>(
fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
where
C: TokenConverter<S>,
S: Span,
S: Copy + fmt::Debug,
C::Token: fmt::Debug,
{
let entry = tt::SubtreeBuilder {
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
@ -485,7 +481,7 @@ struct StaticRawConverter<'a, S> {
span: S,
}
trait SrcToken<Ctx, S>: std::fmt::Debug {
trait SrcToken<Ctx, S> {
fn kind(&self, ctx: &Ctx) -> SyntaxKind;
fn to_char(&self, ctx: &Ctx) -> Option<char>;
@ -525,7 +521,7 @@ fn to_text(&self, ctx: &RawConverter<'_, Ctx>) -> SmolStr {
}
}
impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
impl<S: Copy> SrcToken<StaticRawConverter<'_, S>, S> for usize {
fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind {
ctx.lexed.kind(*self)
}
@ -541,7 +537,7 @@ fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr {
impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
where
SpanData<Ctx>: Span,
SpanData<Ctx>: Copy,
{
type Token = usize;
@ -584,7 +580,7 @@ fn call_site(&self) -> SpanData<Ctx> {
impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
where
S: Span,
S: Copy,
{
type Token = usize;
@ -709,7 +705,7 @@ fn token(&self) -> &SyntaxToken {
}
}
impl<SpanMap, S: std::fmt::Debug> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
impl<SpanMap, S> SrcToken<Converter<SpanMap, S>, S> for SynToken<S> {
fn kind(&self, _ctx: &Converter<SpanMap, S>) -> SyntaxKind {
match self {
SynToken::Ordinary(token) => token.kind(),
@ -748,7 +744,7 @@ fn as_leaf(&self) -> Option<&tt::Leaf<S>> {
impl<S, SpanMap> TokenConverter<S> for Converter<SpanMap, S>
where
S: Span,
S: Copy,
SpanMap: SpanMapper<S>,
{
type Token = SynToken<S>;
@ -828,7 +824,7 @@ fn call_site(&self) -> S {
struct TtTreeSink<'a, Ctx>
where
SpanData<Ctx>: Span,
SpanData<Ctx>: Copy,
{
buf: String,
cursor: Cursor<'a, SpanData<Ctx>>,
@ -839,7 +835,7 @@ struct TtTreeSink<'a, Ctx>
impl<'a, Ctx> TtTreeSink<'a, Ctx>
where
SpanData<Ctx>: Span,
SpanData<Ctx>: Copy,
{
fn new(cursor: Cursor<'a, SpanData<Ctx>>) -> Self {
TtTreeSink {
@ -871,7 +867,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
impl<Ctx> TtTreeSink<'_, Ctx>
where
SpanData<Ctx>: Span,
SpanData<Ctx>: Copy,
{
/// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
/// This occurs when a float literal is used as a field access.

View File

@ -1,11 +1,13 @@
//! Convert macro-by-example tokens which are specific to macro expansion into a
//! format that works for our parser.
use std::fmt;
use syntax::{SyntaxKind, SyntaxKind::*, T};
use tt::{buffer::TokenBuffer, Span};
use tt::buffer::TokenBuffer;
pub(crate) fn to_parser_input<S: Span>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(buffer: &TokenBuffer<'_, S>) -> parser::Input {
let mut res = parser::Input::default();
let mut current = buffer.begin();

View File

@ -1,9 +1,10 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates.
use core::fmt;
use smallvec::{smallvec, SmallVec};
use syntax::SyntaxKind;
use tt::Span;
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
@ -12,7 +13,7 @@ pub(crate) struct TtIter<'a, S> {
pub(crate) inner: std::slice::Iter<'a, tt::TokenTree<S>>,
}
impl<'a, S: Span> TtIter<'a, S> {
impl<'a, S: Copy> TtIter<'a, S> {
pub(crate) fn new(subtree: &'a tt::Subtree<S>) -> TtIter<'a, S> {
TtIter { inner: subtree.token_trees.iter() }
}
@ -130,7 +131,12 @@ pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct<S>, ()> {
_ => Ok(smallvec![first]),
}
}
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
impl<'a, S: Copy + fmt::Debug> TtIter<'a, S> {
pub(crate) fn expect_fragment(
&mut self,
entry_point: parser::PrefixEntryPoint,
@ -185,10 +191,6 @@ pub(crate) fn expect_fragment(
};
ExpandResult { value: res, err }
}
pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree<S>> {
self.inner.as_slice().get(n)
}
}
impl<'a, S> Iterator for TtIter<'a, S> {

View File

@ -88,8 +88,6 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
}
}
impl tt::Span for TokenId {}
#[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree {
subtree: Vec<u32>,

View File

@ -52,7 +52,7 @@
// see `build.rs`
include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
trait ProcMacroSrvSpan: tt::Span {
trait ProcMacroSrvSpan: Copy {
type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}

View File

@ -101,6 +101,8 @@ pub(super) struct TokenStreamBuilder<S> {
/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
pub(super) mod token_stream {
use core::fmt;
use super::{TokenStream, TokenTree};
/// An iterator over `TokenStream`'s `TokenTree`s.
@ -122,7 +124,7 @@ fn into_iter(self) -> Self::IntoIter {
///
/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
impl<S: tt::Span> TokenStream<S> {
impl<S: Copy + fmt::Debug> TokenStream<S> {
pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
let subtree =
mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;

View File

@ -734,6 +734,8 @@ pub fn ws_to_crate_graph(
});
proc_macro_paths.push(crate_proc_macros);
}
crate_graph.shrink_to_fit();
proc_macro_paths.shrink_to_fit();
(crate_graph, proc_macro_paths, layouts, toolchains)
}

View File

@ -17,8 +17,5 @@ text-size.workspace = true
stdx.workspace = true
# FIXME: Remove this dependency once the `Span` trait is gone (that is once Span::DUMMY has been removed)
span.workspace = true
[lints]
workspace = true
workspace = true

View File

@ -11,18 +11,13 @@
pub use smol_str::SmolStr;
pub use text_size::{TextRange, TextSize};
pub trait Span: std::fmt::Debug + Copy + Sized + Eq {}
impl<Ctx> Span for span::SpanData<Ctx> where span::SpanData<Ctx>: std::fmt::Debug + Copy + Sized + Eq
{}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TokenTree<S> {
Leaf(Leaf<S>),
Subtree(Subtree<S>),
}
impl_from!(Leaf<S>, Subtree<S> for TokenTree);
impl<S: Span> TokenTree<S> {
impl<S: Copy> TokenTree<S> {
pub fn empty(span: S) -> Self {
Self::Subtree(Subtree {
delimiter: Delimiter::invisible_spanned(span),
@ -72,7 +67,7 @@ pub struct Subtree<S> {
pub token_trees: Box<[TokenTree<S>]>,
}
impl<S: Span> Subtree<S> {
impl<S: Copy> Subtree<S> {
pub fn empty(span: DelimSpan<S>) -> Self {
Subtree { delimiter: Delimiter::invisible_delim_spanned(span), token_trees: Box::new([]) }
}
@ -114,7 +109,7 @@ pub struct Delimiter<S> {
pub kind: DelimiterKind,
}
impl<S: Span> Delimiter<S> {
impl<S: Copy> Delimiter<S> {
pub const fn invisible_spanned(span: S) -> Self {
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
}