From ebfbb314c03cd8e70198eebf5a96ece8a2f79e51 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 12 Jul 2022 15:13:04 +0200 Subject: [PATCH] Update 1.63 proc macro ABI to match rustc --- .../abis/abi_1_63/proc_macro/bridge/client.rs | 8 -- .../abis/abi_1_63/proc_macro/bridge/handle.rs | 22 ++- .../abis/abi_1_63/proc_macro/bridge/mod.rs | 136 ++++++++++-------- .../abis/abi_1_63/proc_macro/bridge/rpc.rs | 53 ++++--- .../abis/abi_1_63/proc_macro/bridge/server.rs | 22 +-- .../src/abis/abi_1_63/proc_macro/mod.rs | 96 ++++++++++--- .../src/abis/abi_1_63/rustc_server.rs | 89 +++++++----- 7 files changed, 265 insertions(+), 161 deletions(-) diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs index abdd0486c7a..102027d14a9 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs @@ -178,8 +178,6 @@ define_handles! { 'owned: FreeFunctions, TokenStream, - TokenStreamBuilder, - TokenStreamIter, Group, Literal, SourceFile, @@ -204,12 +202,6 @@ impl Clone for TokenStream { } } -impl Clone for TokenStreamIter { - fn clone(&self) -> Self { - self.clone() - } -} - impl Clone for Group { fn clone(&self) -> Self { self.clone() diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs index 7d6adda48ec..c219a9465d3 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/handle.rs @@ -1,7 +1,7 @@ //! Server-side handles and storage for per-handle data. use std::collections::{BTreeMap, HashMap}; -use std::hash::Hash; +use std::hash::{BuildHasher, Hash}; use std::num::NonZeroU32; use std::ops::{Index, IndexMut}; use std::sync::atomic::{AtomicUsize, Ordering}; @@ -51,15 +51,31 @@ impl IndexMut for OwnedStore { } } +// HACK(eddyb) deterministic `std::collections::hash_map::RandomState` replacement +// that doesn't require adding any dependencies to `proc_macro` (like `rustc-hash`). +#[derive(Clone)] +struct NonRandomState; + +impl BuildHasher for NonRandomState { + type Hasher = std::collections::hash_map::DefaultHasher; + #[inline] + fn build_hasher(&self) -> Self::Hasher { + Self::Hasher::new() + } +} + /// Like `OwnedStore`, but avoids storing any value more than once. pub(super) struct InternedStore { owned: OwnedStore, - interner: HashMap, + interner: HashMap, } impl InternedStore { pub(super) fn new(counter: &'static AtomicUsize) -> Self { - InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() } + InternedStore { + owned: OwnedStore::new(counter), + interner: HashMap::with_hasher(NonRandomState), + } } pub(super) fn alloc(&mut self, x: T) -> Handle { diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs index c1d04b1b7a9..4967da4931a 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/mod.rs @@ -60,7 +60,6 @@ macro_rules! with_api { TokenStream { fn drop($self: $S::TokenStream); fn clone($self: &$S::TokenStream) -> $S::TokenStream; - fn new() -> $S::TokenStream; fn is_empty($self: &$S::TokenStream) -> bool; fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; fn from_str(src: &str) -> $S::TokenStream; @@ -68,25 +67,22 @@ macro_rules! with_api { fn from_token_tree( tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>, ) -> $S::TokenStream; - fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter; - }, - TokenStreamBuilder { - fn drop($self: $S::TokenStreamBuilder); - fn new() -> $S::TokenStreamBuilder; - fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream); - fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream; - }, - TokenStreamIter { - fn drop($self: $S::TokenStreamIter); - fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter; - fn next( - $self: &mut $S::TokenStreamIter, - ) -> Option>; + fn concat_trees( + base: Option<$S::TokenStream>, + trees: Vec>, + ) -> $S::TokenStream; + fn concat_streams( + base: Option<$S::TokenStream>, + streams: Vec<$S::TokenStream>, + ) -> $S::TokenStream; + fn into_trees( + $self: $S::TokenStream + ) -> Vec>; }, Group { fn drop($self: $S::Group); fn clone($self: &$S::Group) -> $S::Group; - fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group; + fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group; fn delimiter($self: &$S::Group) -> Delimiter; fn stream($self: &$S::Group) -> $S::TokenStream; fn span($self: &$S::Group) -> $S::Span; @@ -311,29 +307,18 @@ impl<'a, T, M> Unmark for &'a mut Marked { } } -impl Mark for Option { - type Unmarked = Option; +impl Mark for Vec { + type Unmarked = Vec; fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark) + // Should be a no-op due to std's in-place collect optimizations. + unmarked.into_iter().map(T::mark).collect() } } -impl Unmark for Option { - type Unmarked = Option; +impl Unmark for Vec { + type Unmarked = Vec; fn unmark(self) -> Self::Unmarked { - self.map(T::unmark) - } -} - -impl Mark for Result { - type Unmarked = Result; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark).map_err(E::mark) - } -} -impl Unmark for Result { - type Unmarked = Result; - fn unmark(self) -> Self::Unmarked { - self.map(T::unmark).map_err(E::unmark) + // Should be a no-op due to std's in-place collect optimizations. + self.into_iter().map(T::unmark).collect() } } @@ -367,7 +352,6 @@ mark_noop! { Level, LineColumn, Spacing, - Bound, } rpc_encode_decode!( @@ -394,6 +378,61 @@ rpc_encode_decode!( } ); +macro_rules! mark_compound { + (enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => { + impl<$($T: Mark),+> Mark for $name <$($T),+> { + type Unmarked = $name <$($T::Unmarked),+>; + fn mark(unmarked: Self::Unmarked) -> Self { + match unmarked { + $($name::$variant $(($field))? => { + $name::$variant $((Mark::mark($field)))? + })* + } + } + } + + impl<$($T: Unmark),+> Unmark for $name <$($T),+> { + type Unmarked = $name <$($T::Unmarked),+>; + fn unmark(self) -> Self::Unmarked { + match self { + $($name::$variant $(($field))? => { + $name::$variant $((Unmark::unmark($field)))? + })* + } + } + } + } +} + +macro_rules! compound_traits { + ($($t:tt)*) => { + rpc_encode_decode!($($t)*); + mark_compound!($($t)*); + }; +} + +compound_traits!( + enum Bound { + Included(x), + Excluded(x), + Unbounded, + } +); + +compound_traits!( + enum Option { + Some(t), + None, + } +); + +compound_traits!( + enum Result { + Ok(t), + Err(e), + } +); + #[derive(Clone)] pub enum TokenTree { Group(G), @@ -402,30 +441,7 @@ pub enum TokenTree { Literal(L), } -impl Mark for TokenTree { - type Unmarked = TokenTree; - fn mark(unmarked: Self::Unmarked) -> Self { - match unmarked { - TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)), - TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)), - TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)), - TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)), - } - } -} -impl Unmark for TokenTree { - type Unmarked = TokenTree; - fn unmark(self) -> Self::Unmarked { - match self { - TokenTree::Group(tt) => TokenTree::Group(tt.unmark()), - TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()), - TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()), - TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()), - } - } -} - -rpc_encode_decode!( +compound_traits!( enum TokenTree { Group(tt), Punct(tt), diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs index f79e016400f..e9d7a46c06f 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/rpc.rs @@ -4,7 +4,6 @@ use std::any::Any; use std::char; use std::io::Write; use std::num::NonZeroU32; -use std::ops::Bound; use std::str; pub(super) type Writer = super::buffer::Buffer; @@ -43,15 +42,17 @@ macro_rules! rpc_encode_decode { } } }; - (struct $name:ident { $($field:ident),* $(,)? }) => { - impl Encode for $name { + (struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => { + impl),+)?> Encode for $name $(<$($T),+>)? { fn encode(self, w: &mut Writer, s: &mut S) { $(self.$field.encode(w, s);)* } } - impl DecodeMut<'_, '_, S> for $name { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { + impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> + for $name $(<$($T),+>)? + { + fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { $name { $($field: DecodeMut::decode(r, s)),* } @@ -184,28 +185,6 @@ impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> De } } -rpc_encode_decode!( - enum Bound { - Included(x), - Excluded(x), - Unbounded, - } -); - -rpc_encode_decode!( - enum Option { - None, - Some(x), - } -); - -rpc_encode_decode!( - enum Result { - Ok(x), - Err(e), - } -); - impl Encode for &[u8] { fn encode(self, w: &mut Writer, s: &mut S) { self.len().encode(w, s); @@ -246,6 +225,26 @@ impl DecodeMut<'_, '_, S> for String { } } +impl> Encode for Vec { + fn encode(self, w: &mut Writer, s: &mut S) { + self.len().encode(w, s); + for x in self { + x.encode(w, s); + } + } +} + +impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec { + fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { + let len = usize::decode(r, s); + let mut vec = Vec::with_capacity(len); + for _ in 0..len { + vec.push(T::decode(r, s)); + } + vec + } +} + /// Simplified version of panic payloads, ignoring /// types other than `&'static str` and `String`. pub enum PanicMessage { diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs index 1662683914d..0fb3c698589 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/server.rs @@ -8,8 +8,6 @@ use super::client::HandleStore; pub trait Types { type FreeFunctions: 'static; type TokenStream: 'static + Clone; - type TokenStreamBuilder: 'static; - type TokenStreamIter: 'static + Clone; type Group: 'static + Clone; type Punct: 'static + Copy + Eq + Hash; type Ident: 'static + Copy + Eq + Hash; @@ -275,13 +273,17 @@ fn run_server< } impl client::Client { - pub fn run( + pub fn run( &self, strategy: &impl ExecutionStrategy, server: S, input: S::TokenStream, force_show_panics: bool, - ) -> Result { + ) -> Result + where + S: Server, + S::TokenStream: Default, + { let client::Client { get_handle_counters, run, _marker } = *self; run_server( strategy, @@ -291,7 +293,7 @@ impl client::Client { run, force_show_panics, ) - .map( as Types>::TokenStream::unmark) + .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) } } @@ -301,14 +303,18 @@ impl super::super::TokenStream, > { - pub fn run( + pub fn run( &self, strategy: &impl ExecutionStrategy, server: S, input: S::TokenStream, input2: S::TokenStream, force_show_panics: bool, - ) -> Result { + ) -> Result + where + S: Server, + S::TokenStream: Default, + { let client::Client { get_handle_counters, run, _marker } = *self; run_server( strategy, @@ -321,6 +327,6 @@ impl run, force_show_panics, ) - .map( as Types>::TokenStream::unmark) + .map(|s| as Types>::TokenStream>>::unmark(s).unwrap_or_default()) } } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs index 8e5451b73cc..c50a16bf4d1 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs @@ -47,7 +47,7 @@ pub fn is_available() -> bool { /// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]` /// and `#[proc_macro_derive]` definitions. #[derive(Clone)] -pub struct TokenStream(bridge::client::TokenStream); +pub struct TokenStream(Option); /// Error returned from `TokenStream::from_str`. #[non_exhaustive] @@ -78,12 +78,12 @@ impl error::Error for ExpandError {} impl TokenStream { /// Returns an empty `TokenStream` containing no token trees. pub fn new() -> TokenStream { - TokenStream(bridge::client::TokenStream::new()) + TokenStream(None) } /// Checks if this `TokenStream` is empty. pub fn is_empty(&self) -> bool { - self.0.is_empty() + self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true) } /// Parses this `TokenStream` as an expression and attempts to expand any @@ -97,8 +97,9 @@ impl TokenStream { /// specific behavior for any error condition, and what conditions are /// considered errors, is unspecified and may change in the future. pub fn expand_expr(&self) -> Result { - match bridge::client::TokenStream::expand_expr(&self.0) { - Ok(stream) => Ok(TokenStream(stream)), + let stream = self.0.as_ref().ok_or(ExpandError)?; + match bridge::client::TokenStream::expand_expr(stream) { + Ok(stream) => Ok(TokenStream(Some(stream))), Err(_) => Err(ExpandError), } } @@ -115,7 +116,7 @@ impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { - Ok(TokenStream(bridge::client::TokenStream::from_str(src))) + Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src)))) } } @@ -144,15 +145,64 @@ impl Default for TokenStream { pub use quote::{quote, quote_span}; +fn tree_to_bridge_tree( + tree: TokenTree, +) -> bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, +> { + match tree { + TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), + TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), + TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), + TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), + } +} + /// Creates a token stream containing a single token tree. impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream(bridge::client::TokenStream::from_token_tree(match tree { - TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), - TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), - TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), - })) + TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree)))) + } +} + +/// Non-generic helper for implementing `FromIterator` and +/// `Extend` with less monomorphization in calling crates. +struct ConcatStreamsHelper { + streams: Vec, +} + +impl ConcatStreamsHelper { + fn new(capacity: usize) -> Self { + ConcatStreamsHelper { streams: Vec::with_capacity(capacity) } + } + + fn push(&mut self, stream: TokenStream) { + if let Some(stream) = stream.0 { + self.streams.push(stream); + } + } + + fn build(mut self) -> TokenStream { + if self.streams.len() <= 1 { + TokenStream(self.streams.pop()) + } else { + TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams))) + } + } + + fn append_to(mut self, stream: &mut TokenStream) { + if self.streams.is_empty() { + return; + } + let base = stream.0.take(); + if base.is_none() && self.streams.len() == 1 { + stream.0 = self.streams.pop(); + } else { + stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams)); + } } } @@ -167,9 +217,10 @@ impl iter::FromIterator for TokenStream { /// from multiple token streams into a single stream. impl iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { - let mut builder = bridge::client::TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream.0)); - TokenStream(builder.build()) + let iter = streams.into_iter(); + let mut builder = ConcatStreamsHelper::new(iter.size_hint().0); + iter.for_each(|stream| builder.push(stream)); + builder.build() } } @@ -194,7 +245,16 @@ pub mod token_stream { /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, /// and returns whole groups as token trees. #[derive(Clone)] - pub struct IntoIter(bridge::client::TokenStreamIter); + pub struct IntoIter( + std::vec::IntoIter< + bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, + >, + >, + ); impl Iterator for IntoIter { type Item = TokenTree; @@ -214,7 +274,7 @@ pub mod token_stream { type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { - IntoIter(self.0.into_iter()) + IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter()) } } } @@ -560,7 +620,7 @@ impl Group { /// Note that the returned token stream does not include the delimiter /// returned above. pub fn stream(&self) -> TokenStream { - TokenStream(self.0.stream()) + TokenStream(Some(self.0.stream())) } /// Returns the span for the delimiters of this token stream, spanning the diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs index 5c596bc0e62..7fd422ca292 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/rustc_server.rs @@ -23,14 +23,14 @@ type Spacing = tt::Spacing; type Literal = tt::Literal; type Span = tt::TokenId; -#[derive(Debug, Clone)] +#[derive(Debug, Default, Clone)] pub struct TokenStream { pub token_trees: Vec, } impl TokenStream { pub fn new() -> Self { - TokenStream { token_trees: Default::default() } + TokenStream::default() } pub fn with_subtree(subtree: tt::Subtree) -> Self { @@ -276,8 +276,6 @@ pub struct Rustc { impl server::Types for Rustc { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type TokenStreamBuilder = TokenStreamBuilder; - type TokenStreamIter = TokenStreamIter; type Group = Group; type Punct = Punct; type Ident = IdentId; @@ -297,10 +295,6 @@ impl server::FreeFunctions for Rustc { } impl server::TokenStream for Rustc { - fn new(&mut self) -> Self::TokenStream { - Self::TokenStream::new() - } - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } @@ -344,41 +338,55 @@ impl server::TokenStream for Rustc { } } - fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { - let trees: Vec = stream.into_iter().collect(); - TokenStreamIter { trees: trees.into_iter() } - } - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { Ok(self_.clone()) } -} -impl server::TokenStreamBuilder for Rustc { - fn new(&mut self) -> Self::TokenStreamBuilder { - Self::TokenStreamBuilder::new() - } - fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { - builder.push(stream) - } - fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } builder.build() } -} -impl server::TokenStreamIter for Rustc { - fn next( + fn concat_streams( &mut self, - iter: &mut Self::TokenStreamIter, - ) -> Option> { - iter.trees.next().map(|tree| match tree { - TokenTree::Subtree(group) => bridge::TokenTree::Group(group), - TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) - } - TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), - TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), - }) + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => bridge::TokenTree::Literal(lit), + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(subtree), + }) + .collect() } } @@ -416,8 +424,15 @@ fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { } impl server::Group for Rustc { - fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { - Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees } + fn new( + &mut self, + delimiter: bridge::Delimiter, + stream: Option, + ) -> Self::Group { + Self::Group { + delimiter: delim_to_internal(delimiter), + token_trees: stream.unwrap_or_default().token_trees, + } } fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { delim_to_external(group.delimiter)