10066: internal: improve compile times a bit r=matklad a=matklad

I wanted to *quickly* remove `smol_str = {features = "serde"}`, and figured out that the simplest way to do that is to replace our straightforward proc macro serialization with something significantly more obscure. 

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2021-08-28 20:38:58 +00:00 committed by GitHub
commit 10e9408d38
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 379 additions and 257 deletions

7
Cargo.lock generated
View File

@ -1064,9 +1064,6 @@ checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58"
[[package]]
name = "paths"
version = "0.0.0"
dependencies = [
"serde",
]
[[package]]
name = "percent-encoding"
@ -1505,9 +1502,6 @@ name = "smol_str"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b203e79e90905594272c1c97c7af701533d42adaab0beb3859018e477d54a3b0"
dependencies = [
"serde",
]
[[package]]
name = "snap"
@ -1574,7 +1568,6 @@ dependencies = [
"rowan",
"rustc-ap-rustc_lexer",
"rustc-hash",
"serde",
"smol_str",
"sourcegen",
"stdx",

View File

@ -9,4 +9,7 @@ edition = "2018"
doctest = false
[dependencies]
serde = "1"
# Adding this dep sadly puts a lot of rust-analyzer crates after the
# serde-derive crate. Even though we don't activate the derive feature here,
# someone else in the crate graph certainly does!
# serde = "1"

View File

@ -66,27 +66,6 @@ fn eq(&self, other: &AbsPath) -> bool {
}
}
impl serde::Serialize for AbsPathBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.0.serialize(serializer)
}
}
impl<'de> serde::Deserialize<'de> for AbsPathBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let path = PathBuf::deserialize(deserializer)?;
AbsPathBuf::try_from(path).map_err(|path| {
serde::de::Error::custom(format!("expected absolute path, got {}", path.display()))
})
}
}
impl AbsPathBuf {
/// Wrap the given absolute path in `AbsPathBuf`
///

View File

@ -21,7 +21,9 @@
use crate::process::ProcMacroProcessSrv;
pub use rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind};
pub use rpc::{
flat::FlatTree, ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask, ProcMacroKind,
};
pub use version::{read_dylib_info, RustCInfo};
#[derive(Debug, Clone)]
@ -58,10 +60,10 @@ pub fn expand(
env: Vec<(String, String)>,
) -> Result<Subtree, tt::ExpansionError> {
let task = ExpansionTask {
macro_body: subtree.clone(),
macro_body: FlatTree::new(subtree),
macro_name: self.name.to_string(),
attributes: attr.cloned(),
lib: self.dylib_path.to_path_buf(),
attributes: attr.map(FlatTree::new),
lib: self.dylib_path.to_path_buf().into(),
env,
};
@ -70,7 +72,7 @@ pub fn expand(
.lock()
.unwrap_or_else(|e| e.into_inner())
.send_task(msg::Request::ExpansionMacro(task))?;
Ok(result.expansion)
Ok(result.expansion.to_subtree())
}
}

View File

@ -12,13 +12,13 @@
ExpansionResult, ExpansionTask,
};
#[derive(Debug, Serialize, Deserialize, Clone)]
#[derive(Debug, Serialize, Deserialize)]
pub enum Request {
ListMacro(ListMacrosTask),
ExpansionMacro(ExpansionTask),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[derive(Debug, Serialize, Deserialize)]
pub enum Response {
Error(ResponseError),
ListMacro(ListMacrosResult),

View File

@ -39,7 +39,7 @@ pub(crate) fn find_proc_macros(
&mut self,
dylib_path: &AbsPath,
) -> Result<Vec<(String, ProcMacroKind)>, tt::ExpansionError> {
let task = ListMacrosTask { lib: dylib_path.to_path_buf() };
let task = ListMacrosTask { lib: dylib_path.to_path_buf().into() };
let result: ListMacrosResult = self.send_task(Request::ListMacro(task))?;
Ok(result.macros)

View File

@ -5,17 +5,17 @@
//! Although adding `Serialize` and `Deserialize` traits to `tt` directly seems
//! to be much easier, we deliberately duplicate `tt` structs with `#[serde(with = "XXDef")]`
//! for separation of code responsibility.
pub(crate) mod flat;
use std::path::PathBuf;
use paths::AbsPathBuf;
use serde::{Deserialize, Serialize};
use tt::{
Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, SmolStr, Spacing, Subtree, TokenId,
TokenTree,
};
use crate::rpc::flat::FlatTree;
#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
pub struct ListMacrosTask {
pub lib: AbsPathBuf,
pub lib: PathBuf,
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
@ -30,14 +30,13 @@ pub struct ListMacrosResult {
pub macros: Vec<(String, ProcMacroKind)>,
}
#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct ExpansionTask {
/// Argument of macro call.
///
/// In custom derive this will be a struct or enum; in attribute-like macro - underlying
/// item; in function-like macro - the macro body.
#[serde(with = "SubtreeDef")]
pub macro_body: Subtree,
pub macro_body: FlatTree,
/// Name of macro to expand.
///
@ -46,208 +45,23 @@ pub struct ExpansionTask {
pub macro_name: String,
/// Possible attributes for the attribute-like macros.
#[serde(with = "opt_subtree_def")]
pub attributes: Option<Subtree>,
pub attributes: Option<FlatTree>,
pub lib: AbsPathBuf,
pub lib: PathBuf,
/// Environment variables to set during macro expansion.
pub env: Vec<(String, String)>,
}
#[derive(Clone, Eq, PartialEq, Debug, Default, Serialize, Deserialize)]
#[derive(Debug, Serialize, Deserialize)]
pub struct ExpansionResult {
#[serde(with = "SubtreeDef")]
pub expansion: Subtree,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "DelimiterKind")]
enum DelimiterKindDef {
Parenthesis,
Brace,
Bracket,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "TokenId")]
struct TokenIdDef(u32);
#[derive(Serialize, Deserialize)]
#[serde(remote = "Delimiter")]
struct DelimiterDef {
#[serde(
with = "TokenIdDef",
default = "tt::TokenId::unspecified",
skip_serializing_if = "token_id_def::skip_if"
)]
id: TokenId,
#[serde(with = "DelimiterKindDef")]
kind: DelimiterKind,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Subtree")]
struct SubtreeDef {
#[serde(default, with = "opt_delimiter_def")]
delimiter: Option<Delimiter>,
#[serde(with = "vec_token_tree")]
token_trees: Vec<TokenTree>,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "TokenTree")]
enum TokenTreeDef {
#[serde(with = "LeafDef")]
Leaf(Leaf),
#[serde(with = "SubtreeDef")]
Subtree(Subtree),
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Leaf")]
enum LeafDef {
#[serde(with = "LiteralDef")]
Literal(Literal),
#[serde(with = "PunctDef")]
Punct(Punct),
#[serde(with = "IdentDef")]
Ident(Ident),
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Literal")]
struct LiteralDef {
text: SmolStr,
#[serde(
with = "TokenIdDef",
default = "tt::TokenId::unspecified",
skip_serializing_if = "token_id_def::skip_if"
)]
id: TokenId,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Punct")]
struct PunctDef {
char: char,
#[serde(with = "SpacingDef")]
spacing: Spacing,
#[serde(
with = "TokenIdDef",
default = "tt::TokenId::unspecified",
skip_serializing_if = "token_id_def::skip_if"
)]
id: TokenId,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Spacing")]
enum SpacingDef {
Alone,
Joint,
}
#[derive(Serialize, Deserialize)]
#[serde(remote = "Ident")]
struct IdentDef {
text: SmolStr,
#[serde(
with = "TokenIdDef",
default = "tt::TokenId::unspecified",
skip_serializing_if = "token_id_def::skip_if"
)]
id: TokenId,
}
mod token_id_def {
pub(super) fn skip_if(value: &tt::TokenId) -> bool {
*value == tt::TokenId::unspecified()
}
}
mod opt_delimiter_def {
use super::{Delimiter, DelimiterDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub(super) fn serialize<S>(value: &Option<Delimiter>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#[derive(Serialize)]
struct Helper<'a>(#[serde(with = "DelimiterDef")] &'a Delimiter);
value.as_ref().map(Helper).serialize(serializer)
}
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Delimiter>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct Helper(#[serde(with = "DelimiterDef")] Delimiter);
let helper = Option::deserialize(deserializer)?;
Ok(helper.map(|Helper(external)| external))
}
}
mod opt_subtree_def {
use super::{Subtree, SubtreeDef};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub(super) fn serialize<S>(value: &Option<Subtree>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#[derive(Serialize)]
struct Helper<'a>(#[serde(with = "SubtreeDef")] &'a Subtree);
value.as_ref().map(Helper).serialize(serializer)
}
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Option<Subtree>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct Helper(#[serde(with = "SubtreeDef")] Subtree);
let helper = Option::deserialize(deserializer)?;
Ok(helper.map(|Helper(external)| external))
}
}
mod vec_token_tree {
use super::{TokenTree, TokenTreeDef};
use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer};
pub(super) fn serialize<S>(value: &[TokenTree], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
#[derive(Serialize)]
struct Helper<'a>(#[serde(with = "TokenTreeDef")] &'a TokenTree);
let items: Vec<_> = value.iter().map(Helper).collect();
let mut seq = serializer.serialize_seq(Some(items.len()))?;
for element in items {
seq.serialize_element(&element)?;
}
seq.end()
}
pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TokenTree>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct Helper(#[serde(with = "TokenTreeDef")] TokenTree);
let helper = Vec::deserialize(deserializer)?;
Ok(helper.into_iter().map(|Helper(external)| external).collect())
}
pub expansion: FlatTree,
}
#[cfg(test)]
mod tests {
use super::*;
use tt::*;
fn fixture_token_tree() -> Subtree {
let mut subtree = Subtree::default();
@ -257,6 +71,15 @@ fn fixture_token_tree() -> Subtree {
subtree
.token_trees
.push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
text: "Foo".into(),
id: TokenId::unspecified(),
})));
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
char: '@',
id: TokenId::unspecified(),
spacing: Spacing::Joint,
})));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
token_trees: vec![],
@ -268,22 +91,17 @@ fn fixture_token_tree() -> Subtree {
fn test_proc_macro_rpc_works() {
let tt = fixture_token_tree();
let task = ExpansionTask {
macro_body: tt.clone(),
macro_body: FlatTree::new(&tt),
macro_name: Default::default(),
attributes: None,
lib: AbsPathBuf::assert(std::env::current_dir().unwrap()),
lib: std::env::current_dir().unwrap(),
env: Default::default(),
};
let json = serde_json::to_string(&task).unwrap();
println!("{}", json);
let back: ExpansionTask = serde_json::from_str(&json).unwrap();
assert_eq!(task.macro_body, back.macro_body);
let result = ExpansionResult { expansion: tt };
let json = serde_json::to_string(&result).unwrap();
let back: ExpansionResult = serde_json::from_str(&json).unwrap();
assert_eq!(result, back);
assert_eq!(tt, back.macro_body.to_subtree());
}
}

View File

@ -0,0 +1,328 @@
//! Serialization-friendly representation of `tt::Subtree`.
//!
//! It is possible to serialize `Subtree` as is, as a tree, but using
//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
//! parser to overflow the stack.
//!
//! Additionally, such implementation would be pretty verbose, and we do care
//! about performance here a bit.
//!
//! So what this module does is dumping a `tt::Subtree` into a bunch of flat
//! array of numbers. See the test in the parent module to get an example
//! output.
//!
//! ```json
//! {
//! // Array of subtrees, each subtree is represented by 4 numbers:
//! // id of delimiter, delimiter kind, index of first child in `token_tree`,
//! // index of last child in `token_tree`
//! "subtree":[4294967295,0,0,5,2,2,5,5],
//! // 2 ints per literal: [token id, index into `text`]
//! "literal":[4294967295,1],
//! // 3 ints per punct: [token id, char, spacing]
//! "punct":[4294967295,64,1],
//! // 2 ints per ident: [token id, index into `text`]
//! "ident": [0,0,1,1],
//! // children of all subtrees, concatenated. Each child is represented as `index << 2 | tag`
//! // where tag denotes one of subtree, literal, punct or ident.
//! "token_tree":[3,7,1,4],
//! // Strings shared by idents and literals
//! "text": ["struct","Foo"]
//! }
//! ```
//!
//! We probably should replace most of the code here with bincode someday, but,
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
//! the time being.
use std::{
collections::{HashMap, VecDeque},
convert::TryInto,
};
use serde::{Deserialize, Serialize};
use tt::TokenId;
#[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree {
subtree: Vec<u32>,
literal: Vec<u32>,
punct: Vec<u32>,
ident: Vec<u32>,
token_tree: Vec<u32>,
text: Vec<String>,
}
struct SubtreeRepr {
id: tt::TokenId,
kind: Option<tt::DelimiterKind>,
tt: [u32; 2],
}
struct LiteralRepr {
id: tt::TokenId,
text: u32,
}
struct PunctRepr {
id: tt::TokenId,
char: char,
spacing: tt::Spacing,
}
struct IdentRepr {
id: tt::TokenId,
text: u32,
}
impl FlatTree {
pub fn new(subtree: &tt::Subtree) -> FlatTree {
let mut w = Writer {
string_table: HashMap::new(),
work: VecDeque::new(),
subtree: Vec::new(),
literal: Vec::new(),
punct: Vec::new(),
ident: Vec::new(),
token_tree: Vec::new(),
text: Vec::new(),
};
w.write(subtree);
return FlatTree {
subtree: write_vec(w.subtree, SubtreeRepr::write),
literal: write_vec(w.literal, LiteralRepr::write),
punct: write_vec(w.punct, PunctRepr::write),
ident: write_vec(w.ident, IdentRepr::write),
token_tree: w.token_tree,
text: w.text,
};
fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
xs.into_iter().flat_map(f).collect()
}
}
pub fn to_subtree(self) -> tt::Subtree {
return Reader {
subtree: read_vec(self.subtree, SubtreeRepr::read),
literal: read_vec(self.literal, LiteralRepr::read),
punct: read_vec(self.punct, PunctRepr::read),
ident: read_vec(self.ident, IdentRepr::read),
token_tree: self.token_tree,
text: self.text,
}
.read();
fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
let mut chunks = xs.chunks_exact(N);
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
assert!(chunks.remainder().is_empty());
res
}
}
}
impl SubtreeRepr {
fn write(self) -> [u32; 4] {
let kind = match self.kind {
None => 0,
Some(tt::DelimiterKind::Parenthesis) => 1,
Some(tt::DelimiterKind::Brace) => 2,
Some(tt::DelimiterKind::Bracket) => 3,
};
[self.id.0, kind, self.tt[0], self.tt[1]]
}
fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
let kind = match kind {
0 => None,
1 => Some(tt::DelimiterKind::Parenthesis),
2 => Some(tt::DelimiterKind::Brace),
3 => Some(tt::DelimiterKind::Bracket),
other => panic!("bad kind {}", other),
};
SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
}
}
impl LiteralRepr {
fn write(self) -> [u32; 2] {
[self.id.0, self.text]
}
fn read([id, text]: [u32; 2]) -> LiteralRepr {
LiteralRepr { id: TokenId(id), text }
}
}
impl PunctRepr {
fn write(self) -> [u32; 3] {
let spacing = match self.spacing {
tt::Spacing::Alone => 0,
tt::Spacing::Joint => 1,
};
[self.id.0, self.char as u32, spacing]
}
fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
let spacing = match spacing {
0 => tt::Spacing::Alone,
1 => tt::Spacing::Joint,
other => panic!("bad spacing {}", other),
};
PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
}
}
impl IdentRepr {
fn write(self) -> [u32; 2] {
[self.id.0, self.text]
}
fn read(data: [u32; 2]) -> IdentRepr {
IdentRepr { id: TokenId(data[0]), text: data[1] }
}
}
struct Writer<'a> {
work: VecDeque<(usize, &'a tt::Subtree)>,
string_table: HashMap<&'a str, u32>,
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
}
impl<'a> Writer<'a> {
fn write(&mut self, root: &'a tt::Subtree) {
self.enqueue(root);
while let Some((idx, subtree)) = self.work.pop_front() {
self.subtree(idx, subtree);
}
}
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.token_trees.len();
self.token_tree.resize(first_tt + n_tt, !0);
self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
for child in &subtree.token_trees {
let idx_tag = match child {
tt::TokenTree::Subtree(it) => {
let idx = self.enqueue(it);
idx << 2 | 0b00
}
tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Literal(lit) => {
let idx = self.literal.len() as u32;
let text = self.intern(&lit.text);
self.literal.push(LiteralRepr { id: lit.id, text });
idx << 2 | 0b01
}
tt::Leaf::Punct(punct) => {
let idx = self.punct.len() as u32;
self.punct.push(PunctRepr {
char: punct.char,
spacing: punct.spacing,
id: punct.id,
});
idx << 2 | 0b10
}
tt::Leaf::Ident(ident) => {
let idx = self.ident.len() as u32;
let text = self.intern(&ident.text);
self.ident.push(IdentRepr { id: ident.id, text });
idx << 2 | 0b11
}
},
};
self.token_tree[first_tt] = idx_tag;
first_tt += 1;
}
}
fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
let idx = self.subtree.len();
let delimiter_id = subtree.delimiter.map(|it| it.id).unwrap_or(TokenId::unspecified());
let delimiter_kind = subtree.delimiter.map(|it| it.kind);
self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
self.work.push_back((idx, subtree));
idx as u32
}
pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
let table = &mut self.text;
*self.string_table.entry(text).or_insert_with(|| {
let idx = table.len();
table.push(text.to_string());
idx as u32
})
}
}
struct Reader {
subtree: Vec<SubtreeRepr>,
literal: Vec<LiteralRepr>,
punct: Vec<PunctRepr>,
ident: Vec<IdentRepr>,
token_tree: Vec<u32>,
text: Vec<String>,
}
impl Reader {
pub(crate) fn read(self) -> tt::Subtree {
let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
for i in (0..self.subtree.len()).rev() {
let repr = &self.subtree[i];
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
let s = tt::Subtree {
delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }),
token_trees: token_trees
.iter()
.copied()
.map(|idx_tag| {
let tag = idx_tag & 0b11;
let idx = (idx_tag >> 2) as usize;
match tag {
// XXX: we iterate subtrees in reverse to guarantee
// that this unwrap doesn't fire.
0b00 => res[idx].take().unwrap().into(),
0b01 => {
let repr = &self.literal[idx];
tt::Leaf::Literal(tt::Literal {
text: self.text[repr.text as usize].as_str().into(),
id: repr.id,
})
.into()
}
0b10 => {
let repr = &self.punct[idx];
tt::Leaf::Punct(tt::Punct {
char: repr.char,
spacing: repr.spacing,
id: repr.id,
})
.into()
}
0b11 => {
let repr = &self.ident[idx];
tt::Leaf::Ident(tt::Ident {
text: self.text[repr.text as usize].as_str().into(),
id: repr.id,
})
.into()
}
other => panic!("bad tag: {}", other),
}
})
.collect(),
};
res[i] = Some(s.into())
}
res[0].take().unwrap()
}
}

View File

@ -12,7 +12,7 @@ pub fn run() -> io::Result<()> {
let res = match req {
msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro),
msg::Request::ExpansionMacro(task) => {
srv.expand(&task).map(msg::Response::ExpansionMacro)
srv.expand(task).map(msg::Response::ExpansionMacro)
}
};

View File

@ -15,7 +15,7 @@
mod abis;
use proc_macro_api::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
use proc_macro_api::{ExpansionResult, ExpansionTask, FlatTree, ListMacrosResult, ListMacrosTask};
use std::{
collections::{hash_map::Entry, HashMap},
env, fs,
@ -29,7 +29,7 @@ pub(crate) struct ProcMacroSrv {
}
impl ProcMacroSrv {
pub fn expand(&mut self, task: &ExpansionTask) -> Result<ExpansionResult, String> {
pub fn expand(&mut self, task: ExpansionTask) -> Result<ExpansionResult, String> {
let expander = self.expander(task.lib.as_ref())?;
let mut prev_env = HashMap::new();
@ -38,7 +38,11 @@ pub fn expand(&mut self, task: &ExpansionTask) -> Result<ExpansionResult, String
env::set_var(k, v);
}
let result = expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref());
let macro_body = task.macro_body.to_subtree();
let attributes = task.attributes.map(|it| it.to_subtree());
let result = expander
.expand(&task.macro_name, &macro_body, attributes.as_ref())
.map(|it| FlatTree::new(&it));
for (k, _) in &task.env {
match &prev_env[k.as_str()] {

View File

@ -3,7 +3,6 @@
use crate::dylib;
use crate::ProcMacroSrv;
use expect_test::Expect;
use paths::AbsPathBuf;
use proc_macro_api::ListMacrosTask;
use std::str::FromStr;
@ -42,8 +41,7 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect:
}
pub fn list() -> Vec<String> {
let path = AbsPathBuf::assert(fixtures::proc_macro_test_dylib_path());
let task = ListMacrosTask { lib: path };
let task = ListMacrosTask { lib: fixtures::proc_macro_test_dylib_path() };
let mut srv = ProcMacroSrv::default();
let res = srv.list_macros(&task).unwrap();
res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()

View File

@ -18,8 +18,7 @@ rustc-hash = "1.1.0"
arrayvec = "0.7"
once_cell = "1.3.1"
indexmap = "1.4.0"
smol_str = { version = "0.1.15", features = ["serde"] }
serde = { version = "1.0.106", features = ["derive"] }
smol_str = "0.1.15"
stdx = { path = "../stdx", version = "0.0.0" }
text_edit = { path = "../text_edit", version = "0.0.0" }

View File

@ -9,8 +9,6 @@ edition = "2018"
doctest = false
[dependencies]
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
# to reduce number of compilations
smol_str = { version = "0.1.15", features = ["serde"] }
smol_str = "0.1"
stdx = { path = "../stdx", version = "0.0.0" }