Try to support pre and post-change metavars

This commit is contained in:
Lukas Wirth 2023-12-19 12:53:10 +01:00
parent 8753ca5360
commit f48ecb6e09
21 changed files with 255 additions and 110 deletions

1
Cargo.lock generated
View File

@ -74,6 +74,7 @@ dependencies = [
"profile",
"rust-analyzer-salsa",
"rustc-hash",
"semver",
"span",
"stdx",
"syntax",

View File

@ -110,6 +110,7 @@ nohash-hasher = "0.2.0"
rayon = "1.8.0"
rust-analyzer-salsa = "0.17.0-pre.4"
rustc-hash = "1.1.0"
semver = "1.0.14"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
smallvec = { version = "1.10.0", features = [

View File

@ -16,6 +16,7 @@ la-arena.workspace = true
rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
semver.workspace = true
# local deps
cfg.workspace = true

View File

@ -11,6 +11,7 @@
use cfg::CfgOptions;
use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use syntax::SmolStr;
use triomphe::Arc;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
@ -258,7 +259,7 @@ pub fn as_str(self) -> &'static str {
pub fn from_str(str: &str) -> Option<Self> {
Some(match str {
"" => ReleaseChannel::Stable,
"" | "stable" => ReleaseChannel::Stable,
"nightly" => ReleaseChannel::Nightly,
_ if str.starts_with("beta") => ReleaseChannel::Beta,
_ => return None,
@ -289,7 +290,7 @@ pub struct CrateData {
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
pub toolchain: Option<Version>,
}
impl CrateData {
@ -346,6 +347,10 @@ pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bo
slf_deps.eq(other_deps)
}
pub fn channel(&self) -> Option<ReleaseChannel> {
self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -427,7 +432,7 @@ pub fn add_crate_root(
is_proc_macro: bool,
origin: CrateOrigin,
target_layout: Result<Arc<str>, Arc<str>>,
channel: Option<ReleaseChannel>,
toolchain: Option<Version>,
) -> CrateId {
let data = CrateData {
root_file_id,
@ -441,7 +446,7 @@ pub fn add_crate_root(
origin,
target_layout,
is_proc_macro,
channel,
toolchain,
};
self.arena.alloc(data)
}

View File

@ -23,6 +23,8 @@
pub use span::{FilePosition, FileRange};
pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
#[macro_export]
macro_rules! impl_intern_key {
($name:ident) => {

View File

@ -1,8 +1,10 @@
//! Defines database & queries for macro expansion.
use std::sync::OnceLock;
use base_db::{
salsa::{self, debug::DebugQueryTable},
CrateId, Edition, FileId, SourceDatabase,
CrateId, Edition, FileId, SourceDatabase, VersionReq,
};
use either::Either;
use limit::Limit;
@ -45,6 +47,9 @@ pub struct DeclarativeMacroExpander {
pub transparency: Transparency,
}
// FIXME: Remove this once we drop support for 1.76
static REQUIREMENT: OnceLock<VersionReq> = OnceLock::new();
impl DeclarativeMacroExpander {
pub fn expand(
&self,
@ -52,6 +57,18 @@ pub fn expand(
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[db.lookup_intern_macro_call(call_id).def.krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
@ -59,18 +76,39 @@ pub fn expand(
),
None => self
.mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
.expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
)
.map_err(Into::into),
}
}
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expand_unhygienic(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
krate: CrateId,
) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
None => self.mac.expand(&tt, |_| (), new_meta_vars).map_err(Into::into),
}
}
}
@ -278,7 +316,7 @@ pub fn expand_speculative(
expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
@ -525,7 +563,8 @@ fn decl_macro_expander(
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let crate_data = &db.crate_graph()[def_crate];
let is_2021 = crate_data.edition >= Edition::Edition2021;
let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node();
@ -549,13 +588,25 @@ fn decl_macro_expander(
_ => None,
}
};
let toolchain = crate_data.toolchain.as_ref();
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version {
pre: base_db::Prerelease::EMPTY,
build: base_db::BuildMetadata::EMPTY,
major: version.major,
minor: version.minor,
patch: version.patch,
},
)
});
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
mac
}
None => mbe::DeclarativeMacro::from_err(
@ -569,7 +620,7 @@ fn decl_macro_expander(
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
mac
}
None => mbe::DeclarativeMacro::from_err(

View File

@ -693,7 +693,7 @@ pub(super) fn new(
let krate = scope.krate();
let module = scope.module();
let toolchain = db.crate_graph()[krate.into()].channel;
let toolchain = db.crate_graph()[krate.into()].channel();
// `toolchain == None` means we're in some detached files. Since we have no information on
// the toolchain being used, let's just allow unstable items to be listed.
let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None);

View File

@ -492,7 +492,7 @@ fn get_doc_base_urls(
let Some(krate) = def.krate(db) else { return Default::default() };
let Some(display_name) = krate.display_name(db) else { return Default::default() };
let crate_data = &db.crate_graph()[krate.into()];
let channel = crate_data.channel.map_or("nightly", ReleaseChannel::as_str);
let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str();
let (web_base, local_base) = match &crate_data.origin {
// std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.

View File

@ -40,7 +40,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
data.is_proc_macro,
data.origin.clone(),
data.target_layout.clone(),
data.channel,
data.toolchain.clone(),
);
new_proc_macros.insert(new_id, proc_macros[&old_id].clone());
map.insert(old_id, new_id);

View File

@ -20,7 +20,10 @@ fn benchmark_parse_macro_rules() {
let rules = macro_rules_fixtures_tt();
let hash: usize = {
let _pt = bench("mbe parse macro rules");
rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum()
rules
.values()
.map(|it| DeclarativeMacro::parse_macro_rules(it, true, true).rules.len())
.sum()
};
assert_eq!(hash, 1144);
}
@ -38,7 +41,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
let res = rules[&id].expand(&tt, |_| ());
let res = rules[&id].expand(&tt, |_| (), true);
assert!(res.err.is_none());
res.value.token_trees.len()
})
@ -50,7 +53,7 @@ fn benchmark_expand_macro_rules() {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true)))
.collect()
}
@ -105,7 +108,7 @@ fn invocation_fixtures(
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
if it.expand(&subtree, |_| ()).err.is_none() {
if it.expand(&subtree, |_| (), true).err.is_none() {
res.push((name.clone(), subtree));
break;
}

View File

@ -16,6 +16,7 @@ pub(crate) fn expand_rules<S: Span>(
input: &tt::Subtree<S>,
marker: impl Fn(&mut S) + Copy,
is_2021: bool,
new_meta_vars: bool,
) -> ExpandResult<tt::Subtree<S>> {
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules {
@ -26,7 +27,7 @@ pub(crate) fn expand_rules<S: Span>(
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, new_meta_vars);
if transcribe_err.is_none() {
return ExpandResult::ok(value);
}
@ -45,7 +46,7 @@ pub(crate) fn expand_rules<S: Span>(
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(

View File

@ -131,8 +131,9 @@ pub(super) fn transcribe<S: Span>(
template: &MetaTemplate<S>,
bindings: &Bindings<S>,
marker: impl Fn(&mut S) + Copy,
new_meta_vars: bool,
) -> ExpandResult<tt::Subtree<S>> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new() };
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars };
let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
expand_subtree(&mut ctx, template, None, &mut arena, marker)
}
@ -152,6 +153,7 @@ struct NestingState {
struct ExpandCtx<'a, S> {
bindings: &'a Bindings<S>,
nesting: Vec<NestingState>,
new_meta_vars: bool,
}
fn expand_subtree<S: Span>(
@ -284,7 +286,13 @@ fn expand_subtree<S: Span>(
}
}
let c = match count(ctx, binding, 0, *depth) {
let res = if ctx.new_meta_vars {
count(ctx, binding, 0, depth.unwrap_or(0))
} else {
count_old(ctx, binding, 0, *depth)
};
let c = match res {
Ok(c) => c,
Err(e) => {
// XXX: It *might* make sense to emit a dummy integer value like `0` here.
@ -548,3 +556,32 @@ fn count<S>(
Binding::Fragment(_) | Binding::Missing(_) => Ok(1),
}
}
fn count_old<S>(
ctx: &ExpandCtx<'_, S>,
binding: &Binding<S>,
our_depth: usize,
count_depth: Option<usize>,
) -> Result<usize, CountError> {
match binding {
Binding::Nested(bs) => match count_depth {
None => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, None)).sum(),
Some(0) => Ok(bs.len()),
Some(d) => bs.iter().map(|b| count_old(ctx, b, our_depth + 1, Some(d - 1))).sum(),
},
Binding::Empty => Ok(0),
Binding::Fragment(_) | Binding::Missing(_) => {
if our_depth == 0 {
// `${count(t)}` is placed inside the innermost repetition. This includes cases
// where `t` is not a repeated fragment.
Err(CountError::Misplaced)
} else if count_depth.is_none() {
Ok(1)
} else {
// We've reached at the innermost repeated fragment, but the user wants us to go
// further!
Err(CountError::OutOfBounds)
}
}
}
}

View File

@ -147,7 +147,12 @@ pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro<S> {
}
/// The old, `macro_rules! m {}` flavor.
pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
pub fn parse_macro_rules(
tt: &tt::Subtree<S>,
is_2021: bool,
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
new_meta_vars: bool,
) -> DeclarativeMacro<S> {
// Note: this parsing can be implemented using mbe machinery itself, by
// matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing
// manually seems easier.
@ -156,7 +161,7 @@ pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro
let mut err = None;
while src.len() > 0 {
let rule = match Rule::parse(&mut src, true) {
let rule = match Rule::parse(&mut src, true, new_meta_vars) {
Ok(it) => it,
Err(e) => {
err = Some(Box::new(e));
@ -183,7 +188,12 @@ pub fn parse_macro_rules(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro
}
/// The new, unstable `macro m {}` flavor.
pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
pub fn parse_macro2(
tt: &tt::Subtree<S>,
is_2021: bool,
// FIXME: Remove this once we drop support for rust 1.76 (defaults to true then)
new_meta_vars: bool,
) -> DeclarativeMacro<S> {
let mut src = TtIter::new(tt);
let mut rules = Vec::new();
let mut err = None;
@ -191,7 +201,7 @@ pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
if tt::DelimiterKind::Brace == tt.delimiter.kind {
cov_mark::hit!(parse_macro_def_rules);
while src.len() > 0 {
let rule = match Rule::parse(&mut src, true) {
let rule = match Rule::parse(&mut src, true, new_meta_vars) {
Ok(it) => it,
Err(e) => {
err = Some(Box::new(e));
@ -210,7 +220,7 @@ pub fn parse_macro2(tt: &tt::Subtree<S>, is_2021: bool) -> DeclarativeMacro<S> {
}
} else {
cov_mark::hit!(parse_macro_def_simple);
match Rule::parse(&mut src, false) {
match Rule::parse(&mut src, false, new_meta_vars) {
Ok(rule) => {
if src.len() != 0 {
err = Some(Box::new(ParseError::expected("remaining tokens in macro def")));
@ -241,13 +251,18 @@ pub fn expand(
&self,
tt: &tt::Subtree<S>,
marker: impl Fn(&mut S) + Copy,
new_meta_vars: bool,
) -> ExpandResult<tt::Subtree<S>> {
expander::expand_rules(&self.rules, &tt, marker, self.is_2021)
expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars)
}
}
impl<S: Span> Rule<S> {
fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError> {
fn parse(
src: &mut TtIter<'_, S>,
expect_arrow: bool,
new_meta_vars: bool,
) -> Result<Self, ParseError> {
let lhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
if expect_arrow {
src.expect_char('=').map_err(|()| ParseError::expected("expected `=`"))?;
@ -256,7 +271,7 @@ fn parse(src: &mut TtIter<'_, S>, expect_arrow: bool) -> Result<Self, ParseError
let rhs = src.expect_subtree().map_err(|()| ParseError::expected("expected subtree"))?;
let lhs = MetaTemplate::parse_pattern(lhs)?;
let rhs = MetaTemplate::parse_template(rhs)?;
let rhs = MetaTemplate::parse_template(rhs, new_meta_vars)?;
Ok(crate::Rule { lhs, rhs })
}

View File

@ -25,23 +25,26 @@
impl<S: Span> MetaTemplate<S> {
pub(crate) fn parse_pattern(pattern: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(pattern, Mode::Pattern)
MetaTemplate::parse(pattern, Mode::Pattern, false)
}
pub(crate) fn parse_template(template: &tt::Subtree<S>) -> Result<Self, ParseError> {
MetaTemplate::parse(template, Mode::Template)
pub(crate) fn parse_template(
template: &tt::Subtree<S>,
new_meta_vars: bool,
) -> Result<Self, ParseError> {
MetaTemplate::parse(template, Mode::Template, new_meta_vars)
}
pub(crate) fn iter(&self) -> impl Iterator<Item = &Op<S>> {
self.0.iter()
}
fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> {
fn parse(tt: &tt::Subtree<S>, mode: Mode, new_meta_vars: bool) -> Result<Self, ParseError> {
let mut src = TtIter::new(tt);
let mut res = Vec::new();
while let Some(first) = src.peek_n(0) {
let op = next_op(first, &mut src, mode)?;
let op = next_op(first, &mut src, mode, new_meta_vars)?;
res.push(op);
}
@ -51,13 +54,35 @@ fn parse(tt: &tt::Subtree<S>, mode: Mode) -> Result<Self, ParseError> {
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum Op<S> {
Var { name: SmolStr, kind: Option<MetaVarKind>, id: S },
Ignore { name: SmolStr, id: S },
Index { depth: usize },
Length { depth: usize },
Count { name: SmolStr, depth: usize },
Repeat { tokens: MetaTemplate<S>, kind: RepeatKind, separator: Option<Separator<S>> },
Subtree { tokens: MetaTemplate<S>, delimiter: tt::Delimiter<S> },
Var {
name: SmolStr,
kind: Option<MetaVarKind>,
id: S,
},
Ignore {
name: SmolStr,
id: S,
},
Index {
depth: usize,
},
Length {
depth: usize,
},
Count {
name: SmolStr,
// FIXME: `usize`` once we drop support for 1.76
depth: Option<usize>,
},
Repeat {
tokens: MetaTemplate<S>,
kind: RepeatKind,
separator: Option<Separator<S>>,
},
Subtree {
tokens: MetaTemplate<S>,
delimiter: tt::Delimiter<S>,
},
Literal(tt::Literal<S>),
Punct(SmallVec<[tt::Punct<S>; 3]>),
Ident(tt::Ident<S>),
@ -123,6 +148,7 @@ fn next_op<S: Span>(
first_peeked: &tt::TokenTree<S>,
src: &mut TtIter<'_, S>,
mode: Mode,
new_meta_vars: bool,
) -> Result<Op<S>, ParseError> {
let res = match first_peeked {
tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => {
@ -136,14 +162,14 @@ fn next_op<S: Span>(
tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind {
tt::DelimiterKind::Parenthesis => {
let (separator, kind) = parse_repeat(src)?;
let tokens = MetaTemplate::parse(subtree, mode)?;
let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?;
Op::Repeat { tokens, separator, kind }
}
tt::DelimiterKind::Brace => match mode {
Mode::Template => {
parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
ParseError::unexpected("invalid metavariable expression")
})?
parse_metavar_expr(new_meta_vars, &mut TtIter::new(subtree)).map_err(
|()| ParseError::unexpected("invalid metavariable expression"),
)?
}
Mode::Pattern => {
return Err(ParseError::unexpected(
@ -207,7 +233,7 @@ fn next_op<S: Span>(
tt::TokenTree::Subtree(subtree) => {
src.next().expect("first token already peeked");
let tokens = MetaTemplate::parse(subtree, mode)?;
let tokens = MetaTemplate::parse(subtree, mode, new_meta_vars)?;
Op::Subtree { tokens, delimiter: subtree.delimiter }
}
};
@ -288,7 +314,7 @@ fn parse_repeat<S: Span>(
Err(ParseError::InvalidRepeat)
}
fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
fn parse_metavar_expr<S: Span>(new_meta_vars: bool, src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
let func = src.expect_ident()?;
let args = src.expect_subtree()?;
@ -300,16 +326,20 @@ fn parse_metavar_expr<S: Span>(src: &mut TtIter<'_, S>) -> Result<Op<S>, ()> {
let op = match &*func.text {
"ignore" => {
args.expect_dollar()?;
if new_meta_vars {
args.expect_dollar()?;
}
let ident = args.expect_ident()?;
Op::Ignore { name: ident.text.clone(), id: ident.span }
}
"index" => Op::Index { depth: parse_depth(&mut args)? },
"length" => Op::Length { depth: parse_depth(&mut args)? },
"count" => {
args.expect_dollar()?;
if new_meta_vars {
args.expect_dollar()?;
}
let ident = args.expect_ident()?;
let depth = if try_eat_comma(&mut args) { parse_depth(&mut args)? } else { 0 };
let depth = if try_eat_comma(&mut args) { Some(parse_depth(&mut args)?) } else { None };
Op::Count { name: ident.text.clone(), depth }
}
_ => return Err(()),

View File

@ -15,7 +15,7 @@ doctest = false
anyhow.workspace = true
cargo_metadata.workspace = true
rustc-hash.workspace = true
semver = "1.0.14"
semver.workspace = true
serde_json.workspace = true
serde.workspace = true
tracing.workspace = true

View File

@ -7,7 +7,7 @@
use anyhow::{format_err, Context};
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind,
Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult,
Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult,
};
use cfg::{CfgDiff, CfgOptions};
use paths::{AbsPath, AbsPathBuf};
@ -619,7 +619,7 @@ pub fn to_crate_graph(
sysroot.as_ref().ok(),
extra_env,
Err("rust-project.json projects have no target layout set".into()),
toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())),
toolchain.clone(),
)
}
ProjectWorkspace::Cargo {
@ -644,7 +644,7 @@ pub fn to_crate_graph(
Ok(it) => Ok(Arc::from(it.as_str())),
Err(it) => Err(Arc::from(it.as_str())),
},
toolchain.as_ref().and_then(|it| ReleaseChannel::from_str(it.pre.as_str())),
toolchain.as_ref(),
),
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
detached_files_to_crate_graph(
@ -733,7 +733,7 @@ fn project_json_to_crate_graph(
sysroot: Option<&Sysroot>,
extra_env: &FxHashMap<String, String>,
target_layout: TargetLayoutLoadResult,
channel: Option<ReleaseChannel>,
toolchain: Option<Version>,
) -> (CrateGraph, ProcMacroPaths) {
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
let (crate_graph, proc_macros) = &mut res;
@ -744,7 +744,7 @@ fn project_json_to_crate_graph(
rustc_cfg.clone(),
target_layout.clone(),
load,
channel,
toolchain.as_ref(),
)
});
@ -807,7 +807,7 @@ fn project_json_to_crate_graph(
CrateOrigin::Local { repo: None, name: None }
},
target_layout.clone(),
channel,
toolchain.clone(),
);
if *is_proc_macro {
if let Some(path) = proc_macro_dylib_path.clone() {
@ -853,7 +853,7 @@ fn cargo_to_crate_graph(
forced_cfg: Option<CfgOptions>,
build_scripts: &WorkspaceBuildScripts,
target_layout: TargetLayoutLoadResult,
channel: Option<ReleaseChannel>,
toolchain: Option<&Version>,
) -> (CrateGraph, ProcMacroPaths) {
let _p = profile::span("cargo_to_crate_graph");
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
@ -866,7 +866,7 @@ fn cargo_to_crate_graph(
rustc_cfg.clone(),
target_layout.clone(),
load,
channel,
toolchain,
),
None => (SysrootPublicDeps::default(), None),
};
@ -950,7 +950,7 @@ fn cargo_to_crate_graph(
is_proc_macro,
target_layout.clone(),
false,
channel,
toolchain.cloned(),
);
if kind == TargetKind::Lib {
lib_tgt = Some((crate_id, name.clone()));
@ -1038,7 +1038,7 @@ fn cargo_to_crate_graph(
rustc_build_scripts
},
target_layout,
channel,
toolchain,
);
}
}
@ -1117,7 +1117,7 @@ fn handle_rustc_crates(
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
target_layout: TargetLayoutLoadResult,
channel: Option<ReleaseChannel>,
toolchain: Option<&Version>,
) {
let mut rustc_pkg_crates = FxHashMap::default();
// The root package of the rustc-dev component is rustc_driver, so we match that
@ -1172,7 +1172,7 @@ fn handle_rustc_crates(
rustc_workspace[tgt].is_proc_macro,
target_layout.clone(),
true,
channel,
toolchain.cloned(),
);
pkg_to_lib_crate.insert(pkg, crate_id);
// Add dependencies on core / std / alloc for this crate
@ -1248,7 +1248,7 @@ fn add_target_crate_root(
is_proc_macro: bool,
target_layout: TargetLayoutLoadResult,
rustc_crate: bool,
channel: Option<ReleaseChannel>,
toolchain: Option<Version>,
) -> CrateId {
let edition = pkg.edition;
let potential_cfg_options = if pkg.features.is_empty() {
@ -1304,7 +1304,7 @@ fn add_target_crate_root(
CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() }
},
target_layout,
channel,
toolchain,
);
if is_proc_macro {
let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
@ -1346,7 +1346,7 @@ fn sysroot_to_crate_graph(
rustc_cfg: Vec<CfgFlag>,
target_layout: TargetLayoutLoadResult,
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
channel: Option<ReleaseChannel>,
toolchain: Option<&Version>,
) -> (SysrootPublicDeps, Option<CrateId>) {
let _p = profile::span("sysroot_to_crate_graph");
let cfg_options = create_cfg_options(rustc_cfg.clone());
@ -1357,7 +1357,7 @@ fn sysroot_to_crate_graph(
rustc_cfg,
cfg_options,
target_layout,
channel,
toolchain,
crate_graph,
sysroot,
),
@ -1380,7 +1380,7 @@ fn sysroot_to_crate_graph(
false,
CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)),
target_layout.clone(),
channel,
toolchain.cloned(),
);
Some((krate, crate_id))
})
@ -1412,7 +1412,7 @@ fn handle_hack_cargo_workspace(
rustc_cfg: Vec<CfgFlag>,
cfg_options: CfgOptions,
target_layout: Result<Arc<str>, Arc<str>>,
channel: Option<ReleaseChannel>,
toolchain: Option<&Version>,
crate_graph: &mut CrateGraph,
sysroot: &Sysroot,
) -> FxHashMap<SysrootCrate, CrateId> {
@ -1426,7 +1426,7 @@ fn handle_hack_cargo_workspace(
Some(cfg_options),
&WorkspaceBuildScripts::default(),
target_layout,
channel,
toolchain,
);
crate_graph.extend(cg, &mut pm);
for crate_name in ["std", "alloc", "core"] {

View File

@ -62,7 +62,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -135,7 +135,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -208,7 +208,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -281,7 +281,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -350,6 +350,6 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
}

View File

@ -62,7 +62,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -135,7 +135,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -208,7 +208,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -281,7 +281,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -350,6 +350,6 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
}

View File

@ -61,7 +61,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -133,7 +133,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -205,7 +205,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -277,7 +277,7 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -346,6 +346,6 @@
target_layout: Err(
"target_data_layout not loaded",
),
channel: None,
toolchain: None,
},
}

View File

@ -39,7 +39,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
1: CrateData {
root_file_id: FileId(
@ -72,7 +72,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
2: CrateData {
root_file_id: FileId(
@ -105,7 +105,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
3: CrateData {
root_file_id: FileId(
@ -138,7 +138,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
4: CrateData {
root_file_id: FileId(
@ -188,7 +188,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
5: CrateData {
root_file_id: FileId(
@ -221,7 +221,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
6: CrateData {
root_file_id: FileId(
@ -319,7 +319,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
7: CrateData {
root_file_id: FileId(
@ -352,7 +352,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
8: CrateData {
root_file_id: FileId(
@ -385,7 +385,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
9: CrateData {
root_file_id: FileId(
@ -418,7 +418,7 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
10: CrateData {
root_file_id: FileId(
@ -495,6 +495,6 @@
target_layout: Err(
"rust-project.json projects have no target layout set",
),
channel: None,
toolchain: None,
},
}

View File

@ -3,8 +3,8 @@
use base_db::{
CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind,
Edition, Env, FileChange, FileSet, LangCrateOrigin, ReleaseChannel, SourceDatabaseExt,
SourceRoot, VfsPath,
Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version,
VfsPath,
};
use cfg::CfgOptions;
use hir_expand::{
@ -120,12 +120,10 @@ pub fn parse_with_proc_macros(
) -> ChangeFixture {
let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } =
FixtureWithProjectMeta::parse(ra_fixture);
let toolchain = toolchain
.map(|it| {
ReleaseChannel::from_str(&it)
.unwrap_or_else(|| panic!("unknown release channel found: {it}"))
})
.unwrap_or(ReleaseChannel::Stable);
let toolchain = Some({
let channel = toolchain.as_deref().unwrap_or("stable");
Version::parse(&format!("1.76.0-{channel}")).unwrap()
});
let mut source_change = FileChange::new();
let mut files = Vec::new();
@ -193,7 +191,7 @@ pub fn parse_with_proc_macros(
.as_deref()
.map(From::from)
.ok_or_else(|| "target_data_layout unset".into()),
Some(toolchain),
toolchain.clone(),
);
let prev = crates.insert(crate_name.clone(), crate_id);
assert!(prev.is_none(), "multiple crates with same name: {}", crate_name);
@ -233,7 +231,7 @@ pub fn parse_with_proc_macros(
default_target_data_layout
.map(|it| it.into())
.ok_or_else(|| "target_data_layout unset".into()),
Some(toolchain),
toolchain.clone(),
);
} else {
for (from, to, prelude) in crate_deps {
@ -280,7 +278,7 @@ pub fn parse_with_proc_macros(
false,
CrateOrigin::Lang(LangCrateOrigin::Core),
target_layout.clone(),
Some(toolchain),
toolchain.clone(),
);
for krate in all_crates {
@ -325,7 +323,7 @@ pub fn parse_with_proc_macros(
true,
CrateOrigin::Local { repo: None, name: None },
target_layout,
Some(toolchain),
toolchain,
);
proc_macros.insert(proc_macros_crate, Ok(proc_macro));