Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
This commit is contained in:
commit
166636ba77
@ -1,4 +1,4 @@
|
||||
[alias]
|
||||
xtask = "run --package xtask --bin xtask --"
|
||||
|
||||
install-ra = "run --package xtask --bin xtask -- install" # for backwards compat
|
||||
tq = "test -- -q"
|
||||
|
62
Cargo.lock
generated
62
Cargo.lock
generated
@ -123,7 +123,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
[[package]]
|
||||
name = "chalk-derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -133,9 +133,9 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "chalk-engine"
|
||||
version = "0.9.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"stacker 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -143,18 +143,18 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "chalk-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chalk-macros"
|
||||
version = "0.1.1"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -162,23 +162,24 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "chalk-rust-ir"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chalk-solve"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05#50f9f636123bd88d0cc1b958749981d6702e4d05"
|
||||
source = "git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478#a88cad7f0a69e05ba8f40b74c58a1c229c1b2478"
|
||||
dependencies = [
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -946,7 +947,6 @@ dependencies = [
|
||||
"ra_hir 0.1.0",
|
||||
"ra_syntax 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
]
|
||||
|
||||
@ -1016,9 +1016,9 @@ name = "ra_hir"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)",
|
||||
"chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)",
|
||||
"ena 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"insta 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1180,7 +1180,7 @@ dependencies = [
|
||||
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rowan 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rowan 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1445,7 +1445,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rowan"
|
||||
version = "0.6.2"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1873,12 +1873,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum cargo_metadata 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8d2d1617e838936c0d2323a65cc151e03ae19a7678dd24f72bccf27119b90a5d"
|
||||
"checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8"
|
||||
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
|
||||
"checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=50f9f636123bd88d0cc1b958749981d6702e4d05)" = "<none>"
|
||||
"checksum chalk-derive 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-engine 0.9.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-macros 0.1.1 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-rust-ir 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chalk-solve 0.1.0 (git+https://github.com/rust-lang/chalk.git?rev=a88cad7f0a69e05ba8f40b74c58a1c229c1b2478)" = "<none>"
|
||||
"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68"
|
||||
"checksum clicolors-control 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e"
|
||||
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
|
||||
@ -1993,7 +1993,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum relative-path 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bedde000f40f2921ce439ea165c9c53fd629bfa115140c72e22aceacb4a21954"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum ron 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ece421e0c4129b90e4a35b6f625e472e96c552136f5093a2f4fa2bbb75a62d5"
|
||||
"checksum rowan 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dc2b79612dedc9004083a61448eb669d336d56690aab29fbd7249e8c8ab41d8c"
|
||||
"checksum rowan 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "fc3a6fb2a35518af7cab43ec4e21ca82eb086a8b3bb1739e426dc3923d459607"
|
||||
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c86aae0c77166108c01305ee1a36a1e77289d7dc6ca0a3cd91ff4992de2d16a5"
|
||||
|
@ -8,6 +8,6 @@ debug = 0
|
||||
|
||||
[profile.release]
|
||||
incremental = true
|
||||
debug = 1 # only line info
|
||||
debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger
|
||||
|
||||
[patch.'crates-io']
|
||||
|
@ -3,3 +3,6 @@ edition = "2018"
|
||||
name = "ra_arena"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -4,11 +4,13 @@ name = "ra_assists"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
format-buf = "1.0.0"
|
||||
join_to_string = "0.1.3"
|
||||
itertools = "0.8.0"
|
||||
rustc_lexer = "0.1.0"
|
||||
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_text_edit = { path = "../ra_text_edit" }
|
||||
|
@ -1,6 +1,5 @@
|
||||
//! This module defines `AssistCtx` -- the API surface that is exposed to assists.
|
||||
|
||||
use hir::db::HirDatabase;
|
||||
use hir::{db::HirDatabase, SourceAnalyzer};
|
||||
use ra_db::FileRange;
|
||||
use ra_fmt::{leading_indent, reindent};
|
||||
use ra_syntax::{
|
||||
@ -113,6 +112,13 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||
pub(crate) fn covering_element(&self) -> SyntaxElement {
|
||||
find_covering_element(self.source_file.syntax(), self.frange.range)
|
||||
}
|
||||
pub(crate) fn source_analyzer(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
offset: Option<TextUnit>,
|
||||
) -> SourceAnalyzer {
|
||||
SourceAnalyzer::new(self.db, hir::Source::new(self.frange.file_id.into(), node), offset)
|
||||
}
|
||||
|
||||
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
|
||||
find_covering_element(self.source_file.syntax(), range)
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
|
||||
}
|
||||
// Infer type
|
||||
let db = ctx.db;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None);
|
||||
let analyzer = ctx.source_analyzer(stmt.syntax(), None);
|
||||
let ty = analyzer.type_of(db, &expr)?;
|
||||
// Assist not applicable if the type is unknown
|
||||
if is_unknown(&ty) {
|
||||
|
@ -100,8 +100,7 @@ fn add_missing_impl_members_inner(
|
||||
let impl_item_list = impl_node.item_list()?;
|
||||
|
||||
let trait_def = {
|
||||
let file_id = ctx.frange.file_id;
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, file_id, impl_node.syntax(), None);
|
||||
let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
|
||||
|
||||
resolve_target_trait_def(ctx.db, &analyzer, &impl_node)?
|
||||
};
|
||||
|
@ -158,9 +158,12 @@ fn find_struct_impl(
|
||||
let same_ty = blk.target_ty(db) == struct_ty;
|
||||
let not_trait_impl = blk.target_trait(db).is_none();
|
||||
|
||||
found_new_fn = has_new_fn(impl_blk);
|
||||
if !(same_ty && not_trait_impl) {
|
||||
return false;
|
||||
}
|
||||
|
||||
same_ty && not_trait_impl
|
||||
found_new_fn = has_new_fn(impl_blk);
|
||||
true
|
||||
});
|
||||
|
||||
if found_new_fn {
|
||||
@ -186,9 +189,10 @@ fn has_new_fn(imp: &ast::ImplBlock) -> bool {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn test_add_new() {
|
||||
@ -345,7 +349,7 @@ struct Foo {<|>}
|
||||
impl Foo {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
}",
|
||||
);
|
||||
|
||||
@ -357,7 +361,7 @@ struct Foo {<|>}
|
||||
impl Foo {
|
||||
fn New() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
}",
|
||||
);
|
||||
}
|
||||
@ -376,4 +380,59 @@ struct EvenMoreIrrelevant;
|
||||
struct Foo<'a, T: Foo<'a>> {}",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unrelated_new() {
|
||||
check_assist(
|
||||
add_new,
|
||||
r##"
|
||||
pub struct AstId<N: AstNode> {
|
||||
file_id: HirFileId,
|
||||
file_ast_id: FileAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> AstId<N> {
|
||||
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
|
||||
AstId { file_id, file_ast_id }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Source<T> {
|
||||
pub file_id: HirFileId,<|>
|
||||
pub ast: T,
|
||||
}
|
||||
|
||||
impl<T> Source<T> {
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||
Source { file_id: self.file_id, ast: f(self.ast) }
|
||||
}
|
||||
}
|
||||
"##,
|
||||
r##"
|
||||
pub struct AstId<N: AstNode> {
|
||||
file_id: HirFileId,
|
||||
file_ast_id: FileAstId<N>,
|
||||
}
|
||||
|
||||
impl<N: AstNode> AstId<N> {
|
||||
pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> {
|
||||
AstId { file_id, file_ast_id }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Source<T> {
|
||||
pub file_id: HirFileId,
|
||||
pub ast: T,
|
||||
}
|
||||
|
||||
impl<T> Source<T> {
|
||||
pub fn new(file_id: HirFileId, ast: T) -> Self { Self { file_id, ast } }<|>
|
||||
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||
Source { file_id: self.file_id, ast: f(self.ast) }
|
||||
}
|
||||
}
|
||||
"##,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -47,8 +47,7 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
|
||||
|
||||
let expr = match_expr.expr()?;
|
||||
let enum_def = {
|
||||
let file_id = ctx.frange.file_id;
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, file_id, expr.syntax(), None);
|
||||
let analyzer = ctx.source_analyzer(expr.syntax(), None);
|
||||
resolve_enum_def(ctx.db, &analyzer, &expr)?
|
||||
};
|
||||
let variant_list = enum_def.variant_list()?;
|
||||
|
@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(ctx: AssistCtx<impl HirDatabase>) -> Option<
|
||||
} else {
|
||||
let_stmt.syntax().text_range()
|
||||
};
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None);
|
||||
let analyzer = ctx.source_analyzer(bind_pat.syntax(), None);
|
||||
let refs = analyzer.find_all_refs(&bind_pat);
|
||||
|
||||
let mut wrap_in_parens = vec![true; refs.len()];
|
||||
|
@ -1,9 +1,9 @@
|
||||
use hir::db::HirDatabase;
|
||||
use ra_syntax::{
|
||||
ast, AstToken,
|
||||
SyntaxKind::{RAW_STRING, STRING},
|
||||
TextRange, TextUnit,
|
||||
TextUnit,
|
||||
};
|
||||
use rustc_lexer;
|
||||
|
||||
use crate::{Assist, AssistCtx, AssistId};
|
||||
|
||||
@ -23,32 +23,16 @@ use crate::{Assist, AssistCtx, AssistId};
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let token = ctx.find_token_at_offset(STRING)?;
|
||||
let text = token.text().as_str();
|
||||
let usual_string_range = find_usual_string_range(text)?;
|
||||
let start_of_inside = usual_string_range.start().to_usize() + 1;
|
||||
let end_of_inside = usual_string_range.end().to_usize();
|
||||
let inside_str = &text[start_of_inside..end_of_inside];
|
||||
let mut unescaped = String::with_capacity(inside_str.len());
|
||||
let mut error = Ok(());
|
||||
rustc_lexer::unescape::unescape_str(
|
||||
inside_str,
|
||||
&mut |_, unescaped_char| match unescaped_char {
|
||||
Ok(c) => unescaped.push(c),
|
||||
Err(_) => error = Err(()),
|
||||
},
|
||||
);
|
||||
if error.is_err() {
|
||||
return None;
|
||||
}
|
||||
let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?;
|
||||
let value = token.value()?;
|
||||
ctx.add_assist(AssistId("make_raw_string"), "make raw string", |edit| {
|
||||
edit.target(token.text_range());
|
||||
let max_hash_streak = count_hashes(&unescaped);
|
||||
edit.target(token.syntax().text_range());
|
||||
let max_hash_streak = count_hashes(&value);
|
||||
let mut hashes = String::with_capacity(max_hash_streak + 1);
|
||||
for _ in 0..hashes.capacity() {
|
||||
hashes.push('#');
|
||||
}
|
||||
edit.replace(token.text_range(), format!("r{}\"{}\"{}", hashes, unescaped, hashes));
|
||||
edit.replace(token.syntax().text_range(), format!("r{}\"{}\"{}", hashes, value, hashes));
|
||||
})
|
||||
}
|
||||
|
||||
@ -68,17 +52,13 @@ pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
|
||||
// }
|
||||
// ```
|
||||
pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let token = ctx.find_token_at_offset(RAW_STRING)?;
|
||||
let text = token.text().as_str();
|
||||
let usual_string_range = find_usual_string_range(text)?;
|
||||
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
|
||||
let value = token.value()?;
|
||||
ctx.add_assist(AssistId("make_usual_string"), "make usual string", |edit| {
|
||||
edit.target(token.text_range());
|
||||
edit.target(token.syntax().text_range());
|
||||
// parse inside string to escape `"`
|
||||
let start_of_inside = usual_string_range.start().to_usize() + 1;
|
||||
let end_of_inside = usual_string_range.end().to_usize();
|
||||
let inside_str = &text[start_of_inside..end_of_inside];
|
||||
let escaped = inside_str.escape_default().to_string();
|
||||
edit.replace(token.text_range(), format!("\"{}\"", escaped));
|
||||
let escaped = value.escape_default().to_string();
|
||||
edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped));
|
||||
})
|
||||
}
|
||||
|
||||
@ -132,6 +112,7 @@ pub(crate) fn remove_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
edit.target(token.text_range());
|
||||
let result = &text[2..text.len() - 1];
|
||||
let result = if result.starts_with('\"') {
|
||||
// FIXME: this logic is wrong, not only the last has has to handled specially
|
||||
// no more hash, escape
|
||||
let internal_str = &result[1..result.len() - 1];
|
||||
format!("\"{}\"", internal_str.escape_default().to_string())
|
||||
@ -154,20 +135,6 @@ fn count_hashes(s: &str) -> usize {
|
||||
max_hash_streak
|
||||
}
|
||||
|
||||
fn find_usual_string_range(s: &str) -> Option<TextRange> {
|
||||
let left_quote = s.find('"')?;
|
||||
let right_quote = s.rfind('"')?;
|
||||
if left_quote == right_quote {
|
||||
// `s` only contains one quote
|
||||
None
|
||||
} else {
|
||||
Some(TextRange::from_to(
|
||||
TextUnit::from(left_quote as u32),
|
||||
TextUnit::from(right_quote as u32),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
@ -4,6 +4,9 @@ name = "ra_batch"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.5"
|
||||
rustc-hash = "1.0"
|
||||
|
@ -4,6 +4,9 @@ name = "ra_cfg"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
rustc-hash = "1.0.1"
|
||||
|
||||
|
@ -169,12 +169,12 @@ pub fn run(
|
||||
println!(
|
||||
"Expressions of unknown type: {} ({}%)",
|
||||
num_exprs_unknown,
|
||||
(num_exprs_unknown * 100 / num_exprs)
|
||||
if num_exprs > 0 { (num_exprs_unknown * 100 / num_exprs) } else { 100 }
|
||||
);
|
||||
println!(
|
||||
"Expressions of partially unknown type: {} ({}%)",
|
||||
num_exprs_partially_unknown,
|
||||
(num_exprs_partially_unknown * 100 / num_exprs)
|
||||
if num_exprs > 0 { (num_exprs_partially_unknown * 100 / num_exprs) } else { 100 }
|
||||
);
|
||||
println!("Type mismatches: {}", num_type_mismatches);
|
||||
println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage());
|
||||
|
@ -4,6 +4,9 @@ name = "ra_db"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
salsa = "0.13.0"
|
||||
relative-path = "1.0.0"
|
||||
|
@ -5,6 +5,9 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.8.0"
|
||||
|
||||
|
@ -4,6 +4,9 @@ name = "ra_hir"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
arrayvec = "0.5.1"
|
||||
log = "0.4.5"
|
||||
@ -23,9 +26,9 @@ hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
|
||||
test_utils = { path = "../test_utils" }
|
||||
ra_prof = { path = "../ra_prof" }
|
||||
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "50f9f636123bd88d0cc1b958749981d6702e4d05" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "50f9f636123bd88d0cc1b958749981d6702e4d05" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "50f9f636123bd88d0cc1b958749981d6702e4d05" }
|
||||
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "a88cad7f0a69e05ba8f40b74c58a1c229c1b2478" }
|
||||
lalrpop-intern = "0.15.1"
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -11,7 +11,7 @@ use hir_def::{
|
||||
body::scope::ExprScopes,
|
||||
builtin_type::BuiltinType,
|
||||
type_ref::{Mutability, TypeRef},
|
||||
CrateModuleId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId,
|
||||
CrateModuleId, ImplId, LocalEnumVariantId, LocalStructFieldId, ModuleId, UnionId,
|
||||
};
|
||||
use hir_expand::{
|
||||
diagnostics::DiagnosticSink,
|
||||
@ -29,7 +29,6 @@ use crate::{
|
||||
AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId,
|
||||
TypeAliasId,
|
||||
},
|
||||
impl_block::ImplBlock,
|
||||
resolve::{Resolver, Scope, TypeNs},
|
||||
traits::TraitData,
|
||||
ty::{InferenceResult, Namespace, TraitRef},
|
||||
@ -243,12 +242,8 @@ impl Module {
|
||||
}
|
||||
|
||||
pub fn impl_blocks(self, db: &impl DefDatabase) -> Vec<ImplBlock> {
|
||||
let module_impl_blocks = db.impls_in_module(self);
|
||||
module_impl_blocks
|
||||
.impls
|
||||
.iter()
|
||||
.map(|(impl_id, _)| ImplBlock::from_id(self, impl_id))
|
||||
.collect()
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
def_map[self.id.module_id].impls.iter().copied().map(ImplBlock::from).collect()
|
||||
}
|
||||
|
||||
fn with_module_id(self, module_id: CrateModuleId) -> Module {
|
||||
@ -693,8 +688,7 @@ impl Function {
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
let module_impls = db.impls_in_module(self.module(db));
|
||||
ImplBlock::containing(module_impls, self.into())
|
||||
ImplBlock::containing(db, self.into())
|
||||
}
|
||||
|
||||
/// The containing trait, if this is a trait method definition.
|
||||
@ -759,8 +753,7 @@ impl Const {
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
let module_impls = db.impls_in_module(self.module(db));
|
||||
ImplBlock::containing(module_impls, self.into())
|
||||
ImplBlock::containing(db, self.into())
|
||||
}
|
||||
|
||||
pub fn parent_trait(self, db: &impl DefDatabase) -> Option<Trait> {
|
||||
@ -973,8 +966,7 @@ impl TypeAlias {
|
||||
|
||||
/// The containing impl block, if this is a method.
|
||||
pub fn impl_block(self, db: &impl DefDatabase) -> Option<ImplBlock> {
|
||||
let module_impls = db.impls_in_module(self.module(db));
|
||||
ImplBlock::containing(module_impls, self.into())
|
||||
ImplBlock::containing(db, self.into())
|
||||
}
|
||||
|
||||
/// The containing trait, if this is a trait method definition.
|
||||
@ -1137,3 +1129,8 @@ pub struct GenericParam {
|
||||
pub(crate) parent: GenericDef,
|
||||
pub(crate) idx: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ImplBlock {
|
||||
pub(crate) id: ImplId,
|
||||
}
|
||||
|
@ -10,22 +10,23 @@ use crate::{
|
||||
debug::HirDebugDatabase,
|
||||
generics::{GenericDef, GenericParams},
|
||||
ids,
|
||||
impl_block::{ImplBlock, ImplSourceMap, ModuleImplBlocks},
|
||||
lang_item::{LangItemTarget, LangItems},
|
||||
traits::TraitData,
|
||||
ty::{
|
||||
method_resolution::CrateImplBlocks, traits::Impl, CallableDef, FnSig, GenericPredicate,
|
||||
InferenceResult, Namespace, Substs, Ty, TypableDef, TypeCtor,
|
||||
method_resolution::CrateImplBlocks,
|
||||
traits::{AssocTyValue, Impl},
|
||||
CallableDef, FnSig, GenericPredicate, InferenceResult, Namespace, Substs, Ty, TypableDef,
|
||||
TypeCtor,
|
||||
},
|
||||
type_alias::TypeAliasData,
|
||||
Const, ConstData, Crate, DefWithBody, FnData, Function, Module, Static, StructField, Trait,
|
||||
TypeAlias,
|
||||
Const, ConstData, Crate, DefWithBody, FnData, Function, ImplBlock, Module, Static, StructField,
|
||||
Trait, TypeAlias,
|
||||
};
|
||||
|
||||
pub use hir_def::db::{
|
||||
BodyQuery, BodyWithSourceMapQuery, CrateDefMapQuery, DefDatabase2, DefDatabase2Storage,
|
||||
EnumDataQuery, ExprScopesQuery, InternDatabase, InternDatabaseStorage, RawItemsQuery,
|
||||
RawItemsWithSourceMapQuery, StructDataQuery,
|
||||
EnumDataQuery, ExprScopesQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
|
||||
RawItemsQuery, RawItemsWithSourceMapQuery, StructDataQuery,
|
||||
};
|
||||
pub use hir_expand::db::{
|
||||
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
|
||||
@ -42,15 +43,6 @@ pub trait DefDatabase: HirDebugDatabase + DefDatabase2 {
|
||||
#[salsa::invoke(crate::traits::TraitItemsIndex::trait_items_index)]
|
||||
fn trait_items_index(&self, module: Module) -> crate::traits::TraitItemsIndex;
|
||||
|
||||
#[salsa::invoke(ModuleImplBlocks::impls_in_module_with_source_map_query)]
|
||||
fn impls_in_module_with_source_map(
|
||||
&self,
|
||||
module: Module,
|
||||
) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>);
|
||||
|
||||
#[salsa::invoke(ModuleImplBlocks::impls_in_module_query)]
|
||||
fn impls_in_module(&self, module: Module) -> Arc<ModuleImplBlocks>;
|
||||
|
||||
#[salsa::invoke(crate::generics::GenericParams::generic_params_query)]
|
||||
fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>;
|
||||
|
||||
@ -128,27 +120,43 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
|
||||
#[salsa::interned]
|
||||
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> ids::TypeCtorId;
|
||||
#[salsa::interned]
|
||||
fn intern_impl(&self, impl_: Impl) -> ids::GlobalImplId;
|
||||
fn intern_chalk_impl(&self, impl_: Impl) -> ids::GlobalImplId;
|
||||
#[salsa::interned]
|
||||
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> ids::AssocTyValueId;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_data_query)]
|
||||
fn associated_ty_data(&self, id: chalk_ir::TypeId) -> Arc<chalk_rust_ir::AssociatedTyDatum>;
|
||||
fn associated_ty_data(
|
||||
&self,
|
||||
id: chalk_ir::TypeId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::trait_datum_query)]
|
||||
fn trait_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
) -> Arc<chalk_rust_ir::TraitDatum>;
|
||||
) -> Arc<chalk_rust_ir::TraitDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::struct_datum_query)]
|
||||
fn struct_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
struct_id: chalk_ir::StructId,
|
||||
) -> Arc<chalk_rust_ir::StructDatum>;
|
||||
) -> Arc<chalk_rust_ir::StructDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::impl_datum_query)]
|
||||
fn impl_datum(&self, krate: Crate, impl_id: chalk_ir::ImplId) -> Arc<chalk_rust_ir::ImplDatum>;
|
||||
fn impl_datum(
|
||||
&self,
|
||||
krate: Crate,
|
||||
impl_id: chalk_ir::ImplId,
|
||||
) -> Arc<chalk_rust_ir::ImplDatum<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::chalk::associated_ty_value_query)]
|
||||
fn associated_ty_value(
|
||||
&self,
|
||||
krate: Crate,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyValue<chalk_ir::family::ChalkIr>>;
|
||||
|
||||
#[salsa::invoke(crate::ty::traits::trait_solve_query)]
|
||||
fn trait_solve(
|
||||
|
@ -3,9 +3,9 @@
|
||||
//! It's unclear if we need this long-term, but it's definitelly useful while we
|
||||
//! are splitting the hir.
|
||||
|
||||
use hir_def::{AdtId, DefWithBodyId, EnumVariantId, ModuleDefId};
|
||||
use hir_def::{AdtId, AssocItemId, DefWithBodyId, EnumVariantId, ModuleDefId};
|
||||
|
||||
use crate::{Adt, DefWithBody, EnumVariant, ModuleDef};
|
||||
use crate::{Adt, AssocItem, DefWithBody, EnumVariant, ModuleDef};
|
||||
|
||||
macro_rules! from_id {
|
||||
($(($id:path, $ty:path)),*) => {$(
|
||||
@ -27,6 +27,7 @@ from_id![
|
||||
(hir_def::StaticId, crate::Static),
|
||||
(hir_def::ConstId, crate::Const),
|
||||
(hir_def::FunctionId, crate::Function),
|
||||
(hir_def::ImplId, crate::ImplBlock),
|
||||
(hir_expand::MacroDefId, crate::MacroDef)
|
||||
];
|
||||
|
||||
@ -71,3 +72,13 @@ impl From<DefWithBody> for DefWithBodyId {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AssocItemId> for AssocItem {
|
||||
fn from(def: AssocItemId) -> Self {
|
||||
match def {
|
||||
AssocItemId::FunctionId(it) => AssocItem::Function(it.into()),
|
||||
AssocItemId::TypeAliasId(it) => AssocItem::TypeAlias(it.into()),
|
||||
AssocItemId::ConstId(it) => AssocItem::Const(it.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use hir_def::{ModuleId, StructId, StructOrUnionId, UnionId};
|
||||
use hir_expand::name::AsName;
|
||||
use hir_expand::{name::AsName, AstId, MacroDefId, MacroDefKind};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, NameOwner},
|
||||
match_ast,
|
||||
@ -11,8 +11,8 @@ use crate::{
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
ids::{AstItemDef, LocationCtx},
|
||||
Const, DefWithBody, Enum, EnumVariant, FieldSource, Function, HasBody, HasSource, ImplBlock,
|
||||
Local, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias, Union,
|
||||
VariantDef,
|
||||
Local, MacroDef, Module, ModuleSource, Source, Static, Struct, StructField, Trait, TypeAlias,
|
||||
Union, VariantDef,
|
||||
};
|
||||
|
||||
pub trait FromSource: Sized {
|
||||
@ -77,19 +77,28 @@ impl FromSource for TypeAlias {
|
||||
Some(TypeAlias { id })
|
||||
}
|
||||
}
|
||||
// FIXME: add impl FromSource for MacroDef
|
||||
|
||||
impl FromSource for MacroDef {
|
||||
type Ast = ast::MacroCall;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let kind = MacroDefKind::Declarative;
|
||||
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let krate = module.krate().crate_id();
|
||||
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.ast));
|
||||
|
||||
let id: MacroDefId = MacroDefId { krate, ast_id, kind };
|
||||
Some(MacroDef { id })
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSource for ImplBlock {
|
||||
type Ast = ast::ImplBlock;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let module_src = crate::ModuleSource::from_child_node(
|
||||
db,
|
||||
src.file_id.original_file(db),
|
||||
&src.ast.syntax(),
|
||||
);
|
||||
let module = Module::from_definition(db, Source { file_id: src.file_id, ast: module_src })?;
|
||||
let impls = module.impl_blocks(db);
|
||||
impls.into_iter().find(|b| b.source(db) == src)
|
||||
let id = from_source(db, src)?;
|
||||
Some(ImplBlock { id })
|
||||
}
|
||||
}
|
||||
|
||||
@ -202,9 +211,8 @@ where
|
||||
N: AstNode,
|
||||
DEF: AstItemDef<N>,
|
||||
{
|
||||
let module_src =
|
||||
crate::ModuleSource::from_child_node(db, src.file_id.original_file(db), &src.ast.syntax());
|
||||
let module = Module::from_definition(db, Source { file_id: src.file_id, ast: module_src })?;
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let ctx = LocationCtx::new(db, module.id, src.file_id);
|
||||
Some(DEF::from_ast(ctx, &src.ast))
|
||||
}
|
||||
|
@ -37,3 +37,9 @@ impl_intern_key!(TypeCtorId);
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct GlobalImplId(salsa::InternId);
|
||||
impl_intern_key!(GlobalImplId);
|
||||
|
||||
/// This exists just for Chalk, because it needs a unique ID for each associated
|
||||
/// type value in an impl (even synthetic ones).
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct AssocTyValueId(salsa::InternId);
|
||||
impl_intern_key!(AssocTyValueId);
|
||||
|
@ -1,88 +1,38 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hir_def::{attr::Attr, type_ref::TypeRef};
|
||||
use hir_expand::hygiene::Hygiene;
|
||||
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
|
||||
use ra_cfg::CfgOptions;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
AstPtr,
|
||||
};
|
||||
use hir_def::{type_ref::TypeRef, AstItemDef};
|
||||
use ra_syntax::ast::{self};
|
||||
|
||||
use crate::{
|
||||
code_model::{Module, ModuleSource},
|
||||
db::{AstDatabase, DefDatabase, HirDatabase},
|
||||
generics::HasGenericParams,
|
||||
ids::LocationCtx,
|
||||
ids::MacroCallLoc,
|
||||
resolve::Resolver,
|
||||
ty::Ty,
|
||||
AssocItem, AstId, Const, Function, HasSource, HirFileId, MacroFileKind, Path, Source, TraitRef,
|
||||
TypeAlias,
|
||||
AssocItem, Crate, HasSource, ImplBlock, Module, Source, TraitRef,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ImplSourceMap {
|
||||
map: ArenaMap<ImplId, Source<AstPtr<ast::ImplBlock>>>,
|
||||
}
|
||||
|
||||
impl ImplSourceMap {
|
||||
fn insert(&mut self, impl_id: ImplId, file_id: HirFileId, impl_block: &ast::ImplBlock) {
|
||||
let source = Source { file_id, ast: AstPtr::new(impl_block) };
|
||||
self.map.insert(impl_id, source)
|
||||
}
|
||||
|
||||
pub fn get(&self, db: &impl AstDatabase, impl_id: ImplId) -> Source<ast::ImplBlock> {
|
||||
let src = self.map[impl_id];
|
||||
let root = src.file_syntax(db);
|
||||
src.map(|ptr| ptr.to_node(&root))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ImplBlock {
|
||||
module: Module,
|
||||
impl_id: ImplId,
|
||||
}
|
||||
|
||||
impl HasSource for ImplBlock {
|
||||
type Ast = ast::ImplBlock;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> {
|
||||
let source_map = db.impls_in_module_with_source_map(self.module).1;
|
||||
source_map.get(db, self.impl_id)
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl ImplBlock {
|
||||
pub(crate) fn containing(
|
||||
module_impl_blocks: Arc<ModuleImplBlocks>,
|
||||
item: AssocItem,
|
||||
) -> Option<ImplBlock> {
|
||||
let impl_id = *module_impl_blocks.impls_by_def.get(&item)?;
|
||||
Some(ImplBlock { module: module_impl_blocks.module, impl_id })
|
||||
}
|
||||
|
||||
pub(crate) fn from_id(module: Module, impl_id: ImplId) -> ImplBlock {
|
||||
ImplBlock { module, impl_id }
|
||||
}
|
||||
|
||||
pub fn id(&self) -> ImplId {
|
||||
self.impl_id
|
||||
}
|
||||
|
||||
pub fn module(&self) -> Module {
|
||||
self.module
|
||||
pub(crate) fn containing(db: &impl DefDatabase, item: AssocItem) -> Option<ImplBlock> {
|
||||
let module = item.module(db);
|
||||
let crate_def_map = db.crate_def_map(module.id.krate);
|
||||
crate_def_map[module.id.module_id].impls.iter().copied().map(ImplBlock::from).find(|it| {
|
||||
db.impl_data(it.id).items().iter().copied().map(AssocItem::from).any(|it| it == item)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn target_trait(&self, db: &impl DefDatabase) -> Option<TypeRef> {
|
||||
db.impls_in_module(self.module).impls[self.impl_id].target_trait().cloned()
|
||||
db.impl_data(self.id).target_trait().cloned()
|
||||
}
|
||||
|
||||
pub fn target_type(&self, db: &impl DefDatabase) -> TypeRef {
|
||||
db.impls_in_module(self.module).impls[self.impl_id].target_type().clone()
|
||||
db.impl_data(self.id).target_type().clone()
|
||||
}
|
||||
|
||||
pub fn target_ty(&self, db: &impl HirDatabase) -> Ty {
|
||||
@ -95,15 +45,23 @@ impl ImplBlock {
|
||||
}
|
||||
|
||||
pub fn items(&self, db: &impl DefDatabase) -> Vec<AssocItem> {
|
||||
db.impls_in_module(self.module).impls[self.impl_id].items().to_vec()
|
||||
db.impl_data(self.id).items().iter().map(|it| (*it).into()).collect()
|
||||
}
|
||||
|
||||
pub fn is_negative(&self, db: &impl DefDatabase) -> bool {
|
||||
db.impls_in_module(self.module).impls[self.impl_id].negative
|
||||
db.impl_data(self.id).is_negative()
|
||||
}
|
||||
|
||||
pub fn module(&self, db: &impl DefDatabase) -> Module {
|
||||
self.id.module(db).into()
|
||||
}
|
||||
|
||||
pub fn krate(&self, db: &impl DefDatabase) -> Crate {
|
||||
Crate { crate_id: self.module(db).id.krate }
|
||||
}
|
||||
|
||||
pub(crate) fn resolver(&self, db: &impl DefDatabase) -> Resolver {
|
||||
let r = self.module().resolver(db);
|
||||
let r = self.module(db).resolver(db);
|
||||
// add generic params, if present
|
||||
let p = self.generic_params(db);
|
||||
let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
|
||||
@ -111,175 +69,3 @@ impl ImplBlock {
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ImplData {
|
||||
target_trait: Option<TypeRef>,
|
||||
target_type: TypeRef,
|
||||
items: Vec<AssocItem>,
|
||||
negative: bool,
|
||||
}
|
||||
|
||||
impl ImplData {
|
||||
pub(crate) fn from_ast(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
file_id: HirFileId,
|
||||
module: Module,
|
||||
node: &ast::ImplBlock,
|
||||
) -> Self {
|
||||
let target_trait = node.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(node.target_type());
|
||||
let ctx = LocationCtx::new(db, module.id, file_id);
|
||||
let negative = node.is_negative();
|
||||
let items = if let Some(item_list) = node.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItem::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItem::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
ImplData { target_trait, target_type, items, negative }
|
||||
}
|
||||
|
||||
pub fn target_trait(&self) -> Option<&TypeRef> {
|
||||
self.target_trait.as_ref()
|
||||
}
|
||||
|
||||
pub fn target_type(&self) -> &TypeRef {
|
||||
&self.target_type
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[AssocItem] {
|
||||
&self.items
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ImplId(pub RawId);
|
||||
impl_arena_id!(ImplId);
|
||||
|
||||
/// The collection of impl blocks is a two-step process: first we collect the
|
||||
/// blocks per-module; then we build an index of all impl blocks in the crate.
|
||||
/// This way, we avoid having to do this process for the whole crate whenever
|
||||
/// a file is changed; as long as the impl blocks in the file don't change,
|
||||
/// we don't need to do the second step again.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ModuleImplBlocks {
|
||||
pub(crate) module: Module,
|
||||
pub(crate) impls: Arena<ImplId, ImplData>,
|
||||
impls_by_def: FxHashMap<AssocItem, ImplId>,
|
||||
}
|
||||
|
||||
impl ModuleImplBlocks {
|
||||
pub(crate) fn impls_in_module_with_source_map_query(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
module: Module,
|
||||
) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>) {
|
||||
let mut source_map = ImplSourceMap::default();
|
||||
let crate_graph = db.crate_graph();
|
||||
let cfg_options = crate_graph.cfg_options(module.id.krate);
|
||||
|
||||
let result = ModuleImplBlocks::collect(db, cfg_options, module, &mut source_map);
|
||||
(Arc::new(result), Arc::new(source_map))
|
||||
}
|
||||
|
||||
pub(crate) fn impls_in_module_query(
|
||||
db: &impl DefDatabase,
|
||||
module: Module,
|
||||
) -> Arc<ModuleImplBlocks> {
|
||||
db.impls_in_module_with_source_map(module).0
|
||||
}
|
||||
|
||||
fn collect(
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
cfg_options: &CfgOptions,
|
||||
module: Module,
|
||||
source_map: &mut ImplSourceMap,
|
||||
) -> Self {
|
||||
let mut m = ModuleImplBlocks {
|
||||
module,
|
||||
impls: Arena::default(),
|
||||
impls_by_def: FxHashMap::default(),
|
||||
};
|
||||
|
||||
let src = m.module.definition_source(db);
|
||||
match &src.ast {
|
||||
ModuleSource::SourceFile(node) => {
|
||||
m.collect_from_item_owner(db, cfg_options, source_map, node, src.file_id)
|
||||
}
|
||||
ModuleSource::Module(node) => {
|
||||
let item_list = node.item_list().expect("inline module should have item list");
|
||||
m.collect_from_item_owner(db, cfg_options, source_map, &item_list, src.file_id)
|
||||
}
|
||||
};
|
||||
m
|
||||
}
|
||||
|
||||
fn collect_from_item_owner(
|
||||
&mut self,
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
cfg_options: &CfgOptions,
|
||||
source_map: &mut ImplSourceMap,
|
||||
owner: &dyn ast::ModuleItemOwner,
|
||||
file_id: HirFileId,
|
||||
) {
|
||||
let hygiene = Hygiene::new(db, file_id);
|
||||
for item in owner.items_with_macros() {
|
||||
match item {
|
||||
ast::ItemOrMacro::Item(ast::ModuleItem::ImplBlock(impl_block_ast)) => {
|
||||
let attrs = Attr::from_attrs_owner(&impl_block_ast, &hygiene);
|
||||
if attrs.map_or(false, |attrs| {
|
||||
attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false))
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let impl_block = ImplData::from_ast(db, file_id, self.module, &impl_block_ast);
|
||||
let id = self.impls.alloc(impl_block);
|
||||
for &impl_item in &self.impls[id].items {
|
||||
self.impls_by_def.insert(impl_item, id);
|
||||
}
|
||||
|
||||
source_map.insert(id, file_id, &impl_block_ast);
|
||||
}
|
||||
ast::ItemOrMacro::Item(_) => (),
|
||||
ast::ItemOrMacro::Macro(macro_call) => {
|
||||
let attrs = Attr::from_attrs_owner(¯o_call, &hygiene);
|
||||
if attrs.map_or(false, |attrs| {
|
||||
attrs.iter().any(|attr| attr.is_cfg_enabled(cfg_options) == Some(false))
|
||||
}) {
|
||||
continue;
|
||||
}
|
||||
|
||||
//FIXME: we should really cut down on the boilerplate required to process a macro
|
||||
let ast_id = AstId::new(file_id, db.ast_id_map(file_id).ast_id(¯o_call));
|
||||
if let Some(path) =
|
||||
macro_call.path().and_then(|path| Path::from_src(path, &hygiene))
|
||||
{
|
||||
if let Some(def) = self.module.resolver(db).resolve_path_as_macro(db, &path)
|
||||
{
|
||||
let call_id = db.intern_macro(MacroCallLoc { def: def.id, ast_id });
|
||||
let file_id = call_id.as_file(MacroFileKind::Items);
|
||||
if let Some(item_list) =
|
||||
db.parse_or_expand(file_id).and_then(ast::MacroItems::cast)
|
||||
{
|
||||
self.collect_from_item_owner(
|
||||
db,
|
||||
cfg_options,
|
||||
source_map,
|
||||
&item_list,
|
||||
file_id,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ impl LangItemTarget {
|
||||
Some(match self {
|
||||
LangItemTarget::Enum(e) => e.module(db).krate(),
|
||||
LangItemTarget::Function(f) => f.module(db).krate(),
|
||||
LangItemTarget::ImplBlock(i) => i.module().krate(),
|
||||
LangItemTarget::ImplBlock(i) => i.krate(db),
|
||||
LangItemTarget::Static(s) => s.module(db).krate(),
|
||||
LangItemTarget::Struct(s) => s.module(db).krate(),
|
||||
LangItemTarget::Trait(t) => t.module(db).krate(),
|
||||
|
@ -54,12 +54,11 @@ mod test_db;
|
||||
#[cfg(test)]
|
||||
mod marks;
|
||||
|
||||
use hir_expand::AstId;
|
||||
|
||||
use crate::{ids::MacroFileKind, resolve::Resolver};
|
||||
use crate::resolve::Resolver;
|
||||
|
||||
pub use crate::{
|
||||
adt::VariantDef,
|
||||
code_model::ImplBlock,
|
||||
code_model::{
|
||||
attrs::{AttrDef, Attrs},
|
||||
docs::{DocDef, Docs, Documentation},
|
||||
@ -72,7 +71,6 @@ pub use crate::{
|
||||
from_source::FromSource,
|
||||
generics::GenericDef,
|
||||
ids::{HirFileId, MacroCallId, MacroCallLoc, MacroDefId, MacroFile},
|
||||
impl_block::ImplBlock,
|
||||
resolve::ScopeDef,
|
||||
source_binder::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
|
||||
ty::{
|
||||
|
@ -15,9 +15,8 @@ use crate::{
|
||||
db::{DefDatabase, HirDatabase},
|
||||
expr::{ExprScopes, PatId, ScopeId},
|
||||
generics::GenericParams,
|
||||
impl_block::ImplBlock,
|
||||
Adt, Const, DefWithBody, Enum, EnumVariant, Function, Local, MacroDef, ModuleDef, PerNs,
|
||||
Static, Struct, Trait, TypeAlias,
|
||||
Adt, Const, DefWithBody, Enum, EnumVariant, Function, ImplBlock, Local, MacroDef, ModuleDef,
|
||||
PerNs, Static, Struct, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
|
@ -11,13 +11,12 @@ use hir_def::{
|
||||
expr::{ExprId, PatId},
|
||||
path::known,
|
||||
};
|
||||
use hir_expand::{name::AsName, Source};
|
||||
use ra_db::FileId;
|
||||
use hir_expand::{name::AsName, AstId, MacroCallId, MacroCallLoc, MacroFileKind, Source};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
match_ast, AstPtr,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
|
||||
SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -30,52 +29,45 @@ use crate::{
|
||||
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
|
||||
};
|
||||
|
||||
fn try_get_resolver_for_node(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<Resolver> {
|
||||
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
|
||||
match_ast! {
|
||||
match node {
|
||||
match (node.ast) {
|
||||
ast::Module(it) => {
|
||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
||||
let src = node.with_ast(it);
|
||||
Some(crate::Module::from_declaration(db, src)?.resolver(db))
|
||||
},
|
||||
ast::SourceFile(it) => {
|
||||
let src =
|
||||
crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) };
|
||||
let src = node.with_ast(crate::ModuleSource::SourceFile(it));
|
||||
Some(crate::Module::from_definition(db, src)?.resolver(db))
|
||||
},
|
||||
ast::StructDef(it) => {
|
||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
||||
let src = node.with_ast(it);
|
||||
Some(Struct::from_source(db, src)?.resolver(db))
|
||||
},
|
||||
ast::EnumDef(it) => {
|
||||
let src = crate::Source { file_id: file_id.into(), ast: it };
|
||||
let src = node.with_ast(it);
|
||||
Some(Enum::from_source(db, src)?.resolver(db))
|
||||
},
|
||||
_ => {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db))
|
||||
} else {
|
||||
// FIXME add missing cases
|
||||
None
|
||||
_ => match node.ast.kind() {
|
||||
FN_DEF | CONST_DEF | STATIC_DEF => {
|
||||
Some(def_with_body_from_child_node(db, node)?.resolver(db))
|
||||
}
|
||||
},
|
||||
// FIXME add missing cases
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn def_with_body_from_child_node(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
child: Source<&SyntaxNode>,
|
||||
) -> Option<DefWithBody> {
|
||||
let src = crate::ModuleSource::from_child_node(db, file_id, node);
|
||||
let module = Module::from_definition(db, crate::Source { file_id: file_id.into(), ast: src })?;
|
||||
let ctx = LocationCtx::new(db, module.id, file_id.into());
|
||||
let module_source = crate::ModuleSource::from_child_node(db, child);
|
||||
let module = Module::from_definition(db, Source::new(child.file_id, module_source))?;
|
||||
let ctx = LocationCtx::new(db, module.id, child.file_id);
|
||||
|
||||
node.ancestors().find_map(|node| {
|
||||
child.ast.ancestors().find_map(|node| {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FnDef(def) => { Some(Function {id: ctx.to_def(&def) }.into()) },
|
||||
@ -91,8 +83,7 @@ fn def_with_body_from_child_node(
|
||||
/// original source files. It should not be used inside the HIR itself.
|
||||
#[derive(Debug)]
|
||||
pub struct SourceAnalyzer {
|
||||
// FIXME: this doesn't handle macros at all
|
||||
file_id: FileId,
|
||||
file_id: HirFileId,
|
||||
resolver: Resolver,
|
||||
body_owner: Option<DefWithBody>,
|
||||
body_source_map: Option<Arc<BodySourceMap>>,
|
||||
@ -135,20 +126,38 @@ pub struct ReferenceDescriptor {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
pub struct Expansion {
|
||||
macro_call_id: MacroCallId,
|
||||
}
|
||||
|
||||
impl Expansion {
|
||||
pub fn map_token_down(
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
token: Source<&SyntaxToken>,
|
||||
) -> Option<Source<SyntaxToken>> {
|
||||
let exp_info = self.file_id().expansion_info(db)?;
|
||||
exp_info.map_token_down(token)
|
||||
}
|
||||
|
||||
fn file_id(&self) -> HirFileId {
|
||||
self.macro_call_id.as_file(MacroFileKind::Items)
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceAnalyzer {
|
||||
pub fn new(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
node: Source<&SyntaxNode>,
|
||||
offset: Option<TextUnit>,
|
||||
) -> SourceAnalyzer {
|
||||
let def_with_body = def_with_body_from_child_node(db, file_id, node);
|
||||
let def_with_body = def_with_body_from_child_node(db, node);
|
||||
if let Some(def) = def_with_body {
|
||||
let source_map = def.body_source_map(db);
|
||||
let scopes = def.expr_scopes(db);
|
||||
let scope = match offset {
|
||||
None => scope_for(&scopes, &source_map, file_id.into(), &node),
|
||||
Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset),
|
||||
None => scope_for(&scopes, &source_map, node),
|
||||
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)),
|
||||
};
|
||||
let resolver = expr::resolver_for_scope(db, def, scope);
|
||||
SourceAnalyzer {
|
||||
@ -157,30 +166,31 @@ impl SourceAnalyzer {
|
||||
body_source_map: Some(source_map),
|
||||
infer: Some(def.infer(db)),
|
||||
scopes: Some(scopes),
|
||||
file_id,
|
||||
file_id: node.file_id,
|
||||
}
|
||||
} else {
|
||||
SourceAnalyzer {
|
||||
resolver: node
|
||||
.ast
|
||||
.ancestors()
|
||||
.find_map(|node| try_get_resolver_for_node(db, file_id, &node))
|
||||
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
|
||||
.unwrap_or_default(),
|
||||
body_owner: None,
|
||||
body_source_map: None,
|
||||
infer: None,
|
||||
scopes: None,
|
||||
file_id,
|
||||
file_id: node.file_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
|
||||
let src = Source { file_id: self.file_id.into(), ast: expr };
|
||||
let src = Source { file_id: self.file_id, ast: expr };
|
||||
self.body_source_map.as_ref()?.node_expr(src)
|
||||
}
|
||||
|
||||
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
|
||||
let src = Source { file_id: self.file_id.into(), ast: pat };
|
||||
let src = Source { file_id: self.file_id, ast: pat };
|
||||
self.body_source_map.as_ref()?.node_pat(src)
|
||||
}
|
||||
|
||||
@ -288,7 +298,7 @@ impl SourceAnalyzer {
|
||||
let name = name_ref.as_name();
|
||||
let source_map = self.body_source_map.as_ref()?;
|
||||
let scopes = self.scopes.as_ref()?;
|
||||
let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax())?;
|
||||
let scope = scope_for(scopes, source_map, Source::new(self.file_id, name_ref.syntax()))?;
|
||||
let entry = scopes.resolve_name_in_scope(scope, &name)?;
|
||||
Some(ScopeEntryWithSyntax {
|
||||
name: entry.name().clone(),
|
||||
@ -395,6 +405,13 @@ impl SourceAnalyzer {
|
||||
implements_trait(&canonical_ty, db, &self.resolver, krate, std_future_trait)
|
||||
}
|
||||
|
||||
pub fn expand(&self, db: &impl HirDatabase, macro_call: &ast::MacroCall) -> Option<Expansion> {
|
||||
let def = self.resolve_macro_call(db, macro_call)?.id;
|
||||
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(macro_call));
|
||||
let macro_call_loc = MacroCallLoc { def, ast_id };
|
||||
Some(Expansion { macro_call_id: db.intern_macro(macro_call_loc) })
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
|
||||
self.body_source_map.clone().unwrap()
|
||||
@ -409,20 +426,19 @@ impl SourceAnalyzer {
|
||||
fn scope_for(
|
||||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
file_id: HirFileId,
|
||||
node: &SyntaxNode,
|
||||
node: Source<&SyntaxNode>,
|
||||
) -> Option<ScopeId> {
|
||||
node.ancestors()
|
||||
node.ast
|
||||
.ancestors()
|
||||
.filter_map(ast::Expr::cast)
|
||||
.filter_map(|it| source_map.node_expr(Source { file_id, ast: &it }))
|
||||
.filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it)))
|
||||
.find_map(|it| scopes.scope_for(it))
|
||||
}
|
||||
|
||||
fn scope_for_offset(
|
||||
scopes: &ExprScopes,
|
||||
source_map: &BodySourceMap,
|
||||
file_id: HirFileId,
|
||||
offset: TextUnit,
|
||||
offset: Source<TextUnit>,
|
||||
) -> Option<ScopeId> {
|
||||
scopes
|
||||
.scope_by_expr()
|
||||
@ -430,7 +446,7 @@ fn scope_for_offset(
|
||||
.filter_map(|(id, scope)| {
|
||||
let source = source_map.expr_syntax(*id)?;
|
||||
// FIXME: correctly handle macro expansion
|
||||
if source.file_id != file_id {
|
||||
if source.file_id != offset.file_id {
|
||||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
@ -439,9 +455,14 @@ fn scope_for_offset(
|
||||
})
|
||||
// find containing scope
|
||||
.min_by_key(|(ptr, _scope)| {
|
||||
(!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
|
||||
(
|
||||
!(ptr.range().start() <= offset.ast && offset.ast <= ptr.range().end()),
|
||||
ptr.range().len(),
|
||||
)
|
||||
})
|
||||
.map(|(ptr, scope)| {
|
||||
adjust(scopes, source_map, ptr, offset.file_id, offset.ast).unwrap_or(*scope)
|
||||
})
|
||||
.map(|(ptr, scope)| adjust(scopes, source_map, ptr, file_id, offset).unwrap_or(*scope))
|
||||
}
|
||||
|
||||
// XXX: during completion, cursor might be outside of any particular
|
||||
|
@ -79,7 +79,7 @@ pub enum TypeCtor {
|
||||
///
|
||||
/// For example the type of `bar` here:
|
||||
///
|
||||
/// ```rust
|
||||
/// ```
|
||||
/// fn foo() -> i32 { 1 }
|
||||
/// let bar = foo; // bar: fn() -> i32 {foo}
|
||||
/// ```
|
||||
@ -89,7 +89,7 @@ pub enum TypeCtor {
|
||||
///
|
||||
/// For example the type of `bar` here:
|
||||
///
|
||||
/// ```rust
|
||||
/// ```
|
||||
/// fn foo() -> i32 { 1 }
|
||||
/// let bar: fn() -> i32 = foo;
|
||||
/// ```
|
||||
@ -224,8 +224,8 @@ impl TypeWalk for ProjectionTy {
|
||||
self.parameters.walk(f);
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
self.parameters.walk_mut(f);
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
self.parameters.walk_mut_binders(f, binders);
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,6 +291,20 @@ pub enum Ty {
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct Substs(Arc<[Ty]>);
|
||||
|
||||
impl TypeWalk for Substs {
|
||||
fn walk(&self, f: &mut impl FnMut(&Ty)) {
|
||||
for t in self.0.iter() {
|
||||
t.walk(f);
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
for t in make_mut_slice(&mut self.0) {
|
||||
t.walk_mut_binders(f, binders);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Substs {
|
||||
pub fn empty() -> Substs {
|
||||
Substs(Arc::new([]))
|
||||
@ -304,18 +318,6 @@ impl Substs {
|
||||
Substs(self.0[..std::cmp::min(self.0.len(), n)].into())
|
||||
}
|
||||
|
||||
pub fn walk(&self, f: &mut impl FnMut(&Ty)) {
|
||||
for t in self.0.iter() {
|
||||
t.walk(f);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
for t in make_mut_slice(&mut self.0) {
|
||||
t.walk_mut(f);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_single(&self) -> &Ty {
|
||||
if self.0.len() != 1 {
|
||||
panic!("expected substs of len 1, got {:?}", self);
|
||||
@ -440,8 +442,8 @@ impl TypeWalk for TraitRef {
|
||||
self.substs.walk(f);
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
self.substs.walk_mut(f);
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
self.substs.walk_mut_binders(f, binders);
|
||||
}
|
||||
}
|
||||
|
||||
@ -491,10 +493,12 @@ impl TypeWalk for GenericPredicate {
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
match self {
|
||||
GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut(f),
|
||||
GenericPredicate::Projection(projection_pred) => projection_pred.walk_mut(f),
|
||||
GenericPredicate::Implemented(trait_ref) => trait_ref.walk_mut_binders(f, binders),
|
||||
GenericPredicate::Projection(projection_pred) => {
|
||||
projection_pred.walk_mut_binders(f, binders)
|
||||
}
|
||||
GenericPredicate::Error => {}
|
||||
}
|
||||
}
|
||||
@ -544,9 +548,9 @@ impl TypeWalk for FnSig {
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
for t in make_mut_slice(&mut self.params_and_return) {
|
||||
t.walk_mut(f);
|
||||
t.walk_mut_binders(f, binders);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -671,7 +675,20 @@ impl Ty {
|
||||
/// types, similar to Chalk's `Fold` trait.
|
||||
pub trait TypeWalk {
|
||||
fn walk(&self, f: &mut impl FnMut(&Ty));
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty));
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
self.walk_mut_binders(&mut |ty, _binders| f(ty), 0);
|
||||
}
|
||||
/// Walk the type, counting entered binders.
|
||||
///
|
||||
/// `Ty::Bound` variables use DeBruijn indexing, which means that 0 refers
|
||||
/// to the innermost binder, 1 to the next, etc.. So when we want to
|
||||
/// substitute a certain bound variable, we can't just walk the whole type
|
||||
/// and blindly replace each instance of a certain index; when we 'enter'
|
||||
/// things that introduce new bound variables, we have to keep track of
|
||||
/// that. Currently, the only thing that introduces bound variables on our
|
||||
/// side are `Ty::Dyn` and `Ty::Opaque`, which each introduce a bound
|
||||
/// variable for the self type.
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize);
|
||||
|
||||
fn fold(mut self, f: &mut impl FnMut(Ty) -> Ty) -> Self
|
||||
where
|
||||
@ -700,14 +717,22 @@ pub trait TypeWalk {
|
||||
}
|
||||
|
||||
/// Substitutes `Ty::Bound` vars (as opposed to type parameters).
|
||||
fn subst_bound_vars(self, substs: &Substs) -> Self
|
||||
fn subst_bound_vars(mut self, substs: &Substs) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.fold(&mut |ty| match ty {
|
||||
Ty::Bound(idx) => substs.get(idx as usize).cloned().unwrap_or_else(|| Ty::Bound(idx)),
|
||||
ty => ty,
|
||||
})
|
||||
self.walk_mut_binders(
|
||||
&mut |ty, binders| match ty {
|
||||
&mut Ty::Bound(idx) => {
|
||||
if idx as usize >= binders && (idx as usize - binders) < substs.len() {
|
||||
*ty = substs.0[idx as usize - binders].clone();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
0,
|
||||
);
|
||||
self
|
||||
}
|
||||
|
||||
/// Shifts up `Ty::Bound` vars by `n`.
|
||||
@ -748,22 +773,22 @@ impl TypeWalk for Ty {
|
||||
f(self);
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
match self {
|
||||
Ty::Apply(a_ty) => {
|
||||
a_ty.parameters.walk_mut(f);
|
||||
a_ty.parameters.walk_mut_binders(f, binders);
|
||||
}
|
||||
Ty::Projection(p_ty) => {
|
||||
p_ty.parameters.walk_mut(f);
|
||||
p_ty.parameters.walk_mut_binders(f, binders);
|
||||
}
|
||||
Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
|
||||
for p in make_mut_slice(predicates) {
|
||||
p.walk_mut(f);
|
||||
p.walk_mut_binders(f, binders + 1);
|
||||
}
|
||||
}
|
||||
Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
|
||||
}
|
||||
f(self);
|
||||
f(self, binders);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,17 +134,19 @@ where
|
||||
}
|
||||
|
||||
impl<T> Canonicalized<T> {
|
||||
pub fn decanonicalize_ty(&self, ty: Ty) -> Ty {
|
||||
ty.fold(&mut |ty| match ty {
|
||||
Ty::Bound(idx) => {
|
||||
if (idx as usize) < self.free_vars.len() {
|
||||
Ty::Infer(self.free_vars[idx as usize])
|
||||
} else {
|
||||
Ty::Bound(idx)
|
||||
pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty {
|
||||
ty.walk_mut_binders(
|
||||
&mut |ty, binders| match ty {
|
||||
&mut Ty::Bound(idx) => {
|
||||
if idx as usize >= binders && (idx as usize - binders) < self.free_vars.len() {
|
||||
*ty = Ty::Infer(self.free_vars[idx as usize - binders]);
|
||||
}
|
||||
}
|
||||
}
|
||||
ty => ty,
|
||||
})
|
||||
_ => {}
|
||||
},
|
||||
0,
|
||||
);
|
||||
ty
|
||||
}
|
||||
|
||||
pub fn apply_solution(
|
||||
|
@ -5,16 +5,14 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use hir_def::CrateModuleId;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
impl_block::{ImplBlock, ImplId},
|
||||
resolve::Resolver,
|
||||
ty::primitive::{FloatBitness, Uncertain},
|
||||
ty::{Ty, TypeCtor},
|
||||
AssocItem, Crate, Function, Module, Mutability, Name, Trait,
|
||||
AssocItem, Crate, Function, ImplBlock, Module, Mutability, Name, Trait,
|
||||
};
|
||||
|
||||
use super::{autoderef, lower, Canonical, InEnvironment, TraitEnvironment, TraitRef};
|
||||
@ -39,65 +37,46 @@ impl TyFingerprint {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct CrateImplBlocks {
|
||||
/// To make sense of the CrateModuleIds, we need the source root.
|
||||
krate: Crate,
|
||||
impls: FxHashMap<TyFingerprint, Vec<(CrateModuleId, ImplId)>>,
|
||||
impls_by_trait: FxHashMap<Trait, Vec<(CrateModuleId, ImplId)>>,
|
||||
impls: FxHashMap<TyFingerprint, Vec<ImplBlock>>,
|
||||
impls_by_trait: FxHashMap<Trait, Vec<ImplBlock>>,
|
||||
}
|
||||
|
||||
impl CrateImplBlocks {
|
||||
pub fn lookup_impl_blocks<'a>(&'a self, ty: &Ty) -> impl Iterator<Item = ImplBlock> + 'a {
|
||||
pub(crate) fn impls_in_crate_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
) -> Arc<CrateImplBlocks> {
|
||||
let mut crate_impl_blocks =
|
||||
CrateImplBlocks { impls: FxHashMap::default(), impls_by_trait: FxHashMap::default() };
|
||||
if let Some(module) = krate.root_module(db) {
|
||||
crate_impl_blocks.collect_recursive(db, module);
|
||||
}
|
||||
Arc::new(crate_impl_blocks)
|
||||
}
|
||||
pub fn lookup_impl_blocks(&self, ty: &Ty) -> impl Iterator<Item = ImplBlock> + '_ {
|
||||
let fingerprint = TyFingerprint::for_impl(ty);
|
||||
fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flat_map(|i| i.iter()).map(
|
||||
move |(module_id, impl_id)| {
|
||||
let module = Module::new(self.krate, *module_id);
|
||||
ImplBlock::from_id(module, *impl_id)
|
||||
},
|
||||
)
|
||||
fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn lookup_impl_blocks_for_trait<'a>(
|
||||
&'a self,
|
||||
tr: Trait,
|
||||
) -> impl Iterator<Item = ImplBlock> + 'a {
|
||||
self.impls_by_trait.get(&tr).into_iter().flat_map(|i| i.iter()).map(
|
||||
move |(module_id, impl_id)| {
|
||||
let module = Module::new(self.krate, *module_id);
|
||||
ImplBlock::from_id(module, *impl_id)
|
||||
},
|
||||
)
|
||||
pub fn lookup_impl_blocks_for_trait(&self, tr: Trait) -> impl Iterator<Item = ImplBlock> + '_ {
|
||||
self.impls_by_trait.get(&tr).into_iter().flatten().copied()
|
||||
}
|
||||
|
||||
pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplBlock> + 'a {
|
||||
self.impls.values().chain(self.impls_by_trait.values()).flat_map(|i| i.iter()).map(
|
||||
move |(module_id, impl_id)| {
|
||||
let module = Module::new(self.krate, *module_id);
|
||||
ImplBlock::from_id(module, *impl_id)
|
||||
},
|
||||
)
|
||||
self.impls.values().chain(self.impls_by_trait.values()).flatten().copied()
|
||||
}
|
||||
|
||||
fn collect_recursive(&mut self, db: &impl HirDatabase, module: Module) {
|
||||
let module_impl_blocks = db.impls_in_module(module);
|
||||
|
||||
for (impl_id, _) in module_impl_blocks.impls.iter() {
|
||||
let impl_block = ImplBlock::from_id(module_impl_blocks.module, impl_id);
|
||||
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
let target_ty = impl_block.target_ty(db);
|
||||
|
||||
if impl_block.target_trait(db).is_some() {
|
||||
if let Some(tr) = impl_block.target_trait_ref(db) {
|
||||
self.impls_by_trait
|
||||
.entry(tr.trait_)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((module.id.module_id, impl_id));
|
||||
self.impls_by_trait.entry(tr.trait_).or_default().push(impl_block);
|
||||
}
|
||||
} else {
|
||||
if let Some(target_ty_fp) = TyFingerprint::for_impl(&target_ty) {
|
||||
self.impls
|
||||
.entry(target_ty_fp)
|
||||
.or_insert_with(Vec::new)
|
||||
.push((module.id.module_id, impl_id));
|
||||
self.impls.entry(target_ty_fp).or_default().push(impl_block);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -106,21 +85,6 @@ impl CrateImplBlocks {
|
||||
self.collect_recursive(db, child);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn impls_in_crate_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
) -> Arc<CrateImplBlocks> {
|
||||
let mut crate_impl_blocks = CrateImplBlocks {
|
||||
krate,
|
||||
impls: FxHashMap::default(),
|
||||
impls_by_trait: FxHashMap::default(),
|
||||
};
|
||||
if let Some(module) = krate.root_module(db) {
|
||||
crate_impl_blocks.collect_recursive(db, module);
|
||||
}
|
||||
Arc::new(crate_impl_blocks)
|
||||
}
|
||||
}
|
||||
|
||||
fn def_crates(db: &impl HirDatabase, cur_crate: Crate, ty: &Ty) -> Option<ArrayVec<[Crate; 2]>> {
|
||||
@ -264,14 +228,10 @@ fn iterate_trait_method_candidates<T>(
|
||||
'traits: for t in traits {
|
||||
let data = t.trait_data(db);
|
||||
|
||||
// FIXME this is a bit of a hack, since Chalk should say the same thing
|
||||
// anyway, but currently Chalk doesn't implement `dyn/impl Trait` yet
|
||||
let inherently_implemented = ty.value.inherent_trait() == Some(t);
|
||||
|
||||
// we'll be lazy about checking whether the type implements the
|
||||
// trait, but if we find out it doesn't, we'll skip the rest of the
|
||||
// iteration
|
||||
let mut known_implemented = inherently_implemented;
|
||||
let mut known_implemented = false;
|
||||
for &item in data.items() {
|
||||
if !is_valid_candidate(db, name, mode, item) {
|
||||
continue;
|
||||
|
@ -1,3 +1,6 @@
|
||||
mod never_type;
|
||||
mod coercion;
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -11,7 +14,7 @@ use ra_syntax::{
|
||||
use test_utils::covers;
|
||||
|
||||
use crate::{
|
||||
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult,
|
||||
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
|
||||
SourceAnalyzer,
|
||||
};
|
||||
|
||||
@ -19,9 +22,6 @@ use crate::{
|
||||
// against snapshots of the expected results using insta. Use cargo-insta to
|
||||
// update the snapshots.
|
||||
|
||||
mod never_type;
|
||||
mod coercion;
|
||||
|
||||
#[test]
|
||||
fn cfg_impl_block() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
@ -3983,11 +3983,11 @@ fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
|
||||
[180; 183) 'bar': fn bar() -> impl Trait<u64>
|
||||
[180; 185) 'bar()': impl Trait<u64>
|
||||
[191; 192) 'x': impl Trait<u64>
|
||||
[191; 198) 'x.foo()': {unknown}
|
||||
[191; 198) 'x.foo()': u64
|
||||
[204; 205) 'y': &impl Trait<u64>
|
||||
[204; 211) 'y.foo()': {unknown}
|
||||
[204; 211) 'y.foo()': u64
|
||||
[217; 218) 'z': impl Trait<u64>
|
||||
[217; 224) 'z.foo()': {unknown}
|
||||
[217; 224) 'z.foo()': u64
|
||||
[230; 231) 'x': impl Trait<u64>
|
||||
[230; 238) 'x.foo2()': i64
|
||||
[244; 245) 'y': &impl Trait<u64>
|
||||
@ -4033,11 +4033,11 @@ fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
|
||||
[177; 180) 'bar': fn bar() -> dyn Trait<u64>
|
||||
[177; 182) 'bar()': dyn Trait<u64>
|
||||
[188; 189) 'x': dyn Trait<u64>
|
||||
[188; 195) 'x.foo()': {unknown}
|
||||
[188; 195) 'x.foo()': u64
|
||||
[201; 202) 'y': &dyn Trait<u64>
|
||||
[201; 208) 'y.foo()': {unknown}
|
||||
[201; 208) 'y.foo()': u64
|
||||
[214; 215) 'z': dyn Trait<u64>
|
||||
[214; 221) 'z.foo()': {unknown}
|
||||
[214; 221) 'z.foo()': u64
|
||||
[227; 228) 'x': dyn Trait<u64>
|
||||
[227; 235) 'x.foo2()': i64
|
||||
[241; 242) 'y': &dyn Trait<u64>
|
||||
@ -4184,6 +4184,49 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn impl_trait_assoc_binding_projection_bug() {
|
||||
let (db, pos) = TestDB::with_position(
|
||||
r#"
|
||||
//- /main.rs crate:main deps:std
|
||||
pub trait Language {
|
||||
type Kind;
|
||||
}
|
||||
pub enum RustLanguage {}
|
||||
impl Language for RustLanguage {
|
||||
type Kind = SyntaxKind;
|
||||
}
|
||||
struct SyntaxNode<L> {}
|
||||
fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
|
||||
|
||||
trait Clone {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
|
||||
fn api_walkthrough() {
|
||||
for node in foo() {
|
||||
node.clone()<|>;
|
||||
}
|
||||
}
|
||||
|
||||
//- /std.rs crate:std
|
||||
#[prelude_import] use iter::*;
|
||||
mod iter {
|
||||
trait IntoIterator {
|
||||
type Item;
|
||||
}
|
||||
trait Iterator {
|
||||
type Item;
|
||||
}
|
||||
impl<T: Iterator> IntoIterator for T {
|
||||
type Item = <T as Iterator>::Item;
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
assert_eq!("{unknown}", type_at_pos(&db, pos));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn projection_eq_within_chalk() {
|
||||
// std::env::set_var("CHALK_DEBUG", "1");
|
||||
@ -4609,7 +4652,8 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
|
||||
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
||||
let analyzer =
|
||||
SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
|
||||
let ty = analyzer.type_of(db, &expr).unwrap();
|
||||
ty.display(db).to_string()
|
||||
}
|
||||
@ -4674,7 +4718,7 @@ fn infer(content: &str) -> String {
|
||||
|
||||
for node in source_file.syntax().descendants() {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, &node, None);
|
||||
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
}
|
||||
}
|
||||
@ -4715,7 +4759,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("infer"))
|
||||
}
|
||||
@ -4735,7 +4779,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use ra_prof::profile;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk};
|
||||
use crate::{db::HirDatabase, expr::ExprId, Crate, DefWithBody, ImplBlock, Trait};
|
||||
use crate::{db::HirDatabase, expr::ExprId, Crate, DefWithBody, ImplBlock, Trait, TypeAlias};
|
||||
|
||||
use self::chalk::{from_chalk, ToChalk};
|
||||
|
||||
@ -17,7 +17,7 @@ pub(crate) mod chalk;
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TraitSolver {
|
||||
krate: Crate,
|
||||
inner: Arc<Mutex<chalk_solve::Solver>>,
|
||||
inner: Arc<Mutex<chalk_solve::Solver<ChalkIr>>>,
|
||||
}
|
||||
|
||||
/// We need eq for salsa
|
||||
@ -34,7 +34,7 @@ impl TraitSolver {
|
||||
&self,
|
||||
db: &impl HirDatabase,
|
||||
goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<ChalkIr>>>,
|
||||
) -> Option<chalk_solve::Solution> {
|
||||
) -> Option<chalk_solve::Solution<ChalkIr>> {
|
||||
let context = ChalkContext { db, krate: self.krate };
|
||||
debug!("solve goal: {:?}", goal);
|
||||
let mut solver = match self.inner.lock() {
|
||||
@ -165,9 +165,9 @@ impl TypeWalk for ProjectionPredicate {
|
||||
self.ty.walk(f);
|
||||
}
|
||||
|
||||
fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) {
|
||||
self.projection_ty.walk_mut(f);
|
||||
self.ty.walk_mut(f);
|
||||
fn walk_mut_binders(&mut self, f: &mut impl FnMut(&mut Ty, usize), binders: usize) {
|
||||
self.projection_ty.walk_mut_binders(f, binders);
|
||||
self.ty.walk_mut_binders(f, binders);
|
||||
}
|
||||
}
|
||||
|
||||
@ -188,6 +188,7 @@ pub(crate) fn trait_solve_query(
|
||||
}
|
||||
|
||||
let canonical = goal.to_chalk(db).cast();
|
||||
|
||||
// We currently don't deal with universes (I think / hope they're not yet
|
||||
// relevant for our use cases?)
|
||||
let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
|
||||
@ -195,7 +196,10 @@ pub(crate) fn trait_solve_query(
|
||||
solution.map(|solution| solution_from_chalk(db, solution))
|
||||
}
|
||||
|
||||
fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution {
|
||||
fn solution_from_chalk(
|
||||
db: &impl HirDatabase,
|
||||
solution: chalk_solve::Solution<ChalkIr>,
|
||||
) -> Solution {
|
||||
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<ChalkIr>>| {
|
||||
let value = subst
|
||||
.value
|
||||
@ -300,3 +304,14 @@ pub enum Impl {
|
||||
/// Closure types implement the Fn traits synthetically.
|
||||
ClosureFnTraitImpl(ClosureFnTraitImplData),
|
||||
}
|
||||
|
||||
/// An associated type value. Usually this comes from a `type` declaration
|
||||
/// inside an impl block, but for built-in impls we have to synthesize it.
|
||||
/// (We only need this because Chalk wants a unique ID for each of these.)
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AssocTyValue {
|
||||
/// A normal assoc type value from an impl block.
|
||||
TypeAlias(TypeAlias),
|
||||
/// The output type of the Fn trait implementation.
|
||||
ClosureFnTraitImplOutput(ClosureFnTraitImplData),
|
||||
}
|
||||
|
@ -7,22 +7,19 @@ use chalk_ir::{
|
||||
cast::Cast, family::ChalkIr, Identifier, ImplId, Parameter, PlaceholderIndex, TypeId,
|
||||
TypeKindId, TypeName, UniverseIndex,
|
||||
};
|
||||
use chalk_rust_ir::{AssociatedTyDatum, ImplDatum, StructDatum, TraitDatum};
|
||||
use chalk_rust_ir::{AssociatedTyDatum, AssociatedTyValue, ImplDatum, StructDatum, TraitDatum};
|
||||
|
||||
use hir_expand::name;
|
||||
|
||||
use ra_db::salsa::{InternId, InternKey};
|
||||
|
||||
use super::{Canonical, ChalkContext, Impl, Obligation};
|
||||
use super::{AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
|
||||
use crate::{
|
||||
db::HirDatabase,
|
||||
generics::{GenericDef, HasGenericParams},
|
||||
ty::display::HirDisplay,
|
||||
ty::{
|
||||
ApplicationTy, GenericPredicate, Namespace, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
|
||||
TypeWalk,
|
||||
},
|
||||
AssocItem, Crate, HasBody, ImplBlock, Trait, TypeAlias,
|
||||
ty::{ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk},
|
||||
Crate, HasBody, ImplBlock, Trait, TypeAlias,
|
||||
};
|
||||
|
||||
/// This represents a trait whose name we could not resolve.
|
||||
@ -59,29 +56,36 @@ impl ToChalk for Ty {
|
||||
}
|
||||
};
|
||||
let parameters = apply_ty.parameters.to_chalk(db);
|
||||
chalk_ir::ApplicationTy { name, parameters }.cast()
|
||||
chalk_ir::ApplicationTy { name, parameters }.cast().intern()
|
||||
}
|
||||
Ty::Projection(proj_ty) => {
|
||||
let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
|
||||
let parameters = proj_ty.parameters.to_chalk(db);
|
||||
chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast()
|
||||
chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast().intern()
|
||||
}
|
||||
Ty::Param { idx, .. } => {
|
||||
PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize }.to_ty::<ChalkIr>()
|
||||
}
|
||||
Ty::Bound(idx) => chalk_ir::Ty::BoundVar(idx as usize),
|
||||
Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(),
|
||||
Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
|
||||
// FIXME use Chalk's Dyn/Opaque once the bugs with that are fixed
|
||||
Ty::Unknown | Ty::Dyn(_) | Ty::Opaque(_) => {
|
||||
Ty::Dyn(predicates) => {
|
||||
let where_clauses = predicates.iter().cloned().map(|p| p.to_chalk(db)).collect();
|
||||
chalk_ir::TyData::Dyn(make_binders(where_clauses, 1)).intern()
|
||||
}
|
||||
Ty::Opaque(predicates) => {
|
||||
let where_clauses = predicates.iter().cloned().map(|p| p.to_chalk(db)).collect();
|
||||
chalk_ir::TyData::Opaque(make_binders(where_clauses, 1)).intern()
|
||||
}
|
||||
Ty::Unknown => {
|
||||
let parameters = Vec::new();
|
||||
let name = TypeName::Error;
|
||||
chalk_ir::ApplicationTy { name, parameters }.cast()
|
||||
chalk_ir::ApplicationTy { name, parameters }.cast().intern()
|
||||
}
|
||||
}
|
||||
}
|
||||
fn from_chalk(db: &impl HirDatabase, chalk: chalk_ir::Ty<ChalkIr>) -> Self {
|
||||
match chalk {
|
||||
chalk_ir::Ty::Apply(apply_ty) => {
|
||||
match chalk.data().clone() {
|
||||
chalk_ir::TyData::Apply(apply_ty) => {
|
||||
// FIXME this is kind of hacky due to the fact that
|
||||
// TypeName::Placeholder is a Ty::Param on our side
|
||||
match apply_ty.name {
|
||||
@ -104,21 +108,21 @@ impl ToChalk for Ty {
|
||||
}
|
||||
}
|
||||
}
|
||||
chalk_ir::Ty::Projection(proj) => {
|
||||
chalk_ir::TyData::Projection(proj) => {
|
||||
let associated_ty = from_chalk(db, proj.associated_ty_id);
|
||||
let parameters = from_chalk(db, proj.parameters);
|
||||
Ty::Projection(ProjectionTy { associated_ty, parameters })
|
||||
}
|
||||
chalk_ir::Ty::ForAll(_) => unimplemented!(),
|
||||
chalk_ir::Ty::BoundVar(idx) => Ty::Bound(idx as u32),
|
||||
chalk_ir::Ty::InferenceVar(_iv) => Ty::Unknown,
|
||||
chalk_ir::Ty::Dyn(where_clauses) => {
|
||||
chalk_ir::TyData::ForAll(_) => unimplemented!(),
|
||||
chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32),
|
||||
chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown,
|
||||
chalk_ir::TyData::Dyn(where_clauses) => {
|
||||
assert_eq!(where_clauses.binders.len(), 1);
|
||||
let predicates =
|
||||
where_clauses.value.into_iter().map(|c| from_chalk(db, c)).collect();
|
||||
Ty::Dyn(predicates)
|
||||
}
|
||||
chalk_ir::Ty::Opaque(where_clauses) => {
|
||||
chalk_ir::TyData::Opaque(where_clauses) => {
|
||||
assert_eq!(where_clauses.binders.len(), 1);
|
||||
let predicates =
|
||||
where_clauses.value.into_iter().map(|c| from_chalk(db, c)).collect();
|
||||
@ -191,11 +195,11 @@ impl ToChalk for Impl {
|
||||
type Chalk = chalk_ir::ImplId;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ImplId {
|
||||
db.intern_impl(self).into()
|
||||
db.intern_chalk_impl(self).into()
|
||||
}
|
||||
|
||||
fn from_chalk(db: &impl HirDatabase, impl_id: chalk_ir::ImplId) -> Impl {
|
||||
db.lookup_intern_impl(impl_id.into())
|
||||
db.lookup_intern_chalk_impl(impl_id.into())
|
||||
}
|
||||
}
|
||||
|
||||
@ -211,6 +215,21 @@ impl ToChalk for TypeAlias {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToChalk for AssocTyValue {
|
||||
type Chalk = chalk_rust_ir::AssociatedTyValueId;
|
||||
|
||||
fn to_chalk(self, db: &impl HirDatabase) -> chalk_rust_ir::AssociatedTyValueId {
|
||||
db.intern_assoc_ty_value(self).into()
|
||||
}
|
||||
|
||||
fn from_chalk(
|
||||
db: &impl HirDatabase,
|
||||
assoc_ty_value_id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> AssocTyValue {
|
||||
db.lookup_intern_assoc_ty_value(assoc_ty_value_id.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl ToChalk for GenericPredicate {
|
||||
type Chalk = chalk_ir::QuantifiedWhereClause<ChalkIr>;
|
||||
|
||||
@ -399,20 +418,20 @@ fn convert_where_clauses(
|
||||
result
|
||||
}
|
||||
|
||||
impl<'a, DB> chalk_solve::RustIrDatabase for ChalkContext<'a, DB>
|
||||
impl<'a, DB> chalk_solve::RustIrDatabase<ChalkIr> for ChalkContext<'a, DB>
|
||||
where
|
||||
DB: HirDatabase,
|
||||
{
|
||||
fn associated_ty_data(&self, id: TypeId) -> Arc<AssociatedTyDatum> {
|
||||
fn associated_ty_data(&self, id: TypeId) -> Arc<AssociatedTyDatum<ChalkIr>> {
|
||||
self.db.associated_ty_data(id)
|
||||
}
|
||||
fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> {
|
||||
fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum<ChalkIr>> {
|
||||
self.db.trait_datum(self.krate, trait_id)
|
||||
}
|
||||
fn struct_datum(&self, struct_id: chalk_ir::StructId) -> Arc<StructDatum> {
|
||||
fn struct_datum(&self, struct_id: chalk_ir::StructId) -> Arc<StructDatum<ChalkIr>> {
|
||||
self.db.struct_datum(self.krate, struct_id)
|
||||
}
|
||||
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
|
||||
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum<ChalkIr>> {
|
||||
self.db.impl_datum(self.krate, impl_id)
|
||||
}
|
||||
fn impls_for_trait(
|
||||
@ -462,13 +481,11 @@ where
|
||||
fn type_name(&self, _id: TypeKindId) -> Identifier {
|
||||
unimplemented!()
|
||||
}
|
||||
fn split_projection<'p>(
|
||||
fn associated_ty_value(
|
||||
&self,
|
||||
projection: &'p chalk_ir::ProjectionTy<ChalkIr>,
|
||||
) -> (Arc<AssociatedTyDatum>, &'p [Parameter<ChalkIr>], &'p [Parameter<ChalkIr>]) {
|
||||
debug!("split_projection {:?}", projection);
|
||||
// we don't support GATs, so I think this should always be correct currently
|
||||
(self.db.associated_ty_data(projection.associated_ty_id), &projection.parameters, &[])
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
self.db.associated_ty_value(self.krate, id)
|
||||
}
|
||||
fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<ChalkIr>> {
|
||||
vec![]
|
||||
@ -485,7 +502,7 @@ where
|
||||
pub(crate) fn associated_ty_data_query(
|
||||
db: &impl HirDatabase,
|
||||
id: TypeId,
|
||||
) -> Arc<AssociatedTyDatum> {
|
||||
) -> Arc<AssociatedTyDatum<ChalkIr>> {
|
||||
debug!("associated_ty_data {:?}", id);
|
||||
let type_alias: TypeAlias = from_chalk(db, id);
|
||||
let trait_ = match type_alias.container(db) {
|
||||
@ -493,19 +510,16 @@ pub(crate) fn associated_ty_data_query(
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
let generic_params = type_alias.generic_params(db);
|
||||
let parameter_kinds = generic_params
|
||||
.params_including_parent()
|
||||
.into_iter()
|
||||
.map(|p| chalk_ir::ParameterKind::Ty(lalrpop_intern::intern(&p.name.to_string())))
|
||||
.collect();
|
||||
let bound_data = chalk_rust_ir::AssociatedTyDatumBound {
|
||||
// FIXME add bounds and where clauses
|
||||
bounds: vec![],
|
||||
where_clauses: vec![],
|
||||
};
|
||||
let datum = AssociatedTyDatum {
|
||||
trait_id: trait_.to_chalk(db),
|
||||
id,
|
||||
name: lalrpop_intern::intern(&type_alias.name(db).to_string()),
|
||||
parameter_kinds,
|
||||
// FIXME add bounds and where clauses
|
||||
bounds: vec![],
|
||||
where_clauses: vec![],
|
||||
binders: make_binders(bound_data, generic_params.count_params_including_parent()),
|
||||
};
|
||||
Arc::new(datum)
|
||||
}
|
||||
@ -514,17 +528,10 @@ pub(crate) fn trait_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
trait_id: chalk_ir::TraitId,
|
||||
) -> Arc<TraitDatum> {
|
||||
) -> Arc<TraitDatum<ChalkIr>> {
|
||||
debug!("trait_datum {:?}", trait_id);
|
||||
if trait_id == UNKNOWN_TRAIT {
|
||||
let trait_datum_bound = chalk_rust_ir::TraitDatumBound {
|
||||
trait_ref: chalk_ir::TraitRef {
|
||||
trait_id: UNKNOWN_TRAIT,
|
||||
parameters: vec![chalk_ir::Ty::BoundVar(0).cast()],
|
||||
},
|
||||
associated_ty_ids: Vec::new(),
|
||||
where_clauses: Vec::new(),
|
||||
};
|
||||
let trait_datum_bound = chalk_rust_ir::TraitDatumBound { where_clauses: Vec::new() };
|
||||
|
||||
let flags = chalk_rust_ir::TraitFlags {
|
||||
auto: false,
|
||||
@ -532,18 +539,24 @@ pub(crate) fn trait_datum_query(
|
||||
upstream: true,
|
||||
fundamental: false,
|
||||
non_enumerable: true,
|
||||
coinductive: false,
|
||||
};
|
||||
return Arc::new(TraitDatum { binders: make_binders(trait_datum_bound, 1), flags });
|
||||
return Arc::new(TraitDatum {
|
||||
id: trait_id,
|
||||
binders: make_binders(trait_datum_bound, 1),
|
||||
flags,
|
||||
associated_ty_ids: vec![],
|
||||
});
|
||||
}
|
||||
let trait_: Trait = from_chalk(db, trait_id);
|
||||
debug!("trait {:?} = {:?}", trait_id, trait_.name(db));
|
||||
let generic_params = trait_.generic_params(db);
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let trait_ref = trait_.trait_ref(db).subst(&bound_vars).to_chalk(db);
|
||||
let flags = chalk_rust_ir::TraitFlags {
|
||||
auto: trait_.is_auto(db),
|
||||
upstream: trait_.module(db).krate() != krate,
|
||||
non_enumerable: true,
|
||||
coinductive: false, // only relevant for Chalk testing
|
||||
// FIXME set these flags correctly
|
||||
marker: false,
|
||||
fundamental: false,
|
||||
@ -558,10 +571,13 @@ pub(crate) fn trait_datum_query(
|
||||
})
|
||||
.map(|type_alias| type_alias.to_chalk(db))
|
||||
.collect();
|
||||
let trait_datum_bound =
|
||||
chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, associated_ty_ids };
|
||||
let trait_datum =
|
||||
TraitDatum { binders: make_binders(trait_datum_bound, bound_vars.len()), flags };
|
||||
let trait_datum_bound = chalk_rust_ir::TraitDatumBound { where_clauses };
|
||||
let trait_datum = TraitDatum {
|
||||
id: trait_id,
|
||||
binders: make_binders(trait_datum_bound, bound_vars.len()),
|
||||
flags,
|
||||
associated_ty_ids,
|
||||
};
|
||||
Arc::new(trait_datum)
|
||||
}
|
||||
|
||||
@ -569,7 +585,7 @@ pub(crate) fn struct_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
struct_id: chalk_ir::StructId,
|
||||
) -> Arc<StructDatum> {
|
||||
) -> Arc<StructDatum<ChalkIr>> {
|
||||
debug!("struct_datum {:?}", struct_id);
|
||||
let type_ctor: TypeCtor = from_chalk(db, struct_id);
|
||||
debug!("struct {:?} = {:?}", struct_id, type_ctor);
|
||||
@ -588,17 +604,12 @@ pub(crate) fn struct_datum_query(
|
||||
// FIXME set fundamental flag correctly
|
||||
fundamental: false,
|
||||
};
|
||||
let self_ty = chalk_ir::ApplicationTy {
|
||||
name: TypeName::TypeKindId(type_ctor.to_chalk(db).into()),
|
||||
parameters: (0..num_params).map(|i| chalk_ir::Ty::BoundVar(i).cast()).collect(),
|
||||
};
|
||||
let struct_datum_bound = chalk_rust_ir::StructDatumBound {
|
||||
self_ty,
|
||||
fields: Vec::new(), // FIXME add fields (only relevant for auto traits)
|
||||
where_clauses,
|
||||
flags,
|
||||
};
|
||||
let struct_datum = StructDatum { binders: make_binders(struct_datum_bound, num_params) };
|
||||
let struct_datum =
|
||||
StructDatum { id: struct_id, binders: make_binders(struct_datum_bound, num_params), flags };
|
||||
Arc::new(struct_datum)
|
||||
}
|
||||
|
||||
@ -606,16 +617,15 @@ pub(crate) fn impl_datum_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
impl_id: ImplId,
|
||||
) -> Arc<ImplDatum> {
|
||||
) -> Arc<ImplDatum<ChalkIr>> {
|
||||
let _p = ra_prof::profile("impl_datum");
|
||||
debug!("impl_datum {:?}", impl_id);
|
||||
let impl_: Impl = from_chalk(db, impl_id);
|
||||
match impl_ {
|
||||
Impl::ImplBlock(impl_block) => impl_block_datum(db, krate, impl_id, impl_block),
|
||||
Impl::ClosureFnTraitImpl(data) => {
|
||||
closure_fn_trait_impl_datum(db, krate, impl_id, data).unwrap_or_else(invalid_impl_datum)
|
||||
}
|
||||
Impl::ClosureFnTraitImpl(data) => closure_fn_trait_impl_datum(db, krate, data),
|
||||
}
|
||||
.unwrap_or_else(invalid_impl_datum)
|
||||
}
|
||||
|
||||
fn impl_block_datum(
|
||||
@ -623,14 +633,12 @@ fn impl_block_datum(
|
||||
krate: Crate,
|
||||
impl_id: ImplId,
|
||||
impl_block: ImplBlock,
|
||||
) -> Arc<ImplDatum> {
|
||||
) -> Option<Arc<ImplDatum<ChalkIr>>> {
|
||||
let generic_params = impl_block.generic_params(db);
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let trait_ref = impl_block
|
||||
.target_trait_ref(db)
|
||||
.expect("FIXME handle unresolved impl block trait ref")
|
||||
.subst(&bound_vars);
|
||||
let impl_type = if impl_block.module().krate() == krate {
|
||||
let trait_ref = impl_block.target_trait_ref(db)?.subst(&bound_vars);
|
||||
let trait_ = trait_ref.trait_;
|
||||
let impl_type = if impl_block.krate(db) == krate {
|
||||
chalk_rust_ir::ImplType::Local
|
||||
} else {
|
||||
chalk_rust_ir::ImplType::External
|
||||
@ -644,28 +652,7 @@ fn impl_block_datum(
|
||||
trait_ref.display(db),
|
||||
where_clauses
|
||||
);
|
||||
let trait_ = trait_ref.trait_;
|
||||
let trait_ref = trait_ref.to_chalk(db);
|
||||
let associated_ty_values = impl_block
|
||||
.items(db)
|
||||
.into_iter()
|
||||
.filter_map(|item| match item {
|
||||
AssocItem::TypeAlias(t) => Some(t),
|
||||
_ => None,
|
||||
})
|
||||
.filter_map(|t| {
|
||||
let assoc_ty = trait_.associated_type_by_name(db, &t.name(db))?;
|
||||
let ty = db.type_for_def(t.into(), Namespace::Types).subst(&bound_vars);
|
||||
Some(chalk_rust_ir::AssociatedTyValue {
|
||||
impl_id,
|
||||
associated_ty_id: assoc_ty.to_chalk(db),
|
||||
value: chalk_ir::Binders {
|
||||
value: chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(db) },
|
||||
binders: vec![], // we don't support GATs yet
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let polarity = if negative {
|
||||
chalk_rust_ir::Polarity::Negative
|
||||
@ -673,31 +660,41 @@ fn impl_block_datum(
|
||||
chalk_rust_ir::Polarity::Positive
|
||||
};
|
||||
|
||||
let impl_datum_bound =
|
||||
chalk_rust_ir::ImplDatumBound { trait_ref, where_clauses, associated_ty_values };
|
||||
let impl_datum_bound = chalk_rust_ir::ImplDatumBound { trait_ref, where_clauses };
|
||||
let associated_ty_value_ids = impl_block
|
||||
.items(db)
|
||||
.into_iter()
|
||||
.filter_map(|item| match item {
|
||||
crate::AssocItem::TypeAlias(type_alias) => Some(type_alias),
|
||||
_ => None,
|
||||
})
|
||||
.filter(|type_alias| {
|
||||
// don't include associated types that don't exist in the trait
|
||||
trait_.associated_type_by_name(db, &type_alias.name(db)).is_some()
|
||||
})
|
||||
.map(|type_alias| AssocTyValue::TypeAlias(type_alias).to_chalk(db))
|
||||
.collect();
|
||||
debug!("impl_datum: {:?}", impl_datum_bound);
|
||||
let impl_datum = ImplDatum {
|
||||
binders: make_binders(impl_datum_bound, bound_vars.len()),
|
||||
impl_type,
|
||||
polarity,
|
||||
associated_ty_value_ids,
|
||||
};
|
||||
Arc::new(impl_datum)
|
||||
Some(Arc::new(impl_datum))
|
||||
}
|
||||
|
||||
fn invalid_impl_datum() -> Arc<ImplDatum> {
|
||||
fn invalid_impl_datum() -> Arc<ImplDatum<ChalkIr>> {
|
||||
let trait_ref = chalk_ir::TraitRef {
|
||||
trait_id: UNKNOWN_TRAIT,
|
||||
parameters: vec![chalk_ir::Ty::BoundVar(0).cast()],
|
||||
};
|
||||
let impl_datum_bound = chalk_rust_ir::ImplDatumBound {
|
||||
trait_ref,
|
||||
where_clauses: Vec::new(),
|
||||
associated_ty_values: Vec::new(),
|
||||
parameters: vec![chalk_ir::TyData::BoundVar(0).cast().intern().cast()],
|
||||
};
|
||||
let impl_datum_bound = chalk_rust_ir::ImplDatumBound { trait_ref, where_clauses: Vec::new() };
|
||||
let impl_datum = ImplDatum {
|
||||
binders: make_binders(impl_datum_bound, 1),
|
||||
impl_type: chalk_rust_ir::ImplType::External,
|
||||
polarity: chalk_rust_ir::Polarity::Positive,
|
||||
associated_ty_value_ids: Vec::new(),
|
||||
};
|
||||
Arc::new(impl_datum)
|
||||
}
|
||||
@ -705,15 +702,19 @@ fn invalid_impl_datum() -> Arc<ImplDatum> {
|
||||
fn closure_fn_trait_impl_datum(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
impl_id: ImplId,
|
||||
data: super::ClosureFnTraitImplData,
|
||||
) -> Option<Arc<ImplDatum>> {
|
||||
) -> Option<Arc<ImplDatum<ChalkIr>>> {
|
||||
// for some closure |X, Y| -> Z:
|
||||
// impl<T, U, V> Fn<(T, U)> for closure<fn(T, U) -> V> { Output = V }
|
||||
|
||||
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
|
||||
let trait_ = get_fn_trait(db, krate, data.fn_trait)?; // get corresponding fn trait
|
||||
|
||||
// validate FnOnce trait, since we need it in the assoc ty value definition
|
||||
// and don't want to return a valid value only to find out later that FnOnce
|
||||
// is broken
|
||||
let fn_once_trait = get_fn_trait(db, krate, super::FnTrait::FnOnce)?;
|
||||
fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?;
|
||||
|
||||
let num_args: u16 = match &data.def.body(db)[data.expr] {
|
||||
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
||||
_ => {
|
||||
@ -726,7 +727,6 @@ fn closure_fn_trait_impl_datum(
|
||||
TypeCtor::Tuple { cardinality: num_args },
|
||||
Substs::builder(num_args as usize).fill_with_bound_vars(0).build(),
|
||||
);
|
||||
let output_ty = Ty::Bound(num_args.into());
|
||||
let sig_ty = Ty::apply(
|
||||
TypeCtor::FnPtr { num_args },
|
||||
Substs::builder(num_args as usize + 1).fill_with_bound_vars(0).build(),
|
||||
@ -739,32 +739,99 @@ fn closure_fn_trait_impl_datum(
|
||||
substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(),
|
||||
};
|
||||
|
||||
let output_ty_id = fn_once_trait.associated_type_by_name(db, &name::OUTPUT_TYPE)?;
|
||||
|
||||
let output_ty_value = chalk_rust_ir::AssociatedTyValue {
|
||||
associated_ty_id: output_ty_id.to_chalk(db),
|
||||
impl_id,
|
||||
value: make_binders(
|
||||
chalk_rust_ir::AssociatedTyValueBound { ty: output_ty.to_chalk(db) },
|
||||
0,
|
||||
),
|
||||
};
|
||||
let output_ty_id = AssocTyValue::ClosureFnTraitImplOutput(data.clone()).to_chalk(db);
|
||||
|
||||
let impl_type = chalk_rust_ir::ImplType::External;
|
||||
|
||||
let impl_datum_bound = chalk_rust_ir::ImplDatumBound {
|
||||
trait_ref: trait_ref.to_chalk(db),
|
||||
where_clauses: Vec::new(),
|
||||
associated_ty_values: vec![output_ty_value],
|
||||
};
|
||||
let impl_datum = ImplDatum {
|
||||
binders: make_binders(impl_datum_bound, num_args as usize + 1),
|
||||
impl_type,
|
||||
polarity: chalk_rust_ir::Polarity::Positive,
|
||||
associated_ty_value_ids: vec![output_ty_id],
|
||||
};
|
||||
Some(Arc::new(impl_datum))
|
||||
}
|
||||
|
||||
pub(crate) fn associated_ty_value_query(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
id: chalk_rust_ir::AssociatedTyValueId,
|
||||
) -> Arc<chalk_rust_ir::AssociatedTyValue<ChalkIr>> {
|
||||
let data: AssocTyValue = from_chalk(db, id);
|
||||
match data {
|
||||
AssocTyValue::TypeAlias(type_alias) => {
|
||||
type_alias_associated_ty_value(db, krate, type_alias)
|
||||
}
|
||||
AssocTyValue::ClosureFnTraitImplOutput(data) => {
|
||||
closure_fn_trait_output_assoc_ty_value(db, krate, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn type_alias_associated_ty_value(
|
||||
db: &impl HirDatabase,
|
||||
_krate: Crate,
|
||||
type_alias: TypeAlias,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
let impl_block = type_alias.impl_block(db).expect("assoc ty value should be in impl");
|
||||
let impl_id = Impl::ImplBlock(impl_block).to_chalk(db);
|
||||
let trait_ = impl_block
|
||||
.target_trait_ref(db)
|
||||
.expect("assoc ty value should not exist") // we don't return any assoc ty values if the impl'd trait can't be resolved
|
||||
.trait_;
|
||||
let assoc_ty = trait_
|
||||
.associated_type_by_name(db, &type_alias.name(db))
|
||||
.expect("assoc ty value should not exist"); // validated when building the impl data as well
|
||||
let generic_params = impl_block.generic_params(db);
|
||||
let bound_vars = Substs::bound_vars(&generic_params);
|
||||
let ty = db.type_for_def(type_alias.into(), crate::ty::Namespace::Types).subst(&bound_vars);
|
||||
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(db) };
|
||||
let value = chalk_rust_ir::AssociatedTyValue {
|
||||
impl_id,
|
||||
associated_ty_id: assoc_ty.to_chalk(db),
|
||||
value: make_binders(value_bound, bound_vars.len()),
|
||||
};
|
||||
Arc::new(value)
|
||||
}
|
||||
|
||||
fn closure_fn_trait_output_assoc_ty_value(
|
||||
db: &impl HirDatabase,
|
||||
krate: Crate,
|
||||
data: super::ClosureFnTraitImplData,
|
||||
) -> Arc<AssociatedTyValue<ChalkIr>> {
|
||||
let impl_id = Impl::ClosureFnTraitImpl(data.clone()).to_chalk(db);
|
||||
|
||||
let num_args: u16 = match &data.def.body(db)[data.expr] {
|
||||
crate::expr::Expr::Lambda { args, .. } => args.len() as u16,
|
||||
_ => {
|
||||
log::warn!("closure for closure type {:?} not found", data);
|
||||
0
|
||||
}
|
||||
};
|
||||
|
||||
let output_ty = Ty::Bound(num_args.into());
|
||||
|
||||
let fn_once_trait =
|
||||
get_fn_trait(db, krate, super::FnTrait::FnOnce).expect("assoc ty value should not exist");
|
||||
|
||||
let output_ty_id = fn_once_trait
|
||||
.associated_type_by_name(db, &name::OUTPUT_TYPE)
|
||||
.expect("assoc ty value should not exist");
|
||||
|
||||
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: output_ty.to_chalk(db) };
|
||||
|
||||
let value = chalk_rust_ir::AssociatedTyValue {
|
||||
associated_ty_id: output_ty_id.to_chalk(db),
|
||||
impl_id,
|
||||
value: make_binders(value_bound, num_args as usize + 1),
|
||||
};
|
||||
Arc::new(value)
|
||||
}
|
||||
|
||||
fn get_fn_trait(db: &impl HirDatabase, krate: Crate, fn_trait: super::FnTrait) -> Option<Trait> {
|
||||
let target = db.lang_item(krate, fn_trait.lang_item_name().into())?;
|
||||
match target {
|
||||
@ -803,3 +870,15 @@ impl From<crate::ids::GlobalImplId> for chalk_ir::ImplId {
|
||||
chalk_ir::ImplId(id_to_chalk(impl_id))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<chalk_rust_ir::AssociatedTyValueId> for crate::ids::AssocTyValueId {
|
||||
fn from(id: chalk_rust_ir::AssociatedTyValueId) -> Self {
|
||||
id_from_chalk(id.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::ids::AssocTyValueId> for chalk_rust_ir::AssociatedTyValueId {
|
||||
fn from(assoc_ty_value_id: crate::ids::AssocTyValueId) -> Self {
|
||||
chalk_rust_ir::AssociatedTyValueId(id_to_chalk(assoc_ty_value_id))
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,9 @@ name = "ra_hir_def"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.5"
|
||||
once_cell = "1.0.1"
|
||||
|
@ -8,30 +8,32 @@ use ra_syntax::ast;
|
||||
use crate::{
|
||||
adt::{EnumData, StructData},
|
||||
body::{scope::ExprScopes, Body, BodySourceMap},
|
||||
imp::ImplData,
|
||||
nameres::{
|
||||
raw::{ImportSourceMap, RawItems},
|
||||
CrateDefMap,
|
||||
},
|
||||
DefWithBodyId, EnumId, StructOrUnionId,
|
||||
DefWithBodyId, EnumId, ImplId, ItemLoc, StructOrUnionId,
|
||||
};
|
||||
|
||||
#[salsa::query_group(InternDatabaseStorage)]
|
||||
pub trait InternDatabase: SourceDatabase {
|
||||
#[salsa::interned]
|
||||
fn intern_function(&self, loc: crate::ItemLoc<ast::FnDef>) -> crate::FunctionId;
|
||||
fn intern_function(&self, loc: ItemLoc<ast::FnDef>) -> crate::FunctionId;
|
||||
#[salsa::interned]
|
||||
fn intern_struct_or_union(&self, loc: crate::ItemLoc<ast::StructDef>)
|
||||
-> crate::StructOrUnionId;
|
||||
fn intern_struct_or_union(&self, loc: ItemLoc<ast::StructDef>) -> crate::StructOrUnionId;
|
||||
#[salsa::interned]
|
||||
fn intern_enum(&self, loc: crate::ItemLoc<ast::EnumDef>) -> crate::EnumId;
|
||||
fn intern_enum(&self, loc: ItemLoc<ast::EnumDef>) -> crate::EnumId;
|
||||
#[salsa::interned]
|
||||
fn intern_const(&self, loc: crate::ItemLoc<ast::ConstDef>) -> crate::ConstId;
|
||||
fn intern_const(&self, loc: ItemLoc<ast::ConstDef>) -> crate::ConstId;
|
||||
#[salsa::interned]
|
||||
fn intern_static(&self, loc: crate::ItemLoc<ast::StaticDef>) -> crate::StaticId;
|
||||
fn intern_static(&self, loc: ItemLoc<ast::StaticDef>) -> crate::StaticId;
|
||||
#[salsa::interned]
|
||||
fn intern_trait(&self, loc: crate::ItemLoc<ast::TraitDef>) -> crate::TraitId;
|
||||
fn intern_trait(&self, loc: ItemLoc<ast::TraitDef>) -> crate::TraitId;
|
||||
#[salsa::interned]
|
||||
fn intern_type_alias(&self, loc: crate::ItemLoc<ast::TypeAliasDef>) -> crate::TypeAliasId;
|
||||
fn intern_type_alias(&self, loc: ItemLoc<ast::TypeAliasDef>) -> crate::TypeAliasId;
|
||||
#[salsa::interned]
|
||||
fn intern_impl(&self, loc: ItemLoc<ast::ImplBlock>) -> crate::ImplId;
|
||||
}
|
||||
|
||||
#[salsa::query_group(DefDatabase2Storage)]
|
||||
@ -54,6 +56,9 @@ pub trait DefDatabase2: InternDatabase + AstDatabase {
|
||||
#[salsa::invoke(EnumData::enum_data_query)]
|
||||
fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
|
||||
|
||||
#[salsa::invoke(ImplData::impl_data_query)]
|
||||
fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
|
||||
|
||||
#[salsa::invoke(Body::body_with_source_map_query)]
|
||||
fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
|
||||
|
||||
|
71
crates/ra_hir_def/src/imp.rs
Normal file
71
crates/ra_hir_def/src/imp.rs
Normal file
@ -0,0 +1,71 @@
|
||||
//! Defines hir-level representation of impls.
|
||||
//!
|
||||
//! The handling is similar, but is not quite the same as for other items,
|
||||
//! because `impl`s don't have names.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::ast;
|
||||
|
||||
use crate::{
|
||||
db::DefDatabase2, type_ref::TypeRef, AssocItemId, AstItemDef, ConstId, FunctionId, ImplId,
|
||||
LocationCtx, TypeAliasId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ImplData {
|
||||
target_trait: Option<TypeRef>,
|
||||
target_type: TypeRef,
|
||||
items: Vec<AssocItemId>,
|
||||
negative: bool,
|
||||
}
|
||||
|
||||
impl ImplData {
|
||||
pub(crate) fn impl_data_query(db: &impl DefDatabase2, id: ImplId) -> Arc<ImplData> {
|
||||
let src = id.source(db);
|
||||
let items = db.ast_id_map(src.file_id);
|
||||
|
||||
let target_trait = src.ast.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.ast.target_type());
|
||||
let negative = src.ast.is_negative();
|
||||
|
||||
let items = if let Some(item_list) = src.ast.item_list() {
|
||||
let ctx = LocationCtx::new(db, id.module(db), src.file_id);
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
ast::ImplItem::FnDef(it) => {
|
||||
FunctionId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
}
|
||||
ast::ImplItem::ConstDef(it) => {
|
||||
ConstId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
}
|
||||
ast::ImplItem::TypeAliasDef(it) => {
|
||||
TypeAliasId::from_ast_id(ctx, items.ast_id(&it)).into()
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let res = ImplData { target_trait, target_type, items, negative };
|
||||
Arc::new(res)
|
||||
}
|
||||
|
||||
pub fn target_trait(&self) -> Option<&TypeRef> {
|
||||
self.target_trait.as_ref()
|
||||
}
|
||||
|
||||
pub fn target_type(&self) -> &TypeRef {
|
||||
&self.target_type
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &[AssocItemId] {
|
||||
&self.items
|
||||
}
|
||||
|
||||
pub fn is_negative(&self) -> bool {
|
||||
self.negative
|
||||
}
|
||||
}
|
@ -13,6 +13,7 @@ pub mod path;
|
||||
pub mod type_ref;
|
||||
pub mod builtin_type;
|
||||
pub mod adt;
|
||||
pub mod imp;
|
||||
pub mod diagnostics;
|
||||
pub mod expr;
|
||||
pub mod body;
|
||||
@ -77,14 +78,13 @@ impl ModuleSource {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_child_node(
|
||||
db: &impl db::DefDatabase2,
|
||||
file_id: FileId,
|
||||
child: &SyntaxNode,
|
||||
) -> ModuleSource {
|
||||
if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
|
||||
pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource {
|
||||
if let Some(m) =
|
||||
child.ast.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
|
||||
{
|
||||
ModuleSource::Module(m)
|
||||
} else {
|
||||
let file_id = child.file_id.original_file(db);
|
||||
let source_file = db.parse(file_id).tree();
|
||||
ModuleSource::SourceFile(source_file)
|
||||
}
|
||||
@ -321,6 +321,18 @@ impl AstItemDef<ast::TypeAliasDef> for TypeAliasId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ImplId(salsa::InternId);
|
||||
impl_intern_key!(ImplId);
|
||||
impl AstItemDef<ast::ImplBlock> for ImplId {
|
||||
fn intern(db: &impl InternDatabase, loc: ItemLoc<ast::ImplBlock>) -> Self {
|
||||
db.intern_impl(loc)
|
||||
}
|
||||
fn lookup_intern(self, db: &impl InternDatabase) -> ItemLoc<ast::ImplBlock> {
|
||||
db.lookup_intern_impl(self)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_froms {
|
||||
($e:ident: $($v:ident $(($($sv:ident),*))?),*) => {
|
||||
$(
|
||||
@ -384,3 +396,15 @@ pub enum DefWithBodyId {
|
||||
}
|
||||
|
||||
impl_froms!(DefWithBodyId: FunctionId, ConstId, StaticId);
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum AssocItemId {
|
||||
FunctionId(FunctionId),
|
||||
ConstId(ConstId),
|
||||
TypeAliasId(TypeAliasId),
|
||||
}
|
||||
// FIXME: not every function, ... is actually an assoc item. maybe we should make
|
||||
// sure that you can only turn actual assoc items into AssocItemIds. This would
|
||||
// require not implementing From, and instead having some checked way of
|
||||
// casting them, and somehow making the constructors private, which would be annoying.
|
||||
impl_froms!(AssocItemId: FunctionId, ConstId, TypeAliasId);
|
||||
|
@ -73,7 +73,7 @@ use crate::{
|
||||
diagnostics::DefDiagnostic, path_resolution::ResolveMode, per_ns::PerNs, raw::ImportId,
|
||||
},
|
||||
path::Path,
|
||||
AstId, CrateModuleId, FunctionId, ModuleDefId, ModuleId, TraitId,
|
||||
AstId, CrateModuleId, FunctionId, ImplId, ModuleDefId, ModuleId, TraitId,
|
||||
};
|
||||
|
||||
/// Contains all top-level defs from a macro-expanded crate
|
||||
@ -122,16 +122,17 @@ pub struct ModuleData {
|
||||
///
|
||||
/// Note that non-inline modules, by definition, live inside non-macro file.
|
||||
pub definition: Option<FileId>,
|
||||
pub impls: Vec<ImplId>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Declarations {
|
||||
fns: FxHashMap<FileAstId<ast::FnDef>, FunctionId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
pub struct ModuleScope {
|
||||
pub items: FxHashMap<Name, Resolution>,
|
||||
items: FxHashMap<Name, Resolution>,
|
||||
/// Macros visable in current module in legacy textual scope
|
||||
///
|
||||
/// For macros invoked by an unquatified identifier like `bar!()`, `legacy_macros` will be searched in first.
|
||||
|
@ -19,7 +19,7 @@ use crate::{
|
||||
per_ns::PerNs, raw, CrateDefMap, ModuleData, Resolution, ResolveMode,
|
||||
},
|
||||
path::{Path, PathKind},
|
||||
AdtId, AstId, AstItemDef, ConstId, CrateModuleId, EnumId, EnumVariantId, FunctionId,
|
||||
AdtId, AstId, AstItemDef, ConstId, CrateModuleId, EnumId, EnumVariantId, FunctionId, ImplId,
|
||||
LocationCtx, ModuleDefId, ModuleId, StaticId, StructId, StructOrUnionId, TraitId, TypeAliasId,
|
||||
UnionId,
|
||||
};
|
||||
@ -165,7 +165,7 @@ where
|
||||
/// crate::foo!();
|
||||
/// ```
|
||||
///
|
||||
/// Well, this code compiles, bacause the plain path `foo` in `use` is searched
|
||||
/// Well, this code compiles, because the plain path `foo` in `use` is searched
|
||||
/// in the legacy textual scope only.
|
||||
/// ```rust
|
||||
/// macro_rules! foo { () => {} }
|
||||
@ -571,6 +571,15 @@ where
|
||||
.push((self.module_id, import_id, self.raw_items[import_id].clone())),
|
||||
raw::RawItemKind::Def(def) => self.define_def(&self.raw_items[def]),
|
||||
raw::RawItemKind::Macro(mac) => self.collect_macro(&self.raw_items[mac]),
|
||||
raw::RawItemKind::Impl(imp) => {
|
||||
let module = ModuleId {
|
||||
krate: self.def_collector.def_map.krate,
|
||||
module_id: self.module_id,
|
||||
};
|
||||
let ctx = LocationCtx::new(self.def_collector.db, module, self.file_id);
|
||||
let imp_id = ImplId::from_ast_id(ctx, self.raw_items[imp].ast_id);
|
||||
self.def_collector.def_map.modules[self.module_id].impls.push(imp_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ pub struct RawItems {
|
||||
imports: Arena<ImportId, ImportData>,
|
||||
defs: Arena<Def, DefData>,
|
||||
macros: Arena<Macro, MacroData>,
|
||||
impls: Arena<Impl, ImplData>,
|
||||
/// items for top-level module
|
||||
items: Vec<RawItem>,
|
||||
}
|
||||
@ -121,6 +122,13 @@ impl Index<Macro> for RawItems {
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<Impl> for RawItems {
|
||||
type Output = ImplData;
|
||||
fn index(&self, idx: Impl) -> &ImplData {
|
||||
&self.impls[idx]
|
||||
}
|
||||
}
|
||||
|
||||
// Avoid heap allocation on items without attributes.
|
||||
type Attrs = Option<Arc<[Attr]>>;
|
||||
|
||||
@ -142,6 +150,7 @@ pub(super) enum RawItemKind {
|
||||
Import(ImportId),
|
||||
Def(Def),
|
||||
Macro(Macro),
|
||||
Impl(Impl),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
@ -203,6 +212,15 @@ pub(super) struct MacroData {
|
||||
pub(super) builtin: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(super) struct Impl(RawId);
|
||||
impl_arena_id!(Impl);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(super) struct ImplData {
|
||||
pub(super) ast_id: FileAstId<ast::ImplBlock>,
|
||||
}
|
||||
|
||||
struct RawItemsCollector {
|
||||
raw_items: RawItems,
|
||||
source_ast_id_map: Arc<AstIdMap>,
|
||||
@ -236,8 +254,8 @@ impl RawItemsCollector {
|
||||
self.add_extern_crate_item(current_module, extern_crate);
|
||||
return;
|
||||
}
|
||||
ast::ModuleItem::ImplBlock(_) => {
|
||||
// impls don't participate in name resolution
|
||||
ast::ModuleItem::ImplBlock(it) => {
|
||||
self.add_impl(current_module, it);
|
||||
return;
|
||||
}
|
||||
ast::ModuleItem::StructDef(it) => {
|
||||
@ -376,6 +394,13 @@ impl RawItemsCollector {
|
||||
self.push_item(current_module, attrs, RawItemKind::Macro(m));
|
||||
}
|
||||
|
||||
fn add_impl(&mut self, current_module: Option<Module>, imp: ast::ImplBlock) {
|
||||
let attrs = self.parse_attrs(&imp);
|
||||
let ast_id = self.source_ast_id_map.ast_id(&imp);
|
||||
let imp = self.raw_items.impls.alloc(ImplData { ast_id });
|
||||
self.push_item(current_module, attrs, RawItemKind::Impl(imp))
|
||||
}
|
||||
|
||||
fn push_import(
|
||||
&mut self,
|
||||
current_module: Option<Module>,
|
||||
|
@ -4,6 +4,9 @@ name = "ra_hir_expand"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.5"
|
||||
|
||||
|
@ -32,10 +32,17 @@ impl TokenExpander {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn shift(&self) -> u32 {
|
||||
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
||||
match self {
|
||||
TokenExpander::MacroRules(it) => it.shift(),
|
||||
TokenExpander::Builtin(_) => 0,
|
||||
TokenExpander::MacroRules(it) => it.map_id_down(id),
|
||||
TokenExpander::Builtin(..) => id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
||||
match self {
|
||||
TokenExpander::MacroRules(it) => it.map_id_up(id),
|
||||
TokenExpander::Builtin(..) => (id, mbe::Origin::Def),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,9 @@ use std::sync::Arc;
|
||||
|
||||
use ra_db::{salsa, CrateId, FileId};
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, AstNode},
|
||||
SyntaxNode, TextRange, TextUnit,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
|
||||
use crate::ast_id_map::FileAstId;
|
||||
@ -83,14 +84,21 @@ impl HirFileId {
|
||||
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
|
||||
|
||||
let macro_def = db.macro_def(loc.def)?;
|
||||
let shift = macro_def.0.shift();
|
||||
let exp_map = db.parse_macro(macro_file)?.1;
|
||||
let (parse, exp_map) = db.parse_macro(macro_file)?;
|
||||
let expanded = Source::new(self, parse.syntax_node());
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
|
||||
|
||||
let arg_start = (loc.ast_id.file_id, arg_start);
|
||||
let def_start = (loc.def.ast_id.file_id, def_start);
|
||||
|
||||
Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map, shift })
|
||||
Some(ExpansionInfo {
|
||||
expanded,
|
||||
arg_start,
|
||||
def_start,
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -147,26 +155,42 @@ impl MacroCallId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// ExpansionInfo mainly describes how to map text range between src and expanded macro
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
pub(crate) arg_start: (HirFileId, TextUnit),
|
||||
pub(crate) def_start: (HirFileId, TextUnit),
|
||||
pub(crate) shift: u32,
|
||||
expanded: Source<SyntaxNode>,
|
||||
arg_start: (HirFileId, TextUnit),
|
||||
def_start: (HirFileId, TextUnit),
|
||||
|
||||
pub(crate) macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
||||
pub(crate) exp_map: Arc<mbe::RevTokenMap>,
|
||||
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
||||
exp_map: Arc<mbe::RevTokenMap>,
|
||||
}
|
||||
|
||||
impl ExpansionInfo {
|
||||
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg_start.0);
|
||||
let range = token.ast.text_range().checked_sub(self.arg_start.1)?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
let range = self.exp_map.range_by_token(token_id)?;
|
||||
|
||||
let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?;
|
||||
|
||||
Some(self.expanded.with_ast(token))
|
||||
}
|
||||
|
||||
// FIXME: a more correct signature would be
|
||||
// `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>`
|
||||
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
|
||||
let token_id = look_in_rev_map(&self.exp_map, from)?;
|
||||
|
||||
let (token_map, (file_id, start_offset), token_id) = if token_id.0 >= self.shift {
|
||||
(&self.macro_arg.1, self.arg_start, tt::TokenId(token_id.0 - self.shift).into())
|
||||
} else {
|
||||
(&self.macro_def.1, self.def_start, token_id)
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
|
||||
let (token_map, (file_id, start_offset)) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
|
||||
mbe::Origin::Def => (&self.macro_def.1, self.def_start),
|
||||
};
|
||||
|
||||
let range = token_map.relative_range_of(token_id)?;
|
||||
@ -223,18 +247,30 @@ impl<N: AstNode> AstId<N> {
|
||||
}
|
||||
}
|
||||
|
||||
/// FIXME: https://github.com/matklad/with ?
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub struct Source<T> {
|
||||
pub file_id: HirFileId,
|
||||
// FIXME: this stores all kind of things, not only `ast`.
|
||||
// There should be a better name...
|
||||
pub ast: T,
|
||||
}
|
||||
|
||||
impl<T> Source<T> {
|
||||
pub fn new(file_id: HirFileId, ast: T) -> Source<T> {
|
||||
Source { file_id, ast }
|
||||
}
|
||||
|
||||
// Similarly, naming here is stupid...
|
||||
pub fn with_ast<U>(&self, ast: U) -> Source<U> {
|
||||
Source::new(self.file_id, ast)
|
||||
}
|
||||
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||
Source { file_id: self.file_id, ast: f(self.ast) }
|
||||
Source::new(self.file_id, f(self.ast))
|
||||
}
|
||||
pub fn as_ref(&self) -> Source<&T> {
|
||||
Source { file_id: self.file_id, ast: &self.ast }
|
||||
self.with_ast(&self.ast)
|
||||
}
|
||||
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
|
||||
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
||||
|
@ -4,6 +4,9 @@ name = "ra_ide_api"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
wasm = []
|
||||
|
||||
|
@ -19,7 +19,11 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
||||
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
||||
let name_ref = calling_node.name_ref()?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let analyzer = hir::SourceAnalyzer::new(
|
||||
db,
|
||||
hir::Source::new(position.file_id.into(), name_ref.syntax()),
|
||||
None,
|
||||
);
|
||||
let (mut call_info, has_self) = match &calling_node {
|
||||
FnCallNode::CallExpr(expr) => {
|
||||
//FIXME: apply subst
|
||||
|
@ -271,7 +271,6 @@ impl RootDatabase {
|
||||
self.query(hir::db::AstIdMapQuery).sweep(sweep);
|
||||
|
||||
self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
|
||||
self.query(hir::db::ImplsInModuleWithSourceMapQuery).sweep(sweep);
|
||||
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
|
||||
|
||||
self.query(hir::db::ExprScopesQuery).sweep(sweep);
|
||||
@ -314,8 +313,6 @@ impl RootDatabase {
|
||||
hir::db::RawItemsWithSourceMapQuery
|
||||
hir::db::RawItemsQuery
|
||||
hir::db::CrateDefMapQuery
|
||||
hir::db::ImplsInModuleWithSourceMapQuery
|
||||
hir::db::ImplsInModuleQuery
|
||||
hir::db::GenericParamsQuery
|
||||
hir::db::FnDataQuery
|
||||
hir::db::TypeAliasDataQuery
|
||||
@ -340,6 +337,7 @@ impl RootDatabase {
|
||||
hir::db::TraitDatumQuery
|
||||
hir::db::StructDatumQuery
|
||||
hir::db::ImplDatumQuery
|
||||
hir::db::ImplDataQuery
|
||||
hir::db::TraitSolveQuery
|
||||
];
|
||||
acc.sort_by_key(|it| std::cmp::Reverse(it.1));
|
||||
|
@ -58,8 +58,11 @@ impl<'a> CompletionContext<'a> {
|
||||
);
|
||||
let token =
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let analyzer = hir::SourceAnalyzer::new(
|
||||
db,
|
||||
hir::Source::new(position.file_id.into(), &token.parent()),
|
||||
Some(position.offset),
|
||||
);
|
||||
let mut ctx = CompletionContext {
|
||||
db,
|
||||
analyzer,
|
||||
|
@ -1,10 +1,11 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use ra_db::{FileId, SourceDatabase};
|
||||
use std::iter::successors;
|
||||
|
||||
use hir::{db::AstDatabase, Source};
|
||||
use ra_syntax::{
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, DocCommentsOwner},
|
||||
match_ast, AstNode, SyntaxNode,
|
||||
match_ast, AstNode, SyntaxNode, SyntaxToken,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -18,17 +19,42 @@ pub(crate) fn goto_definition(
|
||||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
|
||||
return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()));
|
||||
}
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, &name)?;
|
||||
return Some(RangeInfo::new(name.syntax().text_range(), navs));
|
||||
}
|
||||
None
|
||||
let token = descend_into_macros(db, position)?;
|
||||
|
||||
let res = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
ast::NameRef(name_ref) => {
|
||||
let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec();
|
||||
RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())
|
||||
},
|
||||
ast::Name(name) => {
|
||||
let navs = name_definition(db, token.with_ast(&name))?;
|
||||
RangeInfo::new(name.syntax().text_range(), navs)
|
||||
|
||||
},
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Option<Source<SyntaxToken>> {
|
||||
let file = db.parse_or_expand(position.file_id.into())?;
|
||||
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
|
||||
|
||||
successors(Some(Source::new(position.file_id.into(), token)), |token| {
|
||||
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
return None;
|
||||
}
|
||||
let source_analyzer =
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, ¯o_call)?;
|
||||
exp.map_token_down(db, token.as_ref())
|
||||
})
|
||||
.last()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -49,12 +75,11 @@ impl ReferenceResult {
|
||||
|
||||
pub(crate) fn reference_definition(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name_ref: &ast::NameRef,
|
||||
name_ref: Source<&ast::NameRef>,
|
||||
) -> ReferenceResult {
|
||||
use self::ReferenceResult::*;
|
||||
|
||||
let name_kind = classify_name_ref(db, file_id, &name_ref).map(|d| d.kind);
|
||||
let name_kind = classify_name_ref(db, name_ref).map(|d| d.kind);
|
||||
match name_kind {
|
||||
Some(Macro(mac)) => return Exact(mac.to_nav(db)),
|
||||
Some(Field(field)) => return Exact(field.to_nav(db)),
|
||||
@ -76,7 +101,7 @@ pub(crate) fn reference_definition(
|
||||
};
|
||||
|
||||
// Fallback index based approach:
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref)
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref.ast)
|
||||
.into_iter()
|
||||
.map(|s| s.to_nav(db))
|
||||
.collect();
|
||||
@ -85,14 +110,13 @@ pub(crate) fn reference_definition(
|
||||
|
||||
pub(crate) fn name_definition(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name: &ast::Name,
|
||||
name: Source<&ast::Name>,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let parent = name.syntax().parent()?;
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
|
||||
if let Some(module) = ast::Module::cast(parent.clone()) {
|
||||
if module.has_semi() {
|
||||
let src = hir::Source { file_id: file_id.into(), ast: module };
|
||||
let src = name.with_ast(module);
|
||||
if let Some(child_module) = hir::Module::from_declaration(db, src) {
|
||||
let nav = child_module.to_nav(db);
|
||||
return Some(vec![nav]);
|
||||
@ -100,20 +124,20 @@ pub(crate) fn name_definition(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(nav) = named_target(db, file_id, &parent) {
|
||||
if let Some(nav) = named_target(db, name.with_ast(&parent)) {
|
||||
return Some(vec![nav]);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
|
||||
fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<NavigationTarget> {
|
||||
match_ast! {
|
||||
match node {
|
||||
match (node.ast) {
|
||||
ast::StructDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -122,7 +146,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::EnumDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -131,7 +155,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::EnumVariant(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -140,7 +164,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::FnDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -149,7 +173,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::TypeAliasDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -158,7 +182,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::ConstDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -167,7 +191,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::StaticDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -176,7 +200,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::TraitDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -185,7 +209,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::RecordFieldDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -194,7 +218,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::Module(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
@ -203,7 +227,7 @@ fn named_target(db: &RootDatabase, file_id: FileId, node: &SyntaxNode) -> Option
|
||||
ast::MacroCall(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
file_id.into(),
|
||||
node.file_id,
|
||||
&it,
|
||||
it.doc_comment_text(),
|
||||
None,
|
||||
@ -677,4 +701,23 @@ mod tests {
|
||||
"bar MODULE FileId(1) [0; 11) [4; 7)",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn goto_from_macro() {
|
||||
check_goto(
|
||||
"
|
||||
//- /lib.rs
|
||||
macro_rules! id {
|
||||
($($tt:tt)*) => { $($tt)* }
|
||||
}
|
||||
fn foo() {}
|
||||
id! {
|
||||
fn bar() {
|
||||
fo<|>o();
|
||||
}
|
||||
}
|
||||
",
|
||||
"foo FN_DEF FileId(1) [52; 63) [55; 58)",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,8 @@ pub(crate) fn goto_type_definition(
|
||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||
})?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, hir::Source::new(position.file_id.into(), &node), None);
|
||||
|
||||
let ty: hir::Ty = if let Some(ty) =
|
||||
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use hir::{Adt, HasSource, HirDisplay};
|
||||
use hir::{Adt, HasSource, HirDisplay, Source};
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{ancestors_at_offset, find_covering_element, find_node_at_offset},
|
||||
@ -171,7 +171,8 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)
|
||||
{
|
||||
let mut no_fallback = false;
|
||||
if let Some(name_kind) = classify_name_ref(db, position.file_id, &name_ref).map(|d| d.kind)
|
||||
if let Some(name_kind) =
|
||||
classify_name_ref(db, Source::new(position.file_id.into(), &name_ref)).map(|d| d.kind)
|
||||
{
|
||||
res.extend(hover_text_from_name_kind(db, name_kind, &mut no_fallback))
|
||||
}
|
||||
@ -230,7 +231,8 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
.ancestors()
|
||||
.take_while(|it| it.text_range() == leaf_node.text_range())
|
||||
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, hir::Source::new(frange.file_id.into(), &node), None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
{
|
||||
ty
|
||||
|
@ -32,6 +32,7 @@ fn get_inlay_hints(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<Vec<InlayHint>> {
|
||||
let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None);
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::LetStmt(it) => {
|
||||
@ -39,11 +40,9 @@ fn get_inlay_hints(
|
||||
return None;
|
||||
}
|
||||
let pat = it.pat()?;
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||
},
|
||||
ast::LambdaExpr(it) => {
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
it.param_list().map(|param_list| {
|
||||
param_list
|
||||
.params()
|
||||
@ -56,21 +55,17 @@ fn get_inlay_hints(
|
||||
},
|
||||
ast::ForExpr(it) => {
|
||||
let pat = it.pat()?;
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, false))
|
||||
},
|
||||
ast::IfExpr(it) => {
|
||||
let pat = it.condition()?.pat()?;
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||
},
|
||||
ast::WhileExpr(it) => {
|
||||
let pat = it.condition()?.pat()?;
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
Some(get_pat_type_hints(db, &analyzer, pat, true))
|
||||
},
|
||||
ast::MatchArmList(it) => {
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
|
||||
Some(
|
||||
it
|
||||
.arms()
|
||||
|
@ -14,6 +14,7 @@ mod name_definition;
|
||||
mod rename;
|
||||
mod search_scope;
|
||||
|
||||
use hir::Source;
|
||||
use once_cell::unsync::Lazy;
|
||||
use ra_db::{SourceDatabase, SourceDatabaseExt};
|
||||
use ra_prof::profile;
|
||||
@ -114,7 +115,7 @@ fn find_name<'a>(
|
||||
return Some(RangeInfo::new(range, (name.text().to_string(), def)));
|
||||
}
|
||||
let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
|
||||
let def = classify_name_ref(db, position.file_id, &name_ref)?;
|
||||
let def = classify_name_ref(db, Source::new(position.file_id.into(), &name_ref))?;
|
||||
let range = name_ref.syntax().text_range();
|
||||
Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
|
||||
}
|
||||
@ -146,7 +147,7 @@ fn process_definition(
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if let Some(d) = classify_name_ref(db, file_id, &name_ref) {
|
||||
if let Some(d) = classify_name_ref(db, Source::new(file_id.into(), &name_ref)) {
|
||||
if d == def {
|
||||
refs.push(FileRange { file_id, range });
|
||||
}
|
||||
@ -369,6 +370,21 @@ mod tests {
|
||||
assert_eq!(refs.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_all_refs_macro_def() {
|
||||
let code = r#"
|
||||
#[macro_export]
|
||||
macro_rules! m1<|> { () => (()) }
|
||||
|
||||
fn foo() {
|
||||
m1();
|
||||
m1();
|
||||
}"#;
|
||||
|
||||
let refs = get_all_refs(code);
|
||||
assert_eq!(refs.len(), 3);
|
||||
}
|
||||
|
||||
fn get_all_refs(text: &str) -> ReferenceSearchResult {
|
||||
let (analysis, position) = single_file_with_position(text);
|
||||
analysis.find_all_refs(position, None).unwrap().unwrap()
|
||||
|
@ -21,7 +21,6 @@ pub(crate) fn classify_name(
|
||||
let parent = name.syntax().parent()?;
|
||||
let file_id = file_id.into();
|
||||
|
||||
// FIXME: add ast::MacroCall(it)
|
||||
match_ast! {
|
||||
match parent {
|
||||
ast::BindPat(it) => {
|
||||
@ -104,6 +103,19 @@ pub(crate) fn classify_name(
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
}
|
||||
},
|
||||
ast::MacroCall(it) => {
|
||||
let src = hir::Source { file_id, ast: it};
|
||||
let def = hir::MacroDef::from_source(db, src.clone())?;
|
||||
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(file_id, module_src))?;
|
||||
|
||||
Some(NameDefinition {
|
||||
visibility: None,
|
||||
container: module,
|
||||
kind: NameKind::Macro(def),
|
||||
})
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -111,15 +123,12 @@ pub(crate) fn classify_name(
|
||||
|
||||
pub(crate) fn classify_name_ref(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name_ref: &ast::NameRef,
|
||||
name_ref: Source<&ast::NameRef>,
|
||||
) -> Option<NameDefinition> {
|
||||
use PathResolution::*;
|
||||
|
||||
let _p = profile("classify_name_ref");
|
||||
|
||||
let parent = name_ref.syntax().parent()?;
|
||||
let analyzer = SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
let parent = name_ref.ast.syntax().parent()?;
|
||||
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
|
||||
|
||||
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
@ -139,17 +148,16 @@ pub(crate) fn classify_name_ref(
|
||||
tested_by!(goto_definition_works_for_record_fields);
|
||||
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) {
|
||||
let variant_def = analyzer.resolve_record_literal(&record_lit)?;
|
||||
let hir_path = Path::from_name_ref(name_ref);
|
||||
let hir_path = Path::from_name_ref(name_ref.ast);
|
||||
let hir_name = hir_path.as_ident()?;
|
||||
let field = variant_def.field(db, hir_name)?;
|
||||
return Some(from_struct_field(db, field));
|
||||
}
|
||||
}
|
||||
|
||||
let ast = ModuleSource::from_child_node(db, file_id, &parent);
|
||||
let file_id = file_id.into();
|
||||
let ast = ModuleSource::from_child_node(db, name_ref.with_ast(&parent));
|
||||
// FIXME: find correct container and visibility for each case
|
||||
let container = Module::from_definition(db, Source { file_id, ast })?;
|
||||
let container = Module::from_definition(db, name_ref.with_ast(ast))?;
|
||||
let visibility = None;
|
||||
|
||||
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
|
||||
@ -160,29 +168,29 @@ pub(crate) fn classify_name_ref(
|
||||
}
|
||||
}
|
||||
|
||||
let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let path = name_ref.ast.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let resolved = analyzer.resolve_path(db, &path)?;
|
||||
match resolved {
|
||||
Def(def) => Some(from_module_def(db, def, Some(container))),
|
||||
AssocItem(item) => Some(from_assoc_item(db, item)),
|
||||
Local(local) => {
|
||||
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),
|
||||
PathResolution::AssocItem(item) => Some(from_assoc_item(db, item)),
|
||||
PathResolution::Local(local) => {
|
||||
let container = local.module(db);
|
||||
let kind = NameKind::Local(local);
|
||||
Some(NameDefinition { kind, container, visibility: None })
|
||||
}
|
||||
GenericParam(par) => {
|
||||
PathResolution::GenericParam(par) => {
|
||||
// FIXME: get generic param def
|
||||
let kind = NameKind::GenericParam(par);
|
||||
Some(NameDefinition { kind, container, visibility })
|
||||
}
|
||||
Macro(def) => {
|
||||
PathResolution::Macro(def) => {
|
||||
let kind = NameKind::Macro(def);
|
||||
Some(NameDefinition { kind, container, visibility })
|
||||
}
|
||||
SelfType(impl_block) => {
|
||||
PathResolution::SelfType(impl_block) => {
|
||||
let ty = impl_block.target_ty(db);
|
||||
let kind = NameKind::SelfType(ty);
|
||||
let container = impl_block.module();
|
||||
let container = impl_block.module(db);
|
||||
Some(NameDefinition { kind, container, visibility })
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use hir::Source;
|
||||
use itertools::Itertools;
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
@ -65,9 +66,8 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Opti
|
||||
return None;
|
||||
}
|
||||
let range = module.syntax().text_range();
|
||||
let src = hir::ModuleSource::from_child_node(db, file_id, &module.syntax());
|
||||
let module =
|
||||
hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), ast: src })?;
|
||||
let src = hir::ModuleSource::from_child_node(db, Source::new(file_id.into(), &module.syntax()));
|
||||
let module = hir::Module::from_definition(db, Source::new(file_id.into(), src))?;
|
||||
|
||||
let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
|
||||
Some(Runnable { range, kind: RunnableKind::TestMod { path } })
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use hir::{Mutability, Name};
|
||||
use hir::{Mutability, Name, Source};
|
||||
use ra_db::SourceDatabase;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, TextRange, T};
|
||||
@ -80,7 +80,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
|
||||
let name_ref = node.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
|
||||
let name_kind = classify_name_ref(db, file_id, &name_ref).map(|d| d.kind);
|
||||
let name_kind =
|
||||
classify_name_ref(db, Source::new(file_id.into(), &name_ref)).map(|d| d.kind);
|
||||
|
||||
if let Some(Local(local)) = &name_kind {
|
||||
if let Some(name) = local.name(db) {
|
||||
|
@ -4,6 +4,9 @@ name = "ra_lsp_server"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
threadpool = "1.7.1"
|
||||
relative-path = "1.0.0"
|
||||
|
@ -4,6 +4,9 @@ name = "ra_mbe"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ra_syntax = { path = "../ra_syntax" }
|
||||
ra_parser = { path = "../ra_parser" }
|
||||
@ -14,4 +17,3 @@ log = "0.4.5"
|
||||
|
||||
[dev-dependencies]
|
||||
test_utils = { path = "../test_utils" }
|
||||
|
||||
|
@ -40,47 +40,73 @@ pub use crate::syntax_bridge::{
|
||||
/// and `$()*` have special meaning (see `Var` and `Repeat` data structures)
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct MacroRules {
|
||||
pub(crate) rules: Vec<Rule>,
|
||||
rules: Vec<Rule>,
|
||||
/// Highest id of the token we have in TokenMap
|
||||
pub(crate) shift: u32,
|
||||
shift: Shift,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Rule {
|
||||
pub(crate) lhs: tt::Subtree,
|
||||
pub(crate) rhs: tt::Subtree,
|
||||
struct Rule {
|
||||
lhs: tt::Subtree,
|
||||
rhs: tt::Subtree,
|
||||
}
|
||||
|
||||
// Find the max token id inside a subtree
|
||||
fn max_id(subtree: &tt::Subtree) -> Option<u32> {
|
||||
subtree
|
||||
.token_trees
|
||||
.iter()
|
||||
.filter_map(|tt| match tt {
|
||||
tt::TokenTree::Subtree(subtree) => max_id(subtree),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
|
||||
if ident.id != tt::TokenId::unspecified() =>
|
||||
{
|
||||
Some(ident.id.0)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.max()
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
struct Shift(u32);
|
||||
|
||||
/// Shift given TokenTree token id
|
||||
fn shift_subtree(tt: &mut tt::Subtree, shift: u32) {
|
||||
for t in tt.token_trees.iter_mut() {
|
||||
match t {
|
||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
||||
tt::Leaf::Ident(ident) if ident.id != tt::TokenId::unspecified() => {
|
||||
ident.id.0 += shift;
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
tt::TokenTree::Subtree(tt) => shift_subtree(tt, shift),
|
||||
impl Shift {
|
||||
fn new(tt: &tt::Subtree) -> Shift {
|
||||
// Note that TokenId is started from zero,
|
||||
// We have to add 1 to prevent duplication.
|
||||
let value = max_id(tt).map_or(0, |it| it + 1);
|
||||
return Shift(value);
|
||||
|
||||
// Find the max token id inside a subtree
|
||||
fn max_id(subtree: &tt::Subtree) -> Option<u32> {
|
||||
subtree
|
||||
.token_trees
|
||||
.iter()
|
||||
.filter_map(|tt| match tt {
|
||||
tt::TokenTree::Subtree(subtree) => max_id(subtree),
|
||||
tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
|
||||
if ident.id != tt::TokenId::unspecified() =>
|
||||
{
|
||||
Some(ident.id.0)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.max()
|
||||
}
|
||||
}
|
||||
|
||||
/// Shift given TokenTree token id
|
||||
fn shift_all(self, tt: &mut tt::Subtree) {
|
||||
for t in tt.token_trees.iter_mut() {
|
||||
match t {
|
||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
||||
tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id),
|
||||
_ => (),
|
||||
},
|
||||
tt::TokenTree::Subtree(tt) => self.shift_all(tt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn shift(self, id: tt::TokenId) -> tt::TokenId {
|
||||
if id == tt::TokenId::unspecified() {
|
||||
return id;
|
||||
}
|
||||
tt::TokenId(id.0 + self.0)
|
||||
}
|
||||
|
||||
fn unshift(self, id: tt::TokenId) -> Option<tt::TokenId> {
|
||||
id.0.checked_sub(self.0).map(tt::TokenId)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Origin {
|
||||
Def,
|
||||
Call,
|
||||
}
|
||||
|
||||
impl MacroRules {
|
||||
@ -105,21 +131,25 @@ impl MacroRules {
|
||||
validate(&rule.lhs)?;
|
||||
}
|
||||
|
||||
// Note that TokenId is started from zero,
|
||||
// We have to add 1 to prevent duplication.
|
||||
let shift = max_id(tt).map_or(0, |it| it + 1);
|
||||
Ok(MacroRules { rules, shift })
|
||||
Ok(MacroRules { rules, shift: Shift::new(tt) })
|
||||
}
|
||||
|
||||
pub fn expand(&self, tt: &tt::Subtree) -> Result<tt::Subtree, ExpandError> {
|
||||
// apply shift
|
||||
let mut tt = tt.clone();
|
||||
shift_subtree(&mut tt, self.shift);
|
||||
self.shift.shift_all(&mut tt);
|
||||
mbe_expander::expand(self, &tt)
|
||||
}
|
||||
|
||||
pub fn shift(&self) -> u32 {
|
||||
self.shift
|
||||
pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
||||
self.shift.shift(id)
|
||||
}
|
||||
|
||||
pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) {
|
||||
match self.shift.unshift(id) {
|
||||
Some(id) => (id, Origin::Call),
|
||||
None => (id, Origin::Def),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -77,8 +77,14 @@ pub fn token_tree_to_syntax_node(
|
||||
}
|
||||
|
||||
impl TokenMap {
|
||||
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = tt.0 as usize;
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
let (idx, _) =
|
||||
self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?;
|
||||
Some(tt::TokenId(idx as u32))
|
||||
}
|
||||
|
||||
pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = token_id.0 as usize;
|
||||
self.tokens.get(idx).copied()
|
||||
}
|
||||
|
||||
@ -90,6 +96,11 @@ impl TokenMap {
|
||||
}
|
||||
|
||||
impl RevTokenMap {
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
|
||||
Some(r)
|
||||
}
|
||||
|
||||
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
|
||||
self.ranges.push((relative_range, token_id.clone()))
|
||||
}
|
||||
|
@ -5,5 +5,8 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
drop_bomb = "0.1.4"
|
||||
|
@ -290,6 +290,22 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
|
||||
let m = lhs.precede(p);
|
||||
p.bump(op);
|
||||
|
||||
if is_range {
|
||||
// test postfix_range
|
||||
// fn foo() {
|
||||
// let x = 1..;
|
||||
// match 1.. { _ => () };
|
||||
// match a.b()..S { _ => () };
|
||||
// }
|
||||
let has_trailing_expression =
|
||||
p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{']));
|
||||
if !has_trailing_expression {
|
||||
// no RHS
|
||||
lhs = m.complete(p, RANGE_EXPR);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
expr_bp(p, r, op_bp + 1);
|
||||
lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
|
||||
}
|
||||
@ -330,7 +346,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
|
||||
if p.at(op) {
|
||||
m = p.start();
|
||||
p.bump(op);
|
||||
if p.at_ts(EXPR_FIRST) {
|
||||
if p.at_ts(EXPR_FIRST) && !(r.forbid_structs && p.at(T!['{'])) {
|
||||
expr_bp(p, r, 2);
|
||||
}
|
||||
return Some((m.complete(p, RANGE_EXPR), BlockLike::NotBlock));
|
||||
@ -344,13 +360,7 @@ fn lhs(p: &mut Parser, r: Restrictions) -> Option<(CompletedMarker, BlockLike)>
|
||||
// }
|
||||
//
|
||||
let (lhs, blocklike) = atom::atom_expr(p, r)?;
|
||||
return Some(postfix_expr(
|
||||
p,
|
||||
lhs,
|
||||
blocklike,
|
||||
!(r.prefer_stmt && blocklike.is_block()),
|
||||
r.forbid_structs,
|
||||
));
|
||||
return Some(postfix_expr(p, lhs, blocklike, !(r.prefer_stmt && blocklike.is_block())));
|
||||
}
|
||||
};
|
||||
// parse the interior of the unary expression
|
||||
@ -366,7 +376,6 @@ fn postfix_expr(
|
||||
// `while true {break}; ();`
|
||||
mut block_like: BlockLike,
|
||||
mut allow_calls: bool,
|
||||
forbid_structs: bool,
|
||||
) -> (CompletedMarker, BlockLike) {
|
||||
loop {
|
||||
lhs = match p.current() {
|
||||
@ -380,7 +389,7 @@ fn postfix_expr(
|
||||
// }
|
||||
T!['('] if allow_calls => call_expr(p, lhs),
|
||||
T!['['] if allow_calls => index_expr(p, lhs),
|
||||
T![.] => match postfix_dot_expr(p, lhs, forbid_structs) {
|
||||
T![.] => match postfix_dot_expr(p, lhs) {
|
||||
Ok(it) => it,
|
||||
Err(it) => {
|
||||
lhs = it;
|
||||
@ -398,7 +407,6 @@ fn postfix_expr(
|
||||
fn postfix_dot_expr(
|
||||
p: &mut Parser,
|
||||
lhs: CompletedMarker,
|
||||
forbid_structs: bool,
|
||||
) -> Result<CompletedMarker, CompletedMarker> {
|
||||
assert!(p.at(T![.]));
|
||||
if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
|
||||
@ -418,25 +426,8 @@ fn postfix_expr(
|
||||
return Ok(m.complete(p, AWAIT_EXPR));
|
||||
}
|
||||
|
||||
// test postfix_range
|
||||
// fn foo() {
|
||||
// let x = 1..;
|
||||
// match 1.. { _ => () };
|
||||
// match a.b()..S { _ => () };
|
||||
// }
|
||||
for &(op, la) in &[(T![..=], 3), (T![..], 2)] {
|
||||
if p.at(op) {
|
||||
let next_token = p.nth(la);
|
||||
let has_trailing_expression =
|
||||
!(forbid_structs && next_token == T!['{']) && EXPR_FIRST.contains(next_token);
|
||||
return if has_trailing_expression {
|
||||
Err(lhs)
|
||||
} else {
|
||||
let m = lhs.precede(p);
|
||||
p.bump(op);
|
||||
Ok(m.complete(p, RANGE_EXPR))
|
||||
};
|
||||
}
|
||||
if p.at(T![..=]) || p.at(T![..]) {
|
||||
return Err(lhs);
|
||||
}
|
||||
|
||||
Ok(field_expr(p, lhs))
|
||||
|
@ -5,6 +5,9 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1.0.1"
|
||||
itertools = "0.8.0"
|
||||
|
@ -4,6 +4,9 @@ name = "ra_project_model"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
log = "0.4.5"
|
||||
rustc-hash = "1.0"
|
||||
|
@ -7,6 +7,9 @@ license = "MIT OR Apache-2.0"
|
||||
description = "Comment and whitespace preserving parser for the Rust langauge"
|
||||
repository = "https://github.com/rust-analyzer/rust-analyzer"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.8.0"
|
||||
rowan = "0.6.1"
|
||||
|
@ -16,7 +16,7 @@ use crate::{
|
||||
};
|
||||
|
||||
pub use self::{
|
||||
expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp},
|
||||
expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp},
|
||||
extensions::{FieldKind, PathSegmentKind, SelfParamKind, StructKind, TypeBoundKind},
|
||||
generated::*,
|
||||
tokens::*,
|
||||
|
@ -189,6 +189,52 @@ impl ast::BinExpr {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum RangeOp {
|
||||
/// `..`
|
||||
Exclusive,
|
||||
/// `..=`
|
||||
Inclusive,
|
||||
}
|
||||
|
||||
impl ast::RangeExpr {
|
||||
fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> {
|
||||
self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| {
|
||||
let token = child.into_token()?;
|
||||
let bin_op = match token.kind() {
|
||||
T![..] => RangeOp::Exclusive,
|
||||
T![..=] => RangeOp::Inclusive,
|
||||
_ => return None,
|
||||
};
|
||||
Some((ix, token, bin_op))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn op_kind(&self) -> Option<RangeOp> {
|
||||
self.op_details().map(|t| t.2)
|
||||
}
|
||||
|
||||
pub fn op_token(&self) -> Option<SyntaxToken> {
|
||||
self.op_details().map(|t| t.1)
|
||||
}
|
||||
|
||||
pub fn start(&self) -> Option<ast::Expr> {
|
||||
let op_ix = self.op_details()?.0;
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.take(op_ix)
|
||||
.find_map(|it| ast::Expr::cast(it.into_node()?))
|
||||
}
|
||||
|
||||
pub fn end(&self) -> Option<ast::Expr> {
|
||||
let op_ix = self.op_details()?.0;
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.skip(op_ix + 1)
|
||||
.find_map(|it| ast::Expr::cast(it.into_node()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::IndexExpr {
|
||||
pub fn base(&self) -> Option<ast::Expr> {
|
||||
children(self).nth(0)
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
use crate::{
|
||||
ast::AstToken,
|
||||
SyntaxKind::{COMMENT, WHITESPACE},
|
||||
SyntaxToken,
|
||||
SyntaxKind::{COMMENT, RAW_STRING, STRING, WHITESPACE},
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
@ -11,10 +11,9 @@ pub struct Comment(SyntaxToken);
|
||||
|
||||
impl AstToken for Comment {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
if token.kind() == COMMENT {
|
||||
Some(Comment(token))
|
||||
} else {
|
||||
None
|
||||
match token.kind() {
|
||||
COMMENT => Some(Comment(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
@ -94,10 +93,9 @@ pub struct Whitespace(SyntaxToken);
|
||||
|
||||
impl AstToken for Whitespace {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
if token.kind() == WHITESPACE {
|
||||
Some(Whitespace(token))
|
||||
} else {
|
||||
None
|
||||
match token.kind() {
|
||||
WHITESPACE => Some(Whitespace(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
@ -111,3 +109,80 @@ impl Whitespace {
|
||||
text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct String(SyntaxToken);
|
||||
|
||||
impl AstToken for String {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
STRING => Some(String(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl String {
|
||||
pub fn value(&self) -> Option<std::string::String> {
|
||||
let text = self.text().as_str();
|
||||
let usual_string_range = find_usual_string_range(text)?;
|
||||
let start_of_inside = usual_string_range.start().to_usize() + 1;
|
||||
let end_of_inside = usual_string_range.end().to_usize();
|
||||
let inside_str = &text[start_of_inside..end_of_inside];
|
||||
|
||||
let mut buf = std::string::String::with_capacity(inside_str.len());
|
||||
let mut has_error = false;
|
||||
rustc_lexer::unescape::unescape_str(inside_str, &mut |_, unescaped_char| {
|
||||
match unescaped_char {
|
||||
Ok(c) => buf.push(c),
|
||||
Err(_) => has_error = true,
|
||||
}
|
||||
});
|
||||
|
||||
if has_error {
|
||||
return None;
|
||||
}
|
||||
Some(buf)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RawString(SyntaxToken);
|
||||
|
||||
impl AstToken for RawString {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
match token.kind() {
|
||||
RAW_STRING => Some(RawString(token)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl RawString {
|
||||
pub fn value(&self) -> Option<std::string::String> {
|
||||
let text = self.text().as_str();
|
||||
let usual_string_range = find_usual_string_range(text)?;
|
||||
let start_of_inside = usual_string_range.start().to_usize() + 1;
|
||||
let end_of_inside = usual_string_range.end().to_usize();
|
||||
let inside_str = &text[start_of_inside..end_of_inside];
|
||||
Some(inside_str.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn find_usual_string_range(s: &str) -> Option<TextRange> {
|
||||
let left_quote = s.find('"')?;
|
||||
let right_quote = s.rfind('"')?;
|
||||
if left_quote == right_quote {
|
||||
// `s` only contains one quote
|
||||
None
|
||||
} else {
|
||||
Some(TextRange::from_to(
|
||||
TextUnit::from(left_quote as u32),
|
||||
TextUnit::from(right_quote as u32),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
@ -176,9 +176,11 @@ impl SourceFile {
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! match_ast {
|
||||
(match $node:ident {
|
||||
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
|
||||
|
||||
(match ($node:expr) {
|
||||
$( ast::$ast:ident($it:ident) => $res:block, )*
|
||||
_ => $catch_all:expr,
|
||||
_ => $catch_all:expr $(,)?
|
||||
}) => {{
|
||||
$( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
|
||||
{ $catch_all }
|
||||
|
@ -83,6 +83,7 @@ pub enum SyntaxErrorKind {
|
||||
InvalidMatchInnerAttr,
|
||||
InvalidTupleIndexFormat,
|
||||
VisibilityNotAllowed,
|
||||
InclusiveRangeMissingEnd,
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxErrorKind {
|
||||
@ -103,6 +104,9 @@ impl fmt::Display for SyntaxErrorKind {
|
||||
VisibilityNotAllowed => {
|
||||
write!(f, "unnecessary visibility qualifier")
|
||||
}
|
||||
InclusiveRangeMissingEnd => {
|
||||
write!(f, "An inclusive range must have an end expression")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -103,6 +103,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
|
||||
ast::FieldExpr(it) => { validate_numeric_name(it.name_ref(), &mut errors) },
|
||||
ast::RecordField(it) => { validate_numeric_name(it.name_ref(), &mut errors) },
|
||||
ast::Visibility(it) => { validate_visibility(it, &mut errors) },
|
||||
ast::RangeExpr(it) => { validate_range_expr(it, &mut errors) },
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
@ -227,3 +228,12 @@ fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
|
||||
.push(SyntaxError::new(SyntaxErrorKind::VisibilityNotAllowed, vis.syntax.text_range()))
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
|
||||
if expr.op_kind() == Some(ast::RangeOp::Inclusive) && expr.end().is_none() {
|
||||
errors.push(SyntaxError::new(
|
||||
SyntaxErrorKind::InclusiveRangeMissingEnd,
|
||||
expr.syntax().text_range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
0..=;
|
||||
..=;
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
SOURCE_FILE@[0; 33)
|
||||
FN_DEF@[0; 32)
|
||||
FN_KW@[0; 2) "fn"
|
||||
WHITESPACE@[2; 3) " "
|
||||
NAME@[3; 7)
|
||||
IDENT@[3; 7) "main"
|
||||
PARAM_LIST@[7; 9)
|
||||
L_PAREN@[7; 8) "("
|
||||
R_PAREN@[8; 9) ")"
|
||||
WHITESPACE@[9; 10) " "
|
||||
BLOCK_EXPR@[10; 32)
|
||||
BLOCK@[10; 32)
|
||||
L_CURLY@[10; 11) "{"
|
||||
WHITESPACE@[11; 16) "\n "
|
||||
EXPR_STMT@[16; 21)
|
||||
RANGE_EXPR@[16; 20)
|
||||
LITERAL@[16; 17)
|
||||
INT_NUMBER@[16; 17) "0"
|
||||
DOTDOTEQ@[17; 20) "..="
|
||||
SEMI@[20; 21) ";"
|
||||
WHITESPACE@[21; 26) "\n "
|
||||
EXPR_STMT@[26; 30)
|
||||
RANGE_EXPR@[26; 29)
|
||||
DOTDOTEQ@[26; 29) "..="
|
||||
SEMI@[29; 30) ";"
|
||||
WHITESPACE@[30; 31) "\n"
|
||||
R_CURLY@[31; 32) "}"
|
||||
WHITESPACE@[32; 33) "\n"
|
||||
error [16; 20): An inclusive range must have an end expression
|
||||
error [26; 29): An inclusive range must have an end expression
|
4
crates/ra_syntax/test_data/parser/ok/0060_as_range.rs
Normal file
4
crates/ra_syntax/test_data/parser/ok/0060_as_range.rs
Normal file
@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
0 as usize ..;
|
||||
1 + 2 as usize ..;
|
||||
}
|
56
crates/ra_syntax/test_data/parser/ok/0060_as_range.txt
Normal file
56
crates/ra_syntax/test_data/parser/ok/0060_as_range.txt
Normal file
@ -0,0 +1,56 @@
|
||||
SOURCE_FILE@[0; 56)
|
||||
FN_DEF@[0; 55)
|
||||
FN_KW@[0; 2) "fn"
|
||||
WHITESPACE@[2; 3) " "
|
||||
NAME@[3; 7)
|
||||
IDENT@[3; 7) "main"
|
||||
PARAM_LIST@[7; 9)
|
||||
L_PAREN@[7; 8) "("
|
||||
R_PAREN@[8; 9) ")"
|
||||
WHITESPACE@[9; 10) " "
|
||||
BLOCK_EXPR@[10; 55)
|
||||
BLOCK@[10; 55)
|
||||
L_CURLY@[10; 11) "{"
|
||||
WHITESPACE@[11; 16) "\n "
|
||||
EXPR_STMT@[16; 30)
|
||||
RANGE_EXPR@[16; 29)
|
||||
CAST_EXPR@[16; 26)
|
||||
LITERAL@[16; 17)
|
||||
INT_NUMBER@[16; 17) "0"
|
||||
WHITESPACE@[17; 18) " "
|
||||
AS_KW@[18; 20) "as"
|
||||
WHITESPACE@[20; 21) " "
|
||||
PATH_TYPE@[21; 26)
|
||||
PATH@[21; 26)
|
||||
PATH_SEGMENT@[21; 26)
|
||||
NAME_REF@[21; 26)
|
||||
IDENT@[21; 26) "usize"
|
||||
WHITESPACE@[26; 27) " "
|
||||
DOTDOT@[27; 29) ".."
|
||||
SEMI@[29; 30) ";"
|
||||
WHITESPACE@[30; 35) "\n "
|
||||
EXPR_STMT@[35; 53)
|
||||
RANGE_EXPR@[35; 52)
|
||||
BIN_EXPR@[35; 49)
|
||||
LITERAL@[35; 36)
|
||||
INT_NUMBER@[35; 36) "1"
|
||||
WHITESPACE@[36; 37) " "
|
||||
PLUS@[37; 38) "+"
|
||||
WHITESPACE@[38; 39) " "
|
||||
CAST_EXPR@[39; 49)
|
||||
LITERAL@[39; 40)
|
||||
INT_NUMBER@[39; 40) "2"
|
||||
WHITESPACE@[40; 41) " "
|
||||
AS_KW@[41; 43) "as"
|
||||
WHITESPACE@[43; 44) " "
|
||||
PATH_TYPE@[44; 49)
|
||||
PATH@[44; 49)
|
||||
PATH_SEGMENT@[44; 49)
|
||||
NAME_REF@[44; 49)
|
||||
IDENT@[44; 49) "usize"
|
||||
WHITESPACE@[49; 50) " "
|
||||
DOTDOT@[50; 52) ".."
|
||||
SEMI@[52; 53) ";"
|
||||
WHITESPACE@[53; 54) "\n"
|
||||
R_CURLY@[54; 55) "}"
|
||||
WHITESPACE@[55; 56) "\n"
|
@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
match .. {
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
SOURCE_FILE@[0; 35)
|
||||
FN_DEF@[0; 34)
|
||||
FN_KW@[0; 2) "fn"
|
||||
WHITESPACE@[2; 3) " "
|
||||
NAME@[3; 7)
|
||||
IDENT@[3; 7) "main"
|
||||
PARAM_LIST@[7; 9)
|
||||
L_PAREN@[7; 8) "("
|
||||
R_PAREN@[8; 9) ")"
|
||||
WHITESPACE@[9; 10) " "
|
||||
BLOCK_EXPR@[10; 34)
|
||||
BLOCK@[10; 34)
|
||||
L_CURLY@[10; 11) "{"
|
||||
WHITESPACE@[11; 16) "\n "
|
||||
MATCH_EXPR@[16; 32)
|
||||
MATCH_KW@[16; 21) "match"
|
||||
WHITESPACE@[21; 22) " "
|
||||
RANGE_EXPR@[22; 24)
|
||||
DOTDOT@[22; 24) ".."
|
||||
WHITESPACE@[24; 25) " "
|
||||
MATCH_ARM_LIST@[25; 32)
|
||||
L_CURLY@[25; 26) "{"
|
||||
WHITESPACE@[26; 31) "\n "
|
||||
R_CURLY@[31; 32) "}"
|
||||
WHITESPACE@[32; 33) "\n"
|
||||
R_CURLY@[33; 34) "}"
|
||||
WHITESPACE@[34; 35) "\n"
|
@ -5,6 +5,9 @@ version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
text_unit = "0.1.6"
|
||||
|
||||
|
@ -4,6 +4,9 @@ name = "ra_tt"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
||||
# to reduce number of compilations
|
||||
|
@ -4,6 +4,9 @@ name = "ra_vfs_glob"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ra_vfs = "0.5.0"
|
||||
globset = "0.4.4"
|
||||
|
@ -4,6 +4,9 @@ name = "test_utils"
|
||||
version = "0.1.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
difference = "2.0.0"
|
||||
text_unit = "0.1.2"
|
||||
|
@ -4,7 +4,7 @@
|
||||
//! See <https://matklad.github.io/2018/06/18/a-trick-for-test-maintenance.html>
|
||||
//! for details, but the TL;DR is that you write your test as
|
||||
//!
|
||||
//! ```rust,no_run
|
||||
//! ```
|
||||
//! #[test]
|
||||
//! fn test_foo() {
|
||||
//! covers!(test_foo);
|
||||
@ -13,7 +13,7 @@
|
||||
//!
|
||||
//! and in the code under test you write
|
||||
//!
|
||||
//! ```rust,no_run
|
||||
//! ```
|
||||
//! # use test_utils::tested_by;
|
||||
//! # fn some_condition() -> bool { true }
|
||||
//! fn foo() {
|
||||
|
@ -161,7 +161,9 @@ export async function startCargoWatch(
|
||||
): Promise<CargoWatchProvider | undefined> {
|
||||
const execPromise = util.promisify(child_process.exec);
|
||||
|
||||
const { stderr } = await execPromise('cargo watch --version').catch(e => e);
|
||||
const { stderr, code = 0 } = await execPromise(
|
||||
'cargo watch --version'
|
||||
).catch(e => e);
|
||||
|
||||
if (stderr.includes('no such subcommand: `watch`')) {
|
||||
const msg =
|
||||
@ -201,6 +203,11 @@ export async function startCargoWatch(
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else if (code !== 0) {
|
||||
vscode.window.showErrorMessage(
|
||||
`\`cargo watch\` failed with ${code}: ${stderr}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const provider = await registerCargoWatchProvider(context.subscriptions);
|
||||
|
Loading…
x
Reference in New Issue
Block a user