Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen 2020-04-17 10:30:39 +02:00
commit 0a1585075c
51 changed files with 1129 additions and 367 deletions

51
Cargo.lock generated
View File

@ -114,17 +114,18 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "chalk-derive"
version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "chalk-engine"
version = "0.9.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"chalk-macros",
"rustc-hash",
@ -133,7 +134,7 @@ dependencies = [
[[package]]
name = "chalk-ir"
version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"chalk-derive",
"chalk-engine",
@ -143,7 +144,7 @@ dependencies = [
[[package]]
name = "chalk-macros"
version = "0.1.1"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"lazy_static",
]
@ -151,7 +152,7 @@ dependencies = [
[[package]]
name = "chalk-rust-ir"
version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"chalk-derive",
"chalk-engine",
@ -162,7 +163,7 @@ dependencies = [
[[package]]
name = "chalk-solve"
version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=039fc904a05f8cb3d0c682c9a57a63dda7a35356#039fc904a05f8cb3d0c682c9a57a63dda7a35356"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8"
dependencies = [
"chalk-derive",
"chalk-engine",
@ -446,9 +447,9 @@ dependencies = [
[[package]]
name = "hermit-abi"
version = "0.1.10"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "725cf19794cf90aa94e65050cb4191ff5d8fa87a498383774c47b332e3af952e"
checksum = "8a0d737e0f947a1864e93d33fdef4af8445a00d1ed8dc0c8ddb73139ea6abf15"
dependencies = [
"libc",
]
@ -495,9 +496,9 @@ dependencies = [
[[package]]
name = "insta"
version = "0.15.0"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de3f029212a3fe78a6090f1f2b993877ca245a9ded863f3fcbd6eae084fc1ed"
checksum = "8386e795fb3927131ea4cede203c529a333652eb6dc4ff29616b832b27e9b096"
dependencies = [
"console",
"difference",
@ -593,15 +594,15 @@ checksum = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
[[package]]
name = "libc"
version = "0.2.68"
version = "0.2.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dea0c0405123bba743ee3f91f49b1c7cfb684eef0da0a50110f758ccf24cdff0"
checksum = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005"
[[package]]
name = "libloading"
version = "0.6.0"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c979a19ffb457f0273965c333053f3d586bf759bf7b683fbebc37f9a9ebedc4"
checksum = "3c4f51b790f5bdb65acb4cc94bb81d7b2ee60348a5431ac1467d390b017600b0"
dependencies = [
"winapi 0.3.8",
]
@ -757,9 +758,9 @@ dependencies = [
[[package]]
name = "num_cpus"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
dependencies = [
"hermit-abi",
"libc",
@ -779,9 +780,9 @@ checksum = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
[[package]]
name = "parking_lot"
version = "0.10.1"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fdfcb5f20930a79e326f7ec992a9fdb5b7bd809254b1e735bdd5a99f78bee0d"
checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e"
dependencies = [
"lock_api",
"parking_lot_core",
@ -1013,6 +1014,7 @@ dependencies = [
"chalk-solve",
"ena",
"insta",
"itertools",
"log",
"ra_arena",
"ra_db",
@ -1339,6 +1341,7 @@ dependencies = [
"ra_hir_def",
"ra_hir_ty",
"ra_ide",
"ra_proc_macro_srv",
"ra_prof",
"ra_project_model",
"ra_syntax",
@ -1564,6 +1567,18 @@ dependencies = [
"unicode-xid",
]
[[package]]
name = "synstructure"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545"
dependencies = [
"proc-macro2",
"quote",
"syn",
"unicode-xid",
]
[[package]]
name = "tempfile"
version = "3.1.0"

View File

@ -78,7 +78,7 @@ fn foo() {
}
fn bar(arg: &str, baz: Baz) {
unimplemented!()
todo!()
}
"#####,

View File

@ -29,7 +29,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
// }
//
// fn bar(arg: &str, baz: Baz) {
// unimplemented!()
// todo!()
// }
//
// ```
@ -80,7 +80,7 @@ impl FunctionBuilder {
Some(Self { append_fn_at, fn_name, type_params, params })
}
fn render(self) -> Option<FunctionTemplate> {
let placeholder_expr = ast::make::expr_unimplemented();
let placeholder_expr = ast::make::expr_todo();
let fn_body = ast::make::block_expr(vec![], Some(placeholder_expr));
let fn_def = ast::make::fn_def(self.fn_name, self.type_params, self.params, fn_body);
let fn_def = ast::make::add_newlines(2, fn_def);
@ -225,7 +225,7 @@ fn foo() {
}
fn bar() {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -252,7 +252,7 @@ impl Foo {
}
fn bar() {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -276,7 +276,7 @@ fn foo1() {
}
fn bar() {
<|>unimplemented!()
<|>todo!()
}
fn foo2() {}
@ -302,7 +302,7 @@ mod baz {
}
fn bar() {
<|>unimplemented!()
<|>todo!()
}
}
",
@ -315,20 +315,20 @@ mod baz {
add_function,
r"
struct Baz;
fn baz() -> Baz { unimplemented!() }
fn baz() -> Baz { todo!() }
fn foo() {
bar<|>(baz());
}
",
r"
struct Baz;
fn baz() -> Baz { unimplemented!() }
fn baz() -> Baz { todo!() }
fn foo() {
bar(baz());
}
fn bar(baz: Baz) {
<|>unimplemented!()
<|>todo!()
}
",
);
@ -361,7 +361,7 @@ impl Baz {
}
fn bar(baz: Baz) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -382,7 +382,7 @@ fn foo() {
}
fn bar(arg: &str) {
<|>unimplemented!()
<|>todo!()
}
"#,
)
@ -403,7 +403,7 @@ fn foo() {
}
fn bar(arg: char) {
<|>unimplemented!()
<|>todo!()
}
"#,
)
@ -424,7 +424,7 @@ fn foo() {
}
fn bar(arg: i32) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -445,7 +445,7 @@ fn foo() {
}
fn bar(arg: u8) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -470,7 +470,7 @@ fn foo() {
}
fn bar(x: u8) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -493,7 +493,7 @@ fn foo() {
}
fn bar(worble: ()) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -506,7 +506,7 @@ fn bar(worble: ()) {
r"
trait Foo {}
fn foo() -> impl Foo {
unimplemented!()
todo!()
}
fn baz() {
<|>bar(foo())
@ -515,14 +515,14 @@ fn baz() {
r"
trait Foo {}
fn foo() -> impl Foo {
unimplemented!()
todo!()
}
fn baz() {
bar(foo())
}
fn bar(foo: impl Foo) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -556,7 +556,7 @@ mod Foo {
}
fn bar(baz: super::Baz::Bof) {
<|>unimplemented!()
<|>todo!()
}
}
",
@ -580,7 +580,7 @@ fn foo<T>(t: T) {
}
fn bar<T>(t: T) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -611,7 +611,7 @@ fn foo() {
}
fn bar(arg: fn() -> Baz) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -636,7 +636,7 @@ fn foo() {
}
fn bar(closure: impl Fn(i64) -> i64) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -657,7 +657,7 @@ fn foo() {
}
fn bar(baz: ()) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -682,7 +682,7 @@ fn foo() {
}
fn bar(baz_1: Baz, baz_2: Baz) {
<|>unimplemented!()
<|>todo!()
}
",
)
@ -707,7 +707,7 @@ fn foo() {
}
fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
<|>unimplemented!()
<|>todo!()
}
"#,
)
@ -779,7 +779,7 @@ impl Foo {
self.bar();
}
fn bar(&self) {
unimplemented!();
todo!();
}
}
",

View File

@ -13,4 +13,4 @@ serde_json = "1.0.48"
jod-thread = "0.1.1"
[dev-dependencies]
insta = "0.15.0"
insta = "0.16.0"

View File

@ -25,7 +25,7 @@ use hir_ty::{
autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy,
Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor,
};
use ra_db::{CrateId, Edition, FileId};
use ra_db::{CrateId, CrateName, Edition, FileId};
use ra_prof::profile;
use ra_syntax::{
ast::{self, AttrsOwner, NameOwner},
@ -91,6 +91,10 @@ impl Crate {
db.crate_graph()[self.id].edition
}
pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> {
db.crate_graph()[self.id].display_name.as_ref().cloned()
}
pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
db.crate_graph().iter().map(|id| Crate { id }).collect()
}

View File

@ -28,4 +28,4 @@ ra_cfg = { path = "../ra_cfg" }
tt = { path = "../ra_tt", package = "ra_tt" }
[dev-dependencies]
insta = "0.15.0"
insta = "0.16.0"

View File

@ -9,13 +9,14 @@ use hir_expand::{
};
use ra_prof::profile;
use ra_syntax::ast::{
self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner,
self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, TypeBoundsOwner,
VisibilityOwner,
};
use crate::{
attr::Attrs,
db::DefDatabase,
path::{path, GenericArgs, Path},
path::{path, AssociatedTypeBinding, GenericArgs, Path},
src::HasSource,
type_ref::{Mutability, TypeBound, TypeRef},
visibility::RawVisibility,
@ -95,7 +96,11 @@ fn desugar_future_path(orig: TypeRef) -> Path {
let path = path![std::future::Future];
let mut generic_args: Vec<_> = std::iter::repeat(None).take(path.segments.len() - 1).collect();
let mut last = GenericArgs::empty();
last.bindings.push((name![Output], orig));
last.bindings.push(AssociatedTypeBinding {
name: name![Output],
type_ref: Some(orig),
bounds: Vec::new(),
});
generic_args.push(Some(Arc::new(last)));
Path::from_known_path(path, generic_args)
@ -106,6 +111,7 @@ pub struct TypeAliasData {
pub name: Name,
pub type_ref: Option<TypeRef>,
pub visibility: RawVisibility,
pub bounds: Vec<TypeBound>,
}
impl TypeAliasData {
@ -118,9 +124,17 @@ impl TypeAliasData {
let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.value.type_ref().map(TypeRef::from_ast);
let vis_default = RawVisibility::default_for_container(loc.container);
let visibility =
RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility()));
Arc::new(TypeAliasData { name, type_ref, visibility })
let visibility = RawVisibility::from_ast_with_default(
db,
vis_default,
node.as_ref().map(|n| n.visibility()),
);
let bounds = if let Some(bound_list) = node.value.type_bound_list() {
bound_list.bounds().map(TypeBound::from_ast).collect()
} else {
Vec::new()
};
Arc::new(TypeAliasData { name, type_ref, visibility, bounds })
}
}

View File

@ -14,7 +14,10 @@ use hir_expand::{
use ra_db::CrateId;
use ra_syntax::ast;
use crate::{type_ref::TypeRef, InFile};
use crate::{
type_ref::{TypeBound, TypeRef},
InFile,
};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath {
@ -111,7 +114,21 @@ pub struct GenericArgs {
/// is left out.
pub has_self_type: bool,
/// Associated type bindings like in `Iterator<Item = T>`.
pub bindings: Vec<(Name, TypeRef)>,
pub bindings: Vec<AssociatedTypeBinding>,
}
/// An associated type binding like in `Iterator<Item = T>`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct AssociatedTypeBinding {
/// The name of the associated type.
pub name: Name,
/// The type bound to this associated type (in `Item = T`, this would be the
/// `T`). This can be `None` if there are bounds instead.
pub type_ref: Option<TypeRef>,
/// Bounds for the associated type, like in `Iterator<Item:
/// SomeOtherTrait>`. (This is the unstable `associated_type_bounds`
/// feature.)
pub bounds: Vec<TypeBound>,
}
/// A single generic argument.

View File

@ -9,11 +9,12 @@ use hir_expand::{
hygiene::Hygiene,
name::{name, AsName},
};
use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner};
use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner, TypeBoundsOwner};
use super::AssociatedTypeBinding;
use crate::{
path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
type_ref::TypeRef,
type_ref::{TypeBound, TypeRef},
};
pub(super) use lower_use::lower_use_tree;
@ -136,10 +137,16 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs>
// lifetimes ignored for now
let mut bindings = Vec::new();
for assoc_type_arg in node.assoc_type_args() {
let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg;
if let Some(name_ref) = assoc_type_arg.name_ref() {
let name = name_ref.as_name();
let type_ref = TypeRef::from_ast_opt(assoc_type_arg.type_ref());
bindings.push((name, type_ref));
let type_ref = assoc_type_arg.type_ref().map(TypeRef::from_ast);
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
l.bounds().map(TypeBound::from_ast).collect()
} else {
Vec::new()
};
bindings.push(AssociatedTypeBinding { name, type_ref, bounds });
}
}
if args.is_empty() && bindings.is_empty() {
@ -168,7 +175,11 @@ fn lower_generic_args_from_fn_path(
}
if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(ret_type.type_ref());
bindings.push((name![Output], type_ref))
bindings.push(AssociatedTypeBinding {
name: name![Output],
type_ref: Some(type_ref),
bounds: Vec::new(),
});
}
if args.is_empty() && bindings.is_empty() {
None

View File

@ -163,8 +163,16 @@ impl TypeRef {
let crate::path::GenericArg::Type(type_ref) = arg;
go(type_ref, f);
}
for (_, type_ref) in &args_and_bindings.bindings {
go(type_ref, f);
for binding in &args_and_bindings.bindings {
if let Some(type_ref) = &binding.type_ref {
go(type_ref, f);
}
for bound in &binding.bounds {
match bound {
TypeBound::Path(path) => go_path(path, f),
TypeBound::Error => (),
}
}
}
}
}

View File

@ -8,6 +8,7 @@ authors = ["rust-analyzer developers"]
doctest = false
[dependencies]
itertools = "0.9.0"
arrayvec = "0.5.1"
smallvec = "1.2.0"
ena = "0.13.1"
@ -26,9 +27,9 @@ test_utils = { path = "../test_utils" }
scoped-tls = "1"
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "039fc904a05f8cb3d0c682c9a57a63dda7a35356" }
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" }
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" }
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" }
[dev-dependencies]
insta = "0.15.0"
insta = "0.16.0"

View File

@ -194,9 +194,10 @@ use smallvec::{smallvec, SmallVec};
use crate::{
db::HirDatabase,
expr::{Body, Expr, Literal, Pat, PatId},
InferenceResult,
ApplicationTy, InferenceResult, Ty, TypeCtor,
};
use hir_def::{adt::VariantData, EnumVariantId, VariantId};
use hir_def::{adt::VariantData, AdtId, EnumVariantId, VariantId};
use ra_arena::Idx;
#[derive(Debug, Clone, Copy)]
/// Either a pattern from the source code being analyzed, represented as
@ -512,6 +513,7 @@ pub enum Usefulness {
}
pub struct MatchCheckCtx<'a> {
pub match_expr: Idx<Expr>,
pub body: Arc<Body>,
pub infer: Arc<InferenceResult>,
pub db: &'a dyn HirDatabase,
@ -530,6 +532,16 @@ pub(crate) fn is_useful(
matrix: &Matrix,
v: &PatStack,
) -> MatchCheckResult<Usefulness> {
// Handle the special case of enums with no variants. In that case, no match
// arm is useful.
if let Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(AdtId::EnumId(enum_id)), .. }) =
cx.infer[cx.match_expr].strip_references()
{
if cx.db.enum_data(*enum_id).variants.is_empty() {
return Ok(Usefulness::NotUseful);
}
}
if v.is_empty() {
let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful };
@ -1618,6 +1630,32 @@ mod tests {
check_no_diagnostic(content);
}
#[test]
fn enum_never() {
let content = r"
enum Never {}
fn test_fn(never: Never) {
match never {}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_never_ref() {
let content = r"
enum Never {}
fn test_fn(never: &Never) {
match never {}
}
";
check_no_diagnostic(content);
}
}
#[cfg(test)]

View File

@ -14,7 +14,7 @@ use crate::{
db::HirDatabase,
traits::{InEnvironment, Solution},
utils::generics,
BoundVar, Canonical, DebruijnIndex, Substs, Ty,
BoundVar, Canonical, DebruijnIndex, Obligation, Substs, TraitRef, Ty,
};
const AUTODEREF_RECURSION_LIMIT: usize = 10;
@ -66,6 +66,20 @@ fn deref_by_trait(
let parameters =
Substs::build_for_generics(&generic_params).push(ty.value.value.clone()).build();
// Check that the type implements Deref at all
let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() };
let implements_goal = super::Canonical {
num_vars: ty.value.num_vars,
value: InEnvironment {
value: Obligation::Trait(trait_ref),
environment: ty.environment.clone(),
},
};
if db.trait_solve(krate, implements_goal).is_none() {
return None;
}
// Now do the assoc type projection
let projection = super::traits::ProjectionPredicate {
ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)),
projection_ty: super::ProjectionTy { associated_ty: target, parameters },
@ -91,6 +105,11 @@ fn deref_by_trait(
// they're just being 'passed through'. In the 'standard' case where
// we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
// the case.
// FIXME: if the trait solver decides to truncate the type, these
// assumptions will be broken. We would need to properly introduce
// new variables in that case
for i in 1..vars.0.num_vars {
if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1))
{

View File

@ -156,7 +156,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
None => return,
};
let cx = MatchCheckCtx { body, infer: infer.clone(), db };
let cx = MatchCheckCtx { match_expr, body, infer: infer.clone(), db };
let pats = arms.iter().map(|arm| arm.pat);
let mut seen = Matrix::empty();

View File

@ -32,6 +32,7 @@ where
var_stack: Vec<TypeVarId>,
}
#[derive(Debug)]
pub(super) struct Canonicalized<T> {
pub value: Canonical<T>,
free_vars: Vec<InferTy>,

View File

@ -680,6 +680,16 @@ impl Ty {
}
}
pub fn strip_references(&self) -> &Ty {
let mut t: &Ty = self;
while let Ty::Apply(ApplicationTy { ctor: TypeCtor::Ref(_mutability), parameters }) = t {
t = parameters.as_single();
}
t
}
pub fn as_adt(&self) -> Option<(AdtId, &Substs)> {
match self {
Ty::Apply(ApplicationTy { ctor: TypeCtor::Adt(adt_def), parameters }) => {

View File

@ -8,6 +8,8 @@
use std::iter;
use std::sync::Arc;
use smallvec::SmallVec;
use hir_def::{
adt::StructKind,
builtin_type::BuiltinType,
@ -360,13 +362,23 @@ impl Ty {
},
Some(TypeNs::GenericParam(param_id)) => {
let predicates = ctx.db.generic_predicates_for_param(param_id);
predicates
let mut traits_: Vec<_> = predicates
.iter()
.filter_map(|pred| match &pred.value {
GenericPredicate::Implemented(tr) => Some(tr.trait_),
_ => None,
})
.collect()
.collect();
// Handle `Self::Type` referring to own associated type in trait definitions
if let GenericDefId::TraitId(trait_id) = param_id.parent {
let generics = generics(ctx.db.upcast(), trait_id.into());
if generics.params.types[param_id.local_id].provenance
== TypeParamProvenance::TraitSelf
{
traits_.push(trait_id);
}
}
traits_
}
_ => return Ty::Unknown,
};
@ -596,21 +608,35 @@ fn assoc_type_bindings_from_type_bound<'a>(
.into_iter()
.flat_map(|segment| segment.args_and_bindings.into_iter())
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
.map(move |(name, type_ref)| {
.flat_map(move |binding| {
let associated_ty = associated_type_by_name_including_super_traits(
ctx.db.upcast(),
trait_ref.trait_,
&name,
&binding.name,
);
let associated_ty = match associated_ty {
None => return GenericPredicate::Error,
None => return SmallVec::<[GenericPredicate; 1]>::new(),
Some(t) => t,
};
let projection_ty =
ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() };
let ty = Ty::from_hir(ctx, type_ref);
let projection_predicate = ProjectionPredicate { projection_ty, ty };
GenericPredicate::Projection(projection_predicate)
let mut preds = SmallVec::with_capacity(
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
);
if let Some(type_ref) = &binding.type_ref {
let ty = Ty::from_hir(ctx, type_ref);
let projection_predicate =
ProjectionPredicate { projection_ty: projection_ty.clone(), ty };
preds.push(GenericPredicate::Projection(projection_predicate));
}
for bound in &binding.bounds {
preds.extend(GenericPredicate::from_type_bound(
ctx,
bound,
Ty::Projection(projection_ty.clone()),
));
}
preds
})
}

View File

@ -451,8 +451,7 @@ pub mod str {
"#,
);
// should be Option<char>, but currently not because of Chalk ambiguity problem
assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos));
assert_eq!("(Option<char>, Option<char>)", super::type_at_pos(&db, pos));
}
#[test]

View File

@ -349,7 +349,6 @@ trait Trait: SuperTrait {
#[test]
fn infer_project_associated_type() {
// y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234
assert_snapshot!(
infer(r#"
trait Iterable {
@ -368,12 +367,12 @@ fn test<T: Iterable>() {
[108; 261) '{ ...ter; }': ()
[118; 119) 'x': u32
[145; 146) '1': u32
[156; 157) 'y': {unknown}
[183; 192) 'no_matter': {unknown}
[202; 203) 'z': {unknown}
[215; 224) 'no_matter': {unknown}
[234; 235) 'a': {unknown}
[249; 258) 'no_matter': {unknown}
[156; 157) 'y': Iterable::Item<T>
[183; 192) 'no_matter': Iterable::Item<T>
[202; 203) 'z': Iterable::Item<T>
[215; 224) 'no_matter': Iterable::Item<T>
[234; 235) 'a': Iterable::Item<T>
[249; 258) 'no_matter': Iterable::Item<T>
"###
);
}
@ -433,8 +432,8 @@ fn test<T: Iterable<Item=u32>>() {
"#),
@r###"
[67; 100) '{ ...own; }': ()
[77; 78) 'y': {unknown}
[90; 97) 'unknown': {unknown}
[77; 78) 'y': u32
[90; 97) 'unknown': u32
"###
);
}
@ -549,7 +548,7 @@ impl std::ops::Index<u32> for Bar {
fn test() {
let a = Bar;
let b = a[1];
let b = a[1u32];
b<|>;
}
@ -574,7 +573,7 @@ fn infer_ops_index_autoderef() {
//- /main.rs crate:main deps:std
fn test() {
let a = &[1u32, 2, 3];
let b = a[1];
let b = a[1u32];
b<|>;
}
@ -916,11 +915,7 @@ fn test<T: ApplyL>(t: T) {
}
"#,
);
// FIXME here Chalk doesn't normalize the type to a placeholder. I think we
// need to add a rule like Normalize(<T as ApplyL>::Out -> ApplyL::Out<T>)
// to the trait env ourselves here; probably Chalk can't do this by itself.
// assert_eq!(t, "ApplyL::Out<[missing name]>");
assert_eq!(t, "{unknown}");
assert_eq!(t, "ApplyL::Out<T>");
}
#[test]
@ -1329,16 +1324,16 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
[263; 264) 'y': impl Trait<Type = i64>
[290; 398) '{ ...r>); }': ()
[296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type
[296; 302) 'get(x)': {unknown}
[296; 302) 'get(x)': u32
[300; 301) 'x': T
[308; 312) 'get2': fn get2<{unknown}, T>(T) -> {unknown}
[308; 315) 'get2(x)': {unknown}
[308; 312) 'get2': fn get2<u32, T>(T) -> u32
[308; 315) 'get2(x)': u32
[313; 314) 'x': T
[321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
[321; 327) 'get(y)': {unknown}
[321; 327) 'get(y)': i64
[325; 326) 'y': impl Trait<Type = i64>
[333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> {unknown}
[333; 340) 'get2(y)': {unknown}
[333; 337) 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
[333; 340) 'get2(y)': i64
[338; 339) 'y': impl Trait<Type = i64>
[346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
[346; 357) 'get(set(S))': u64
@ -1402,7 +1397,6 @@ mod iter {
#[test]
fn projection_eq_within_chalk() {
// std::env::set_var("CHALK_DEBUG", "1");
assert_snapshot!(
infer(r#"
trait Trait1 {
@ -1422,7 +1416,7 @@ fn test<T: Trait1<Type = u32>>(x: T) {
[164; 165) 'x': T
[170; 186) '{ ...o(); }': ()
[176; 177) 'x': T
[176; 183) 'x.foo()': {unknown}
[176; 183) 'x.foo()': u32
"###
);
}
@ -1578,7 +1572,7 @@ fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
[150; 151) 'f': F
[156; 184) '{ ...2)); }': ()
[162; 163) 'f': F
[162; 181) 'f.call...1, 2))': {unknown}
[162; 181) 'f.call...1, 2))': u128
[174; 180) '(1, 2)': (u32, u64)
[175; 176) '1': u32
[178; 179) '2': u64
@ -1803,7 +1797,7 @@ fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
}
#[test]
fn unselected_projection_on_trait_self() {
fn unselected_projection_on_impl_self() {
assert_snapshot!(infer(
r#"
//- /main.rs
@ -1829,7 +1823,7 @@ impl Trait for S2 {
"#,
), @r###"
[54; 58) 'self': &Self
[60; 61) 'x': {unknown}
[60; 61) 'x': Trait::Item<Self>
[140; 144) 'self': &S
[146; 147) 'x': u32
[161; 175) '{ let y = x; }': ()
@ -1843,6 +1837,30 @@ impl Trait for S2 {
"###);
}
#[test]
fn unselected_projection_on_trait_self() {
let t = type_at(
r#"
//- /main.rs
trait Trait {
type Item;
fn f(&self) -> Self::Item { loop {} }
}
struct S;
impl Trait for S {
type Item = u32;
}
fn test() {
S.f()<|>;
}
"#,
);
assert_eq!(t, "u32");
}
#[test]
fn trait_impl_self_ty() {
let t = type_at(
@ -1923,6 +1941,119 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
assert_eq!(t, "{unknown}");
}
#[test]
fn inline_assoc_type_bounds_1() {
let t = type_at(
r#"
//- /main.rs
trait Iterator {
type Item;
}
trait OtherTrait<T> {
fn foo(&self) -> T;
}
// workaround for Chalk assoc type normalization problems
pub struct S<T>;
impl<T: Iterator> Iterator for S<T> {
type Item = <T as Iterator>::Item;
}
fn test<I: Iterator<Item: OtherTrait<u32>>>() {
let x: <S<I> as Iterator>::Item;
x.foo()<|>;
}
"#,
);
assert_eq!(t, "u32");
}
#[test]
fn inline_assoc_type_bounds_2() {
let t = type_at(
r#"
//- /main.rs
trait Iterator {
type Item;
}
fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
let x: <<I as Iterator>::Item as Iterator>::Item;
x<|>;
}
"#,
);
assert_eq!(t, "u32");
}
#[test]
fn proc_macro_server_types() {
assert_snapshot!(
infer_with_mismatches(r#"
macro_rules! with_api {
($S:ident, $self:ident, $m:ident) => {
$m! {
TokenStream {
fn new() -> $S::TokenStream;
},
Group {
},
}
};
}
macro_rules! associated_item {
(type TokenStream) =>
(type TokenStream: 'static + Clone;);
(type Group) =>
(type Group: 'static + Clone;);
($($item:tt)*) => ($($item)*;)
}
macro_rules! declare_server_traits {
($($name:ident {
$(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
}),* $(,)?) => {
pub trait Types {
$(associated_item!(type $name);)*
}
$(pub trait $name: Types {
$(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
})*
pub trait Server: Types $(+ $name)* {}
impl<S: Types $(+ $name)*> Server for S {}
}
}
with_api!(Self, self_, declare_server_traits);
struct Group {}
struct TokenStream {}
struct Rustc;
impl Types for Rustc {
type TokenStream = TokenStream;
type Group = Group;
}
fn make<T>() -> T { loop {} }
impl TokenStream for Rustc {
fn new() -> Self::TokenStream {
let group: Self::Group = make();
make()
}
}
"#, true),
@r###"
[1115; 1126) '{ loop {} }': T
[1117; 1124) 'loop {}': !
[1122; 1124) '{}': ()
[1190; 1253) '{ ... }': {unknown}
[1204; 1209) 'group': {unknown}
[1225; 1229) 'make': fn make<{unknown}>() -> {unknown}
[1225; 1231) 'make()': {unknown}
[1241; 1245) 'make': fn make<{unknown}>() -> {unknown}
[1241; 1247) 'make()': {unknown}
"###
);
}
#[test]
fn unify_impl_trait() {
assert_snapshot!(
@ -2022,6 +2153,33 @@ fn main() {
);
}
#[test]
fn associated_type_bound() {
let t = type_at(
r#"
//- /main.rs
pub trait Trait {
type Item: OtherTrait<u32>;
}
pub trait OtherTrait<T> {
fn foo(&self) -> T;
}
// this is just a workaround for chalk#234
pub struct S<T>;
impl<T: Trait> Trait for S<T> {
type Item = <T as Trait>::Item;
}
fn test<T: Trait>() {
let y: <S<T> as Trait>::Item = no_matter;
y.foo()<|>;
}
"#,
);
assert_eq!(t, "u32");
}
#[test]
fn dyn_trait_through_chalk() {
let t = type_at(

View File

@ -16,10 +16,12 @@ use self::chalk::{from_chalk, Interner, ToChalk};
pub(crate) mod chalk;
mod builtin;
/// This controls the maximum size of types Chalk considers. If we set this too
/// high, we can run into slow edge cases; if we set it too low, Chalk won't
/// find some solutions.
const CHALK_SOLVER_MAX_SIZE: usize = 10;
// This controls the maximum size of types Chalk considers. If we set this too
// high, we can run into slow edge cases; if we set it too low, Chalk won't
// find some solutions.
// FIXME this is currently hardcoded in the recursive solver
// const CHALK_SOLVER_MAX_SIZE: usize = 10;
/// This controls how much 'time' we give the Chalk solver before giving up.
const CHALK_SOLVER_FUEL: i32 = 100;
@ -30,8 +32,7 @@ struct ChalkContext<'a> {
}
fn create_chalk_solver() -> chalk_solve::Solver<Interner> {
let solver_choice =
chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None };
let solver_choice = chalk_solve::SolverChoice::recursive();
solver_choice.into_solver()
}
@ -194,13 +195,16 @@ fn solve(
}
remaining > 0
};
let mut solve = || solver.solve_limited(&context, goal, should_continue);
let mut solve = || {
let solution = solver.solve_limited(&context, goal, should_continue);
log::debug!("solve({:?}) => {:?}", goal, solution);
solution
};
// don't set the TLS for Chalk unless Chalk debugging is active, to make
// extra sure we only use it for debugging
let solution =
if is_chalk_debug() { chalk::tls::set_current_program(db, solve) } else { solve() };
log::debug!("solve({:?}) => {:?}", goal, solution);
solution
}

View File

@ -32,6 +32,9 @@ impl chalk_ir::interner::Interner for Interner {
type InternedGoal = Arc<GoalData<Self>>;
type InternedGoals = Vec<Goal<Self>>;
type InternedSubstitution = Vec<Parameter<Self>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>;
type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
type Identifier = TypeAliasId;
type DefId = InternId;
@ -181,6 +184,48 @@ impl chalk_ir::interner::Interner for Interner {
) -> &'a [Parameter<Self>] {
substitution
}
fn intern_program_clause(
&self,
data: chalk_ir::ProgramClauseData<Self>,
) -> chalk_ir::ProgramClauseData<Self> {
data
}
fn program_clause_data<'a>(
&self,
clause: &'a chalk_ir::ProgramClauseData<Self>,
) -> &'a chalk_ir::ProgramClauseData<Self> {
clause
}
fn intern_program_clauses(
&self,
data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>,
) -> Vec<chalk_ir::ProgramClause<Self>> {
data.into_iter().collect()
}
fn program_clauses_data<'a>(
&self,
clauses: &'a Vec<chalk_ir::ProgramClause<Self>>,
) -> &'a [chalk_ir::ProgramClause<Self>] {
clauses
}
fn intern_quantified_where_clauses(
&self,
data: impl IntoIterator<Item = chalk_ir::QuantifiedWhereClause<Self>>,
) -> Self::InternedQuantifiedWhereClauses {
data.into_iter().collect()
}
fn quantified_where_clauses_data<'a>(
&self,
clauses: &'a Self::InternedQuantifiedWhereClauses,
) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
clauses
}
}
impl chalk_ir::interner::HasInterner for Interner {
@ -238,12 +283,10 @@ impl ToChalk for Ty {
Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx).intern(&Interner),
Ty::Infer(_infer_ty) => panic!("uncanonicalized infer ty"),
Ty::Dyn(predicates) => {
let where_clauses = predicates
.iter()
.filter(|p| !p.is_error())
.cloned()
.map(|p| p.to_chalk(db))
.collect();
let where_clauses = chalk_ir::QuantifiedWhereClauses::from(
&Interner,
predicates.iter().filter(|p| !p.is_error()).cloned().map(|p| p.to_chalk(db)),
);
let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) };
chalk_ir::TyData::Dyn(bounded_ty).intern(&Interner)
}
@ -281,8 +324,12 @@ impl ToChalk for Ty {
chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown,
chalk_ir::TyData::Dyn(where_clauses) => {
assert_eq!(where_clauses.bounds.binders.len(), 1);
let predicates =
where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect();
let predicates = where_clauses
.bounds
.skip_binders()
.iter(&Interner)
.map(|c| from_chalk(db, c.clone()))
.collect();
Ty::Dyn(predicates)
}
}
@ -426,7 +473,7 @@ impl ToChalk for GenericPredicate {
) -> GenericPredicate {
// we don't produce any where clauses with binders and can't currently deal with them
match where_clause
.value
.skip_binders()
.shifted_out(&Interner)
.expect("unexpected bound vars in where clause")
{
@ -464,13 +511,13 @@ impl ToChalk for ProjectionTy {
}
impl ToChalk for super::ProjectionPredicate {
type Chalk = chalk_ir::Normalize<Interner>;
type Chalk = chalk_ir::AliasEq<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Normalize<Interner> {
chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) }
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
chalk_ir::AliasEq { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) }
}
fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::Normalize<Interner>) -> Self {
fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
unimplemented!()
}
}
@ -521,7 +568,7 @@ impl ToChalk for Arc<super::TraitEnvironment> {
pred.clone().to_chalk(db).cast(&Interner);
clauses.push(program_clause.into_from_env_clause(&Interner));
}
chalk_ir::Environment::new().add_clauses(clauses)
chalk_ir::Environment::new(&Interner).add_clauses(&Interner, clauses)
}
fn from_chalk(
@ -603,10 +650,10 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData {
}
fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> {
chalk_ir::Binders {
chalk_ir::Binders::new(
std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(),
value,
binders: std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(),
}
)
}
fn convert_where_clauses(
@ -626,6 +673,55 @@ fn convert_where_clauses(
result
}
fn generic_predicate_to_inline_bound(
db: &dyn HirDatabase,
pred: &GenericPredicate,
self_ty: &Ty,
) -> Option<chalk_rust_ir::InlineBound<Interner>> {
// An InlineBound is like a GenericPredicate, except the self type is left out.
// We don't have a special type for this, but Chalk does.
match pred {
GenericPredicate::Implemented(trait_ref) => {
if &trait_ref.substs[0] != self_ty {
// we can only convert predicates back to type bounds if they
// have the expected self type
return None;
}
let args_no_self = trait_ref.substs[1..]
.iter()
.map(|ty| ty.clone().to_chalk(db).cast(&Interner))
.collect();
let trait_bound =
chalk_rust_ir::TraitBound { trait_id: trait_ref.trait_.to_chalk(db), args_no_self };
Some(chalk_rust_ir::InlineBound::TraitBound(trait_bound))
}
GenericPredicate::Projection(proj) => {
if &proj.projection_ty.parameters[0] != self_ty {
return None;
}
let trait_ = match proj.projection_ty.associated_ty.lookup(db.upcast()).container {
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let args_no_self = proj.projection_ty.parameters[1..]
.iter()
.map(|ty| ty.clone().to_chalk(db).cast(&Interner))
.collect();
let alias_eq_bound = chalk_rust_ir::AliasEqBound {
value: proj.ty.clone().to_chalk(db),
trait_bound: chalk_rust_ir::TraitBound {
trait_id: trait_.to_chalk(db),
args_no_self,
},
associated_ty_id: proj.projection_ty.associated_ty.to_chalk(db),
parameters: Vec::new(), // FIXME we don't support generic associated types yet
};
Some(chalk_rust_ir::InlineBound::AliasEqBound(alias_eq_bound))
}
GenericPredicate::Error => None,
}
}
impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
self.db.associated_ty_data(id)
@ -696,6 +792,13 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
fn interner(&self) -> &Interner {
&Interner
}
fn well_known_trait_id(
&self,
_well_known_trait: chalk_rust_ir::WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
// FIXME tell Chalk about well-known traits (here and in trait_datum)
None
}
}
pub(crate) fn associated_ty_data_query(
@ -708,12 +811,25 @@ pub(crate) fn associated_ty_data_query(
AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_data(type_alias);
let generic_params = generics(db.upcast(), type_alias.into());
let bound_data = chalk_rust_ir::AssociatedTyDatumBound {
// FIXME add bounds and where clauses
bounds: vec![],
where_clauses: vec![],
};
let bound_vars = Substs::bound_vars(&generic_params);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver)
.with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable);
let self_ty = Ty::Bound(crate::BoundVar::new(crate::DebruijnIndex::INNERMOST, 0));
let bounds = type_alias_data
.bounds
.iter()
.flat_map(|bound| GenericPredicate::from_type_bound(&ctx, bound, self_ty.clone()))
.filter_map(|pred| generic_predicate_to_inline_bound(db, &pred, &self_ty))
.map(|bound| make_binders(bound.shifted_in(&Interner), 0))
.collect();
let where_clauses = convert_where_clauses(db, type_alias.into(), &bound_vars);
let bound_data = chalk_rust_ir::AssociatedTyDatumBound { bounds, where_clauses };
let datum = AssociatedTyDatum {
trait_id: trait_.to_chalk(db),
id,

View File

@ -2,10 +2,11 @@
use std::fmt;
use chalk_ir::{AliasTy, Goal, Goals, Lifetime, Parameter, ProgramClauseImplication, TypeName};
use itertools::Itertools;
use super::{from_chalk, Interner};
use crate::{db::HirDatabase, CallableDef, TypeCtor};
use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId};
use hir_def::{AdtId, AssocContainerId, DefWithBodyId, Lookup, TypeAliasId};
pub use unsafe_tls::{set_current_program, with_current_program};
@ -69,7 +70,27 @@ impl DebugContext<'_> {
write!(f, "{}::{}", trait_name, name)?;
}
TypeCtor::Closure { def, expr } => {
write!(f, "{{closure {:?} in {:?}}}", expr.into_raw(), def)?;
write!(f, "{{closure {:?} in ", expr.into_raw())?;
match def {
DefWithBodyId::FunctionId(func) => {
write!(f, "fn {}", self.0.function_data(func).name)?
}
DefWithBodyId::StaticId(s) => {
if let Some(name) = self.0.static_data(s).name.as_ref() {
write!(f, "body of static {}", name)?;
} else {
write!(f, "body of unnamed static {:?}", s)?;
}
}
DefWithBodyId::ConstId(c) => {
if let Some(name) = self.0.const_data(c).name.as_ref() {
write!(f, "body of const {}", name)?;
} else {
write!(f, "body of unnamed const {:?}", c)?;
}
}
};
write!(f, "}}")?;
}
}
Ok(())
@ -113,14 +134,15 @@ impl DebugContext<'_> {
};
let trait_data = self.0.trait_data(trait_);
let params = alias.substitution.parameters(&Interner);
write!(
fmt,
"<{:?} as {}<{:?}>>::{}",
&params[0],
trait_data.name,
&params[1..],
type_alias_data.name
)
write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
if params.len() > 1 {
write!(
fmt,
"<{}>",
&params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)
}
pub fn debug_ty(

View File

@ -35,4 +35,4 @@ ra_assists = { path = "../ra_assists" }
hir = { path = "../ra_hir", package = "ra_hir" }
[dev-dependencies]
insta = "0.15.0"
insta = "0.16.0"

View File

@ -175,7 +175,7 @@ impl ToNav for FileSymbol {
NavigationTarget {
file_id: self.file_id,
name: self.name.clone(),
kind: self.ptr.kind(),
kind: self.kind,
full_range: self.ptr.range(),
focus_range: self.name_range,
container_name: self.container_name.clone(),

View File

@ -285,7 +285,7 @@ impl Query {
let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value);
for symbol in &symbol_index.symbols[start..end] {
if self.only_types && !is_type(symbol.ptr.kind()) {
if self.only_types && !is_type(symbol.kind) {
continue;
}
if self.exact && symbol.name != self.query {
@ -312,6 +312,7 @@ fn is_type(kind: SyntaxKind) -> bool {
pub struct FileSymbol {
pub file_id: FileId,
pub name: SmolStr,
pub kind: SyntaxKind,
pub ptr: SyntaxNodePtr,
pub name_range: Option<TextRange>,
pub container_name: Option<SmolStr>,
@ -377,6 +378,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
fn to_file_symbol(node: &SyntaxNode, file_id: FileId) -> Option<FileSymbol> {
to_symbol(node).map(move |(name, ptr, name_range)| FileSymbol {
name,
kind: node.kind(),
ptr,
file_id,
name_range: Some(name_range),

View File

@ -607,12 +607,13 @@ impl<'a> TreeSink for TtTreeSink<'a> {
let text: SmolStr = match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
// Mark the range if needed
let id = match leaf {
tt::Leaf::Ident(ident) => ident.id,
tt::Leaf::Punct(punct) => punct.id,
tt::Leaf::Literal(lit) => lit.id,
let (text, id) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id),
tt::Leaf::Punct(punct) => {
(SmolStr::new_inline_from_ascii(1, &[punct.char as u8]), punct.id)
}
tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
};
let text = SmolStr::new(format!("{}", leaf));
let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text));
self.token_map.insert(id, range);
self.cursor = self.cursor.bump();

View File

@ -12,6 +12,7 @@ pub mod msg;
use process::{ProcMacroProcessSrv, ProcMacroProcessThread};
use ra_tt::{SmolStr, Subtree};
use std::{
ffi::OsStr,
path::{Path, PathBuf},
sync::Arc,
};
@ -56,8 +57,15 @@ pub struct ProcMacroClient {
}
impl ProcMacroClient {
pub fn extern_process(process_path: &Path) -> Result<ProcMacroClient, std::io::Error> {
let (thread, process) = ProcMacroProcessSrv::run(process_path)?;
pub fn extern_process<I, S>(
process_path: &Path,
args: I,
) -> Result<ProcMacroClient, std::io::Error>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?;
Ok(ProcMacroClient {
kind: ProcMacroClientKind::Process { process: Arc::new(process), thread },
})

View File

@ -9,6 +9,7 @@ use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTas
use io::{BufRead, BufReader};
use std::{
convert::{TryFrom, TryInto},
ffi::OsStr,
io::{self, Write},
path::{Path, PathBuf},
process::{Child, Command, Stdio},
@ -44,8 +45,13 @@ impl Drop for Process {
}
impl Process {
fn run(process_path: &Path) -> Result<Process, io::Error> {
fn run<I, S>(process_path: &Path, args: I) -> Result<Process, io::Error>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let child = Command::new(process_path.clone())
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::null())
@ -74,10 +80,15 @@ impl Process {
}
impl ProcMacroProcessSrv {
pub fn run(
pub fn run<I, S>(
process_path: &Path,
) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> {
let process = Process::run(process_path)?;
args: I,
) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let process = Process::run(process_path, args)?;
let (task_tx, task_rx) = bounded(0);
let handle = jod_thread::spawn(move || {

View File

@ -1,7 +1,7 @@
//! Driver for proc macro server
use crate::{expand_task, list_macros};
use ra_proc_macro::msg::{self, Message};
use ra_proc_macro_srv::{expand_task, list_macros};
use std::io;
@ -24,7 +24,8 @@ fn write_response(res: Result<msg::Response, String>) -> Result<(), io::Error> {
let mut stdout = stdout.lock();
msg.write(&mut stdout)
}
fn main() {
pub fn run() {
loop {
let req = match read_request() {
Err(err) => {

View File

@ -22,7 +22,7 @@ mod dylib;
use proc_macro::bridge::client::TokenStream;
use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
pub fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
let expander = dylib::Expander::new(&task.lib)
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
@ -39,7 +39,7 @@ pub fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
}
}
pub fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
pub(crate) fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
let expander = dylib::Expander::new(&task.lib)
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
@ -53,5 +53,7 @@ pub fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> {
}
}
pub mod cli;
#[cfg(test)]
mod tests;

View File

@ -5,9 +5,8 @@ mod json_project;
mod sysroot;
use std::{
error::Error,
fs::{read_dir, File, ReadDir},
io::BufReader,
io::{self, BufReader},
path::{Path, PathBuf},
process::Command,
};
@ -25,25 +24,6 @@ pub use crate::{
};
pub use ra_proc_macro::ProcMacroClient;
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct CargoTomlNotFoundError {
pub searched_at: PathBuf,
pub reason: String,
}
impl std::fmt::Display for CargoTomlNotFoundError {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
fmt,
"can't find Cargo.toml at {}, due to {}",
self.searched_at.display(),
self.reason
)
}
}
impl Error for CargoTomlNotFoundError {}
#[derive(Debug, Clone)]
pub enum ProjectWorkspace {
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
@ -77,31 +57,119 @@ impl PackageRoot {
}
}
impl ProjectWorkspace {
pub fn discover(path: &Path, cargo_features: &CargoConfig) -> Result<ProjectWorkspace> {
ProjectWorkspace::discover_with_sysroot(path, true, cargo_features)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectRoot {
ProjectJson(PathBuf),
CargoToml(PathBuf),
}
impl ProjectRoot {
pub fn from_manifest_file(path: PathBuf) -> Result<ProjectRoot> {
if path.ends_with("rust-project.json") {
return Ok(ProjectRoot::ProjectJson(path));
}
if path.ends_with("Cargo.toml") {
return Ok(ProjectRoot::CargoToml(path));
}
bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display())
}
pub fn discover_with_sysroot(
path: &Path,
with_sysroot: bool,
cargo_features: &CargoConfig,
) -> Result<ProjectWorkspace> {
match find_rust_project_json(path) {
Some(json_path) => {
let file = File::open(&json_path)
.with_context(|| format!("Failed to open json file {}", json_path.display()))?;
let reader = BufReader::new(file);
Ok(ProjectWorkspace::Json {
project: from_reader(reader).with_context(|| {
format!("Failed to deserialize json file {}", json_path.display())
})?,
})
pub fn discover_single(path: &Path) -> Result<ProjectRoot> {
let mut candidates = ProjectRoot::discover(path)?;
let res = match candidates.pop() {
None => bail!("no projects"),
Some(it) => it,
};
if !candidates.is_empty() {
bail!("more than one project")
}
Ok(res)
}
pub fn discover(path: &Path) -> io::Result<Vec<ProjectRoot>> {
if let Some(project_json) = find_rust_project_json(path) {
return Ok(vec![ProjectRoot::ProjectJson(project_json)]);
}
return find_cargo_toml(path)
.map(|paths| paths.into_iter().map(ProjectRoot::CargoToml).collect());
fn find_rust_project_json(path: &Path) -> Option<PathBuf> {
if path.ends_with("rust-project.json") {
return Some(path.to_path_buf());
}
None => {
let cargo_toml = find_cargo_toml(path).with_context(|| {
format!("Failed to find Cargo.toml for path {}", path.display())
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join("rust-project.json");
if candidate.exists() {
return Some(candidate);
}
curr = path.parent();
}
None
}
fn find_cargo_toml(path: &Path) -> io::Result<Vec<PathBuf>> {
if path.ends_with("Cargo.toml") {
return Ok(vec![path.to_path_buf()]);
}
if let Some(p) = find_cargo_toml_in_parent_dir(path) {
return Ok(vec![p]);
}
let entities = read_dir(path)?;
Ok(find_cargo_toml_in_child_dir(entities))
}
fn find_cargo_toml_in_parent_dir(path: &Path) -> Option<PathBuf> {
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join("Cargo.toml");
if candidate.exists() {
return Some(candidate);
}
curr = path.parent();
}
None
}
fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<PathBuf> {
// Only one level down to avoid cycles the easy way and stop a runaway scan with large projects
let mut valid_canditates = vec![];
for entity in entities.filter_map(Result::ok) {
let candidate = entity.path().join("Cargo.toml");
if candidate.exists() {
valid_canditates.push(candidate)
}
}
valid_canditates
}
}
}
impl ProjectWorkspace {
pub fn load(
root: ProjectRoot,
cargo_features: &CargoConfig,
with_sysroot: bool,
) -> Result<ProjectWorkspace> {
let res = match root {
ProjectRoot::ProjectJson(project_json) => {
let file = File::open(&project_json).with_context(|| {
format!("Failed to open json file {}", project_json.display())
})?;
let reader = BufReader::new(file);
ProjectWorkspace::Json {
project: from_reader(reader).with_context(|| {
format!("Failed to deserialize json file {}", project_json.display())
})?,
}
}
ProjectRoot::CargoToml(cargo_toml) => {
let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)
.with_context(|| {
format!(
@ -119,9 +187,11 @@ impl ProjectWorkspace {
} else {
Sysroot::default()
};
Ok(ProjectWorkspace::Cargo { cargo, sysroot })
ProjectWorkspace::Cargo { cargo, sysroot }
}
}
};
Ok(res)
}
/// Returns the roots for the current `ProjectWorkspace`
@ -469,87 +539,6 @@ impl ProjectWorkspace {
}
}
fn find_rust_project_json(path: &Path) -> Option<PathBuf> {
if path.ends_with("rust-project.json") {
return Some(path.to_path_buf());
}
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join("rust-project.json");
if candidate.exists() {
return Some(candidate);
}
curr = path.parent();
}
None
}
fn find_cargo_toml_in_parent_dir(path: &Path) -> Option<PathBuf> {
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join("Cargo.toml");
if candidate.exists() {
return Some(candidate);
}
curr = path.parent();
}
None
}
fn find_cargo_toml_in_child_dir(entities: ReadDir) -> Vec<PathBuf> {
// Only one level down to avoid cycles the easy way and stop a runaway scan with large projects
let mut valid_canditates = vec![];
for entity in entities.filter_map(Result::ok) {
let candidate = entity.path().join("Cargo.toml");
if candidate.exists() {
valid_canditates.push(candidate)
}
}
valid_canditates
}
fn find_cargo_toml(path: &Path) -> Result<PathBuf> {
if path.ends_with("Cargo.toml") {
return Ok(path.to_path_buf());
}
if let Some(p) = find_cargo_toml_in_parent_dir(path) {
return Ok(p);
}
let entities = match read_dir(path) {
Ok(entities) => entities,
Err(e) => {
return Err(CargoTomlNotFoundError {
searched_at: path.to_path_buf(),
reason: format!("file system error: {}", e),
}
.into());
}
};
let mut valid_canditates = find_cargo_toml_in_child_dir(entities);
match valid_canditates.len() {
1 => Ok(valid_canditates.remove(0)),
0 => Err(CargoTomlNotFoundError {
searched_at: path.to_path_buf(),
reason: "no Cargo.toml file found".to_string(),
}
.into()),
_ => Err(CargoTomlNotFoundError {
searched_at: path.to_path_buf(),
reason: format!(
"multiple equally valid Cargo.toml files found: {:?}",
valid_canditates
),
}
.into()),
}
}
pub fn get_rustc_cfg_options() -> CfgOptions {
let mut cfg_options = CfgOptions::default();

View File

@ -351,7 +351,7 @@ fn with_children(
// FIXME: use a more elegant way to re-fetch the node (#1185), make
// `range` private afterwards
let mut ptr = SyntaxNodePtr::new(parent);
ptr.range = TextRange::offset_len(ptr.range().start(), len);
ptr.range = TextRange::offset_len(ptr.range.start(), len);
ptr.to_node(&new_root_node)
}

View File

@ -34,12 +34,8 @@ impl SyntaxNodePtr {
self.range
}
pub fn kind(&self) -> SyntaxKind {
self.kind
}
pub fn cast<N: AstNode>(self) -> Option<AstPtr<N>> {
if !N::can_cast(self.kind()) {
if !N::can_cast(self.kind) {
return None;
}
Some(AstPtr { raw: self, _ty: PhantomData })
@ -88,7 +84,7 @@ impl<N: AstNode> AstPtr<N> {
}
pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
if !U::can_cast(self.raw.kind()) {
if !U::can_cast(self.raw.kind) {
return None;
}
Some(AstPtr { raw: self.raw, _ty: PhantomData })

View File

@ -46,7 +46,7 @@ ra_db = { path = "../ra_db" }
hir = { path = "../ra_hir", package = "ra_hir" }
hir_def = { path = "../ra_hir_def", package = "ra_hir_def" }
hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" }
ra_proc_macro_srv = { path = "../ra_proc_macro_srv" }
[target.'cfg(windows)'.dependencies]
winapi = "0.3.8"

View File

@ -29,12 +29,23 @@ pub(crate) enum Command {
with_deps: bool,
path: PathBuf,
load_output_dirs: bool,
with_proc_macro: bool,
},
Bench {
path: PathBuf,
what: BenchWhat,
load_output_dirs: bool,
with_proc_macro: bool,
},
Diagnostics {
path: PathBuf,
load_output_dirs: bool,
with_proc_macro: bool,
/// Include files which are not modules. In rust-analyzer
/// this would include the parser test files.
all: bool,
},
ProcMacro,
RunServer,
Version,
}
@ -141,6 +152,7 @@ FLAGS:
-h, --help Prints help information
--memory-usage
--load-output-dirs Load OUT_DIR values by running `cargo check` before analysis
--with-proc-macro Use ra-proc-macro-srv for proc-macro expanding
-v, --verbose
-q, --quiet
@ -158,6 +170,7 @@ ARGS:
let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?;
let with_deps: bool = matches.contains("--with-deps");
let load_output_dirs = matches.contains("--load-output-dirs");
let with_proc_macro = matches.contains("--with-proc-macro");
let path = {
let mut trailing = matches.free()?;
if trailing.len() != 1 {
@ -166,7 +179,15 @@ ARGS:
trailing.pop().unwrap().into()
};
Command::Stats { randomize, memory_usage, only, with_deps, path, load_output_dirs }
Command::Stats {
randomize,
memory_usage,
only,
with_deps,
path,
load_output_dirs,
with_proc_macro,
}
}
"analysis-bench" => {
if matches.contains(["-h", "--help"]) {
@ -180,6 +201,7 @@ USAGE:
FLAGS:
-h, --help Prints help information
--load-output-dirs Load OUT_DIR values by running `cargo check` before analysis
--with-proc-macro Use ra-proc-macro-srv for proc-macro expanding
-v, --verbose
OPTIONS:
@ -207,8 +229,43 @@ ARGS:
),
};
let load_output_dirs = matches.contains("--load-output-dirs");
Command::Bench { path, what, load_output_dirs }
let with_proc_macro = matches.contains("--with-proc-macro");
Command::Bench { path, what, load_output_dirs, with_proc_macro }
}
"diagnostics" => {
if matches.contains(["-h", "--help"]) {
eprintln!(
"\
ra-cli-diagnostics
USAGE:
rust-analyzer diagnostics [FLAGS] [PATH]
FLAGS:
-h, --help Prints help information
--load-output-dirs Load OUT_DIR values by running `cargo check` before analysis
--all Include all files rather than only modules
ARGS:
<PATH>"
);
return Ok(Err(HelpPrinted));
}
let load_output_dirs = matches.contains("--load-output-dirs");
let with_proc_macro = matches.contains("--with-proc-macro");
let all = matches.contains("--all");
let path = {
let mut trailing = matches.free()?;
if trailing.len() != 1 {
bail!("Invalid flags");
}
trailing.pop().unwrap().into()
};
Command::Diagnostics { path, load_output_dirs, with_proc_macro, all }
}
"proc-macro" => Command::ProcMacro,
_ => {
eprintln!(
"\

View File

@ -25,6 +25,7 @@ fn main() -> Result<()> {
with_deps,
path,
load_output_dirs,
with_proc_macro,
} => cli::analysis_stats(
args.verbosity,
memory_usage,
@ -33,12 +34,24 @@ fn main() -> Result<()> {
with_deps,
randomize,
load_output_dirs,
with_proc_macro,
)?,
args::Command::Bench { path, what, load_output_dirs } => {
cli::analysis_bench(args.verbosity, path.as_ref(), what, load_output_dirs)?
args::Command::Bench { path, what, load_output_dirs, with_proc_macro } => {
cli::analysis_bench(
args.verbosity,
path.as_ref(),
what,
load_output_dirs,
with_proc_macro,
)?
}
args::Command::Diagnostics { path, load_output_dirs, with_proc_macro, all } => {
cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)?
}
args::Command::ProcMacro => run_proc_macro_sv()?,
args::Command::RunServer => run_server()?,
args::Command::Version => println!("rust-analyzer {}", env!("REV")),
}
@ -52,6 +65,11 @@ fn setup_logging() -> Result<()> {
Ok(())
}
fn run_proc_macro_sv() -> Result<()> {
ra_proc_macro_srv::cli::run();
Ok(())
}
fn run_server() -> Result<()> {
log::info!("lifecycle: server started");

View File

@ -3,6 +3,7 @@
mod load_cargo;
mod analysis_stats;
mod analysis_bench;
mod diagnostics;
mod progress_report;
use std::io::Read;
@ -12,6 +13,10 @@ use ra_ide::{file_structure, Analysis};
use ra_prof::profile;
use ra_syntax::{AstNode, SourceFile};
pub use analysis_bench::{analysis_bench, BenchWhat, Position};
pub use analysis_stats::analysis_stats;
pub use diagnostics::diagnostics;
#[derive(Clone, Copy)]
pub enum Verbosity {
Spammy,
@ -60,9 +65,6 @@ pub fn highlight(rainbow: bool) -> Result<()> {
Ok(())
}
pub use analysis_bench::{analysis_bench, BenchWhat, Position};
pub use analysis_stats::analysis_stats;
fn file() -> Result<SourceFile> {
let text = read_stdin()?;
Ok(SourceFile::parse(&text).tree())

View File

@ -47,12 +47,13 @@ pub fn analysis_bench(
path: &Path,
what: BenchWhat,
load_output_dirs: bool,
with_proc_macro: bool,
) -> Result<()> {
ra_prof::init();
let start = Instant::now();
eprint!("loading: ");
let (mut host, roots) = load_cargo(path, load_output_dirs)?;
let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database();
eprintln!("{:?}\n", start.elapsed());

View File

@ -25,9 +25,10 @@ pub fn analysis_stats(
with_deps: bool,
randomize: bool,
load_output_dirs: bool,
with_proc_macro: bool,
) -> Result<()> {
let db_load_time = Instant::now();
let (mut host, roots) = load_cargo(path, load_output_dirs)?;
let (mut host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database();
println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed());
let analysis_time = Instant::now();

View File

@ -0,0 +1,79 @@
//! Analyze all modules in a project for diagnostics. Exits with a non-zero status
//! code if any errors are found.
use anyhow::anyhow;
use ra_db::SourceDatabaseExt;
use ra_ide::Severity;
use std::{collections::HashSet, path::Path};
use crate::cli::{load_cargo::load_cargo, Result};
use hir::Semantics;
pub fn diagnostics(
path: &Path,
load_output_dirs: bool,
with_proc_macro: bool,
all: bool,
) -> Result<()> {
let (host, roots) = load_cargo(path, load_output_dirs, with_proc_macro)?;
let db = host.raw_database();
let analysis = host.analysis();
let semantics = Semantics::new(db);
let members = roots
.into_iter()
.filter_map(|(source_root_id, project_root)| {
// filter out dependencies
if project_root.is_member() {
Some(source_root_id)
} else {
None
}
})
.collect::<HashSet<_>>();
let mut found_error = false;
let mut visited_files = HashSet::new();
for source_root_id in members {
for file_id in db.source_root(source_root_id).walk() {
// Filter out files which are not actually modules (unless `--all` flag is
// passed). In the rust-analyzer repository this filters out the parser test files.
if semantics.to_module_def(file_id).is_some() || all {
if !visited_files.contains(&file_id) {
let crate_name = if let Some(module) = semantics.to_module_def(file_id) {
if let Some(name) = module.krate().display_name(db) {
format!("{}", name)
} else {
String::from("unknown")
}
} else {
String::from("unknown")
};
println!(
"processing crate: {}, module: {}",
crate_name,
db.file_relative_path(file_id)
);
for diagnostic in analysis.diagnostics(file_id).unwrap() {
if matches!(diagnostic.severity, Severity::Error) {
found_error = true;
}
println!("{:?}", diagnostic);
}
visited_files.insert(file_id);
}
}
}
}
println!();
println!("diagnostic scan complete");
if found_error {
println!();
Err(anyhow!("diagnostic error detected"))
} else {
Ok(())
}
}

View File

@ -8,7 +8,7 @@ use crossbeam_channel::{unbounded, Receiver};
use ra_db::{ExternSourceId, FileId, SourceRootId};
use ra_ide::{AnalysisChange, AnalysisHost};
use ra_project_model::{
get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectWorkspace,
get_rustc_cfg_options, CargoConfig, PackageRoot, ProcMacroClient, ProjectRoot, ProjectWorkspace,
};
use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch};
use rustc_hash::{FxHashMap, FxHashSet};
@ -25,11 +25,14 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId {
pub(crate) fn load_cargo(
root: &Path,
load_out_dirs_from_check: bool,
with_proc_macro: bool,
) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> {
let root = std::env::current_dir()?.join(root);
let ws = ProjectWorkspace::discover(
root.as_ref(),
let root = ProjectRoot::discover_single(&root)?;
let ws = ProjectWorkspace::load(
root,
&CargoConfig { load_out_dirs_from_check, ..Default::default() },
true,
)?;
let mut extern_dirs = FxHashSet::default();
@ -69,7 +72,14 @@ pub(crate) fn load_cargo(
})
.collect::<FxHashMap<_, _>>();
let proc_macro_client = ProcMacroClient::dummy();
let proc_macro_client = if !with_proc_macro {
ProcMacroClient::dummy()
} else {
let mut path = std::env::current_exe()?;
path.pop();
path.push("rust-analyzer");
ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap()
};
let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client);
Ok((host, source_roots))
}
@ -175,7 +185,7 @@ mod tests {
#[test]
fn test_loading_rust_analyzer() {
let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
let (host, _roots) = load_cargo(path, false).unwrap();
let (host, _roots) = load_cargo(path, false, false).unwrap();
let n_crates = Crate::all(host.raw_database()).len();
// RA has quite a few crates, but the exact count doesn't matter
assert!(n_crates > 20);

View File

@ -20,7 +20,7 @@ pub struct Config {
pub with_sysroot: bool,
pub publish_diagnostics: bool,
pub lru_capacity: Option<usize>,
pub proc_macro_srv: Option<String>,
pub proc_macro_srv: Option<(String, Vec<String>)>,
pub files: FilesConfig,
pub notifications: NotificationsConfig,
@ -131,6 +131,18 @@ impl Config {
set(value, "/cargo/allFeatures", &mut self.cargo.all_features);
set(value, "/cargo/features", &mut self.cargo.features);
set(value, "/cargo/loadOutDirsFromCheck", &mut self.cargo.load_out_dirs_from_check);
match get::<bool>(value, "/procMacro/enabled") {
Some(true) => {
if let Ok(mut path) = std::env::current_exe() {
path.pop();
path.push("rust-analyzer");
self.proc_macro_srv = Some((path.to_string_lossy().to_string(), vec!["proc-macro".to_string()]));
}
}
_ => self.proc_macro_srv = None,
}
match get::<Vec<String>>(value, "/rustfmt/overrideCommand") {
Some(mut args) if !args.is_empty() => {
let command = args.remove(0);

View File

@ -15,6 +15,7 @@ use std::{
};
use crossbeam_channel::{never, select, unbounded, RecvError, Sender};
use itertools::Itertools;
use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response};
use lsp_types::{
NumberOrString, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressCreateParams,
@ -88,37 +89,46 @@ pub fn main_loop(ws_roots: Vec<PathBuf>, config: Config, connection: Connection)
let mut loop_state = LoopState::default();
let mut world_state = {
// FIXME: support dynamic workspace loading.
let workspaces = {
let mut loaded_workspaces = Vec::new();
for ws_root in &ws_roots {
let workspace = ra_project_model::ProjectWorkspace::discover_with_sysroot(
ws_root.as_path(),
config.with_sysroot,
&config.cargo,
);
match workspace {
Ok(workspace) => loaded_workspaces.push(workspace),
Err(e) => {
log::error!("loading workspace failed: {:?}", e);
// FIXME: support dynamic workspace loading.
let mut visited = FxHashSet::default();
let project_roots = ws_roots
.iter()
.filter_map(|it| ra_project_model::ProjectRoot::discover(it).ok())
.flatten()
.filter(|it| visited.insert(it.clone()))
.collect::<Vec<_>>();
if let Some(ra_project_model::CargoTomlNotFoundError { .. }) =
e.downcast_ref()
{
if !config.notifications.cargo_toml_not_found {
continue;
}
}
if project_roots.is_empty() && config.notifications.cargo_toml_not_found {
show_message(
req::MessageType::Error,
format!(
"rust-analyzer failed to discover workspace, no Cargo.toml found, dirs searched: {}",
ws_roots.iter().format_with(", ", |it, f| f(&it.display()))
),
&connection.sender,
);
};
project_roots
.into_iter()
.filter_map(|root| {
ra_project_model::ProjectWorkspace::load(
root,
&config.cargo,
config.with_sysroot,
)
.map_err(|err| {
log::error!("failed to load workspace: {:#}", err);
show_message(
req::MessageType::Error,
format!("rust-analyzer failed to load workspace: {:?}", e),
format!("rust-analyzer failed to load workspace: {:#}", err),
&connection.sender,
);
}
}
}
loaded_workspaces
})
.ok()
})
.collect::<Vec<_>>()
};
let globs = config

View File

@ -64,6 +64,7 @@ pub struct WorldState {
pub latest_requests: Arc<RwLock<LatestRequests>>,
pub flycheck: Option<Flycheck>,
pub diagnostics: DiagnosticCollection,
pub proc_macro_client: ProcMacroClient,
}
/// An immutable snapshot of the world's state at a point in time.
@ -147,9 +148,9 @@ impl WorldState {
let proc_macro_client = match &config.proc_macro_srv {
None => ProcMacroClient::dummy(),
Some(srv) => {
let path = Path::new(&srv);
match ProcMacroClient::extern_process(path) {
Some((path, args)) => {
let path = std::path::Path::new(path);
match ProcMacroClient::extern_process(path, args) {
Ok(it) => it,
Err(err) => {
log::error!(
@ -192,6 +193,7 @@ impl WorldState {
latest_requests: Default::default(),
flycheck,
diagnostics: Default::default(),
proc_macro_client,
}
}

View File

@ -9,7 +9,7 @@ use lsp_types::{
};
use rust_analyzer::req::{
CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument,
Formatting, GotoDefinition, OnEnter, Runnables, RunnablesParams,
Formatting, GotoDefinition, HoverRequest, OnEnter, Runnables, RunnablesParams,
};
use serde_json::json;
use tempfile::TempDir;
@ -625,3 +625,92 @@ fn main() { message(); }
));
assert!(format!("{}", res).contains("hello.rs"));
}
#[test]
fn resolve_proc_macro() {
if skip_slow_tests() {
return;
}
let server = Project::with_fixture(
r###"
//- foo/Cargo.toml
[package]
name = "foo"
version = "0.0.0"
edition = "2018"
[dependencies]
bar = {path = "../bar"}
//- foo/src/main.rs
use bar::Bar;
trait Bar {
fn bar();
}
#[derive(Bar)]
struct Foo {}
fn main() {
Foo::bar();
}
//- bar/Cargo.toml
[package]
name = "bar"
version = "0.0.0"
edition = "2018"
[lib]
proc-macro = true
//- bar/src/lib.rs
extern crate proc_macro;
use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
macro_rules! t {
($n:literal) => {
TokenTree::from(Ident::new($n, Span::call_site()))
};
({}) => {
TokenTree::from(Group::new(Delimiter::Brace, TokenStream::new()))
};
(()) => {
TokenTree::from(Group::new(Delimiter::Parenthesis, TokenStream::new()))
};
}
#[proc_macro_derive(Bar)]
pub fn foo(_input: TokenStream) -> TokenStream {
// We hard code the output here for preventing to use any deps
let mut res = TokenStream::new();
// impl Bar for Foo { fn bar() {} }
let mut tokens = vec![t!("impl"), t!("Bar"), t!("for"), t!("Foo")];
let mut fn_stream = TokenStream::new();
fn_stream.extend(vec![t!("fn"), t!("bar"), t!(()), t!({})]);
tokens.push(Group::new(Delimiter::Brace, fn_stream).into());
res.extend(tokens);
res
}
"###,
)
.with_config(|config| {
// FIXME: Use env!("CARGO_BIN_EXE_ra-analyzer") instead after
// https://github.com/rust-lang/cargo/pull/7697 landed
let macro_srv_path = std::path::Path::new(std::env!("CARGO_MANIFEST_DIR"))
.join("../../target/debug/rust-analyzer")
.to_string_lossy()
.to_string();
config.cargo.load_out_dirs_from_check = true;
config.proc_macro_srv = Some((macro_srv_path, vec!["proc-macro".to_string()]));
})
.root("foo")
.root("bar")
.server();
server.wait_until_workspace_is_loaded();
let res = server.send_request::<HoverRequest>(TextDocumentPositionParams::new(
server.doc_id("foo/src/main.rs"),
Position::new(7, 9),
));
let value = res.get("contents").unwrap().get("value").unwrap().to_string();
assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#)
}

View File

@ -77,7 +77,7 @@ fn foo() {
}
fn bar(arg: &str, baz: Baz) {
unimplemented!()
todo!()
}
```

View File

@ -109,9 +109,9 @@
}
},
"@types/vscode": {
"version": "1.43.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.43.0.tgz",
"integrity": "sha512-kIaR9qzd80rJOxePKpCB/mdy00mz8Apt2QA5Y6rdrKFn13QNFNeP3Hzmsf37Bwh/3cS7QjtAeGSK7wSqAU0sYQ==",
"version": "1.44.0",
"resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.44.0.tgz",
"integrity": "sha512-WJZtZlinE3meRdH+I7wTsIhpz/GLhqEQwmPGeh4s1irWLwMzCeTV8WZ+pgPTwrDXoafVUWwo1LiZ9HJVHFlJSQ==",
"dev": true
},
"@typescript-eslint/eslint-plugin": {
@ -1776,32 +1776,32 @@
}
},
"vscode-jsonrpc": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.0.1.tgz",
"integrity": "sha512-JvONPptw3GAQGXlVV2utDcHx0BiY34FupW/kI6mZ5x06ER5DdPG/tXWMVHjTNULF5uKPOUUD0SaXg5QaubJL0A=="
"version": "5.1.0-next.1",
"resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-5.1.0-next.1.tgz",
"integrity": "sha512-mwLDojZkbmpizSJSmp690oa9FB9jig18SIDGZeBCvFc2/LYSRvMm/WwWtMBJuJ1MfFh7rZXfQige4Uje5Z9NzA=="
},
"vscode-languageclient": {
"version": "6.1.3",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-6.1.3.tgz",
"integrity": "sha512-YciJxk08iU5LmWu7j5dUt9/1OLjokKET6rME3cI4BRpiF6HZlusm2ZwPt0MYJ0lV5y43sZsQHhyon2xBg4ZJVA==",
"version": "7.0.0-next.1",
"resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-7.0.0-next.1.tgz",
"integrity": "sha512-JrjCUhLpQZxQ5VpWpilOHDMhVsn0fdN5jBh1uFNhSr5c2loJvRdr9Km2EuSQOFfOQsBKx0+xvY8PbsypNEcJ6w==",
"requires": {
"semver": "^6.3.0",
"vscode-languageserver-protocol": "^3.15.3"
"vscode-languageserver-protocol": "3.16.0-next.2"
}
},
"vscode-languageserver-protocol": {
"version": "3.15.3",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.15.3.tgz",
"integrity": "sha512-zrMuwHOAQRhjDSnflWdJG+O2ztMWss8GqUUB8dXLR/FPenwkiBNkMIJJYfSN6sgskvsF0rHAoBowNQfbyZnnvw==",
"version": "3.16.0-next.2",
"resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.16.0-next.2.tgz",
"integrity": "sha512-atmkGT/W6tF0cx4SaWFYtFs2UeSeC28RPiap9myv2YZTaTCFvTBEPNWrU5QRKfkyM0tbgtGo6T3UCQ8tkDpjzA==",
"requires": {
"vscode-jsonrpc": "^5.0.1",
"vscode-languageserver-types": "3.15.1"
"vscode-jsonrpc": "5.1.0-next.1",
"vscode-languageserver-types": "3.16.0-next.1"
}
},
"vscode-languageserver-types": {
"version": "3.15.1",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz",
"integrity": "sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ=="
"version": "3.16.0-next.1",
"resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.16.0-next.1.tgz",
"integrity": "sha512-tZFUSbyjUcrh+qQf13ALX4QDdOfDX0cVaBFgy7ktJ0VwS7AW/yRKgGPSxVqqP9OCMNPdqP57O5q47w2pEwfaUg=="
},
"which": {
"version": "1.3.1",

View File

@ -34,14 +34,14 @@
"dependencies": {
"jsonc-parser": "^2.2.1",
"node-fetch": "^2.6.0",
"vscode-languageclient": "6.1.3"
"vscode-languageclient": "7.0.0-next.1"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^11.0.2",
"@rollup/plugin-node-resolve": "^7.1.1",
"@types/node": "^12.12.34",
"@types/node-fetch": "^2.5.5",
"@types/vscode": "^1.43.0",
"@types/vscode": "^1.44.0",
"@typescript-eslint/eslint-plugin": "^2.27.0",
"@typescript-eslint/parser": "^2.27.0",
"eslint": "^6.8.0",
@ -154,7 +154,7 @@
"keybindings": [
{
"command": "rust-analyzer.parentModule",
"key": "ctrl+u",
"key": "ctrl+shift+u",
"when": "editorTextFocus && editorLangId == rust"
},
{
@ -388,6 +388,11 @@
"description": "Enable logging of VS Code extensions itself",
"type": "boolean",
"default": false
},
"rust-analyzer.procMacro.enabled": {
"description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.",
"type": "boolean",
"default": false
}
}
},

View File

@ -12,6 +12,7 @@ export class Config {
private readonly requiresReloadOpts = [
"serverPath",
"cargo",
"procMacro",
"files",
"highlighting",
"updates.channel",

View File

@ -3,13 +3,13 @@ import * as vscode from 'vscode';
import * as ra from './rust-analyzer-api';
import { Ctx, Disposable } from './ctx';
import { sendRequestWithRetry, isRustDocument, RustDocument, RustEditor } from './util';
import { sendRequestWithRetry, isRustDocument, RustDocument, RustEditor, sleep } from './util';
export function activateInlayHints(ctx: Ctx) {
const maybeUpdater = {
updater: null as null | HintsUpdater,
onConfigChange() {
async onConfigChange() {
if (
!ctx.config.inlayHints.typeHints &&
!ctx.config.inlayHints.parameterHints &&
@ -17,6 +17,7 @@ export function activateInlayHints(ctx: Ctx) {
) {
return this.dispose();
}
await sleep(100);
if (this.updater) {
this.updater.syncCacheAndRenderHints();
} else {

View File

@ -26,6 +26,7 @@ fn check_todo(path: &Path, text: &str) {
// Some of our assists generate `todo!()` so those files are whitelisted.
"doc_tests/generated.rs",
"handlers/add_missing_impl_members.rs",
"handlers/add_function.rs",
// To support generating `todo!()` in assists, we have `expr_todo()` in ast::make.
"ast/make.rs",
];