Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen 2020-04-18 17:14:24 +02:00
commit af3c19e85f
30 changed files with 708 additions and 186 deletions

11
Cargo.lock generated
View File

@ -675,6 +675,16 @@ version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
[[package]]
name = "memmap"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
dependencies = [
"libc",
"winapi 0.3.8",
]
[[package]]
name = "memoffset"
version = "0.5.4"
@ -1112,6 +1122,7 @@ dependencies = [
"difference",
"goblin",
"libloading",
"memmap",
"ra_mbe",
"ra_proc_macro",
"ra_tt",

View File

@ -20,6 +20,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
db::HirDatabase,
diagnostics::Diagnostic,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer},
AssocItem, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, Name,
@ -126,6 +127,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
original_range(self.db, node.as_ref())
}
pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
let src = diagnostics.source();
let root = self.db.parse_or_expand(src.file_id).unwrap();
let node = src.value.to_node(&root);
original_range(self.db, src.with_value(&node))
}
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
let node = self.find_file(node);
node.ancestors_with_macros(self.db).map(|it| it.value)

View File

@ -23,7 +23,7 @@ use hir_ty::{
};
use ra_syntax::{
ast::{self, AstNode},
SyntaxNode, SyntaxNodePtr, TextUnit,
SyntaxNode, TextRange, TextUnit,
};
use crate::{
@ -56,7 +56,7 @@ impl SourceAnalyzer {
let scopes = db.expr_scopes(def);
let scope = match offset {
None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)),
};
let resolver = resolver_for_scope(db.upcast(), def, scope);
SourceAnalyzer {
@ -304,6 +304,7 @@ fn scope_for(
}
fn scope_for_offset(
db: &dyn HirDatabase,
scopes: &ExprScopes,
source_map: &BodySourceMap,
offset: InFile<TextUnit>,
@ -317,21 +318,63 @@ fn scope_for_offset(
if source.file_id != offset.file_id {
return None;
}
let syntax_node_ptr = source.value.syntax_node_ptr();
Some((syntax_node_ptr, scope))
let root = source.file_syntax(db.upcast());
let node = source.value.to_node(&root);
Some((node.syntax().text_range(), scope))
})
// find containing scope
.min_by_key(|(ptr, _scope)| {
.min_by_key(|(expr_range, _scope)| {
(
!(ptr.range().start() <= offset.value && offset.value <= ptr.range().end()),
ptr.range().len(),
!(expr_range.start() <= offset.value && offset.value <= expr_range.end()),
expr_range.len(),
)
})
.map(|(ptr, scope)| {
adjust(scopes, source_map, ptr, offset.file_id, offset.value).unwrap_or(*scope)
.map(|(expr_range, scope)| {
adjust(db, scopes, source_map, expr_range, offset.file_id, offset.value)
.unwrap_or(*scope)
})
}
// XXX: during completion, cursor might be outside of any particular
// expression. Try to figure out the correct scope...
fn adjust(
db: &dyn HirDatabase,
scopes: &ExprScopes,
source_map: &BodySourceMap,
expr_range: TextRange,
file_id: HirFileId,
offset: TextUnit,
) -> Option<ScopeId> {
let child_scopes = scopes
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
let source = source_map.expr_syntax(*id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != file_id {
return None;
}
let root = source.file_syntax(db.upcast());
let node = source.value.to_node(&root);
Some((node.syntax().text_range(), scope))
})
.filter(|(range, _)| {
range.start() <= offset && range.is_subrange(&expr_range) && *range != expr_range
});
child_scopes
.max_by(|(r1, _), (r2, _)| {
if r2.is_subrange(&r1) {
std::cmp::Ordering::Greater
} else if r1.is_subrange(&r2) {
std::cmp::Ordering::Less
} else {
r1.start().cmp(&r2.start())
}
})
.map(|(_ptr, scope)| *scope)
}
pub(crate) fn resolve_hir_path(
db: &dyn HirDatabase,
resolver: &Resolver,
@ -376,41 +419,3 @@ pub(crate) fn resolve_hir_path(
.map(|def| PathResolution::Macro(def.into()))
})
}
// XXX: during completion, cursor might be outside of any particular
// expression. Try to figure out the correct scope...
fn adjust(
scopes: &ExprScopes,
source_map: &BodySourceMap,
ptr: SyntaxNodePtr,
file_id: HirFileId,
offset: TextUnit,
) -> Option<ScopeId> {
let r = ptr.range();
let child_scopes = scopes
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
let source = source_map.expr_syntax(*id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != file_id {
return None;
}
let syntax_node_ptr = source.value.syntax_node_ptr();
Some((syntax_node_ptr, scope))
})
.map(|(ptr, scope)| (ptr.range(), scope))
.filter(|(range, _)| range.start() <= offset && range.is_subrange(&r) && *range != r);
child_scopes
.max_by(|(r1, _), (r2, _)| {
if r2.is_subrange(&r1) {
std::cmp::Ordering::Greater
} else if r1.is_subrange(&r2) {
std::cmp::Ordering::Less
} else {
r1.start().cmp(&r2.start())
}
})
.map(|(_ptr, scope)| *scope)
}

View File

@ -210,7 +210,7 @@ pub struct BodySourceMap {
expr_map_back: ArenaMap<ExprId, Result<ExprSource, SyntheticSyntax>>,
pat_map: FxHashMap<PatSource, PatId>,
pat_map_back: ArenaMap<PatId, Result<PatSource, SyntheticSyntax>>,
field_map: FxHashMap<(ExprId, usize), AstPtr<ast::RecordField>>,
field_map: FxHashMap<(ExprId, usize), InFile<AstPtr<ast::RecordField>>>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
}
@ -303,7 +303,7 @@ impl BodySourceMap {
self.pat_map.get(&src).cloned()
}
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::RecordField> {
pub fn field_syntax(&self, expr: ExprId, field: usize) -> InFile<AstPtr<ast::RecordField>> {
self.field_map[&(expr, field)].clone()
}
}

View File

@ -320,7 +320,8 @@ impl ExprCollector<'_> {
let res = self.alloc_expr(record_lit, syntax_ptr);
for (i, ptr) in field_ptrs.into_iter().enumerate() {
self.source_map.field_map.insert((res, i), ptr);
let src = self.expander.to_source(ptr);
self.source_map.field_map.insert((res, i), src);
}
res
}
@ -650,6 +651,7 @@ impl ExprCollector<'_> {
ast::Pat::SlicePat(p) => {
let SlicePatComponents { prefix, slice, suffix } = p.components();
// FIXME properly handle `DotDotPat`
Pat::Slice {
prefix: prefix.into_iter().map(|p| self.collect_pat(p)).collect(),
slice: slice.map(|p| self.collect_pat(p)),
@ -666,9 +668,15 @@ impl ExprCollector<'_> {
Pat::Missing
}
}
ast::Pat::DotDotPat(_) => unreachable!(
"`DotDotPat` requires special handling and should not be mapped to a Pat."
),
ast::Pat::DotDotPat(_) => {
// `DotDotPat` requires special handling and should not be mapped
// to a Pat. Here we are using `Pat::Missing` as a fallback for
// when `DotDotPat` is mapped to `Pat`, which can easily happen
// when the source code being analyzed has a malformed pattern
// which includes `..` in a place where it isn't valid.
Pat::Missing
}
// FIXME: implement
ast::Pat::BoxPat(_) | ast::Pat::RangePat(_) | ast::Pat::MacroPat(_) => Pat::Missing,
};

View File

@ -5,7 +5,7 @@
use std::fmt;
use hir_expand::name::{name, Name};
use hir_expand::name::{name, AsName, Name};
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Signedness {
@ -75,33 +75,39 @@ impl BuiltinType {
];
}
impl fmt::Display for BuiltinType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let type_name = match self {
BuiltinType::Char => "char",
BuiltinType::Bool => "bool",
BuiltinType::Str => "str",
impl AsName for BuiltinType {
fn as_name(&self) -> Name {
match self {
BuiltinType::Char => name![char],
BuiltinType::Bool => name![bool],
BuiltinType::Str => name![str],
BuiltinType::Int(BuiltinInt { signedness, bitness }) => match (signedness, bitness) {
(Signedness::Signed, IntBitness::Xsize) => "isize",
(Signedness::Signed, IntBitness::X8) => "i8",
(Signedness::Signed, IntBitness::X16) => "i16",
(Signedness::Signed, IntBitness::X32) => "i32",
(Signedness::Signed, IntBitness::X64) => "i64",
(Signedness::Signed, IntBitness::X128) => "i128",
(Signedness::Signed, IntBitness::Xsize) => name![isize],
(Signedness::Signed, IntBitness::X8) => name![i8],
(Signedness::Signed, IntBitness::X16) => name![i16],
(Signedness::Signed, IntBitness::X32) => name![i32],
(Signedness::Signed, IntBitness::X64) => name![i64],
(Signedness::Signed, IntBitness::X128) => name![i128],
(Signedness::Unsigned, IntBitness::Xsize) => "usize",
(Signedness::Unsigned, IntBitness::X8) => "u8",
(Signedness::Unsigned, IntBitness::X16) => "u16",
(Signedness::Unsigned, IntBitness::X32) => "u32",
(Signedness::Unsigned, IntBitness::X64) => "u64",
(Signedness::Unsigned, IntBitness::X128) => "u128",
(Signedness::Unsigned, IntBitness::Xsize) => name![usize],
(Signedness::Unsigned, IntBitness::X8) => name![u8],
(Signedness::Unsigned, IntBitness::X16) => name![u16],
(Signedness::Unsigned, IntBitness::X32) => name![u32],
(Signedness::Unsigned, IntBitness::X64) => name![u64],
(Signedness::Unsigned, IntBitness::X128) => name![u128],
},
BuiltinType::Float(BuiltinFloat { bitness }) => match bitness {
FloatBitness::X32 => "f32",
FloatBitness::X64 => "f64",
FloatBitness::X32 => name![f32],
FloatBitness::X64 => name![f64],
},
};
f.write_str(type_name)
}
}
}
impl fmt::Display for BuiltinType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let type_name = self.as_name();
type_name.fmt(f)
}
}

View File

@ -20,7 +20,7 @@ impl Diagnostic for UnresolvedModule {
"unresolved module".to_string()
}
fn source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.decl.clone().into() }
InFile::new(self.file, self.decl.clone().into())
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {
self

View File

@ -7,7 +7,7 @@ use crate::{
visibility::Visibility,
CrateId, ModuleDefId, ModuleId,
};
use hir_expand::name::{known, Name};
use hir_expand::name::{known, AsName, Name};
use test_utils::tested_by;
const MAX_PATH_LEN: usize = 15;
@ -113,6 +113,11 @@ fn find_path_inner(
}
}
// - if the item is a builtin, it's in scope
if let ItemInNs::Types(ModuleDefId::BuiltinType(builtin)) = item {
return Some(ModPath::from_segments(PathKind::Plain, vec![builtin.as_name()]));
}
// Recursive case:
// - if the item is an enum variant, refer to it via the enum
if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
@ -523,4 +528,18 @@ mod tests {
"#;
check_found_path(code, "megaalloc::Arc");
}
#[test]
fn builtins_are_in_scope() {
let code = r#"
//- /main.rs
<|>
pub mod primitive {
pub use u8;
}
"#;
check_found_path(code, "u8");
check_found_path(code, "u16");
}
}

View File

@ -16,16 +16,13 @@
use std::{any::Any, fmt};
use ra_syntax::{SyntaxNode, SyntaxNodePtr, TextRange};
use ra_syntax::{SyntaxNode, SyntaxNodePtr};
use crate::{db::AstDatabase, InFile};
pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
fn message(&self) -> String;
fn source(&self) -> InFile<SyntaxNodePtr>;
fn highlight_range(&self) -> TextRange {
self.source().value.range()
}
fn as_any(&self) -> &(dyn Any + Send + 'static);
}

View File

@ -21,7 +21,7 @@ impl Diagnostic for NoSuchField {
}
fn source(&self) -> InFile<SyntaxNodePtr> {
InFile { file_id: self.file, value: self.field.clone().into() }
InFile::new(self.file, self.field.clone().into())
}
fn as_any(&self) -> &(dyn Any + Send + 'static) {

View File

@ -682,10 +682,10 @@ mod diagnostics {
) {
match self {
InferenceDiagnostic::NoSuchField { expr, field } => {
let file = owner.lookup(db.upcast()).source(db.upcast()).file_id;
let source = owner.lookup(db.upcast()).source(db.upcast());
let (_, source_map) = db.body_with_source_map(owner.into());
let field = source_map.field_syntax(*expr, *field);
sink.push(NoSuchField { file, field })
sink.push(NoSuchField { file: source.file_id, field: field.value })
}
}
}

View File

@ -51,7 +51,7 @@ impl<'a> InferenceContext<'a> {
// Trivial cases, this should go after `never` check to
// avoid infer result type to be never
_ => {
if self.table.unify_inner_trivial(&from_ty, &to_ty) {
if self.table.unify_inner_trivial(&from_ty, &to_ty, 0) {
return true;
}
}
@ -175,7 +175,7 @@ impl<'a> InferenceContext<'a> {
return self.table.unify_substs(st1, st2, 0);
}
_ => {
if self.table.unify_inner_trivial(&derefed_ty, &to_ty) {
if self.table.unify_inner_trivial(&derefed_ty, &to_ty, 0) {
return true;
}
}

View File

@ -8,7 +8,8 @@ use test_utils::tested_by;
use super::{InferenceContext, Obligation};
use crate::{
BoundVar, Canonical, DebruijnIndex, InEnvironment, InferTy, Substs, Ty, TypeCtor, TypeWalk,
BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty,
TypeCtor, TypeWalk,
};
impl<'a> InferenceContext<'a> {
@ -226,16 +227,26 @@ impl InferenceTable {
(Ty::Apply(a_ty1), Ty::Apply(a_ty2)) if a_ty1.ctor == a_ty2.ctor => {
self.unify_substs(&a_ty1.parameters, &a_ty2.parameters, depth + 1)
}
_ => self.unify_inner_trivial(&ty1, &ty2),
_ => self.unify_inner_trivial(&ty1, &ty2, depth),
}
}
pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
pub(super) fn unify_inner_trivial(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
match (ty1, ty2) {
(Ty::Unknown, _) | (_, Ty::Unknown) => true,
(Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
(Ty::Dyn(dyn1), Ty::Dyn(dyn2)) if dyn1.len() == dyn2.len() => {
for (pred1, pred2) in dyn1.iter().zip(dyn2.iter()) {
if !self.unify_preds(pred1, pred2, depth + 1) {
return false;
}
}
true
}
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
| (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
@ -268,6 +279,31 @@ impl InferenceTable {
}
}
fn unify_preds(
&mut self,
pred1: &GenericPredicate,
pred2: &GenericPredicate,
depth: usize,
) -> bool {
match (pred1, pred2) {
(GenericPredicate::Implemented(tr1), GenericPredicate::Implemented(tr2))
if tr1.trait_ == tr2.trait_ =>
{
self.unify_substs(&tr1.substs, &tr2.substs, depth + 1)
}
(GenericPredicate::Projection(proj1), GenericPredicate::Projection(proj2))
if proj1.projection_ty.associated_ty == proj2.projection_ty.associated_ty =>
{
self.unify_substs(
&proj1.projection_ty.parameters,
&proj2.projection_ty.parameters,
depth + 1,
) && self.unify_inner(&proj1.ty, &proj2.ty, depth + 1)
}
_ => false,
}
}
/// If `ty` is a type variable with known type, returns that type;
/// otherwise, return ty.
pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {

View File

@ -396,12 +396,12 @@ impl Substs {
}
/// Return Substs that replace each parameter by a bound variable.
pub(crate) fn bound_vars(generic_params: &Generics) -> Substs {
pub(crate) fn bound_vars(generic_params: &Generics, debruijn: DebruijnIndex) -> Substs {
Substs(
generic_params
.iter()
.enumerate()
.map(|(idx, _)| Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx)))
.map(|(idx, _)| Ty::Bound(BoundVar::new(debruijn, idx)))
.collect(),
)
}

View File

@ -39,6 +39,7 @@ use crate::{
pub struct TyLoweringContext<'a> {
pub db: &'a dyn HirDatabase,
pub resolver: &'a Resolver,
in_binders: DebruijnIndex,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
@ -53,7 +54,27 @@ impl<'a> TyLoweringContext<'a> {
let impl_trait_counter = std::cell::Cell::new(0);
let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
let type_param_mode = TypeParamLoweringMode::Placeholder;
Self { db, resolver, impl_trait_mode, impl_trait_counter, type_param_mode }
let in_binders = DebruijnIndex::INNERMOST;
Self { db, resolver, in_binders, impl_trait_mode, impl_trait_counter, type_param_mode }
}
pub fn with_shifted_in<T>(
&self,
debruijn: DebruijnIndex,
f: impl FnOnce(&TyLoweringContext) -> T,
) -> T {
let new_ctx = Self {
in_binders: self.in_binders.shifted_in_from(debruijn),
impl_trait_counter: std::cell::Cell::new(self.impl_trait_counter.get()),
..*self
};
let result = f(&new_ctx);
self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
result
}
pub fn shifted_in(self, debruijn: DebruijnIndex) -> Self {
Self { in_binders: self.in_binders.shifted_in_from(debruijn), ..self }
}
pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
@ -134,22 +155,26 @@ impl Ty {
}
TypeRef::DynTrait(bounds) => {
let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
let predicates = bounds
.iter()
.flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
.collect();
let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
bounds
.iter()
.flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
.collect()
});
Ty::Dyn(predicates)
}
TypeRef::ImplTrait(bounds) => {
match ctx.impl_trait_mode {
ImplTraitLoweringMode::Opaque => {
let self_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, 0));
let predicates = bounds
.iter()
.flat_map(|b| {
GenericPredicate::from_type_bound(ctx, b, self_ty.clone())
})
.collect();
let predicates = ctx.with_shifted_in(DebruijnIndex::ONE, |ctx| {
bounds
.iter()
.flat_map(|b| {
GenericPredicate::from_type_bound(ctx, b, self_ty.clone())
})
.collect()
});
Ty::Opaque(predicates)
}
ImplTraitLoweringMode::Param => {
@ -180,7 +205,7 @@ impl Ty {
(0, 0, 0, 0)
};
Ty::Bound(BoundVar::new(
DebruijnIndex::INNERMOST,
ctx.in_binders,
idx as usize + parent_params + self_params + list_params,
))
}
@ -293,7 +318,7 @@ impl Ty {
TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
TypeParamLoweringMode::Variable => {
let idx = generics.param_idx(param_id).expect("matching generics");
Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, idx))
Ty::Bound(BoundVar::new(ctx.in_binders, idx))
}
}
}
@ -303,7 +328,9 @@ impl Ty {
TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics)
}
TypeParamLoweringMode::Variable => Substs::bound_vars(&generics),
TypeParamLoweringMode::Variable => {
Substs::bound_vars(&generics, ctx.in_binders)
}
};
ctx.db.impl_self_ty(impl_id).subst(&substs)
}
@ -313,7 +340,9 @@ impl Ty {
TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics)
}
TypeParamLoweringMode::Variable => Substs::bound_vars(&generics),
TypeParamLoweringMode::Variable => {
Substs::bound_vars(&generics, ctx.in_binders)
}
};
ctx.db.ty(adt.into()).subst(&substs)
}
@ -797,7 +826,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
/// function body.
fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
let generics = generics(db.upcast(), def.into());
let substs = Substs::bound_vars(&generics);
let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
}
@ -851,7 +880,7 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<T
return type_for_adt(db, def.into());
}
let generics = generics(db.upcast(), def.into());
let substs = Substs::bound_vars(&generics);
let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
}
@ -876,13 +905,13 @@ fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -
return type_for_adt(db, def.parent.into());
}
let generics = generics(db.upcast(), def.parent.into());
let substs = Substs::bound_vars(&generics);
let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
}
fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db.upcast(), adt.into());
let substs = Substs::bound_vars(&generics);
let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs))
}
@ -892,7 +921,7 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let type_ref = &db.type_alias_data(t).type_ref;
let substs = Substs::bound_vars(&generics);
let substs = Substs::bound_vars(&generics, DebruijnIndex::INNERMOST);
let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error));
Binders::new(substs.len(), inner)
}

View File

@ -484,3 +484,52 @@ fn main() {
assert_eq!("()", super::type_at_pos(&db, pos));
}
#[test]
fn issue_3999_slice() {
assert_snapshot!(
infer(r#"
fn foo(params: &[usize]) {
match params {
[ps @ .., _] => {}
}
}
"#),
@r###"
[8; 14) 'params': &[usize]
[26; 81) '{ ... } }': ()
[32; 79) 'match ... }': ()
[38; 44) 'params': &[usize]
[55; 67) '[ps @ .., _]': [usize]
[65; 66) '_': usize
[71; 73) '{}': ()
"###
);
}
#[test]
fn issue_3999_struct() {
// rust-analyzer should not panic on seeing this malformed
// record pattern.
assert_snapshot!(
infer(r#"
struct Bar {
a: bool,
}
fn foo(b: Bar) {
match b {
Bar { a: .. } => {},
}
}
"#),
@r###"
[36; 37) 'b': Bar
[44; 96) '{ ... } }': ()
[50; 94) 'match ... }': ()
[56; 57) 'b': Bar
[68; 81) 'Bar { a: .. }': Bar
[77; 79) '..': bool
[85; 87) '{}': ()
"###
);
}

View File

@ -1210,6 +1210,42 @@ fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
);
}
#[test]
fn dyn_trait_in_impl() {
assert_snapshot!(
infer(r#"
trait Trait<T, U> {
fn foo(&self) -> (T, U);
}
struct S<T, U> {}
impl<T, U> S<T, U> {
fn bar(&self) -> &dyn Trait<T, U> { loop {} }
}
trait Trait2<T, U> {
fn baz(&self) -> (T, U);
}
impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
fn test(s: S<u32, i32>) {
s.bar().baz();
}
"#),
@r###"
[33; 37) 'self': &Self
[103; 107) 'self': &S<T, U>
[129; 140) '{ loop {} }': &dyn Trait<T, U>
[131; 138) 'loop {}': !
[136; 138) '{}': ()
[176; 180) 'self': &Self
[252; 253) 's': S<u32, i32>
[268; 290) '{ ...z(); }': ()
[274; 275) 's': S<u32, i32>
[274; 281) 's.bar()': &dyn Trait<u32, i32>
[274; 287) 's.bar().baz()': (u32, i32)
"###
);
}
#[test]
fn dyn_trait_bare() {
assert_snapshot!(
@ -2204,3 +2240,201 @@ fn test(x: Box<dyn Trait>) {
);
assert_eq!(t, "()");
}
#[test]
fn string_to_owned() {
let t = type_at(
r#"
//- /main.rs
struct String {}
pub trait ToOwned {
type Owned;
fn to_owned(&self) -> Self::Owned;
}
impl ToOwned for str {
type Owned = String;
}
fn test() {
"foo".to_owned()<|>;
}
"#,
);
assert_eq!(t, "String");
}
#[test]
fn iterator_chain() {
assert_snapshot!(
infer(r#"
//- /main.rs
#[lang = "fn_once"]
trait FnOnce<Args> {
type Output;
}
#[lang = "fn_mut"]
trait FnMut<Args>: FnOnce<Args> { }
enum Option<T> { Some(T), None }
use Option::*;
pub trait Iterator {
type Item;
fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where
F: FnMut(Self::Item) -> Option<B>,
{ loop {} }
fn for_each<F>(self, f: F)
where
F: FnMut(Self::Item),
{ loop {} }
}
pub trait IntoIterator {
type Item;
type IntoIter: Iterator<Item = Self::Item>;
fn into_iter(self) -> Self::IntoIter;
}
pub struct FilterMap<I, F> { }
impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
where
F: FnMut(I::Item) -> Option<B>,
{
type Item = B;
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator> IntoIterator for I {
type Item = I::Item;
type IntoIter = I;
fn into_iter(self) -> I {
self
}
}
struct Vec<T> {}
impl<T> Vec<T> {
fn new() -> Self { loop {} }
}
impl<T> IntoIterator for Vec<T> {
type Item = T;
type IntoIter = IntoIter<T>;
}
pub struct IntoIter<T> { }
impl<T> Iterator for IntoIter<T> {
type Item = T;
}
fn main() {
Vec::<i32>::new().into_iter()
.filter_map(|x| if x > 0 { Some(x as u32) } else { None })
.for_each(|y| { y; });
}
"#),
@r###"
[240; 244) 'self': Self
[246; 247) 'f': F
[331; 342) '{ loop {} }': FilterMap<Self, F>
[333; 340) 'loop {}': !
[338; 340) '{}': ()
[363; 367) 'self': Self
[369; 370) 'f': F
[419; 430) '{ loop {} }': ()
[421; 428) 'loop {}': !
[426; 428) '{}': ()
[539; 543) 'self': Self
[868; 872) 'self': I
[879; 899) '{ ... }': I
[889; 893) 'self': I
[958; 969) '{ loop {} }': Vec<T>
[960; 967) 'loop {}': !
[965; 967) '{}': ()
[1156; 1287) '{ ... }); }': ()
[1162; 1177) 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
[1162; 1179) 'Vec::<...:new()': Vec<i32>
[1162; 1191) 'Vec::<...iter()': IntoIter<i32>
[1162; 1256) 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
[1162; 1284) 'Vec::<... y; })': ()
[1210; 1255) '|x| if...None }': |i32| -> Option<u32>
[1211; 1212) 'x': i32
[1214; 1255) 'if x >...None }': Option<u32>
[1217; 1218) 'x': i32
[1217; 1222) 'x > 0': bool
[1221; 1222) '0': i32
[1223; 1241) '{ Some...u32) }': Option<u32>
[1225; 1229) 'Some': Some<u32>(u32) -> Option<u32>
[1225; 1239) 'Some(x as u32)': Option<u32>
[1230; 1231) 'x': i32
[1230; 1238) 'x as u32': u32
[1247; 1255) '{ None }': Option<u32>
[1249; 1253) 'None': Option<u32>
[1273; 1283) '|y| { y; }': |u32| -> ()
[1274; 1275) 'y': u32
[1277; 1283) '{ y; }': ()
[1279; 1280) 'y': u32
"###
);
}
#[test]
fn nested_assoc() {
let t = type_at(
r#"
//- /main.rs
struct Bar;
struct Foo;
trait A {
type OutputA;
}
impl A for Bar {
type OutputA = Foo;
}
trait B {
type Output;
fn foo() -> Self::Output;
}
impl<T:A> B for T {
type Output = T::OutputA;
fn foo() -> Self::Output { loop {} }
}
fn main() {
Bar::foo()<|>;
}
"#,
);
assert_eq!(t, "Foo");
}
#[test]
fn trait_object_no_coercion() {
assert_snapshot!(
infer_with_mismatches(r#"
trait Foo {}
fn foo(x: &dyn Foo) {}
fn test(x: &dyn Foo) {
foo(x);
}
"#, true),
@r###"
[22; 23) 'x': &dyn Foo
[35; 37) '{}': ()
[47; 48) 'x': &dyn Foo
[60; 75) '{ foo(x); }': ()
[66; 69) 'foo': fn foo(&dyn Foo)
[66; 72) 'foo(x)': ()
[70; 71) 'x': &dyn Foo
"###
);
}

View File

@ -17,7 +17,7 @@ use ra_db::{
use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
use crate::{
db::HirDatabase, display::HirDisplay, method_resolution::TyFingerprint, utils::generics,
ApplicationTy, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
ApplicationTy, DebruijnIndex, GenericPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
};
pub(super) mod tls;
@ -815,7 +815,7 @@ pub(crate) fn associated_ty_data_query(
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
let type_alias_data = db.type_alias_data(type_alias);
let generic_params = generics(db.upcast(), type_alias.into());
let bound_vars = Substs::bound_vars(&generic_params);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
let ctx = crate::TyLoweringContext::new(db, &resolver)
.with_type_param_mode(crate::lower::TypeParamLoweringMode::Variable);
@ -849,7 +849,7 @@ pub(crate) fn trait_datum_query(
let trait_data = db.trait_data(trait_);
debug!("trait {:?} = {:?}", trait_id, trait_data.name);
let generic_params = generics(db.upcast(), trait_.into());
let bound_vars = Substs::bound_vars(&generic_params);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let flags = chalk_rust_ir::TraitFlags {
auto: trait_data.auto,
upstream: trait_.lookup(db.upcast()).container.module(db.upcast()).krate != krate,
@ -888,7 +888,7 @@ pub(crate) fn struct_datum_query(
.as_generic_def()
.map(|generic_def| {
let generic_params = generics(db.upcast(), generic_def);
let bound_vars = Substs::bound_vars(&generic_params);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
convert_where_clauses(db, generic_def, &bound_vars)
})
.unwrap_or_else(Vec::new);
@ -934,7 +934,7 @@ fn impl_def_datum(
let impl_data = db.impl_data(impl_id);
let generic_params = generics(db.upcast(), impl_id.into());
let bound_vars = Substs::bound_vars(&generic_params);
let bound_vars = Substs::bound_vars(&generic_params, DebruijnIndex::INNERMOST);
let trait_ = trait_ref.trait_;
let impl_type = if impl_id.lookup(db.upcast()).container.module(db.upcast()).krate == krate {
chalk_rust_ir::ImplType::Local

View File

@ -142,11 +142,11 @@ fn add_function_impl(
CompletionItemKind::Function
};
let snippet = format!("{} {{}}", display);
let snippet = format!("{} {{\n $0\n}}", display);
let range = TextRange::from_to(fn_def_node.text_range().start(), ctx.source_range().end());
builder.text_edit(TextEdit::replace(range, snippet)).kind(completion_kind).add_to(acc);
builder.snippet_edit(TextEdit::replace(range, snippet)).kind(completion_kind).add_to(acc);
}
fn add_type_alias_impl(
@ -217,9 +217,10 @@ fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {
#[cfg(test)]
mod tests {
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
use insta::assert_debug_snapshot;
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
fn complete(code: &str) -> Vec<CompletionItem> {
do_completion(code, CompletionKind::Magic)
}
@ -255,7 +256,7 @@ mod tests {
label: "fn test()",
source_range: [209; 210),
delete: [209; 210),
insert: "fn test() {}",
insert: "fn test() {\n $0\n}",
kind: Function,
lookup: "test",
},
@ -313,7 +314,7 @@ mod tests {
label: "fn test()",
source_range: [139; 140),
delete: [139; 140),
insert: "fn test() {}",
insert: "fn test() {\n $0\n}",
kind: Function,
lookup: "test",
},
@ -342,7 +343,7 @@ mod tests {
label: "fn foo()",
source_range: [141; 142),
delete: [138; 142),
insert: "fn foo() {}",
insert: "fn foo() {\n $0\n}",
kind: Function,
lookup: "foo",
},
@ -374,7 +375,7 @@ mod tests {
label: "fn foo_bar()",
source_range: [200; 201),
delete: [197; 201),
insert: "fn foo_bar() {}",
insert: "fn foo_bar() {\n $0\n}",
kind: Function,
lookup: "foo_bar",
},
@ -425,7 +426,7 @@ mod tests {
label: "fn foo()",
source_range: [144; 145),
delete: [141; 145),
insert: "fn foo<T>() {}",
insert: "fn foo<T>() {\n $0\n}",
kind: Function,
lookup: "foo",
},
@ -454,7 +455,7 @@ mod tests {
label: "fn foo()",
source_range: [166; 167),
delete: [163; 167),
insert: "fn foo<T>()\nwhere T: Into<String> {}",
insert: "fn foo<T>()\nwhere T: Into<String> {\n $0\n}",
kind: Function,
lookup: "foo",
},

View File

@ -1,6 +1,10 @@
//! Completion of names from the current scope, e.g. locals and imported items.
use hir::ScopeDef;
use test_utils::tested_by;
use crate::completion::{CompletionContext, Completions};
use ra_syntax::AstNode;
pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_trivial_path {
@ -14,12 +18,23 @@ pub(super) fn complete_unqualified_path(acc: &mut Completions, ctx: &CompletionC
return;
}
ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res));
ctx.scope().process_all_names(&mut |name, res| {
if ctx.use_item_syntax.is_some() {
if let (ScopeDef::Unknown, Some(name_ref)) = (&res, &ctx.name_ref_syntax) {
if name_ref.syntax().text() == name.to_string().as_str() {
tested_by!(self_fulfilling_completion);
return;
}
}
}
acc.add_resolution(ctx, name.to_string(), &res)
});
}
#[cfg(test)]
mod tests {
use insta::assert_debug_snapshot;
use test_utils::covers;
use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
@ -27,6 +42,29 @@ mod tests {
do_completion(ra_fixture, CompletionKind::Reference)
}
#[test]
fn self_fulfilling_completion() {
covers!(self_fulfilling_completion);
assert_debug_snapshot!(
do_reference_completion(
r#"
use foo<|>
use std::collections;
"#,
),
@r###"
[
CompletionItem {
label: "collections",
source_range: [21; 24),
delete: [21; 24),
insert: "collections",
},
]
"###
);
}
#[test]
fn bind_pat_and_path_ignore_at() {
assert_debug_snapshot!(

View File

@ -1,4 +1,8 @@
//! FIXME: write short doc here
//! Collects diagnostics & fixits for a single file.
//!
//! The tricky bit here is that diagnostics are produced by hir in terms of
//! macro-expanded files, but we need to present them to the users in terms of
//! original files. So we need to map the ranges.
use std::cell::RefCell;
@ -46,7 +50,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
let mut sink = DiagnosticSink::new(|d| {
res.borrow_mut().push(Diagnostic {
message: d.message(),
range: d.highlight_range(),
range: sema.diagnostics_range(d).range,
severity: Severity::Error,
fix: None,
})
@ -62,7 +66,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
let create_file = FileSystemEdit::CreateFile { source_root, path };
let fix = SourceChange::file_system_edit("create module", create_file);
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
range: sema.diagnostics_range(d).range,
message: d.message(),
severity: Severity::Error,
fix: Some(fix),
@ -95,7 +99,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
};
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
range: sema.diagnostics_range(d).range,
message: d.message(),
severity: Severity::Error,
fix,
@ -103,7 +107,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
})
.on::<hir::diagnostics::MissingMatchArms, _>(|d| {
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
range: sema.diagnostics_range(d).range,
message: d.message(),
severity: Severity::Error,
fix: None,
@ -115,7 +119,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
let edit = TextEdit::replace(node.syntax().text_range(), replacement);
let fix = SourceChange::source_file_edit_from("wrap with ok", file_id, edit);
res.borrow_mut().push(Diagnostic {
range: d.highlight_range(),
range: sema.diagnostics_range(d).range,
message: d.message(),
severity: Severity::Error,
fix: Some(fix),
@ -621,6 +625,62 @@ mod tests {
"###);
}
#[test]
fn range_mapping_out_of_macros() {
let (analysis, file_id) = single_file(
r"
fn some() {}
fn items() {}
fn here() {}
macro_rules! id {
($($tt:tt)*) => { $($tt)*};
}
fn main() {
let _x = id![Foo { a: 42 }];
}
pub struct Foo {
pub a: i32,
pub b: i32,
}
",
);
let diagnostics = analysis.diagnostics(file_id).unwrap();
assert_debug_snapshot!(diagnostics, @r###"
[
Diagnostic {
message: "Missing structure fields:\n- b",
range: [224; 233),
fix: Some(
SourceChange {
label: "fill struct fields",
source_file_edits: [
SourceFileEdit {
file_id: FileId(
1,
),
edit: TextEdit {
atoms: [
AtomTextEdit {
delete: [3; 9),
insert: "{a:42, b: ()}",
},
],
},
},
],
file_system_edits: [],
cursor_position: None,
},
),
severity: Error,
},
]
"###);
}
#[test]
fn test_check_unnecessary_braces_in_use_statement() {
check_not_applicable(

View File

@ -237,7 +237,8 @@ fn should_show_param_hint(
) -> bool {
if param_name.is_empty()
|| is_argument_similar_to_param(argument, param_name)
|| Some(param_name) == fn_signature.name.as_ref().map(String::as_str)
|| Some(param_name.trim_start_matches('_'))
== fn_signature.name.as_ref().map(|s| s.trim_start_matches('_'))
{
return false;
}
@ -255,6 +256,8 @@ fn should_show_param_hint(
fn is_argument_similar_to_param(argument: &ast::Expr, param_name: &str) -> bool {
let argument_string = remove_ref(argument.clone()).syntax().to_string();
let param_name = param_name.trim_start_matches('_');
let argument_string = argument_string.trim_start_matches('_');
argument_string.starts_with(&param_name) || argument_string.ends_with(&param_name)
}
@ -1094,8 +1097,10 @@ struct Param {}
fn different_order(param: &Param) {}
fn different_order_mut(param: &mut Param) {}
fn has_underscore(_param: bool) {}
fn twiddle(twiddle: bool) {}
fn doo(_doo: bool) {}
fn main() {
let container: TestVarContainer = TestVarContainer { test_var: 42 };
@ -1112,11 +1117,15 @@ fn main() {
test_processed.frob(false);
twiddle(true);
doo(true);
let param_begin: Param = Param {};
different_order(&param_begin);
different_order(&mut param_begin);
let param: bool = true;
has_underscore(param);
let a: f64 = 7.0;
let b: f64 = 4.0;
let _: f64 = a.div_euclid(b);

View File

@ -8,4 +8,5 @@ test_utils::marks!(
test_resolve_parent_module_on_module_decl
search_filters_by_range
dont_insert_macro_call_parens_unncessary
self_fulfilling_completion
);

View File

@ -174,7 +174,8 @@ pub(crate) fn highlight(
}
assert_eq!(res.len(), 1, "after DFS traversal, the stack should only contain a single element");
let res = res.pop().unwrap();
let mut res = res.pop().unwrap();
res.sort_by_key(|range| range.range.start());
// Check that ranges are sorted and disjoint
assert!(res
.iter()

View File

@ -156,3 +156,15 @@ fn main() {
fs::write(dst_file, &actual_html).unwrap();
assert_eq_text!(expected_html, actual_html);
}
#[test]
fn ranges_sorted() {
let (analysis, file_id) = single_file(
r#"
#[foo(bar = "bar")]
macro_rules! test {}
}"#
.trim(),
);
let _ = analysis.highlight(file_id).unwrap();
}

View File

@ -14,6 +14,7 @@ ra_mbe = { path = "../ra_mbe" }
ra_proc_macro = { path = "../ra_proc_macro" }
goblin = "0.2.1"
libloading = "0.6.0"
memmap = "0.7"
test_utils = { path = "../test_utils" }
[dev-dependencies]

View File

@ -1,10 +1,12 @@
//! Handles dynamic library loading for proc macro
use crate::{proc_macro::bridge, rustc_server::TokenStream};
use std::fs::File;
use std::path::Path;
use goblin::{mach::Mach, Object};
use libloading::Library;
use memmap::Mmap;
use ra_proc_macro::ProcMacroKind;
use std::io::Error as IoError;
@ -16,53 +18,52 @@ fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> I
IoError::new(IoErrorKind::InvalidData, e)
}
fn get_symbols_from_lib(file: &Path) -> Result<Vec<String>, IoError> {
let buffer = std::fs::read(file)?;
let object = Object::parse(&buffer).map_err(invalid_data_err)?;
match object {
Object::Elf(elf) => {
let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?;
let names = symbols.iter().map(|s| s.to_string()).collect();
Ok(names)
}
Object::PE(pe) => {
let symbol_names =
pe.exports.iter().flat_map(|s| s.name).map(|n| n.to_string()).collect();
Ok(symbol_names)
}
Object::Mach(mach) => match mach {
Mach::Binary(binary) => {
let exports = binary.exports().map_err(invalid_data_err)?;
let names = exports
.into_iter()
.map(|s| {
// In macos doc:
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
// Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
// prepended with an underscore.
if s.name.starts_with("_") {
s.name[1..].to_string()
} else {
s.name
}
})
.collect();
Ok(names)
}
Mach::Fat(_) => Ok(vec![]),
},
Object::Archive(_) | Object::Unknown(_) => Ok(vec![]),
}
}
fn is_derive_registrar_symbol(symbol: &str) -> bool {
symbol.contains(NEW_REGISTRAR_SYMBOL)
}
fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> {
let symbols = get_symbols_from_lib(file)?;
Ok(symbols.into_iter().find(|s| is_derive_registrar_symbol(s)))
let file = File::open(file)?;
let buffer = unsafe { Mmap::map(&file)? };
let object = Object::parse(&buffer).map_err(invalid_data_err)?;
match object {
Object::Elf(elf) => {
let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?;
let name =
symbols.iter().find(|s| is_derive_registrar_symbol(s)).map(|s| s.to_string());
Ok(name)
}
Object::PE(pe) => {
let name = pe
.exports
.iter()
.flat_map(|s| s.name)
.find(|s| is_derive_registrar_symbol(s))
.map(|s| s.to_string());
Ok(name)
}
Object::Mach(Mach::Binary(binary)) => {
let exports = binary.exports().map_err(invalid_data_err)?;
let name = exports
.iter()
.map(|s| {
// In macos doc:
// https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
// Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
// prepended with an underscore.
if s.name.starts_with("_") {
&s.name[1..]
} else {
&s.name
}
})
.find(|s| is_derive_registrar_symbol(s))
.map(|s| s.to_string());
Ok(name)
}
_ => Ok(None),
}
}
/// Loads dynamic library in platform dependent manner.

View File

@ -75,9 +75,7 @@ pub(crate) fn load_cargo(
let proc_macro_client = if !with_proc_macro {
ProcMacroClient::dummy()
} else {
let mut path = std::env::current_exe()?;
path.pop();
path.push("rust-analyzer");
let path = std::env::current_exe()?;
ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap()
};
let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client);

View File

@ -134,9 +134,7 @@ impl Config {
match get::<bool>(value, "/procMacro/enabled") {
Some(true) => {
if let Ok(mut path) = std::env::current_exe() {
path.pop();
path.push("rust-analyzer");
if let Ok(path) = std::env::current_exe() {
self.proc_macro_srv = Some((path.to_string_lossy().to_string(), vec!["proc-macro".to_string()]));
}
}

View File

@ -35,7 +35,7 @@ fn check_todo(path: &Path, text: &str) {
}
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
panic!(
"\nTODO markers should not be committed to the master branch,\n\
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
use FIXME instead\n\
{}\n",
path.display(),
@ -47,9 +47,9 @@ fn check_trailing_ws(path: &Path, text: &str) {
if is_exclude_dir(path, &["test_data"]) {
return;
}
for line in text.lines() {
for (line_number, line) in text.lines().enumerate() {
if line.chars().last().map(char::is_whitespace) == Some(true) {
panic!("Trailing whitespace in {}", path.display())
panic!("Trailing whitespace in {} at line {}", path.display(), line_number)
}
}
}