This commit is contained in:
Aleksey Kladov 2021-01-20 01:56:11 +03:00
parent 63f509f492
commit 46b4f89c92
28 changed files with 69 additions and 67 deletions

2
Cargo.lock generated
View File

@ -1376,8 +1376,6 @@ checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
[[package]] [[package]]
name = "rowan" name = "rowan"
version = "0.11.0" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3ae0ae5091cf38acfb834dbb6adcd45bb0d6b6a72ca5798e134195d2fa33574"
dependencies = [ dependencies = [
"hashbrown", "hashbrown",
"memoffset", "memoffset",

View File

@ -1,6 +1,9 @@
use itertools::Itertools; use itertools::Itertools;
use stdx::format_to; use stdx::format_to;
use syntax::ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner}; use syntax::{
ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner},
SmolStr,
};
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -49,16 +52,16 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()
format_to!(buf, "{}", type_params.syntax()); format_to!(buf, "{}", type_params.syntax());
} }
buf.push_str(" "); buf.push_str(" ");
buf.push_str(name.text().as_str()); buf.push_str(name.text());
if let Some(type_params) = type_params { if let Some(type_params) = type_params {
let lifetime_params = type_params let lifetime_params = type_params
.lifetime_params() .lifetime_params()
.filter_map(|it| it.lifetime()) .filter_map(|it| it.lifetime())
.map(|it| it.text().clone()); .map(|it| SmolStr::from(it.text()));
let type_params = type_params let type_params = type_params
.type_params() .type_params()
.filter_map(|it| it.name()) .filter_map(|it| it.name())
.map(|it| it.text().clone()); .map(|it| SmolStr::from(it.text()));
let generic_params = lifetime_params.chain(type_params).format(", "); let generic_params = lifetime_params.chain(type_params).format(", ");
format_to!(buf, "<{}>", generic_params) format_to!(buf, "<{}>", generic_params)

View File

@ -3,7 +3,7 @@
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner}, ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
T, SmolStr, T,
}; };
use crate::{AssistContext, AssistId, AssistKind, Assists}; use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -95,14 +95,14 @@ fn generate_impl_text(strukt: &ast::Struct, code: &str) -> String {
format_to!(buf, "{}", type_params.syntax()); format_to!(buf, "{}", type_params.syntax());
} }
buf.push_str(" "); buf.push_str(" ");
buf.push_str(strukt.name().unwrap().text().as_str()); buf.push_str(strukt.name().unwrap().text());
if let Some(type_params) = type_params { if let Some(type_params) = type_params {
let lifetime_params = type_params let lifetime_params = type_params
.lifetime_params() .lifetime_params()
.filter_map(|it| it.lifetime()) .filter_map(|it| it.lifetime())
.map(|it| it.text().clone()); .map(|it| SmolStr::from(it.text()));
let type_params = let type_params =
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); type_params.type_params().filter_map(|it| it.name()).map(|it| SmolStr::from(it.text()));
format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", ")) format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
} }

View File

@ -138,7 +138,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
return None; return None;
} }
let text = token.text().as_str(); let text = token.text();
if !text.starts_with("r#") && text.ends_with('#') { if !text.starts_with("r#") && text.ends_with('#') {
return None; return None;
} }

View File

@ -3,7 +3,7 @@
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{
ast::{self, make, AstNode}, ast::{self, make, AstNode},
Direction, SmolStr, Direction,
SyntaxKind::{IDENT, WHITESPACE}, SyntaxKind::{IDENT, WHITESPACE},
TextSize, TextSize,
}; };
@ -43,17 +43,18 @@ pub(crate) fn replace_derive_with_manual_impl(
) -> Option<()> { ) -> Option<()> {
let attr = ctx.find_node_at_offset::<ast::Attr>()?; let attr = ctx.find_node_at_offset::<ast::Attr>()?;
let attr_name = attr let has_derive = attr
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
.filter(|t| t.kind() == IDENT) .filter(|t| t.kind() == IDENT)
.find_map(syntax::NodeOrToken::into_token) .find_map(syntax::NodeOrToken::into_token)
.filter(|t| t.text() == "derive")? .filter(|t| t.text() == "derive")
.text() .is_some();
.clone(); if !has_derive {
return None;
}
let trait_token = let trait_token = ctx.token_at_offset().find(|t| t.kind() == IDENT && t.text() != "derive")?;
ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?;
let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text()))); let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text())));
let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?;
@ -176,9 +177,9 @@ fn update_attribute(
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
.filter(|t| t.kind() == IDENT) .filter(|t| t.kind() == IDENT)
.filter_map(|t| t.into_token().map(|t| t.text().clone())) .filter_map(|t| t.into_token().map(|t| t.text().to_string()))
.filter(|t| t != trait_name.text()) .filter(|t| t != trait_name.text())
.collect::<Vec<SmolStr>>(); .collect::<Vec<_>>();
let has_more_derives = !new_attr_input.is_empty(); let has_more_derives = !new_attr_input.is_empty();
if has_more_derives { if has_more_derives {

View File

@ -223,7 +223,7 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
let method = mce.name_ref()?; let method = mce.name_ref()?;
let arg_list = mce.arg_list()?; let arg_list = mce.arg_list()?;
let method = match method.text().as_str() { let method = match method.text() {
"is_some" => "is_none", "is_some" => "is_none",
"is_none" => "is_some", "is_none" => "is_some",
"is_ok" => "is_err", "is_ok" => "is_err",

View File

@ -102,7 +102,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
debug!("name token not found"); debug!("name token not found");
mbe::ExpandError::ConversionError mbe::ExpandError::ConversionError
})?; })?;
let name_token = tt::Ident { id: name_token_id, text: name.text().clone() }; let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
Ok(BasicAdtInfo { name: name_token, type_params }) Ok(BasicAdtInfo { name: name_token, type_params })
} }

View File

@ -38,7 +38,7 @@ pub fn new_tuple_field(idx: usize) -> Name {
} }
pub fn new_lifetime(lt: &ast::Lifetime) -> Name { pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
Self::new_text(lt.text().clone()) Self::new_text(lt.text().into())
} }
/// Shortcut to create inline plain text name /// Shortcut to create inline plain text name
@ -47,12 +47,12 @@ const fn new_inline(text: &str) -> Name {
} }
/// Resolve a name from the text of token. /// Resolve a name from the text of token.
fn resolve(raw_text: &SmolStr) -> Name { fn resolve(raw_text: &str) -> Name {
let raw_start = "r#"; let raw_start = "r#";
if raw_text.as_str().starts_with(raw_start) { if raw_text.starts_with(raw_start) {
Name::new_text(SmolStr::new(&raw_text[raw_start.len()..])) Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
} else { } else {
Name::new_text(raw_text.clone()) Name::new_text(raw_text.into())
} }
} }

View File

@ -153,8 +153,7 @@ pub(crate) fn from_named(
node: InFile<&dyn ast::NameOwner>, node: InFile<&dyn ast::NameOwner>,
kind: SymbolKind, kind: SymbolKind,
) -> NavigationTarget { ) -> NavigationTarget {
let name = let name = node.value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
let focus_range = let focus_range =
node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range); node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
let frange = node.map(|it| it.syntax()).original_file_range(db); let frange = node.map(|it| it.syntax()).original_file_range(db);

View File

@ -90,7 +90,7 @@ fn short_label(&self) -> Option<String> {
impl ShortLabel for ast::ConstParam { impl ShortLabel for ast::ConstParam {
fn short_label(&self) -> Option<String> { fn short_label(&self) -> Option<String> {
let mut buf = "const ".to_owned(); let mut buf = "const ".to_owned();
buf.push_str(self.name()?.text().as_str()); buf.push_str(self.name()?.text());
if let Some(type_ref) = self.ty() { if let Some(type_ref) = self.ty() {
format_to!(buf, ": {}", type_ref.syntax()); format_to!(buf, ": {}", type_ref.syntax());
} }
@ -117,6 +117,6 @@ fn short_label_from_node<T>(node: &T, label: &str) -> Option<String>
{ {
let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default(); let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default();
buf.push_str(label); buf.push_str(label);
buf.push_str(node.name()?.text().as_str()); buf.push_str(node.name()?.text());
Some(buf) Some(buf)
} }

View File

@ -213,8 +213,8 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextSize) -> TextRange
let ws_text = ws.text(); let ws_text = ws.text();
let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start(); let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start(); let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
let ws_suffix = &ws_text.as_str()[suffix]; let ws_suffix = &ws_text[suffix];
let ws_prefix = &ws_text.as_str()[prefix]; let ws_prefix = &ws_text[prefix];
if ws_text.contains('\n') && !ws_suffix.contains('\n') { if ws_text.contains('\n') && !ws_suffix.contains('\n') {
if let Some(node) = ws.next_sibling_or_token() { if let Some(node) = ws.next_sibling_or_token() {
let start = match ws_prefix.rfind('\n') { let start = match ws_prefix.rfind('\n') {

View File

@ -411,7 +411,7 @@ fn get_string_representation(expr: &ast::Expr) -> Option<String> {
match expr { match expr {
ast::Expr::MethodCallExpr(method_call_expr) => { ast::Expr::MethodCallExpr(method_call_expr) => {
let name_ref = method_call_expr.name_ref()?; let name_ref = method_call_expr.name_ref()?;
match name_ref.text().as_str() { match name_ref.text() {
"clone" => method_call_expr.receiver().map(|rec| rec.to_string()), "clone" => method_call_expr.receiver().map(|rec| rec.to_string()),
name_ref => Some(name_ref.to_owned()), name_ref => Some(name_ref.to_owned()),
} }

View File

@ -59,7 +59,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextS
// The node is either the first or the last in the file // The node is either the first or the last in the file
let suff = &token.text()[TextRange::new( let suff = &token.text()[TextRange::new(
offset - token.text_range().start() + TextSize::of('\n'), offset - token.text_range().start() + TextSize::of('\n'),
TextSize::of(token.text().as_str()), TextSize::of(token.text()),
)]; )];
let spaces = suff.bytes().take_while(|&b| b == b' ').count(); let spaces = suff.bytes().take_while(|&b| b == b' ').count();

View File

@ -30,7 +30,7 @@ fn is_format_string(string: &ast::String) -> Option<()> {
let parent = string.syntax().parent(); let parent = string.syntax().parent();
let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?; let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
if !matches!(name.text().as_str(), "format_args" | "format_args_nl") { if !matches!(name.text(), "format_args" | "format_args_nl") {
return None; return None;
} }

View File

@ -116,7 +116,7 @@ pub(super) fn doc_comment(hl: &mut Highlights, node: &SyntaxNode) {
None => (), None => (),
} }
let line: &str = comment.text().as_str(); let line: &str = comment.text();
let range = comment.syntax().text_range(); let range = comment.syntax().text_range();
let mut pos = TextSize::of(comment.prefix()); let mut pos = TextSize::of(comment.prefix());

View File

@ -343,7 +343,7 @@ pub fn classify(
hir::AssocItem::TypeAlias(it) => Some(*it), hir::AssocItem::TypeAlias(it) => Some(*it),
_ => None, _ => None,
}) })
.find(|alias| alias.name(sema.db).to_string() == **name_ref.text()) .find(|alias| &alias.name(sema.db).to_string() == name_ref.text())
{ {
return Some(NameRefClass::Definition(Definition::ModuleDef( return Some(NameRefClass::Definition(Definition::ModuleDef(
ModuleDef::TypeAlias(ty), ModuleDef::TypeAlias(ty),

View File

@ -507,7 +507,7 @@ fn new(path: &ast::Path) -> ImportGroup {
PathSegmentKind::SelfKw => ImportGroup::ThisModule, PathSegmentKind::SelfKw => ImportGroup::ThisModule,
PathSegmentKind::SuperKw => ImportGroup::SuperModule, PathSegmentKind::SuperKw => ImportGroup::SuperModule,
PathSegmentKind::CrateKw => ImportGroup::ThisCrate, PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
PathSegmentKind::Name(name) => match name.text().as_str() { PathSegmentKind::Name(name) => match name.text() {
"std" => ImportGroup::Std, "std" => ImportGroup::Std,
"core" => ImportGroup::Std, "core" => ImportGroup::Std,
_ => ImportGroup::ExternCrate, _ => ImportGroup::ExternCrate,

View File

@ -209,7 +209,7 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil
query.search(&buf) query.search(&buf)
} }
pub fn index_resolve(db: &RootDatabase, name: &SmolStr) -> Vec<FileSymbol> { pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> {
let mut query = Query::new(name.to_string()); let mut query = Query::new(name.to_string());
query.exact(); query.exact();
query.limit(4); query.limit(4);
@ -409,7 +409,7 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
let name = node.name()?; let name = node.name()?;
let name_range = name.syntax().text_range(); let name_range = name.syntax().text_range();
let name = name.text().clone(); let name = name.text().into();
let ptr = SyntaxNodePtr::new(node.syntax()); let ptr = SyntaxNodePtr::new(node.syntax());
Some((name, ptr, name_range)) Some((name, ptr, name_range))

View File

@ -507,7 +507,7 @@ fn to_char(&self) -> Option<char> {
} }
} }
fn to_text(&self) -> SmolStr { fn to_text(&self) -> SmolStr {
self.token().text().clone() self.token().text().into()
} }
} }

View File

@ -10,8 +10,11 @@
use ide_db::base_db::FileRange; use ide_db::base_db::FileRange;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use std::{cell::Cell, iter::Peekable}; use std::{cell::Cell, iter::Peekable};
use syntax::ast::{AstNode, AstToken};
use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken}; use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
use syntax::{
ast::{AstNode, AstToken},
SmolStr,
};
use test_utils::mark; use test_utils::mark;
// Creates a match error. If we're currently attempting to match some code that we thought we were // Creates a match error. If we're currently attempting to match some code that we thought we were
@ -398,11 +401,11 @@ fn attempt_match_record_field_list(
code: &SyntaxNode, code: &SyntaxNode,
) -> Result<(), MatchFailed> { ) -> Result<(), MatchFailed> {
// Build a map keyed by field name. // Build a map keyed by field name.
let mut fields_by_name = FxHashMap::default(); let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
for child in code.children() { for child in code.children() {
if let Some(record) = ast::RecordExprField::cast(child.clone()) { if let Some(record) = ast::RecordExprField::cast(child.clone()) {
if let Some(name) = record.field_name() { if let Some(name) = record.field_name() {
fields_by_name.insert(name.text().clone(), child.clone()); fields_by_name.insert(name.text().into(), child.clone());
} }
} }
} }
@ -473,9 +476,7 @@ fn attempt_match_token_tree(
} }
SyntaxElement::Node(n) => { SyntaxElement::Node(n) => {
if let Some(first_token) = n.first_token() { if let Some(first_token) = n.first_token() {
if Some(first_token.text().as_str()) if Some(first_token.text()) == next_pattern_token.as_deref() {
== next_pattern_token.as_deref()
{
if let Some(SyntaxElement::Node(p)) = pattern.next() { if let Some(SyntaxElement::Node(p)) = pattern.next() {
// We have a subtree that starts with the next token in our pattern. // We have a subtree that starts with the next token in our pattern.
self.attempt_match_token_tree(phase, &p, &n)?; self.attempt_match_token_tree(phase, &p, &n)?;

View File

@ -173,7 +173,7 @@ fn render_token(&mut self, token: &SyntaxToken) {
); );
} }
} else { } else {
self.out.push_str(token.text().as_str()); self.out.push_str(token.text());
} }
} }

View File

@ -228,7 +228,7 @@ fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
None, None,
|_ty, assoc_item| { |_ty, assoc_item| {
let item_name = assoc_item.name(self.scope.db)?; let item_name = assoc_item.name(self.scope.db)?;
if item_name.to_string().as_str() == name.text().as_str() { if item_name.to_string().as_str() == name.text() {
Some(hir::PathResolution::AssocItem(assoc_item)) Some(hir::PathResolution::AssocItem(assoc_item))
} else { } else {
None None

View File

@ -12,7 +12,7 @@ doctest = false
[dependencies] [dependencies]
itertools = "0.10.0" itertools = "0.10.0"
rowan = "0.11" rowan = { path="../../../rowan" }
rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" } rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
arrayvec = "0.5.1" arrayvec = "0.5.1"

View File

@ -12,7 +12,7 @@
use crate::{ use crate::{
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
SmolStr, SyntaxKind, SyntaxKind,
}; };
pub use self::{ pub use self::{
@ -54,7 +54,7 @@ fn cast(syntax: SyntaxToken) -> Option<Self>
fn syntax(&self) -> &SyntaxToken; fn syntax(&self) -> &SyntaxToken;
fn text(&self) -> &SmolStr { fn text(&self) -> &str {
self.syntax().text() self.syntax().text()
} }
} }

View File

@ -495,7 +495,7 @@ pub fn single_space() -> SyntaxToken {
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(|it| it.into_token()) .filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") .find(|it| it.kind() == WHITESPACE && it.text() == " ")
.unwrap() .unwrap()
} }
@ -523,7 +523,7 @@ pub fn single_newline() -> SyntaxToken {
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(|it| it.into_token()) .filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
.unwrap() .unwrap()
} }
@ -533,7 +533,7 @@ pub fn blank_line() -> SyntaxToken {
.syntax() .syntax()
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(|it| it.into_token()) .filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
.unwrap() .unwrap()
} }

View File

@ -13,19 +13,19 @@
}; };
impl ast::Lifetime { impl ast::Lifetime {
pub fn text(&self) -> &SmolStr { pub fn text(&self) -> &str {
text_of_first_token(self.syntax()) text_of_first_token(self.syntax())
} }
} }
impl ast::Name { impl ast::Name {
pub fn text(&self) -> &SmolStr { pub fn text(&self) -> &str {
text_of_first_token(self.syntax()) text_of_first_token(self.syntax())
} }
} }
impl ast::NameRef { impl ast::NameRef {
pub fn text(&self) -> &SmolStr { pub fn text(&self) -> &str {
text_of_first_token(self.syntax()) text_of_first_token(self.syntax())
} }
@ -34,7 +34,7 @@ pub fn as_tuple_field(&self) -> Option<usize> {
} }
} }
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { fn text_of_first_token(node: &SyntaxNode) -> &str {
node.green().children().next().and_then(|it| it.into_token()).unwrap().text() node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
} }
@ -121,7 +121,7 @@ pub fn as_simple_key_value(&self) -> Option<(SmolStr, SmolStr)> {
pub fn simple_name(&self) -> Option<SmolStr> { pub fn simple_name(&self) -> Option<SmolStr> {
let path = self.path()?; let path = self.path()?;
match (path.segment(), path.qualifier()) { match (path.segment(), path.qualifier()) {
(Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
_ => None, _ => None,
} }
} }

View File

@ -41,7 +41,7 @@ pub fn doc_comment(&self) -> Option<&str> {
match kind { match kind {
CommentKind { shape, doc: Some(_) } => { CommentKind { shape, doc: Some(_) } => {
let prefix = kind.prefix(); let prefix = kind.prefix();
let text = &self.text().as_str()[prefix.len()..]; let text = &self.text()[prefix.len()..];
let ws = text.chars().next().filter(|c| c.is_whitespace()); let ws = text.chars().next().filter(|c| c.is_whitespace());
let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
match shape { match shape {
@ -156,13 +156,13 @@ pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
pub fn value(&self) -> Option<Cow<'_, str>> { pub fn value(&self) -> Option<Cow<'_, str>> {
if self.is_raw() { if self.is_raw() {
let text = self.text().as_str(); let text = self.text();
let text = let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
return Some(Cow::Borrowed(text)); return Some(Cow::Borrowed(text));
} }
let text = self.text().as_str(); let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let mut buf = String::new(); let mut buf = String::new();
@ -190,7 +190,7 @@ pub fn value(&self) -> Option<Cow<'_, str>> {
} }
pub fn quote_offsets(&self) -> Option<QuoteOffsets> { pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
let text = self.text().as_str(); let text = self.text();
let offsets = QuoteOffsets::new(text)?; let offsets = QuoteOffsets::new(text)?;
let o = self.syntax().text_range().start(); let o = self.syntax().text_range().start();
let offsets = QuoteOffsets { let offsets = QuoteOffsets {
@ -560,7 +560,7 @@ impl HasFormatSpecifier for ast::String {
fn char_ranges( fn char_ranges(
&self, &self,
) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
let text = self.text().as_str(); let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
@ -590,7 +590,7 @@ pub fn radix(&self) -> Radix {
pub fn value(&self) -> Option<u128> { pub fn value(&self) -> Option<u128> {
let token = self.syntax(); let token = self.syntax();
let mut text = token.text().as_str(); let mut text = token.text();
if let Some(suffix) = self.suffix() { if let Some(suffix) = self.suffix() {
text = &text[..text.len() - suffix.len()] text = &text[..text.len() - suffix.len()]
} }

View File

@ -116,7 +116,7 @@ fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
} }
let token = literal.token(); let token = literal.token();
let text = token.text().as_str(); let text = token.text();
// FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {