Auto merge of #14630 - Veykril:arc, r=Veykril

internal: `Arc<String>` -> `Arc<str>`
This commit is contained in:
bors 2023-04-22 07:58:13 +00:00
commit 1379b5fac7
18 changed files with 48 additions and 38 deletions

View File

@ -12,7 +12,7 @@
#[derive(Default)] #[derive(Default)]
pub struct Change { pub struct Change {
pub roots: Option<Vec<SourceRoot>>, pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<Arc<String>>)>, pub files_changed: Vec<(FileId, Option<Arc<str>>)>,
pub crate_graph: Option<CrateGraph>, pub crate_graph: Option<CrateGraph>,
pub proc_macros: Option<ProcMacros>, pub proc_macros: Option<ProcMacros>,
} }
@ -42,7 +42,7 @@ pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.roots = Some(roots); self.roots = Some(roots);
} }
pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<String>>) { pub fn change_file(&mut self, file_id: FileId, new_text: Option<Arc<str>>) {
self.files_changed.push((file_id, new_text)) self.files_changed.push((file_id, new_text))
} }
@ -72,7 +72,7 @@ pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
let source_root = db.source_root(source_root_id); let source_root = db.source_root(source_root_id);
let durability = durability(&source_root); let durability = durability(&source_root);
// XXX: can't actually remove the file, just reset the text // XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_default(); let text = text.unwrap_or_else(|| Arc::from(""));
db.set_file_text_with_durability(file_id, text, durability) db.set_file_text_with_durability(file_id, text, durability)
} }
if let Some(crate_graph) = self.crate_graph { if let Some(crate_graph) = self.crate_graph {

View File

@ -190,7 +190,7 @@ pub fn parse_with_proc_macros(
default_target_data_layout = meta.target_data_layout; default_target_data_layout = meta.target_data_layout;
} }
change.change_file(file_id, Some(Arc::new(text))); change.change_file(file_id, Some(Arc::from(text)));
let path = VfsPath::new_virtual_path(meta.path); let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path); file_set.insert(file_id, path);
files.push(file_id); files.push(file_id);
@ -240,7 +240,7 @@ pub fn parse_with_proc_macros(
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string())); fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
change.change_file(core_file, Some(Arc::new(mini_core.source_code()))); change.change_file(core_file, Some(Arc::from(mini_core.source_code())));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();
@ -279,7 +279,7 @@ pub fn parse_with_proc_macros(
); );
roots.push(SourceRoot::new_library(fs)); roots.push(SourceRoot::new_library(fs));
change.change_file(proc_lib_file, Some(Arc::new(source))); change.change_file(proc_lib_file, Some(Arc::from(source)));
let all_crates = crate_graph.crates_in_topological_order(); let all_crates = crate_graph.crates_in_topological_order();

View File

@ -57,7 +57,7 @@ pub struct FileRange {
pub trait FileLoader { pub trait FileLoader {
/// Text of the file. /// Text of the file.
fn file_text(&self, file_id: FileId) -> Arc<String>; fn file_text(&self, file_id: FileId) -> Arc<str>;
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>; fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
} }
@ -90,7 +90,7 @@ fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFil
#[salsa::query_group(SourceDatabaseExtStorage)] #[salsa::query_group(SourceDatabaseExtStorage)]
pub trait SourceDatabaseExt: SourceDatabase { pub trait SourceDatabaseExt: SourceDatabase {
#[salsa::input] #[salsa::input]
fn file_text(&self, file_id: FileId) -> Arc<String>; fn file_text(&self, file_id: FileId) -> Arc<str>;
/// Path to a file, relative to the root of its source root. /// Path to a file, relative to the root of its source root.
/// Source root of the file. /// Source root of the file.
#[salsa::input] #[salsa::input]
@ -118,7 +118,7 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHas
pub struct FileLoaderDelegate<T>(pub T); pub struct FileLoaderDelegate<T>(pub T);
impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> { impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<str> {
SourceDatabaseExt::file_text(self.0, file_id) SourceDatabaseExt::file_text(self.0, file_id)
} }
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {

View File

@ -15,7 +15,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
}); });
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string())); db.set_file_text(pos.file_id, Arc::from(ra_fixture_change));
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -96,7 +96,7 @@ fn f() {
}); });
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string())); db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
@ -150,7 +150,7 @@ fn quux() { 92 }
m!(Y); m!(Y);
m!(Z); m!(Z);
"#; "#;
db.set_file_text(pos.file_id, Arc::new(new_text.to_string())); db.set_file_text(pos.file_id, Arc::from(new_text));
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View File

@ -71,7 +71,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl panic::RefUnwindSafe for TestDB {} impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB { impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {

View File

@ -76,7 +76,7 @@ fn snapshot(&self) -> salsa::Snapshot<TestDB> {
impl panic::RefUnwindSafe for TestDB {} impl panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB { impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {

View File

@ -572,10 +572,9 @@ fn main() {
let x = 1; let x = 1;
x.push(1); x.push(1);
} }
" ";
.to_string();
db.set_file_text(pos.file_id, Arc::new(new_text)); db.set_file_text(pos.file_id, Arc::from(new_text));
let module = db.module_for_file(pos.file_id); let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db); let crate_def_map = module.def_map(&db);

View File

@ -33,10 +33,9 @@ fn foo() -> i32 {
+ +
1 1
} }
" ";
.to_string();
db.set_file_text(pos.file_id, Arc::new(new_text)); db.set_file_text(pos.file_id, Arc::from(new_text));
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {

View File

@ -113,7 +113,7 @@ fn upcast(&self) -> &(dyn HirDatabase + 'static) {
} }
impl FileLoader for RootDatabase { impl FileLoader for RootDatabase {
fn file_text(&self, file_id: FileId) -> Arc<String> { fn file_text(&self, file_id: FileId) -> Arc<str> {
FileLoaderDelegate(self).file_text(file_id) FileLoaderDelegate(self).file_text(file_id)
} }
fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {

View File

@ -438,11 +438,11 @@ fn match_indices<'a>(
fn scope_files<'a>( fn scope_files<'a>(
sema: &'a Semantics<'_, RootDatabase>, sema: &'a Semantics<'_, RootDatabase>,
scope: &'a SearchScope, scope: &'a SearchScope,
) -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a { ) -> impl Iterator<Item = (Arc<str>, FileId, TextRange)> + 'a {
scope.entries.iter().map(|(&file_id, &search_range)| { scope.entries.iter().map(|(&file_id, &search_range)| {
let text = sema.db.file_text(file_id); let text = sema.db.file_text(file_id);
let search_range = let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str()))); search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
(text, file_id, search_range) (text, file_id, search_range)
}) })
@ -553,7 +553,7 @@ fn scope_files<'a>(
let text = sema.db.file_text(file_id); let text = sema.db.file_text(file_id);
let search_range = let search_range =
search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str()))); search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
let finder = &Finder::new("self"); let finder = &Finder::new("self");

View File

@ -224,7 +224,7 @@ pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<Matc
let file = self.sema.parse(file_id); let file = self.sema.parse(file_id);
let mut res = Vec::new(); let mut res = Vec::new();
let file_text = self.sema.db.file_text(file_id); let file_text = self.sema.db.file_text(file_id);
let mut remaining_text = file_text.as_str(); let mut remaining_text = &*file_text;
let mut base = 0; let mut base = 0;
let len = snippet.len() as u32; let len = snippet.len() as u32;
while let Some(offset) = remaining_text.find(snippet) { while let Some(offset) = remaining_text.find(snippet) {

View File

@ -244,7 +244,7 @@ pub fn from_single_file(text: String) -> (Analysis, FileId) {
Err("Analysis::from_single_file has no target layout".into()), Err("Analysis::from_single_file has no target layout".into()),
None, None,
); );
change.change_file(file_id, Some(Arc::new(text))); change.change_file(file_id, Some(Arc::from(text)));
change.set_crate_graph(crate_graph); change.set_crate_graph(crate_graph);
host.apply_change(change); host.apply_change(change);
(host.analysis(), file_id) (host.analysis(), file_id)
@ -263,7 +263,7 @@ pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancell
} }
/// Gets the text of the source file. /// Gets the text of the source file.
pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> { pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
self.with_db(|db| db.file_text(file_id)) self.with_db(|db| db.file_text(file_id))
} }

View File

@ -164,8 +164,8 @@ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
} }
} }
impl StatCollect<FileId, Arc<String>> for FilesStats { impl StatCollect<FileId, Arc<str>> for FilesStats {
fn collect_entry(&mut self, _: FileId, value: Option<Arc<String>>) { fn collect_entry(&mut self, _: FileId, value: Option<Arc<str>>) {
self.total += 1; self.total += 1;
self.size += value.unwrap().len(); self.size += value.unwrap().len();
} }

View File

@ -162,9 +162,9 @@ fn load_crate_graph(
let changes = vfs.take_changes(); let changes = vfs.take_changes();
for file in changes { for file in changes {
if file.exists() { if file.exists() {
let contents = vfs.file_contents(file.file_id).to_vec(); let contents = vfs.file_contents(file.file_id);
if let Ok(text) = String::from_utf8(contents) { if let Ok(text) = std::str::from_utf8(contents) {
analysis_change.change_file(file.file_id, Some(Arc::new(text))) analysis_change.change_file(file.file_id, Some(Arc::from(text)))
} }
} }
} }

View File

@ -269,7 +269,7 @@ pub(crate) fn process_changes(&mut self) -> bool {
String::from_utf8(bytes).ok().and_then(|text| { String::from_utf8(bytes).ok().and_then(|text| {
let (text, line_endings) = LineEndings::normalize(text); let (text, line_endings) = LineEndings::normalize(text);
line_endings_map.insert(file.file_id, line_endings); line_endings_map.insert(file.file_id, line_endings);
Some(Arc::new(text)) Some(Arc::from(text))
}) })
} else { } else {
None None
@ -440,6 +440,10 @@ pub(crate) fn cargo_target_for_crate_root(
ProjectWorkspace::DetachedFiles { .. } => None, ProjectWorkspace::DetachedFiles { .. } => None,
}) })
} }
pub(crate) fn vfs_memory_usage(&self) -> usize {
self.vfs.read().0.memory_usage()
}
} }
pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {

View File

@ -103,6 +103,7 @@ pub(crate) fn handle_analyzer_status(
.collect::<Vec<&AbsPath>>() .collect::<Vec<&AbsPath>>()
); );
} }
format_to!(buf, "\nVfs memory usage: {}\n", snap.vfs_memory_usage());
buf.push_str("\nAnalysis:\n"); buf.push_str("\nAnalysis:\n");
buf.push_str( buf.push_str(
&snap &snap

View File

@ -65,7 +65,7 @@ fn integrated_highlighting_benchmark() {
let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let mut text = host.analysis().file_text(file_id).unwrap().to_string();
text.push_str("\npub fn _dummy() {}\n"); text.push_str("\npub fn _dummy() {}\n");
let mut change = Change::new(); let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text))); change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change); host.apply_change(change);
} }
@ -121,7 +121,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ "sel".len(); + "sel".len();
let mut change = Change::new(); let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text))); change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change); host.apply_change(change);
completion_offset completion_offset
}; };
@ -160,7 +160,7 @@ fn integrated_completion_benchmark() {
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)") patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len(); + "self.".len();
let mut change = Change::new(); let mut change = Change::new();
change.change_file(file_id, Some(Arc::new(text))); change.change_file(file_id, Some(Arc::from(text)));
host.apply_change(change); host.apply_change(change);
completion_offset completion_offset
}; };

View File

@ -139,6 +139,11 @@ pub fn file_contents(&self, file_id: FileId) -> &[u8] {
self.get(file_id).as_deref().unwrap() self.get(file_id).as_deref().unwrap()
} }
/// Returns the overall memory usage for the stored files.
pub fn memory_usage(&self) -> usize {
self.data.iter().flatten().map(|d| d.capacity()).sum()
}
/// Returns an iterator over the stored ids and their corresponding paths. /// Returns an iterator over the stored ids and their corresponding paths.
/// ///
/// This will skip deleted files. /// This will skip deleted files.
@ -158,7 +163,7 @@ pub fn iter(&self) -> impl Iterator<Item = (FileId, &VfsPath)> + '_ {
/// ///
/// If the path does not currently exists in the `Vfs`, allocates a new /// If the path does not currently exists in the `Vfs`, allocates a new
/// [`FileId`] for it. /// [`FileId`] for it.
pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool { pub fn set_file_contents(&mut self, path: VfsPath, mut contents: Option<Vec<u8>>) -> bool {
let file_id = self.alloc_file_id(path); let file_id = self.alloc_file_id(path);
let change_kind = match (self.get(file_id), &contents) { let change_kind = match (self.get(file_id), &contents) {
(None, None) => return false, (None, None) => return false,
@ -167,7 +172,9 @@ pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) ->
(Some(_), None) => ChangeKind::Delete, (Some(_), None) => ChangeKind::Delete,
(Some(_), Some(_)) => ChangeKind::Modify, (Some(_), Some(_)) => ChangeKind::Modify,
}; };
if let Some(contents) = &mut contents {
contents.shrink_to_fit();
}
*self.get_mut(file_id) = contents; *self.get_mut(file_id) = contents;
self.changes.push(ChangedFile { file_id, change_kind }); self.changes.push(ChangedFile { file_id, change_kind });
true true