2022-05-21 07:11:05 -05:00
//! Project loading & configuration updates.
//!
//! This is quite tricky. The main problem is time and changes -- there's no
//! fixed "project" rust-analyzer is working with, "current project" is itself
//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
//! dependency, project model changes. What's more, switching project model is
//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
//! macros) `cargo check`.
//!
2022-05-23 03:29:03 -05:00
//! The main guiding principle here is, as elsewhere in rust-analyzer,
2022-05-21 07:11:05 -05:00
//! robustness. We try not to assume that the project model exists or is
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
2023-09-02 07:01:58 -05:00
// FIXME: This is a mess that needs some untangling work
2023-07-03 10:40:31 -05:00
use std ::{ iter , mem } ;
2020-06-25 16:44:58 -05:00
2020-09-15 20:51:57 -05:00
use flycheck ::{ FlycheckConfig , FlycheckHandle } ;
2023-12-18 05:09:54 -06:00
use hir ::{ db ::DefDatabase , Change , ProcMacros } ;
2024-02-16 08:47:25 -06:00
use ide ::CrateId ;
2023-02-28 05:08:23 -06:00
use ide_db ::{
2024-02-16 09:28:17 -06:00
base_db ::{ salsa ::Durability , CrateGraph , ProcMacroPaths , Version } ,
2023-02-28 05:08:23 -06:00
FxHashMap ,
2021-08-22 06:05:12 -05:00
} ;
2023-11-21 10:54:34 -06:00
use itertools ::Itertools ;
2023-07-03 10:40:31 -05:00
use load_cargo ::{ load_proc_macro , ProjectFolders } ;
use proc_macro_api ::ProcMacroServer ;
2024-02-14 08:13:45 -06:00
use project_model ::{ ProjectWorkspace , WorkspaceBuildScripts } ;
2023-06-13 05:25:04 -05:00
use rustc_hash ::FxHashSet ;
2023-05-26 12:18:17 -05:00
use stdx ::{ format_to , thread ::ThreadIntent } ;
2023-05-02 09:12:22 -05:00
use triomphe ::Arc ;
2024-02-16 08:47:25 -06:00
use vfs ::{ AbsPath , AbsPathBuf , ChangeKind } ;
2020-06-25 16:44:58 -05:00
use crate ::{
config ::{ Config , FilesWatcher , LinkedProject } ,
2021-04-06 06:16:35 -05:00
global_state ::GlobalState ,
2020-07-10 08:27:34 -05:00
lsp_ext ,
2020-07-02 09:47:42 -05:00
main_loop ::Task ,
2022-04-16 07:16:58 -05:00
op_queue ::Cause ,
2020-06-25 16:44:58 -05:00
} ;
2021-01-07 11:08:46 -06:00
#[ derive(Debug) ]
pub ( crate ) enum ProjectWorkspaceProgress {
Begin ,
Report ( String ) ,
2023-06-13 05:25:04 -05:00
End ( Vec < anyhow ::Result < ProjectWorkspace > > , bool ) ,
2021-01-07 11:08:46 -06:00
}
2021-01-28 09:33:02 -06:00
#[ derive(Debug) ]
pub ( crate ) enum BuildDataProgress {
Begin ,
Report ( String ) ,
2021-07-18 05:13:03 -05:00
End ( ( Arc < Vec < ProjectWorkspace > > , Vec < anyhow ::Result < WorkspaceBuildScripts > > ) ) ,
2021-01-28 09:33:02 -06:00
}
2023-03-25 12:06:06 -05:00
#[ derive(Debug) ]
pub ( crate ) enum ProcMacroProgress {
Begin ,
Report ( String ) ,
End ( ProcMacros ) ,
}
2020-06-25 16:44:58 -05:00
impl GlobalState {
2021-04-06 06:16:35 -05:00
pub ( crate ) fn is_quiescent ( & self ) -> bool {
2023-03-10 03:26:02 -06:00
! ( self . last_reported_status . is_none ( )
| | self . fetch_workspaces_queue . op_in_progress ( )
2021-04-06 06:16:35 -05:00
| | self . fetch_build_data_queue . op_in_progress ( )
2023-03-26 01:39:28 -05:00
| | self . fetch_proc_macros_queue . op_in_progress ( )
2021-04-06 06:16:35 -05:00
| | self . vfs_progress_config_version < self . vfs_config_version
| | self . vfs_progress_n_done < self . vfs_progress_n_total )
}
2020-06-26 10:28:04 -05:00
pub ( crate ) fn update_configuration ( & mut self , config : Config ) {
2024-01-17 20:27:38 -06:00
let _p =
tracing ::span! ( tracing ::Level ::INFO , " GlobalState::update_configuration " ) . entered ( ) ;
2021-01-06 06:46:31 -06:00
let old_config = mem ::replace ( & mut self . config , Arc ::new ( config ) ) ;
2023-03-25 17:03:22 -05:00
if self . config . lru_parse_query_capacity ( ) ! = old_config . lru_parse_query_capacity ( ) {
self . analysis_host . update_lru_capacity ( self . config . lru_parse_query_capacity ( ) ) ;
}
if self . config . lru_query_capacities ( ) ! = old_config . lru_query_capacities ( ) {
self . analysis_host . update_lru_capacities (
& self . config . lru_query_capacities ( ) . cloned ( ) . unwrap_or_default ( ) ,
) ;
2020-06-26 10:28:04 -05:00
}
2023-12-18 08:38:21 -06:00
if self . config . linked_or_discovered_projects ( ) ! = old_config . linked_or_discovered_projects ( )
{
2024-01-04 03:19:25 -06:00
self . fetch_workspaces_queue . request_op ( " discovered projects changed " . to_owned ( ) , false )
2021-01-06 04:54:28 -06:00
} else if self . config . flycheck ( ) ! = old_config . flycheck ( ) {
2020-06-25 16:44:58 -05:00
self . reload_flycheck ( ) ;
}
2021-06-03 09:11:20 -05:00
2023-03-29 12:13:40 -05:00
if self . analysis_host . raw_database ( ) . expand_proc_attr_macros ( )
2022-05-06 01:46:02 -05:00
! = self . config . expand_proc_attr_macros ( )
{
2023-03-29 12:13:40 -05:00
self . analysis_host . raw_database_mut ( ) . set_expand_proc_attr_macros_with_durability (
self . config . expand_proc_attr_macros ( ) ,
Durability ::HIGH ,
) ;
2022-05-06 01:46:02 -05:00
}
2020-06-25 16:44:58 -05:00
}
2021-08-30 11:35:49 -05:00
pub ( crate ) fn current_status ( & self ) -> lsp_ext ::ServerStatusParams {
2021-04-06 06:16:35 -05:00
let mut status = lsp_ext ::ServerStatusParams {
health : lsp_ext ::Health ::Ok ,
quiescent : self . is_quiescent ( ) ,
message : None ,
} ;
2023-03-15 05:28:37 -05:00
let mut message = String ::new ( ) ;
2021-04-06 10:08:05 -05:00
2024-01-04 03:19:25 -06:00
if self . build_deps_changed {
2021-10-20 08:29:50 -05:00
status . health = lsp_ext ::Health ::Warning ;
2024-01-04 03:19:25 -06:00
message . push_str (
" Proc-macros and/or build scripts have changed and need to be rebuilt. \n \n " ,
) ;
2021-10-20 08:29:50 -05:00
}
2024-01-19 09:51:08 -06:00
if self . fetch_build_data_error ( ) . is_err ( ) {
2021-04-06 10:08:05 -05:00
status . health = lsp_ext ::Health ::Warning ;
2023-03-15 05:28:37 -05:00
message . push_str ( " Failed to run build scripts of some packages. \n \n " ) ;
2021-04-06 10:08:05 -05:00
}
2023-04-04 12:24:50 -05:00
if self . proc_macro_clients . iter ( ) . any ( | it | it . is_err ( ) ) {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( " Failed to spawn one or more proc-macro servers. \n \n " ) ;
2023-07-30 07:35:20 -05:00
for err in self . proc_macro_clients . iter ( ) {
if let Err ( err ) = err {
format_to! ( message , " - {err} \n " ) ;
}
}
2023-04-04 12:24:50 -05:00
}
2021-04-06 06:16:35 -05:00
if ! self . config . cargo_autoreload ( )
& & self . is_quiescent ( )
& & self . fetch_workspaces_queue . op_requested ( )
{
status . health = lsp_ext ::Health ::Warning ;
2023-03-15 05:35:34 -05:00
message . push_str ( " Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required. \n \n " ) ;
2021-04-06 06:16:35 -05:00
}
2023-12-18 08:38:21 -06:00
if self . config . linked_or_discovered_projects ( ) . is_empty ( )
2022-11-11 06:00:22 -06:00
& & self . config . detached_files ( ) . is_empty ( )
& & self . config . notifications ( ) . cargo_toml_not_found
{
status . health = lsp_ext ::Health ::Warning ;
2023-03-16 10:26:19 -05:00
message . push_str ( " Failed to discover workspace. \n " ) ;
message . push_str ( " Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting. \n \n " ) ;
2022-11-11 06:00:22 -06:00
}
2023-05-26 08:21:00 -05:00
if let Some ( err ) = & self . config_errors {
status . health = lsp_ext ::Health ::Warning ;
format_to! ( message , " {err} \n " ) ;
}
2023-05-26 08:37:41 -05:00
if let Some ( err ) = & self . last_flycheck_error {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( err ) ;
message . push ( '\n' ) ;
}
2023-03-10 02:49:37 -06:00
2023-03-15 05:35:34 -05:00
for ws in self . workspaces . iter ( ) {
let ( ProjectWorkspace ::Cargo { sysroot , .. }
| ProjectWorkspace ::Json { sysroot , .. }
| ProjectWorkspace ::DetachedFiles { sysroot , .. } ) = ws ;
2023-04-13 01:40:14 -05:00
match sysroot {
Err ( None ) = > ( ) ,
Err ( Some ( e ) ) = > {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( e ) ;
message . push_str ( " \n \n " ) ;
}
Ok ( s ) = > {
if let Some ( e ) = s . loading_warning ( ) {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( & e ) ;
message . push_str ( " \n \n " ) ;
}
}
2023-03-15 05:35:34 -05:00
}
if let ProjectWorkspace ::Cargo { rustc : Err ( Some ( e ) ) , .. } = ws {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( e ) ;
message . push_str ( " \n \n " ) ;
}
}
2024-01-19 09:51:08 -06:00
if self . fetch_workspace_error ( ) . is_err ( ) {
2023-03-15 05:28:37 -05:00
status . health = lsp_ext ::Health ::Error ;
2023-12-18 08:38:21 -06:00
message . push_str ( " Failed to load workspaces. " ) ;
if self . config . has_linked_projects ( ) {
message . push_str (
" `rust-analyzer.linkedProjects` have been specified, which may be incorrect. Specified project paths: \n " ,
) ;
message . push_str ( & format! (
" {} " ,
self . config . linked_manifests ( ) . map ( | it | it . display ( ) ) . format ( " \n " )
) ) ;
if self . config . has_linked_project_jsons ( ) {
message . push_str ( " \n Additionally, one or more project jsons are specified " )
}
}
message . push_str ( " \n \n " ) ;
2023-03-15 05:28:37 -05:00
}
if ! message . is_empty ( ) {
status . message = Some ( message . trim_end ( ) . to_owned ( ) ) ;
}
2021-08-30 11:35:49 -05:00
status
2020-07-10 08:27:34 -05:00
}
2021-01-10 09:02:02 -06:00
2023-06-13 05:25:04 -05:00
pub ( crate ) fn fetch_workspaces ( & mut self , cause : Cause , force_crate_graph_reload : bool ) {
2022-04-16 07:16:58 -05:00
tracing ::info! ( % cause , " will fetch workspaces " ) ;
2021-01-07 11:08:46 -06:00
2023-06-04 02:30:21 -05:00
self . task_pool . handle . spawn_with_sender ( ThreadIntent ::Worker , {
2023-12-18 08:38:21 -06:00
let linked_projects = self . config . linked_or_discovered_projects ( ) ;
2021-05-23 12:56:54 -05:00
let detached_files = self . config . detached_files ( ) . to_vec ( ) ;
2021-01-06 04:54:28 -06:00
let cargo_config = self . config . cargo ( ) ;
2021-01-07 11:08:46 -06:00
move | sender | {
let progress = {
let sender = sender . clone ( ) ;
move | msg | {
sender
. send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::Report ( msg ) ) )
. unwrap ( )
}
} ;
sender . send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::Begin ) ) . unwrap ( ) ;
2021-05-23 12:56:54 -05:00
let mut workspaces = linked_projects
2020-07-02 09:47:42 -05:00
. iter ( )
. map ( | project | match project {
LinkedProject ::ProjectManifest ( manifest ) = > {
2021-01-07 11:08:46 -06:00
project_model ::ProjectWorkspace ::load (
manifest . clone ( ) ,
& cargo_config ,
& progress ,
)
2020-07-02 09:47:42 -05:00
}
LinkedProject ::InlineJsonProject ( it ) = > {
2023-01-27 06:49:28 -06:00
Ok ( project_model ::ProjectWorkspace ::load_inline (
2021-01-18 05:52:12 -06:00
it . clone ( ) ,
cargo_config . target . as_deref ( ) ,
2022-09-19 10:31:08 -05:00
& cargo_config . extra_env ,
2023-01-27 06:49:28 -06:00
) )
2020-07-02 09:47:42 -05:00
}
2020-06-26 09:33:57 -05:00
} )
2020-07-02 09:47:42 -05:00
. collect ::< Vec < _ > > ( ) ;
2021-01-07 11:08:46 -06:00
2023-04-18 07:27:01 -05:00
let mut i = 0 ;
while i < workspaces . len ( ) {
if let Ok ( w ) = & workspaces [ i ] {
2023-11-21 10:54:34 -06:00
let dupes : Vec < _ > = workspaces [ i + 1 .. ]
2023-04-18 07:27:01 -05:00
. iter ( )
2023-11-21 10:54:34 -06:00
. positions ( | it | it . as_ref ( ) . is_ok_and ( | ws | ws . eq_ignore_build_data ( w ) ) )
2023-04-25 03:47:33 -05:00
. collect ( ) ;
dupes . into_iter ( ) . rev ( ) . for_each ( | d | {
2023-11-21 10:54:34 -06:00
_ = workspaces . remove ( d + i + 1 ) ;
2023-04-25 03:47:33 -05:00
} ) ;
2023-04-18 07:27:01 -05:00
}
i + = 1 ;
}
2021-05-23 12:56:54 -05:00
if ! detached_files . is_empty ( ) {
2022-11-24 03:21:19 -06:00
workspaces . push ( project_model ::ProjectWorkspace ::load_detached_files (
detached_files ,
& cargo_config ,
) ) ;
2021-05-23 12:56:54 -05:00
}
2021-08-15 07:46:13 -05:00
tracing ::info! ( " did fetch workspaces {:?} " , workspaces ) ;
2021-01-10 09:02:02 -06:00
sender
2023-06-13 05:25:04 -05:00
. send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::End (
workspaces ,
force_crate_graph_reload ,
) ) )
2021-01-10 09:02:02 -06:00
. unwrap ( ) ;
2020-07-02 09:47:42 -05:00
}
} ) ;
}
2021-07-18 03:29:22 -05:00
2022-04-16 07:16:58 -05:00
pub ( crate ) fn fetch_build_data ( & mut self , cause : Cause ) {
2022-06-15 07:29:13 -05:00
tracing ::info! ( % cause , " will fetch build data " ) ;
2021-07-18 03:29:22 -05:00
let workspaces = Arc ::clone ( & self . workspaces ) ;
let config = self . config . cargo ( ) ;
2024-01-11 23:35:53 -06:00
let root_path = self . config . root_path ( ) . clone ( ) ;
2023-06-04 02:30:21 -05:00
self . task_pool . handle . spawn_with_sender ( ThreadIntent ::Worker , move | sender | {
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::Begin ) ) . unwrap ( ) ;
2021-04-06 04:35:40 -05:00
2023-06-04 02:30:21 -05:00
let progress = {
let sender = sender . clone ( ) ;
move | msg | {
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::Report ( msg ) ) ) . unwrap ( )
}
} ;
2024-01-11 23:35:53 -06:00
let res = ProjectWorkspace ::run_all_build_scripts (
& workspaces ,
& config ,
& progress ,
& root_path ,
) ;
2022-08-27 11:28:09 -05:00
2023-06-04 02:30:21 -05:00
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::End ( ( workspaces , res ) ) ) ) . unwrap ( ) ;
} ) ;
2021-04-06 04:35:40 -05:00
}
2023-03-26 01:39:28 -05:00
pub ( crate ) fn fetch_proc_macros ( & mut self , cause : Cause , paths : Vec < ProcMacroPaths > ) {
tracing ::info! ( % cause , " will load proc macros " ) ;
2023-11-17 08:25:20 -06:00
let ignored_proc_macros = self . config . ignored_proc_macros ( ) . clone ( ) ;
2023-03-25 12:06:06 -05:00
let proc_macro_clients = self . proc_macro_clients . clone ( ) ;
2023-06-04 02:30:21 -05:00
self . task_pool . handle . spawn_with_sender ( ThreadIntent ::Worker , move | sender | {
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::Begin ) ) . unwrap ( ) ;
2023-03-25 12:06:06 -05:00
2023-11-17 08:25:20 -06:00
let ignored_proc_macros = & ignored_proc_macros ;
2023-06-04 02:30:21 -05:00
let progress = {
let sender = sender . clone ( ) ;
& move | msg | {
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::Report ( msg ) ) ) . unwrap ( )
2023-03-25 12:06:06 -05:00
}
2023-06-04 02:30:21 -05:00
} ;
let mut res = FxHashMap ::default ( ) ;
let chain = proc_macro_clients
. iter ( )
. map ( | res | res . as_ref ( ) . map_err ( | e | e . to_string ( ) ) )
. chain ( iter ::repeat_with ( | | Err ( " Proc macros servers are not running " . into ( ) ) ) ) ;
for ( client , paths ) in chain . zip ( paths ) {
res . extend ( paths . into_iter ( ) . map ( move | ( crate_id , res ) | {
(
crate_id ,
res . map_or_else (
| _ | Err ( " proc macro crate is missing dylib " . to_owned ( ) ) ,
| ( crate_name , path ) | {
2023-06-19 07:01:47 -05:00
progress ( path . to_string ( ) ) ;
2023-06-04 02:30:21 -05:00
client . as_ref ( ) . map_err ( Clone ::clone ) . and_then ( | client | {
load_proc_macro (
client ,
& path ,
crate_name
. as_deref ( )
. and_then ( | crate_name | {
2024-02-12 06:34:38 -06:00
ignored_proc_macros . iter ( ) . find_map (
| ( name , macros ) | {
eq_ignore_underscore ( name , crate_name )
. then_some ( & * * macros )
} ,
)
2023-06-04 02:30:21 -05:00
} )
. unwrap_or_default ( ) ,
)
} )
} ,
) ,
)
} ) ) ;
}
2023-03-25 12:06:06 -05:00
2023-06-04 02:30:21 -05:00
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::End ( res ) ) ) . unwrap ( ) ;
} ) ;
2023-03-25 12:06:06 -05:00
}
pub ( crate ) fn set_proc_macros ( & mut self , proc_macros : ProcMacros ) {
let mut change = Change ::new ( ) ;
change . set_proc_macros ( proc_macros ) ;
self . analysis_host . apply_change ( change ) ;
}
2022-04-18 01:26:00 -05:00
pub ( crate ) fn switch_workspaces ( & mut self , cause : Cause ) {
2024-01-17 20:27:38 -06:00
let _p = tracing ::span! ( tracing ::Level ::INFO , " GlobalState::switch_workspaces " ) . entered ( ) ;
2022-04-18 01:26:00 -05:00
tracing ::info! ( % cause , " will switch workspaces " ) ;
2021-04-06 06:16:35 -05:00
2023-07-03 13:34:09 -05:00
let Some ( ( workspaces , force_reload_crate_graph ) ) =
self . fetch_workspaces_queue . last_op_result ( )
else {
return ;
} ;
2023-06-13 05:25:04 -05:00
2024-01-19 09:51:08 -06:00
if self . fetch_workspace_error ( ) . is_err ( ) & & ! self . workspaces . is_empty ( ) {
if * force_reload_crate_graph {
self . recreate_crate_graph ( cause ) ;
2021-04-06 06:16:35 -05:00
}
2024-01-19 09:51:08 -06:00
// It only makes sense to switch to a partially broken workspace
// if we don't have any workspace at all yet.
return ;
2021-04-06 06:16:35 -05:00
}
2020-07-10 08:27:34 -05:00
2022-11-11 07:36:27 -06:00
let workspaces =
workspaces . iter ( ) . filter_map ( | res | res . as_ref ( ) . ok ( ) . cloned ( ) ) . collect ::< Vec < _ > > ( ) ;
2020-06-25 16:44:58 -05:00
2023-03-29 14:29:32 -05:00
let same_workspaces = workspaces . len ( ) = = self . workspaces . len ( )
& & workspaces
. iter ( )
. zip ( self . workspaces . iter ( ) )
. all ( | ( l , r ) | l . eq_ignore_build_data ( r ) ) ;
2021-01-28 09:33:02 -06:00
2021-07-18 05:13:03 -05:00
if same_workspaces {
let ( workspaces , build_scripts ) = self . fetch_build_data_queue . last_op_result ( ) ;
2021-10-16 06:32:55 -05:00
if Arc ::ptr_eq ( workspaces , & self . workspaces ) {
2022-04-18 01:26:00 -05:00
tracing ::debug! ( " set build scripts to workspaces " ) ;
2021-07-18 05:13:03 -05:00
let workspaces = workspaces
. iter ( )
. cloned ( )
. zip ( build_scripts )
. map ( | ( mut ws , bs ) | {
ws . set_build_scripts ( bs . as_ref ( ) . ok ( ) . cloned ( ) . unwrap_or_default ( ) ) ;
ws
} )
. collect ::< Vec < _ > > ( ) ;
// Workspaces are the same, but we've updated build data.
self . workspaces = Arc ::new ( workspaces ) ;
} else {
2022-04-29 00:59:32 -05:00
tracing ::info! ( " build scripts do not match the version of the active workspace " ) ;
2023-06-13 05:25:04 -05:00
if * force_reload_crate_graph {
self . recreate_crate_graph ( cause ) ;
}
2024-02-20 08:55:17 -06:00
2021-07-18 05:13:03 -05:00
// Current build scripts do not match the version of the active
// workspace, so there's nothing for us to update.
return ;
}
} else {
2022-04-18 01:26:00 -05:00
tracing ::debug! ( " abandon build scripts for workspaces " ) ;
2021-07-18 05:13:03 -05:00
// Here, we completely changed the workspace (Cargo.toml edit), so
// we don't care about build-script results, they are stale.
2024-02-20 08:55:17 -06:00
// FIXME: can we abort the build scripts here if they are already running?
2023-01-27 06:49:28 -06:00
self . workspaces = Arc ::new ( workspaces ) ;
2024-01-04 03:19:25 -06:00
if self . config . run_build_scripts ( ) {
self . build_deps_changed = false ;
self . fetch_build_data_queue . request_op ( " workspace updated " . to_owned ( ) , ( ) ) ;
}
2020-07-10 08:27:34 -05:00
}
2021-01-06 04:54:28 -06:00
if let FilesWatcher ::Client = self . config . files ( ) . watcher {
2022-01-01 08:26:54 -06:00
let registration_options = lsp_types ::DidChangeWatchedFilesRegistrationOptions {
watchers : self
. workspaces
. iter ( )
. flat_map ( | ws | ws . to_roots ( ) )
. filter ( | it | it . is_local )
. flat_map ( | root | {
root . include . into_iter ( ) . flat_map ( | it | {
[
2023-06-19 07:01:47 -05:00
format! ( " {it} /**/*.rs " ) ,
format! ( " {it} /**/Cargo.toml " ) ,
format! ( " {it} /**/Cargo.lock " ) ,
2022-01-01 08:26:54 -06:00
]
2021-01-10 13:38:35 -06:00
} )
2022-01-01 08:26:54 -06:00
} )
2023-02-14 07:59:50 -06:00
. map ( | glob_pattern | lsp_types ::FileSystemWatcher {
glob_pattern : lsp_types ::GlobPattern ::String ( glob_pattern ) ,
kind : None ,
} )
2022-01-01 08:26:54 -06:00
. collect ( ) ,
} ;
let registration = lsp_types ::Registration {
2024-02-09 09:46:08 -06:00
id : " workspace/didChangeWatchedFiles " . to_owned ( ) ,
method : " workspace/didChangeWatchedFiles " . to_owned ( ) ,
2022-01-01 08:26:54 -06:00
register_options : Some ( serde_json ::to_value ( registration_options ) . unwrap ( ) ) ,
} ;
self . send_request ::< lsp_types ::request ::RegisterCapability > (
lsp_types ::RegistrationParams { registrations : vec ! [ registration ] } ,
| _ , _ | ( ) ,
) ;
2020-06-25 16:44:58 -05:00
}
2021-01-26 07:18:01 -06:00
let files_config = self . config . files ( ) ;
2021-07-18 05:13:03 -05:00
let project_folders = ProjectFolders ::new ( & self . workspaces , & files_config . exclude ) ;
2020-06-25 16:44:58 -05:00
2024-01-19 06:22:32 -06:00
if ( self . proc_macro_clients . is_empty ( ) | | ! same_workspaces )
& & self . config . expand_proc_macros ( )
{
tracing ::info! ( " Spawning proc-macro servers " ) ;
self . proc_macro_clients = Arc ::from_iter ( self . workspaces . iter ( ) . map ( | ws | {
let path = match self . config . proc_macro_srv ( ) {
Some ( path ) = > path ,
None = > ws . find_sysroot_proc_macro_srv ( ) ? ,
} ;
2024-02-14 08:13:45 -06:00
let env =
match ws {
ProjectWorkspace ::Cargo { cargo_config_extra_env , sysroot , .. } = > {
cargo_config_extra_env
. iter ( )
. chain ( self . config . extra_env ( ) )
. map ( | ( a , b ) | ( a . clone ( ) , b . clone ( ) ) )
. chain ( sysroot . as_ref ( ) . map ( | it | {
( " RUSTUP_TOOLCHAIN " . to_owned ( ) , it . root ( ) . to_string ( ) )
} ) )
. collect ( )
}
_ = > Default ::default ( ) ,
} ;
2024-01-19 06:22:32 -06:00
tracing ::info! ( " Using proc-macro server at {path} " ) ;
2024-02-13 12:42:03 -06:00
ProcMacroServer ::spawn ( path . clone ( ) , & env ) . map_err ( | err | {
2024-01-19 06:22:32 -06:00
tracing ::error! (
" Failed to run proc-macro server from path {path}, error: {err:?} " ,
) ;
anyhow ::format_err! (
" Failed to run proc-macro server from path {path}, error: {err:?} " ,
)
} )
} ) )
2021-02-01 14:55:17 -06:00
}
2020-07-10 16:39:25 -05:00
2021-01-26 07:18:01 -06:00
let watch = match files_config . watcher {
2020-06-25 16:44:58 -05:00
FilesWatcher ::Client = > vec! [ ] ,
2022-07-18 10:50:56 -05:00
FilesWatcher ::Server = > project_folders . watch ,
2020-06-25 16:44:58 -05:00
} ;
2021-02-12 08:58:29 -06:00
self . vfs_config_version + = 1 ;
self . loader . handle . set_config ( vfs ::loader ::Config {
load : project_folders . load ,
watch ,
version : self . vfs_config_version ,
} ) ;
2023-03-25 12:06:06 -05:00
self . source_root_config = project_folders . source_root_config ;
2020-06-25 16:44:58 -05:00
2023-06-13 05:25:04 -05:00
self . recreate_crate_graph ( cause ) ;
tracing ::info! ( " did switch workspaces " ) ;
}
fn recreate_crate_graph ( & mut self , cause : String ) {
2024-02-23 03:10:19 -06:00
// crate graph construction relies on these paths, record them so when one of them gets
// deleted or created we trigger a reconstruction of the crate graph
let mut crate_graph_file_dependencies = FxHashSet ::default ( ) ;
let ( crate_graph , proc_macro_paths , layouts , toolchains ) = {
2024-02-16 07:48:25 -06:00
// Create crate graph from all the workspaces
2020-06-25 16:44:58 -05:00
let vfs = & mut self . vfs . write ( ) . 0 ;
let loader = & mut self . loader ;
2023-06-13 05:25:04 -05:00
2024-02-16 08:47:25 -06:00
let load = | path : & AbsPath | {
2024-02-08 03:40:42 -06:00
let _p = tracing ::span! ( tracing ::Level ::DEBUG , " switch_workspaces::load " ) . entered ( ) ;
2020-09-18 14:15:44 -05:00
let vfs_path = vfs ::VfsPath ::from ( path . to_path_buf ( ) ) ;
2023-06-13 05:25:04 -05:00
crate_graph_file_dependencies . insert ( vfs_path . clone ( ) ) ;
2023-03-31 02:10:18 -05:00
match vfs . file_id ( & vfs_path ) {
Some ( file_id ) = > Some ( file_id ) ,
None = > {
2024-01-07 13:31:56 -06:00
// FIXME: Consider not loading this here?
let contents = loader . handle . load_sync ( path ) ;
vfs . set_file_contents ( vfs_path . clone ( ) , contents ) ;
2023-03-31 02:10:18 -05:00
vfs . file_id ( & vfs_path )
}
2020-11-17 05:17:24 -06:00
}
2020-06-25 16:44:58 -05:00
} ;
2021-08-22 05:32:00 -05:00
2024-02-23 03:10:19 -06:00
ws_to_crate_graph ( & self . workspaces , self . config . extra_env ( ) , load )
} ;
let mut change = Change ::new ( ) ;
if self . config . expand_proc_macros ( ) {
change . set_proc_macros (
crate_graph
. iter ( )
. map ( | id | ( id , Err ( " Proc-macros have not been built yet " . to_owned ( ) ) ) )
. collect ( ) ,
) ;
self . fetch_proc_macros_queue . request_op ( cause , proc_macro_paths ) ;
2023-03-26 01:39:28 -05:00
}
2024-02-23 03:10:19 -06:00
change . set_crate_graph ( crate_graph ) ;
change . set_target_data_layouts ( layouts ) ;
change . set_toolchains ( toolchains ) ;
self . analysis_host . apply_change ( change ) ;
self . crate_graph_file_dependencies = crate_graph_file_dependencies ;
2023-03-25 12:20:42 -05:00
self . process_changes ( ) ;
2020-06-25 16:44:58 -05:00
self . reload_flycheck ( ) ;
}
2023-03-10 02:49:37 -06:00
pub ( super ) fn fetch_workspace_error ( & self ) -> Result < ( ) , String > {
2021-04-06 10:08:05 -05:00
let mut buf = String ::new ( ) ;
2021-04-06 06:16:35 -05:00
2023-06-19 06:32:04 -05:00
let Some ( ( last_op_result , _ ) ) = self . fetch_workspaces_queue . last_op_result ( ) else {
2023-07-03 13:34:09 -05:00
return Ok ( ( ) ) ;
2023-06-19 06:32:04 -05:00
} ;
2022-11-11 06:00:22 -06:00
if last_op_result . is_empty ( ) {
stdx ::format_to! ( buf , " rust-analyzer failed to discover workspace " ) ;
} else {
for ws in last_op_result {
if let Err ( err ) = ws {
stdx ::format_to! ( buf , " rust-analyzer failed to load workspace: {:#} \n " , err ) ;
}
2021-04-06 06:16:35 -05:00
}
}
2021-04-06 10:08:05 -05:00
if buf . is_empty ( ) {
2022-04-11 07:38:30 -05:00
return Ok ( ( ) ) ;
2021-04-06 06:16:35 -05:00
}
2022-04-11 07:38:30 -05:00
Err ( buf )
2021-04-06 10:08:05 -05:00
}
2023-03-10 02:49:37 -06:00
pub ( super ) fn fetch_build_data_error ( & self ) -> Result < ( ) , String > {
2022-04-11 07:38:30 -05:00
let mut buf = String ::new ( ) ;
2021-07-18 03:29:22 -05:00
2021-07-18 05:13:03 -05:00
for ws in & self . fetch_build_data_queue . last_op_result ( ) . 1 {
2021-08-25 10:56:39 -05:00
match ws {
2024-01-19 11:31:15 -06:00
Ok ( data ) = > {
if let Some ( stderr ) = data . error ( ) {
stdx ::format_to! ( buf , " {:#} \n " , stderr )
}
}
2022-04-11 07:38:30 -05:00
// io errors
Err ( err ) = > stdx ::format_to! ( buf , " {:#} \n " , err ) ,
2021-04-06 10:08:05 -05:00
}
}
2021-07-18 03:29:22 -05:00
2022-04-11 07:38:30 -05:00
if buf . is_empty ( ) {
Ok ( ( ) )
2021-08-25 10:56:39 -05:00
} else {
2022-04-11 07:38:30 -05:00
Err ( buf )
2021-07-18 03:29:22 -05:00
}
2021-04-06 06:16:35 -05:00
}
2020-06-25 16:44:58 -05:00
fn reload_flycheck ( & mut self ) {
2024-01-17 20:27:38 -06:00
let _p = tracing ::span! ( tracing ::Level ::INFO , " GlobalState::reload_flycheck " ) . entered ( ) ;
2022-12-17 16:26:54 -06:00
let config = self . config . flycheck ( ) ;
2020-07-15 07:37:44 -05:00
let sender = self . flycheck_sender . clone ( ) ;
2022-10-22 16:02:59 -05:00
let invocation_strategy = match config {
FlycheckConfig ::CargoCommand { .. } = > flycheck ::InvocationStrategy ::PerWorkspace ,
FlycheckConfig ::CustomCommand { invocation_strategy , .. } = > invocation_strategy ,
} ;
2022-09-15 06:28:09 -05:00
self . flycheck = match invocation_strategy {
2022-10-19 16:34:36 -05:00
flycheck ::InvocationStrategy ::Once = > vec! [ FlycheckHandle ::spawn (
2022-09-15 06:28:09 -05:00
0 ,
Box ::new ( move | msg | sender . send ( msg ) . unwrap ( ) ) ,
2022-12-23 01:08:08 -06:00
config ,
2024-02-14 08:13:45 -06:00
None ,
2022-09-15 06:28:09 -05:00
self . config . root_path ( ) . clone ( ) ,
) ] ,
2022-09-26 08:58:55 -05:00
flycheck ::InvocationStrategy ::PerWorkspace = > {
2022-09-15 06:28:09 -05:00
self . workspaces
. iter ( )
. enumerate ( )
. filter_map ( | ( id , w ) | match w {
2024-02-12 05:08:18 -06:00
ProjectWorkspace ::Cargo { cargo , sysroot , .. } = > Some ( (
id ,
cargo . workspace_root ( ) ,
2024-02-14 08:13:45 -06:00
sysroot . as_ref ( ) . ok ( ) . map ( | sysroot | sysroot . root ( ) . to_owned ( ) ) ,
2024-02-12 05:08:18 -06:00
) ) ,
ProjectWorkspace ::Json { project , sysroot , .. } = > {
2022-09-15 06:28:09 -05:00
// Enable flychecks for json projects if a custom flycheck command was supplied
// in the workspace configuration.
match config {
2024-02-12 05:08:18 -06:00
FlycheckConfig ::CustomCommand { .. } = > Some ( (
id ,
project . path ( ) ,
2024-02-14 08:13:45 -06:00
sysroot . as_ref ( ) . ok ( ) . map ( | sysroot | sysroot . root ( ) . to_owned ( ) ) ,
2024-02-12 05:08:18 -06:00
) ) ,
2022-09-15 06:28:09 -05:00
_ = > None ,
}
}
ProjectWorkspace ::DetachedFiles { .. } = > None ,
} )
2024-02-14 08:13:45 -06:00
. map ( | ( id , root , sysroot_root ) | {
2022-09-15 06:28:09 -05:00
let sender = sender . clone ( ) ;
FlycheckHandle ::spawn (
id ,
Box ::new ( move | msg | sender . send ( msg ) . unwrap ( ) ) ,
config . clone ( ) ,
2024-02-14 08:13:45 -06:00
sysroot_root ,
2022-09-15 06:28:09 -05:00
root . to_path_buf ( ) ,
)
} )
. collect ( )
}
2022-10-20 12:28:28 -05:00
}
. into ( ) ;
2020-06-25 16:44:58 -05:00
}
}
2024-02-16 08:47:25 -06:00
// FIXME: Move this into load-cargo?
pub fn ws_to_crate_graph (
workspaces : & [ ProjectWorkspace ] ,
extra_env : & FxHashMap < String , String > ,
mut load : impl FnMut ( & AbsPath ) -> Option < vfs ::FileId > ,
) -> (
CrateGraph ,
Vec < FxHashMap < CrateId , Result < ( Option < String > , AbsPathBuf ) , String > > > ,
Vec < Result < Arc < str > , Arc < str > > > ,
Vec < Option < Version > > ,
) {
let mut crate_graph = CrateGraph ::default ( ) ;
let mut proc_macro_paths = Vec ::default ( ) ;
let mut layouts = Vec ::default ( ) ;
let mut toolchains = Vec ::default ( ) ;
let e = Err ( Arc ::from ( " missing layout " ) ) ;
for ws in workspaces {
let ( other , mut crate_proc_macros ) = ws . to_crate_graph ( & mut load , extra_env ) ;
let num_layouts = layouts . len ( ) ;
let num_toolchains = toolchains . len ( ) ;
let ( toolchain , layout ) = match ws {
ProjectWorkspace ::Cargo { toolchain , target_layout , .. }
| ProjectWorkspace ::Json { toolchain , target_layout , .. } = > {
( toolchain . clone ( ) , target_layout . clone ( ) )
}
ProjectWorkspace ::DetachedFiles { .. } = > {
( None , Err ( " detached files have no layout " . into ( ) ) )
}
} ;
let mapping = crate_graph . extend (
other ,
& mut crate_proc_macros ,
2024-02-16 09:28:17 -06:00
| ( cg_id , cg_data ) , ( _o_id , o_data ) | {
2024-02-16 08:47:25 -06:00
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
// we can merge the crates.
let id = cg_id . into_raw ( ) . into_u32 ( ) as usize ;
2024-02-16 09:28:17 -06:00
layouts [ id ] = = layout & & toolchains [ id ] = = toolchain & & cg_data = = o_data
2024-02-16 08:47:25 -06:00
} ,
) ;
// Populate the side tables for the newly merged crates
mapping . values ( ) . for_each ( | val | {
let idx = val . into_raw ( ) . into_u32 ( ) as usize ;
// we only need to consider crates that were not merged and remapped, as the
// ones that were remapped already have the correct layout and toolchain
if idx > = num_layouts {
if layouts . len ( ) < = idx {
layouts . resize ( idx + 1 , e . clone ( ) ) ;
}
layouts [ idx ] = layout . clone ( ) ;
}
if idx > = num_toolchains {
if toolchains . len ( ) < = idx {
toolchains . resize ( idx + 1 , None ) ;
}
toolchains [ idx ] = toolchain . clone ( ) ;
}
} ) ;
proc_macro_paths . push ( crate_proc_macros ) ;
}
( crate_graph , proc_macro_paths , layouts , toolchains )
}
2021-09-13 12:58:09 -05:00
pub ( crate ) fn should_refresh_for_change ( path : & AbsPath , change_kind : ChangeKind ) -> bool {
const IMPLICIT_TARGET_FILES : & [ & str ] = & [ " build.rs " , " src/main.rs " , " src/lib.rs " ] ;
const IMPLICIT_TARGET_DIRS : & [ & str ] = & [ " src/bin " , " examples " , " tests " , " benches " ] ;
2022-08-05 05:06:31 -05:00
let file_name = match path . file_name ( ) . unwrap_or_default ( ) . to_str ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if let " Cargo.toml " | " Cargo.lock " = file_name {
2021-09-13 12:58:09 -05:00
return true ;
}
if change_kind = = ChangeKind ::Modify {
return false ;
}
2022-08-05 05:06:31 -05:00
// .cargo/config{.toml}
2021-09-13 12:58:09 -05:00
if path . extension ( ) . unwrap_or_default ( ) ! = " rs " {
2022-08-05 05:06:31 -05:00
let is_cargo_config = matches! ( file_name , " config.toml " | " config " )
& & path . parent ( ) . map ( | parent | parent . as_ref ( ) . ends_with ( " .cargo " ) ) . unwrap_or ( false ) ;
return is_cargo_config ;
2021-09-13 12:58:09 -05:00
}
2022-08-05 05:06:31 -05:00
2021-09-13 12:58:09 -05:00
if IMPLICIT_TARGET_FILES . iter ( ) . any ( | it | path . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
let parent = match path . parent ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if IMPLICIT_TARGET_DIRS . iter ( ) . any ( | it | parent . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
if file_name = = " main.rs " {
let grand_parent = match parent . parent ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if IMPLICIT_TARGET_DIRS . iter ( ) . any ( | it | grand_parent . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
}
false
}
2023-11-17 08:25:20 -06:00
/// Similar to [`str::eq_ignore_ascii_case`] but instead of ignoring
/// case, we say that `-` and `_` are equal.
fn eq_ignore_underscore ( s1 : & str , s2 : & str ) -> bool {
if s1 . len ( ) ! = s2 . len ( ) {
return false ;
}
s1 . as_bytes ( ) . iter ( ) . zip ( s2 . as_bytes ( ) ) . all ( | ( c1 , c2 ) | {
let c1_underscore = c1 = = & b '_' | | c1 = = & b '-' ;
let c2_underscore = c2 = = & b '_' | | c2 = = & b '-' ;
c1 = = c2 | | ( c1_underscore & & c2_underscore )
} )
}