2022-05-21 07:11:05 -05:00
//! Project loading & configuration updates.
//!
//! This is quite tricky. The main problem is time and changes -- there's no
//! fixed "project" rust-analyzer is working with, "current project" is itself
//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
//! dependency, project model changes. What's more, switching project model is
//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
//! macros) `cargo check`.
//!
2022-05-23 03:29:03 -05:00
//! The main guiding principle here is, as elsewhere in rust-analyzer,
2022-05-21 07:11:05 -05:00
//! robustness. We try not to assume that the project model exists or is
//! correct. Instead, we try to provide a best-effort service. Even if the
//! project is currently loading and we don't have a full project model, we
//! still want to respond to various requests.
2023-03-25 12:06:06 -05:00
use std ::{ collections ::hash_map ::Entry , iter , mem , sync ::Arc } ;
2020-06-25 16:44:58 -05:00
2020-09-15 20:51:57 -05:00
use flycheck ::{ FlycheckConfig , FlycheckHandle } ;
2021-06-03 09:11:20 -05:00
use hir ::db ::DefDatabase ;
2020-10-02 08:45:09 -05:00
use ide ::Change ;
2023-02-28 05:08:23 -06:00
use ide_db ::{
base_db ::{
CrateGraph , Env , ProcMacro , ProcMacroExpander , ProcMacroExpansionError , ProcMacroKind ,
2023-03-25 12:06:06 -05:00
ProcMacroLoadResult , ProcMacroPaths , ProcMacros , SourceRoot , VfsPath ,
2023-02-28 05:08:23 -06:00
} ,
FxHashMap ,
2021-08-22 06:05:12 -05:00
} ;
2023-02-28 05:08:23 -06:00
use itertools ::Itertools ;
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
use proc_macro_api ::{ MacroDylib , ProcMacroServer } ;
2023-02-28 05:08:23 -06:00
use project_model ::{ PackageRoot , ProjectWorkspace , WorkspaceBuildScripts } ;
2022-01-04 13:40:16 -06:00
use syntax ::SmolStr ;
2020-07-10 11:48:39 -05:00
use vfs ::{ file_set ::FileSetConfig , AbsPath , AbsPathBuf , ChangeKind } ;
2020-06-25 16:44:58 -05:00
use crate ::{
config ::{ Config , FilesWatcher , LinkedProject } ,
2021-04-06 06:16:35 -05:00
global_state ::GlobalState ,
2020-07-10 08:27:34 -05:00
lsp_ext ,
2020-07-02 09:47:42 -05:00
main_loop ::Task ,
2022-04-16 07:16:58 -05:00
op_queue ::Cause ,
2020-06-25 16:44:58 -05:00
} ;
2023-01-31 04:49:49 -06:00
use ::tt ::token_id as tt ;
2021-01-07 11:08:46 -06:00
#[ derive(Debug) ]
pub ( crate ) enum ProjectWorkspaceProgress {
Begin ,
Report ( String ) ,
2021-01-10 09:02:02 -06:00
End ( Vec < anyhow ::Result < ProjectWorkspace > > ) ,
2021-01-07 11:08:46 -06:00
}
2021-01-28 09:33:02 -06:00
#[ derive(Debug) ]
pub ( crate ) enum BuildDataProgress {
Begin ,
Report ( String ) ,
2021-07-18 05:13:03 -05:00
End ( ( Arc < Vec < ProjectWorkspace > > , Vec < anyhow ::Result < WorkspaceBuildScripts > > ) ) ,
2021-01-28 09:33:02 -06:00
}
2023-03-25 12:06:06 -05:00
#[ derive(Debug) ]
pub ( crate ) enum ProcMacroProgress {
Begin ,
Report ( String ) ,
End ( ProcMacros ) ,
}
2020-06-25 16:44:58 -05:00
impl GlobalState {
2021-04-06 06:16:35 -05:00
pub ( crate ) fn is_quiescent ( & self ) -> bool {
2023-03-10 03:26:02 -06:00
! ( self . last_reported_status . is_none ( )
| | self . fetch_workspaces_queue . op_in_progress ( )
2021-04-06 06:16:35 -05:00
| | self . fetch_build_data_queue . op_in_progress ( )
2023-03-26 01:39:28 -05:00
| | self . fetch_proc_macros_queue . op_in_progress ( )
2021-04-06 06:16:35 -05:00
| | self . vfs_progress_config_version < self . vfs_config_version
| | self . vfs_progress_n_done < self . vfs_progress_n_total )
}
2020-06-26 10:28:04 -05:00
pub ( crate ) fn update_configuration ( & mut self , config : Config ) {
2020-08-12 09:32:36 -05:00
let _p = profile ::span ( " GlobalState::update_configuration " ) ;
2021-01-06 06:46:31 -06:00
let old_config = mem ::replace ( & mut self . config , Arc ::new ( config ) ) ;
2023-03-25 17:03:22 -05:00
if self . config . lru_parse_query_capacity ( ) ! = old_config . lru_parse_query_capacity ( ) {
self . analysis_host . update_lru_capacity ( self . config . lru_parse_query_capacity ( ) ) ;
}
if self . config . lru_query_capacities ( ) ! = old_config . lru_query_capacities ( ) {
self . analysis_host . update_lru_capacities (
& self . config . lru_query_capacities ( ) . cloned ( ) . unwrap_or_default ( ) ,
) ;
2020-06-26 10:28:04 -05:00
}
2021-01-06 04:54:28 -06:00
if self . config . linked_projects ( ) ! = old_config . linked_projects ( ) {
2023-03-26 01:39:28 -05:00
self . fetch_workspaces_queue . request_op ( " linked projects changed " . to_string ( ) , ( ) )
2021-01-06 04:54:28 -06:00
} else if self . config . flycheck ( ) ! = old_config . flycheck ( ) {
2020-06-25 16:44:58 -05:00
self . reload_flycheck ( ) ;
}
2021-06-03 09:11:20 -05:00
2022-05-06 01:46:02 -05:00
if self . analysis_host . raw_database ( ) . enable_proc_attr_macros ( )
! = self . config . expand_proc_attr_macros ( )
{
self . analysis_host
. raw_database_mut ( )
. set_enable_proc_attr_macros ( self . config . expand_proc_attr_macros ( ) ) ;
}
2020-06-25 16:44:58 -05:00
}
2021-08-30 11:35:49 -05:00
pub ( crate ) fn current_status ( & self ) -> lsp_ext ::ServerStatusParams {
2021-04-06 06:16:35 -05:00
let mut status = lsp_ext ::ServerStatusParams {
health : lsp_ext ::Health ::Ok ,
quiescent : self . is_quiescent ( ) ,
message : None ,
} ;
2023-03-15 05:28:37 -05:00
let mut message = String ::new ( ) ;
2021-04-06 10:08:05 -05:00
2021-10-20 08:29:50 -05:00
if self . proc_macro_changed {
status . health = lsp_ext ::Health ::Warning ;
2023-03-26 01:39:28 -05:00
message . push_str ( " Proc-macros have changed and need to be rebuild. \n \n " ) ;
2021-10-20 08:29:50 -05:00
}
2022-04-11 07:38:30 -05:00
if let Err ( _ ) = self . fetch_build_data_error ( ) {
2021-04-06 10:08:05 -05:00
status . health = lsp_ext ::Health ::Warning ;
2023-03-15 05:28:37 -05:00
message . push_str ( " Failed to run build scripts of some packages. \n \n " ) ;
2021-04-06 10:08:05 -05:00
}
2021-04-06 06:16:35 -05:00
if ! self . config . cargo_autoreload ( )
& & self . is_quiescent ( )
& & self . fetch_workspaces_queue . op_requested ( )
{
status . health = lsp_ext ::Health ::Warning ;
2023-03-15 05:35:34 -05:00
message . push_str ( " Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required. \n \n " ) ;
2021-04-06 06:16:35 -05:00
}
2022-11-11 06:00:22 -06:00
if self . config . linked_projects ( ) . is_empty ( )
& & self . config . detached_files ( ) . is_empty ( )
& & self . config . notifications ( ) . cargo_toml_not_found
{
status . health = lsp_ext ::Health ::Warning ;
2023-03-16 10:26:19 -05:00
message . push_str ( " Failed to discover workspace. \n " ) ;
message . push_str ( " Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting. \n \n " ) ;
2022-11-11 06:00:22 -06:00
}
2023-03-10 02:49:37 -06:00
2023-03-15 05:35:34 -05:00
for ws in self . workspaces . iter ( ) {
let ( ProjectWorkspace ::Cargo { sysroot , .. }
| ProjectWorkspace ::Json { sysroot , .. }
| ProjectWorkspace ::DetachedFiles { sysroot , .. } ) = ws ;
if let Err ( Some ( e ) ) = sysroot {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( e ) ;
message . push_str ( " \n \n " ) ;
}
if let ProjectWorkspace ::Cargo { rustc : Err ( Some ( e ) ) , .. } = ws {
status . health = lsp_ext ::Health ::Warning ;
message . push_str ( e ) ;
message . push_str ( " \n \n " ) ;
}
}
2023-03-15 05:28:37 -05:00
if let Err ( _ ) = self . fetch_workspace_error ( ) {
status . health = lsp_ext ::Health ::Error ;
2023-03-15 05:35:34 -05:00
message . push_str ( " Failed to load workspaces. \n \n " ) ;
2023-03-15 05:28:37 -05:00
}
if ! message . is_empty ( ) {
status . message = Some ( message . trim_end ( ) . to_owned ( ) ) ;
}
2021-08-30 11:35:49 -05:00
status
2020-07-10 08:27:34 -05:00
}
2021-01-10 09:02:02 -06:00
2022-04-16 07:16:58 -05:00
pub ( crate ) fn fetch_workspaces ( & mut self , cause : Cause ) {
tracing ::info! ( % cause , " will fetch workspaces " ) ;
2021-01-07 11:08:46 -06:00
self . task_pool . handle . spawn_with_sender ( {
2021-01-06 04:54:28 -06:00
let linked_projects = self . config . linked_projects ( ) ;
2021-05-23 12:56:54 -05:00
let detached_files = self . config . detached_files ( ) . to_vec ( ) ;
2021-01-06 04:54:28 -06:00
let cargo_config = self . config . cargo ( ) ;
2021-01-07 11:08:46 -06:00
move | sender | {
let progress = {
let sender = sender . clone ( ) ;
move | msg | {
sender
. send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::Report ( msg ) ) )
. unwrap ( )
}
} ;
sender . send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::Begin ) ) . unwrap ( ) ;
2021-05-23 12:56:54 -05:00
let mut workspaces = linked_projects
2020-07-02 09:47:42 -05:00
. iter ( )
. map ( | project | match project {
LinkedProject ::ProjectManifest ( manifest ) = > {
2021-01-07 11:08:46 -06:00
project_model ::ProjectWorkspace ::load (
manifest . clone ( ) ,
& cargo_config ,
& progress ,
)
2020-07-02 09:47:42 -05:00
}
LinkedProject ::InlineJsonProject ( it ) = > {
2023-01-27 06:49:28 -06:00
Ok ( project_model ::ProjectWorkspace ::load_inline (
2021-01-18 05:52:12 -06:00
it . clone ( ) ,
cargo_config . target . as_deref ( ) ,
2022-09-19 10:31:08 -05:00
& cargo_config . extra_env ,
2023-01-27 06:49:28 -06:00
) )
2020-07-02 09:47:42 -05:00
}
2020-06-26 09:33:57 -05:00
} )
2020-07-02 09:47:42 -05:00
. collect ::< Vec < _ > > ( ) ;
2021-01-07 11:08:46 -06:00
2021-05-23 12:56:54 -05:00
if ! detached_files . is_empty ( ) {
2022-11-24 03:21:19 -06:00
workspaces . push ( project_model ::ProjectWorkspace ::load_detached_files (
detached_files ,
& cargo_config ,
) ) ;
2021-05-23 12:56:54 -05:00
}
2021-08-15 07:46:13 -05:00
tracing ::info! ( " did fetch workspaces {:?} " , workspaces ) ;
2021-01-10 09:02:02 -06:00
sender
. send ( Task ::FetchWorkspace ( ProjectWorkspaceProgress ::End ( workspaces ) ) )
. unwrap ( ) ;
2020-07-02 09:47:42 -05:00
}
} ) ;
}
2021-07-18 03:29:22 -05:00
2022-04-16 07:16:58 -05:00
pub ( crate ) fn fetch_build_data ( & mut self , cause : Cause ) {
2022-06-15 07:29:13 -05:00
tracing ::info! ( % cause , " will fetch build data " ) ;
2021-07-18 03:29:22 -05:00
let workspaces = Arc ::clone ( & self . workspaces ) ;
let config = self . config . cargo ( ) ;
2021-04-06 04:35:40 -05:00
self . task_pool . handle . spawn_with_sender ( move | sender | {
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::Begin ) ) . unwrap ( ) ;
let progress = {
let sender = sender . clone ( ) ;
move | msg | {
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::Report ( msg ) ) ) . unwrap ( )
}
} ;
2022-08-27 11:28:09 -05:00
let res = ProjectWorkspace ::run_all_build_scripts ( & workspaces , & config , & progress ) ;
2021-07-18 05:13:03 -05:00
sender . send ( Task ::FetchBuildData ( BuildDataProgress ::End ( ( workspaces , res ) ) ) ) . unwrap ( ) ;
2021-04-06 04:35:40 -05:00
} ) ;
}
2023-03-26 01:39:28 -05:00
pub ( crate ) fn fetch_proc_macros ( & mut self , cause : Cause , paths : Vec < ProcMacroPaths > ) {
tracing ::info! ( % cause , " will load proc macros " ) ;
2023-03-25 12:06:06 -05:00
let dummy_replacements = self . config . dummy_replacements ( ) . clone ( ) ;
let proc_macro_clients = self . proc_macro_clients . clone ( ) ;
self . task_pool . handle . spawn_with_sender ( move | sender | {
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::Begin ) ) . unwrap ( ) ;
let dummy_replacements = & dummy_replacements ;
let progress = {
let sender = sender . clone ( ) ;
& move | msg | {
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::Report ( msg ) ) ) . unwrap ( )
}
} ;
let mut res = FxHashMap ::default ( ) ;
2023-03-26 01:39:28 -05:00
let chain = proc_macro_clients
2023-03-25 12:06:06 -05:00
. iter ( )
. map ( | res | res . as_ref ( ) . map_err ( | e | & * * e ) )
2023-03-26 01:39:28 -05:00
. chain ( iter ::repeat_with ( | | Err ( " Proc macros servers are not running " ) ) ) ;
for ( client , paths ) in chain . zip ( paths ) {
2023-03-25 12:06:06 -05:00
res . extend ( paths . into_iter ( ) . map ( move | ( crate_id , res ) | {
(
crate_id ,
2023-03-26 01:39:28 -05:00
res . map_or_else (
2023-03-29 14:29:32 -05:00
| _ | Err ( " proc macro crate is missing dylib " . to_owned ( ) ) ,
2023-03-26 01:39:28 -05:00
| ( crate_name , path ) | {
progress ( path . display ( ) . to_string ( ) ) ;
load_proc_macro (
client ,
& path ,
crate_name
. as_deref ( )
. and_then ( | crate_name | {
dummy_replacements . get ( crate_name ) . map ( | v | & * * v )
} )
. unwrap_or_default ( ) ,
)
} ,
) ,
2023-03-25 12:06:06 -05:00
)
} ) ) ;
}
sender . send ( Task ::LoadProcMacros ( ProcMacroProgress ::End ( res ) ) ) . unwrap ( ) ;
} ) ;
}
pub ( crate ) fn set_proc_macros ( & mut self , proc_macros : ProcMacros ) {
let mut change = Change ::new ( ) ;
change . set_proc_macros ( proc_macros ) ;
self . analysis_host . apply_change ( change ) ;
}
2022-04-18 01:26:00 -05:00
pub ( crate ) fn switch_workspaces ( & mut self , cause : Cause ) {
2020-08-12 09:32:36 -05:00
let _p = profile ::span ( " GlobalState::switch_workspaces " ) ;
2022-04-18 01:26:00 -05:00
tracing ::info! ( % cause , " will switch workspaces " ) ;
2021-04-06 06:16:35 -05:00
2023-03-10 02:49:37 -06:00
if let Err ( _ ) = self . fetch_workspace_error ( ) {
2021-04-06 06:16:35 -05:00
if ! self . workspaces . is_empty ( ) {
2021-07-18 05:13:03 -05:00
// It only makes sense to switch to a partially broken workspace
// if we don't have any workspace at all yet.
2021-04-06 06:16:35 -05:00
return ;
}
}
2020-07-10 08:27:34 -05:00
2022-11-11 07:36:27 -06:00
let Some ( workspaces ) = self . fetch_workspaces_queue . last_op_result ( ) else { return ; } ;
let workspaces =
workspaces . iter ( ) . filter_map ( | res | res . as_ref ( ) . ok ( ) . cloned ( ) ) . collect ::< Vec < _ > > ( ) ;
2020-06-25 16:44:58 -05:00
2023-03-29 14:29:32 -05:00
let same_workspaces = workspaces . len ( ) = = self . workspaces . len ( )
& & workspaces
. iter ( )
. zip ( self . workspaces . iter ( ) )
. all ( | ( l , r ) | l . eq_ignore_build_data ( r ) ) ;
2021-01-28 09:33:02 -06:00
2021-07-18 05:13:03 -05:00
if same_workspaces {
let ( workspaces , build_scripts ) = self . fetch_build_data_queue . last_op_result ( ) ;
2021-10-16 06:32:55 -05:00
if Arc ::ptr_eq ( workspaces , & self . workspaces ) {
2022-04-18 01:26:00 -05:00
tracing ::debug! ( " set build scripts to workspaces " ) ;
2021-07-18 05:13:03 -05:00
let workspaces = workspaces
. iter ( )
. cloned ( )
. zip ( build_scripts )
. map ( | ( mut ws , bs ) | {
ws . set_build_scripts ( bs . as_ref ( ) . ok ( ) . cloned ( ) . unwrap_or_default ( ) ) ;
ws
} )
. collect ::< Vec < _ > > ( ) ;
// Workspaces are the same, but we've updated build data.
self . workspaces = Arc ::new ( workspaces ) ;
} else {
2022-04-29 00:59:32 -05:00
tracing ::info! ( " build scripts do not match the version of the active workspace " ) ;
2021-07-18 05:13:03 -05:00
// Current build scripts do not match the version of the active
// workspace, so there's nothing for us to update.
return ;
}
} else {
2022-04-18 01:26:00 -05:00
tracing ::debug! ( " abandon build scripts for workspaces " ) ;
2021-07-18 05:13:03 -05:00
// Here, we completely changed the workspace (Cargo.toml edit), so
// we don't care about build-script results, they are stale.
2023-01-27 06:49:28 -06:00
// FIXME: can we abort the build scripts here?
self . workspaces = Arc ::new ( workspaces ) ;
2020-07-10 08:27:34 -05:00
}
2021-01-06 04:54:28 -06:00
if let FilesWatcher ::Client = self . config . files ( ) . watcher {
2022-01-01 08:26:54 -06:00
let registration_options = lsp_types ::DidChangeWatchedFilesRegistrationOptions {
watchers : self
. workspaces
. iter ( )
. flat_map ( | ws | ws . to_roots ( ) )
. filter ( | it | it . is_local )
. flat_map ( | root | {
root . include . into_iter ( ) . flat_map ( | it | {
[
format! ( " {} /**/*.rs " , it . display ( ) ) ,
format! ( " {} /**/Cargo.toml " , it . display ( ) ) ,
format! ( " {} /**/Cargo.lock " , it . display ( ) ) ,
]
2021-01-10 13:38:35 -06:00
} )
2022-01-01 08:26:54 -06:00
} )
2023-02-14 07:59:50 -06:00
. map ( | glob_pattern | lsp_types ::FileSystemWatcher {
glob_pattern : lsp_types ::GlobPattern ::String ( glob_pattern ) ,
kind : None ,
} )
2022-01-01 08:26:54 -06:00
. collect ( ) ,
} ;
let registration = lsp_types ::Registration {
id : " workspace/didChangeWatchedFiles " . to_string ( ) ,
method : " workspace/didChangeWatchedFiles " . to_string ( ) ,
register_options : Some ( serde_json ::to_value ( registration_options ) . unwrap ( ) ) ,
} ;
self . send_request ::< lsp_types ::request ::RegisterCapability > (
lsp_types ::RegistrationParams { registrations : vec ! [ registration ] } ,
| _ , _ | ( ) ,
) ;
2020-06-25 16:44:58 -05:00
}
2021-01-26 07:18:01 -06:00
let files_config = self . config . files ( ) ;
2021-07-18 05:13:03 -05:00
let project_folders = ProjectFolders ::new ( & self . workspaces , & files_config . exclude ) ;
2020-06-25 16:44:58 -05:00
2023-03-29 14:29:32 -05:00
if self . proc_macro_clients . is_empty ( ) | | ! same_workspaces {
2022-10-01 12:50:34 -05:00
if let Some ( ( path , path_manually_set ) ) = self . config . proc_macro_srv ( ) {
2022-08-05 05:06:31 -05:00
tracing ::info! ( " Spawning proc-macro servers " ) ;
2023-03-28 08:56:01 -05:00
self . proc_macro_clients = self
. workspaces
2022-07-25 09:07:41 -05:00
. iter ( )
. map ( | ws | {
2022-11-18 16:32:26 -06:00
let ( path , args ) : ( _ , & [ _ ] ) = if path_manually_set {
2022-10-01 12:50:34 -05:00
tracing ::debug! (
" Pro-macro server path explicitly set: {} " ,
path . display ( )
) ;
2022-11-18 16:32:26 -06:00
( path . clone ( ) , & [ ] )
2022-10-01 12:50:34 -05:00
} else {
2022-11-18 16:32:26 -06:00
match ws . find_sysroot_proc_macro_srv ( ) {
Some ( server_path ) = > ( server_path , & [ ] ) ,
None = > ( path . clone ( ) , & [ " proc-macro " ] ) ,
2022-07-25 09:07:41 -05:00
}
2022-10-01 12:50:34 -05:00
} ;
2022-07-25 09:07:41 -05:00
2022-08-05 05:06:31 -05:00
tracing ::info! ( ? args , " Using proc-macro server at {} " , path . display ( ) , ) ;
2022-10-01 12:50:34 -05:00
ProcMacroServer ::spawn ( path . clone ( ) , args ) . map_err ( | err | {
2022-07-23 13:24:01 -05:00
let error = format! (
2022-08-05 05:06:31 -05:00
" Failed to run proc-macro server from path {}, error: {:?} " ,
2022-07-23 13:10:10 -05:00
path . display ( ) ,
err
) ;
2022-07-23 13:24:01 -05:00
tracing ::error! ( error ) ;
error
} )
2022-07-23 13:10:10 -05:00
} )
2022-08-28 05:31:31 -05:00
. collect ( )
} ;
2021-02-01 14:55:17 -06:00
}
2020-07-10 16:39:25 -05:00
2021-01-26 07:18:01 -06:00
let watch = match files_config . watcher {
2020-06-25 16:44:58 -05:00
FilesWatcher ::Client = > vec! [ ] ,
2022-07-18 10:50:56 -05:00
FilesWatcher ::Server = > project_folders . watch ,
2020-06-25 16:44:58 -05:00
} ;
2021-02-12 08:58:29 -06:00
self . vfs_config_version + = 1 ;
self . loader . handle . set_config ( vfs ::loader ::Config {
load : project_folders . load ,
watch ,
version : self . vfs_config_version ,
} ) ;
2023-03-25 12:06:06 -05:00
self . source_root_config = project_folders . source_root_config ;
2020-06-25 16:44:58 -05:00
// Create crate graph from all the workspaces
2023-03-25 12:06:06 -05:00
let ( crate_graph , proc_macro_paths ) = {
2020-06-25 16:44:58 -05:00
let vfs = & mut self . vfs . write ( ) . 0 ;
let loader = & mut self . loader ;
2020-09-18 14:15:44 -05:00
let mem_docs = & self . mem_docs ;
2021-08-22 05:32:00 -05:00
let mut load = move | path : & AbsPath | {
2023-01-25 07:46:06 -06:00
let _p = profile ::span ( " switch_workspaces::load " ) ;
2020-09-18 14:15:44 -05:00
let vfs_path = vfs ::VfsPath ::from ( path . to_path_buf ( ) ) ;
2021-07-26 12:16:47 -05:00
if ! mem_docs . contains ( & vfs_path ) {
2020-09-18 14:15:44 -05:00
let contents = loader . handle . load_sync ( path ) ;
vfs . set_file_contents ( vfs_path . clone ( ) , contents ) ;
}
2020-11-17 05:17:24 -06:00
let res = vfs . file_id ( & vfs_path ) ;
if res . is_none ( ) {
2021-08-15 07:46:13 -05:00
tracing ::warn! ( " failed to load {} " , path . display ( ) )
2020-11-17 05:17:24 -06:00
}
res
2020-06-25 16:44:58 -05:00
} ;
2021-08-22 05:32:00 -05:00
let mut crate_graph = CrateGraph ::default ( ) ;
2023-03-25 12:06:06 -05:00
let mut proc_macros = Vec ::default ( ) ;
for ws in & * * self . workspaces {
let ( other , mut crate_proc_macros ) =
ws . to_crate_graph ( & mut load , & self . config . cargo ( ) . extra_env ) ;
crate_graph . extend ( other , & mut crate_proc_macros ) ;
proc_macros . push ( crate_proc_macros ) ;
2020-06-25 16:44:58 -05:00
}
2023-03-25 10:42:52 -05:00
( crate_graph , proc_macros )
2020-06-25 16:44:58 -05:00
} ;
2023-03-25 12:06:06 -05:00
let mut change = Change ::new ( ) ;
2023-03-26 01:39:28 -05:00
2023-03-29 14:29:32 -05:00
if self . config . expand_proc_macros ( ) {
self . fetch_proc_macros_queue . request_op ( cause , proc_macro_paths ) ;
2023-03-26 01:39:28 -05:00
}
2020-06-25 16:44:58 -05:00
change . set_crate_graph ( crate_graph ) ;
self . analysis_host . apply_change ( change ) ;
2023-03-25 12:20:42 -05:00
self . process_changes ( ) ;
2023-03-25 12:06:06 -05:00
2020-06-25 16:44:58 -05:00
self . reload_flycheck ( ) ;
2023-03-25 12:20:42 -05:00
2021-08-15 07:46:13 -05:00
tracing ::info! ( " did switch workspaces " ) ;
2020-06-25 16:44:58 -05:00
}
2023-03-10 02:49:37 -06:00
pub ( super ) fn fetch_workspace_error ( & self ) -> Result < ( ) , String > {
2021-04-06 10:08:05 -05:00
let mut buf = String ::new ( ) ;
2021-04-06 06:16:35 -05:00
2022-11-11 07:36:27 -06:00
let Some ( last_op_result ) = self . fetch_workspaces_queue . last_op_result ( ) else { return Ok ( ( ) ) } ;
2022-11-11 06:00:22 -06:00
if last_op_result . is_empty ( ) {
stdx ::format_to! ( buf , " rust-analyzer failed to discover workspace " ) ;
} else {
for ws in last_op_result {
if let Err ( err ) = ws {
stdx ::format_to! ( buf , " rust-analyzer failed to load workspace: {:#} \n " , err ) ;
}
2021-04-06 06:16:35 -05:00
}
}
2021-04-06 10:08:05 -05:00
if buf . is_empty ( ) {
2022-04-11 07:38:30 -05:00
return Ok ( ( ) ) ;
2021-04-06 06:16:35 -05:00
}
2022-04-11 07:38:30 -05:00
Err ( buf )
2021-04-06 10:08:05 -05:00
}
2023-03-10 02:49:37 -06:00
pub ( super ) fn fetch_build_data_error ( & self ) -> Result < ( ) , String > {
2022-04-11 07:38:30 -05:00
let mut buf = String ::new ( ) ;
2021-07-18 03:29:22 -05:00
2021-07-18 05:13:03 -05:00
for ws in & self . fetch_build_data_queue . last_op_result ( ) . 1 {
2021-08-25 10:56:39 -05:00
match ws {
2022-04-11 07:38:30 -05:00
Ok ( data ) = > match data . error ( ) {
Some ( stderr ) = > stdx ::format_to! ( buf , " {:#} \n " , stderr ) ,
_ = > ( ) ,
} ,
// io errors
Err ( err ) = > stdx ::format_to! ( buf , " {:#} \n " , err ) ,
2021-04-06 10:08:05 -05:00
}
}
2021-07-18 03:29:22 -05:00
2022-04-11 07:38:30 -05:00
if buf . is_empty ( ) {
Ok ( ( ) )
2021-08-25 10:56:39 -05:00
} else {
2022-04-11 07:38:30 -05:00
Err ( buf )
2021-07-18 03:29:22 -05:00
}
2021-04-06 06:16:35 -05:00
}
2020-06-25 16:44:58 -05:00
fn reload_flycheck ( & mut self ) {
2021-01-18 04:25:57 -06:00
let _p = profile ::span ( " GlobalState::reload_flycheck " ) ;
2022-12-17 16:26:54 -06:00
let config = self . config . flycheck ( ) ;
2020-07-15 07:37:44 -05:00
let sender = self . flycheck_sender . clone ( ) ;
2022-10-22 16:02:59 -05:00
let invocation_strategy = match config {
FlycheckConfig ::CargoCommand { .. } = > flycheck ::InvocationStrategy ::PerWorkspace ,
FlycheckConfig ::CustomCommand { invocation_strategy , .. } = > invocation_strategy ,
} ;
2022-09-15 06:28:09 -05:00
self . flycheck = match invocation_strategy {
2022-10-19 16:34:36 -05:00
flycheck ::InvocationStrategy ::Once = > vec! [ FlycheckHandle ::spawn (
2022-09-15 06:28:09 -05:00
0 ,
Box ::new ( move | msg | sender . send ( msg ) . unwrap ( ) ) ,
2022-12-23 01:08:08 -06:00
config ,
2022-09-15 06:28:09 -05:00
self . config . root_path ( ) . clone ( ) ,
) ] ,
2022-09-26 08:58:55 -05:00
flycheck ::InvocationStrategy ::PerWorkspace = > {
2022-09-15 06:28:09 -05:00
self . workspaces
. iter ( )
. enumerate ( )
. filter_map ( | ( id , w ) | match w {
ProjectWorkspace ::Cargo { cargo , .. } = > Some ( ( id , cargo . workspace_root ( ) ) ) ,
ProjectWorkspace ::Json { project , .. } = > {
// Enable flychecks for json projects if a custom flycheck command was supplied
// in the workspace configuration.
match config {
FlycheckConfig ::CustomCommand { .. } = > Some ( ( id , project . path ( ) ) ) ,
_ = > None ,
}
}
ProjectWorkspace ::DetachedFiles { .. } = > None ,
} )
. map ( | ( id , root ) | {
let sender = sender . clone ( ) ;
FlycheckHandle ::spawn (
id ,
Box ::new ( move | msg | sender . send ( msg ) . unwrap ( ) ) ,
config . clone ( ) ,
root . to_path_buf ( ) ,
)
} )
. collect ( )
}
2022-10-20 12:28:28 -05:00
}
. into ( ) ;
2020-06-25 16:44:58 -05:00
}
}
#[ derive(Default) ]
pub ( crate ) struct ProjectFolders {
pub ( crate ) load : Vec < vfs ::loader ::Entry > ,
pub ( crate ) watch : Vec < usize > ,
pub ( crate ) source_root_config : SourceRootConfig ,
}
impl ProjectFolders {
2021-01-26 07:18:01 -06:00
pub ( crate ) fn new (
workspaces : & [ ProjectWorkspace ] ,
global_excludes : & [ AbsPathBuf ] ,
) -> ProjectFolders {
2020-06-25 16:44:58 -05:00
let mut res = ProjectFolders ::default ( ) ;
let mut fsc = FileSetConfig ::builder ( ) ;
let mut local_filesets = vec! [ ] ;
2023-02-28 05:08:23 -06:00
// Dedup source roots
// Depending on the project setup, we can have duplicated source roots, or for example in
// the case of the rustc workspace, we can end up with two source roots that are almost the
// same but not quite, like:
// PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
// PackageRoot {
// is_local: true,
// include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
// exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
// }
//
// The first one comes from the explicit rustc workspace which points to the rustc workspace itself
// The second comes from the rustc workspace that we load as the actual project workspace
// These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
// So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
// so we need to also coalesce the includes if they overlap.
let mut roots : Vec < _ > = workspaces
. iter ( )
. flat_map ( | ws | ws . to_roots ( ) )
. update ( | root | root . include . sort ( ) )
. sorted_by ( | a , b | a . include . cmp ( & b . include ) )
. collect ( ) ;
// map that tracks indices of overlapping roots
let mut overlap_map = FxHashMap ::< _ , Vec < _ > > ::default ( ) ;
let mut done = false ;
while ! mem ::replace ( & mut done , true ) {
// maps include paths to indices of the corresponding root
let mut include_to_idx = FxHashMap ::default ( ) ;
// Find and note down the indices of overlapping roots
2023-03-10 19:01:21 -06:00
for ( idx , root ) in roots . iter ( ) . enumerate ( ) . filter ( | ( _ , it ) | ! it . include . is_empty ( ) ) {
2023-02-28 05:08:23 -06:00
for include in & root . include {
match include_to_idx . entry ( include ) {
Entry ::Occupied ( e ) = > {
overlap_map . entry ( * e . get ( ) ) . or_default ( ) . push ( idx ) ;
}
Entry ::Vacant ( e ) = > {
e . insert ( idx ) ;
}
}
}
}
for ( k , v ) in overlap_map . drain ( ) {
done = false ;
for v in v {
let r = mem ::replace (
& mut roots [ v ] ,
PackageRoot { is_local : false , include : vec ! [ ] , exclude : vec ! [ ] } ,
) ;
roots [ k ] . is_local | = r . is_local ;
roots [ k ] . include . extend ( r . include ) ;
roots [ k ] . exclude . extend ( r . exclude ) ;
}
roots [ k ] . include . sort ( ) ;
roots [ k ] . exclude . sort ( ) ;
roots [ k ] . include . dedup ( ) ;
roots [ k ] . exclude . dedup ( ) ;
}
}
for root in roots . into_iter ( ) . filter ( | it | ! it . include . is_empty ( ) ) {
2020-07-21 05:52:51 -05:00
let file_set_roots : Vec < VfsPath > =
root . include . iter ( ) . cloned ( ) . map ( VfsPath ::from ) . collect ( ) ;
2020-06-25 16:44:58 -05:00
2020-07-21 05:52:51 -05:00
let entry = {
let mut dirs = vfs ::loader ::Directories ::default ( ) ;
dirs . extensions . push ( " rs " . into ( ) ) ;
dirs . include . extend ( root . include ) ;
dirs . exclude . extend ( root . exclude ) ;
2021-01-26 07:18:01 -06:00
for excl in global_excludes {
2021-05-25 10:07:01 -05:00
if dirs
. include
. iter ( )
. any ( | incl | incl . starts_with ( excl ) | | excl . starts_with ( incl ) )
{
2021-01-26 07:18:01 -06:00
dirs . exclude . push ( excl . clone ( ) ) ;
}
}
2020-07-21 05:52:51 -05:00
vfs ::loader ::Entry ::Directories ( dirs )
2020-06-25 16:44:58 -05:00
} ;
2021-09-07 10:29:58 -05:00
if root . is_local {
2020-07-21 05:52:51 -05:00
res . watch . push ( res . load . len ( ) ) ;
2020-06-25 16:44:58 -05:00
}
2020-07-21 05:52:51 -05:00
res . load . push ( entry ) ;
2020-06-25 16:44:58 -05:00
2021-09-07 10:29:58 -05:00
if root . is_local {
2020-06-25 16:44:58 -05:00
local_filesets . push ( fsc . len ( ) ) ;
}
fsc . add_file_set ( file_set_roots )
}
let fsc = fsc . build ( ) ;
res . source_root_config = SourceRootConfig { fsc , local_filesets } ;
res
}
}
#[ derive(Default, Debug) ]
pub ( crate ) struct SourceRootConfig {
pub ( crate ) fsc : FileSetConfig ,
pub ( crate ) local_filesets : Vec < usize > ,
}
impl SourceRootConfig {
pub ( crate ) fn partition ( & self , vfs : & vfs ::Vfs ) -> Vec < SourceRoot > {
2020-08-12 09:32:36 -05:00
let _p = profile ::span ( " SourceRootConfig::partition " ) ;
2020-06-25 16:44:58 -05:00
self . fsc
. partition ( vfs )
. into_iter ( )
. enumerate ( )
. map ( | ( idx , file_set ) | {
let is_local = self . local_filesets . contains ( & idx ) ;
if is_local {
SourceRoot ::new_local ( file_set )
} else {
SourceRoot ::new_library ( file_set )
}
} )
. collect ( )
}
}
2021-08-22 06:05:12 -05:00
2022-01-05 12:35:48 -06:00
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
/// with an identity dummy expander.
2022-01-04 13:40:16 -06:00
pub ( crate ) fn load_proc_macro (
2022-07-26 09:30:45 -05:00
server : Result < & ProcMacroServer , & str > ,
2022-01-04 13:40:16 -06:00
path : & AbsPath ,
dummy_replace : & [ Box < str > ] ,
2022-06-15 10:33:55 -05:00
) -> ProcMacroLoadResult {
2023-01-25 07:46:06 -06:00
let server = server . map_err ( ToOwned ::to_owned ) ? ;
2022-06-28 03:41:10 -05:00
let res : Result < Vec < _ > , String > = ( | | {
2023-03-25 09:43:58 -05:00
let dylib = MacroDylib ::new ( path . to_path_buf ( ) ) ;
2022-06-28 03:41:10 -05:00
let vec = server . load_dylib ( dylib ) . map_err ( | e | format! ( " {e} " ) ) ? ;
if vec . is_empty ( ) {
return Err ( " proc macro library returned no proc macros " . to_string ( ) ) ;
}
Ok ( vec
. into_iter ( )
. map ( | expander | expander_to_proc_macro ( expander , dummy_replace ) )
. collect ( ) )
2022-06-15 10:33:55 -05:00
} ) ( ) ;
return match res {
Ok ( proc_macros ) = > {
tracing ::info! (
" Loaded proc-macros for {}: {:?} " ,
path . display ( ) ,
proc_macros . iter ( ) . map ( | it | it . name . clone ( ) ) . collect ::< Vec < _ > > ( )
) ;
Ok ( proc_macros )
}
Err ( e ) = > {
tracing ::warn! ( " proc-macro loading for {} failed: {e} " , path . display ( ) ) ;
Err ( e )
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
}
} ;
2022-01-04 13:40:16 -06:00
fn expander_to_proc_macro (
expander : proc_macro_api ::ProcMacro ,
dummy_replace : & [ Box < str > ] ,
) -> ProcMacro {
let name = SmolStr ::from ( expander . name ( ) ) ;
2021-08-22 06:05:12 -05:00
let kind = match expander . kind ( ) {
proc_macro_api ::ProcMacroKind ::CustomDerive = > ProcMacroKind ::CustomDerive ,
proc_macro_api ::ProcMacroKind ::FuncLike = > ProcMacroKind ::FuncLike ,
proc_macro_api ::ProcMacroKind ::Attr = > ProcMacroKind ::Attr ,
} ;
2022-01-04 13:40:16 -06:00
let expander : Arc < dyn ProcMacroExpander > =
if dummy_replace . iter ( ) . any ( | replace | & * * replace = = name ) {
2022-08-03 11:10:15 -05:00
match kind {
ProcMacroKind ::Attr = > Arc ::new ( IdentityExpander ) ,
_ = > Arc ::new ( EmptyExpander ) ,
}
2022-01-04 13:40:16 -06:00
} else {
Arc ::new ( Expander ( expander ) )
} ;
2021-08-22 06:05:12 -05:00
ProcMacro { name , kind , expander }
}
#[ derive(Debug) ]
2021-08-31 07:44:43 -05:00
struct Expander ( proc_macro_api ::ProcMacro ) ;
2021-08-22 06:05:12 -05:00
impl ProcMacroExpander for Expander {
fn expand (
& self ,
subtree : & tt ::Subtree ,
attrs : Option < & tt ::Subtree > ,
env : & Env ,
2021-08-31 11:14:33 -05:00
) -> Result < tt ::Subtree , ProcMacroExpansionError > {
2021-08-22 06:05:12 -05:00
let env = env . iter ( ) . map ( | ( k , v ) | ( k . to_string ( ) , v . to_string ( ) ) ) . collect ( ) ;
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
match self . 0. expand ( subtree , attrs , env ) {
Ok ( Ok ( subtree ) ) = > Ok ( subtree ) ,
2021-08-31 11:14:33 -05:00
Ok ( Err ( err ) ) = > Err ( ProcMacroExpansionError ::Panic ( err . 0 ) ) ,
Err ( err ) = > Err ( ProcMacroExpansionError ::System ( err . to_string ( ) ) ) ,
internal: cleanup proc macro server error handlig
When dealing with proc macros, there are two very different kinds of
errors:
* first, usual errors of "proc macro panicked on this particular input"
* second, the proc macro server might day if the user, eg, kills it
First kind of errors are expected and are a normal output, while the
second kind are genuine IO-errors.
For this reason, we use a curious nested result here: `Result<Result<T,
E1>, E2>` pattern, which is 100% inspired by http://sled.rs/errors.html
2021-08-31 11:01:39 -05:00
}
2021-08-22 06:05:12 -05:00
}
}
2022-01-04 13:40:16 -06:00
2022-08-03 11:10:15 -05:00
/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
2022-01-04 13:40:16 -06:00
#[ derive(Debug) ]
2022-08-03 11:10:15 -05:00
struct IdentityExpander ;
2022-01-04 13:40:16 -06:00
2022-08-03 11:10:15 -05:00
impl ProcMacroExpander for IdentityExpander {
2022-01-04 13:40:16 -06:00
fn expand (
& self ,
subtree : & tt ::Subtree ,
_ : Option < & tt ::Subtree > ,
_ : & Env ,
) -> Result < tt ::Subtree , ProcMacroExpansionError > {
Ok ( subtree . clone ( ) )
}
}
2022-08-03 11:10:15 -05:00
/// Empty expander, used for proc-macros that are deliberately ignored by the user.
#[ derive(Debug) ]
struct EmptyExpander ;
impl ProcMacroExpander for EmptyExpander {
fn expand (
& self ,
_ : & tt ::Subtree ,
_ : Option < & tt ::Subtree > ,
_ : & Env ,
) -> Result < tt ::Subtree , ProcMacroExpansionError > {
2023-01-31 04:49:49 -06:00
Ok ( tt ::Subtree ::empty ( ) )
2022-08-03 11:10:15 -05:00
}
}
2021-08-22 06:05:12 -05:00
}
2021-09-13 12:58:09 -05:00
pub ( crate ) fn should_refresh_for_change ( path : & AbsPath , change_kind : ChangeKind ) -> bool {
const IMPLICIT_TARGET_FILES : & [ & str ] = & [ " build.rs " , " src/main.rs " , " src/lib.rs " ] ;
const IMPLICIT_TARGET_DIRS : & [ & str ] = & [ " src/bin " , " examples " , " tests " , " benches " ] ;
2022-08-05 05:06:31 -05:00
let file_name = match path . file_name ( ) . unwrap_or_default ( ) . to_str ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if let " Cargo.toml " | " Cargo.lock " = file_name {
2021-09-13 12:58:09 -05:00
return true ;
}
if change_kind = = ChangeKind ::Modify {
return false ;
}
2022-08-05 05:06:31 -05:00
// .cargo/config{.toml}
2021-09-13 12:58:09 -05:00
if path . extension ( ) . unwrap_or_default ( ) ! = " rs " {
2022-08-05 05:06:31 -05:00
let is_cargo_config = matches! ( file_name , " config.toml " | " config " )
& & path . parent ( ) . map ( | parent | parent . as_ref ( ) . ends_with ( " .cargo " ) ) . unwrap_or ( false ) ;
return is_cargo_config ;
2021-09-13 12:58:09 -05:00
}
2022-08-05 05:06:31 -05:00
2021-09-13 12:58:09 -05:00
if IMPLICIT_TARGET_FILES . iter ( ) . any ( | it | path . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
let parent = match path . parent ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if IMPLICIT_TARGET_DIRS . iter ( ) . any ( | it | parent . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
if file_name = = " main.rs " {
let grand_parent = match parent . parent ( ) {
Some ( it ) = > it ,
None = > return false ,
} ;
if IMPLICIT_TARGET_DIRS . iter ( ) . any ( | it | grand_parent . as_ref ( ) . ends_with ( it ) ) {
return true ;
}
}
false
}