• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //! Project loading & configuration updates.
2 //!
3 //! This is quite tricky. The main problem is time and changes -- there's no
4 //! fixed "project" rust-analyzer is working with, "current project" is itself
5 //! mutable state. For example, when the user edits `Cargo.toml` by adding a new
6 //! dependency, project model changes. What's more, switching project model is
7 //! not instantaneous -- it takes time to run `cargo metadata` and (for proc
8 //! macros) `cargo check`.
9 //!
10 //! The main guiding principle here is, as elsewhere in rust-analyzer,
11 //! robustness. We try not to assume that the project model exists or is
12 //! correct. Instead, we try to provide a best-effort service. Even if the
13 //! project is currently loading and we don't have a full project model, we
14 //! still want to respond to various  requests.
15 use std::{collections::hash_map::Entry, iter, mem, sync};
16 
17 use flycheck::{FlycheckConfig, FlycheckHandle};
18 use hir::db::DefDatabase;
19 use ide::Change;
20 use ide_db::{
21     base_db::{
22         salsa::Durability, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
23         ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath,
24     },
25     FxHashMap,
26 };
27 use itertools::Itertools;
28 use proc_macro_api::{MacroDylib, ProcMacroServer};
29 use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts};
30 use rustc_hash::FxHashSet;
31 use stdx::{format_to, thread::ThreadIntent};
32 use syntax::SmolStr;
33 use triomphe::Arc;
34 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
35 
36 use crate::{
37     config::{Config, FilesWatcher, LinkedProject},
38     global_state::GlobalState,
39     lsp_ext,
40     main_loop::Task,
41     op_queue::Cause,
42 };
43 
44 use ::tt::token_id as tt;
45 
46 #[derive(Debug)]
47 pub(crate) enum ProjectWorkspaceProgress {
48     Begin,
49     Report(String),
50     End(Vec<anyhow::Result<ProjectWorkspace>>, bool),
51 }
52 
53 #[derive(Debug)]
54 pub(crate) enum BuildDataProgress {
55     Begin,
56     Report(String),
57     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
58 }
59 
60 #[derive(Debug)]
61 pub(crate) enum ProcMacroProgress {
62     Begin,
63     Report(String),
64     End(ProcMacros),
65 }
66 
67 impl GlobalState {
is_quiescent(&self) -> bool68     pub(crate) fn is_quiescent(&self) -> bool {
69         !(self.last_reported_status.is_none()
70             || self.fetch_workspaces_queue.op_in_progress()
71             || self.fetch_build_data_queue.op_in_progress()
72             || self.fetch_proc_macros_queue.op_in_progress()
73             || self.vfs_progress_config_version < self.vfs_config_version
74             || self.vfs_progress_n_done < self.vfs_progress_n_total)
75     }
76 
update_configuration(&mut self, config: Config)77     pub(crate) fn update_configuration(&mut self, config: Config) {
78         let _p = profile::span("GlobalState::update_configuration");
79         let old_config = mem::replace(&mut self.config, Arc::new(config));
80         if self.config.lru_parse_query_capacity() != old_config.lru_parse_query_capacity() {
81             self.analysis_host.update_lru_capacity(self.config.lru_parse_query_capacity());
82         }
83         if self.config.lru_query_capacities() != old_config.lru_query_capacities() {
84             self.analysis_host.update_lru_capacities(
85                 &self.config.lru_query_capacities().cloned().unwrap_or_default(),
86             );
87         }
88         if self.config.linked_projects() != old_config.linked_projects() {
89             self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), false)
90         } else if self.config.flycheck() != old_config.flycheck() {
91             self.reload_flycheck();
92         }
93 
94         if self.analysis_host.raw_database().expand_proc_attr_macros()
95             != self.config.expand_proc_attr_macros()
96         {
97             self.analysis_host.raw_database_mut().set_expand_proc_attr_macros_with_durability(
98                 self.config.expand_proc_attr_macros(),
99                 Durability::HIGH,
100             );
101         }
102     }
103 
current_status(&self) -> lsp_ext::ServerStatusParams104     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
105         let mut status = lsp_ext::ServerStatusParams {
106             health: lsp_ext::Health::Ok,
107             quiescent: self.is_quiescent(),
108             message: None,
109         };
110         let mut message = String::new();
111 
112         if self.proc_macro_changed {
113             status.health = lsp_ext::Health::Warning;
114             message.push_str("Proc-macros have changed and need to be rebuilt.\n\n");
115         }
116         if let Err(_) = self.fetch_build_data_error() {
117             status.health = lsp_ext::Health::Warning;
118             message.push_str("Failed to run build scripts of some packages.\n\n");
119         }
120         if self.proc_macro_clients.iter().any(|it| it.is_err()) {
121             status.health = lsp_ext::Health::Warning;
122             message.push_str("Failed to spawn one or more proc-macro servers.\n\n");
123         }
124         if !self.config.cargo_autoreload()
125             && self.is_quiescent()
126             && self.fetch_workspaces_queue.op_requested()
127         {
128             status.health = lsp_ext::Health::Warning;
129             message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
130         }
131         if self.config.linked_projects().is_empty()
132             && self.config.detached_files().is_empty()
133             && self.config.notifications().cargo_toml_not_found
134         {
135             status.health = lsp_ext::Health::Warning;
136             message.push_str("Failed to discover workspace.\n");
137             message.push_str("Consider adding the `Cargo.toml` of the workspace to the [`linkedProjects`](https://rust-analyzer.github.io/manual.html#rust-analyzer.linkedProjects) setting.\n\n");
138         }
139         if let Some(err) = &self.config_errors {
140             status.health = lsp_ext::Health::Warning;
141             format_to!(message, "{err}\n");
142         }
143         if let Some(err) = &self.last_flycheck_error {
144             status.health = lsp_ext::Health::Warning;
145             message.push_str(err);
146             message.push('\n');
147         }
148 
149         for ws in self.workspaces.iter() {
150             let (ProjectWorkspace::Cargo { sysroot, .. }
151             | ProjectWorkspace::Json { sysroot, .. }
152             | ProjectWorkspace::DetachedFiles { sysroot, .. }) = ws;
153             match sysroot {
154                 Err(None) => (),
155                 Err(Some(e)) => {
156                     status.health = lsp_ext::Health::Warning;
157                     message.push_str(e);
158                     message.push_str("\n\n");
159                 }
160                 Ok(s) => {
161                     if let Some(e) = s.loading_warning() {
162                         status.health = lsp_ext::Health::Warning;
163                         message.push_str(&e);
164                         message.push_str("\n\n");
165                     }
166                 }
167             }
168             if let ProjectWorkspace::Cargo { rustc: Err(Some(e)), .. } = ws {
169                 status.health = lsp_ext::Health::Warning;
170                 message.push_str(e);
171                 message.push_str("\n\n");
172             }
173         }
174 
175         if let Err(_) = self.fetch_workspace_error() {
176             status.health = lsp_ext::Health::Error;
177             message.push_str("Failed to load workspaces.\n\n");
178         }
179 
180         if !message.is_empty() {
181             status.message = Some(message.trim_end().to_owned());
182         }
183         status
184     }
185 
fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload: bool)186     pub(crate) fn fetch_workspaces(&mut self, cause: Cause, force_crate_graph_reload: bool) {
187         tracing::info!(%cause, "will fetch workspaces");
188 
189         self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
190             let linked_projects = self.config.linked_projects();
191             let detached_files = self.config.detached_files().to_vec();
192             let cargo_config = self.config.cargo();
193 
194             move |sender| {
195                 let progress = {
196                     let sender = sender.clone();
197                     move |msg| {
198                         sender
199                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
200                             .unwrap()
201                     }
202                 };
203 
204                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
205 
206                 let mut workspaces = linked_projects
207                     .iter()
208                     .map(|project| match project {
209                         LinkedProject::ProjectManifest(manifest) => {
210                             project_model::ProjectWorkspace::load(
211                                 manifest.clone(),
212                                 &cargo_config,
213                                 &progress,
214                             )
215                         }
216                         LinkedProject::InlineJsonProject(it) => {
217                             Ok(project_model::ProjectWorkspace::load_inline(
218                                 it.clone(),
219                                 cargo_config.target.as_deref(),
220                                 &cargo_config.extra_env,
221                                 None,
222                             ))
223                         }
224                     })
225                     .collect::<Vec<_>>();
226 
227                 let mut i = 0;
228                 while i < workspaces.len() {
229                     if let Ok(w) = &workspaces[i] {
230                         let dupes: Vec<_> = workspaces
231                             .iter()
232                             .enumerate()
233                             .skip(i + 1)
234                             .filter_map(|(i, it)| {
235                                 it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
236                             })
237                             .collect();
238                         dupes.into_iter().rev().for_each(|d| {
239                             _ = workspaces.remove(d);
240                         });
241                     }
242                     i += 1;
243                 }
244 
245                 if !detached_files.is_empty() {
246                     workspaces.push(project_model::ProjectWorkspace::load_detached_files(
247                         detached_files,
248                         &cargo_config,
249                     ));
250                 }
251 
252                 tracing::info!("did fetch workspaces {:?}", workspaces);
253                 sender
254                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(
255                         workspaces,
256                         force_crate_graph_reload,
257                     )))
258                     .unwrap();
259             }
260         });
261     }
262 
fetch_build_data(&mut self, cause: Cause)263     pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
264         tracing::info!(%cause, "will fetch build data");
265         let workspaces = Arc::clone(&self.workspaces);
266         let config = self.config.cargo();
267         self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
268             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
269 
270             let progress = {
271                 let sender = sender.clone();
272                 move |msg| {
273                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
274                 }
275             };
276             let res = ProjectWorkspace::run_all_build_scripts(&workspaces, &config, &progress);
277 
278             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
279         });
280     }
281 
fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>)282     pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec<ProcMacroPaths>) {
283         tracing::info!(%cause, "will load proc macros");
284         let dummy_replacements = self.config.dummy_replacements().clone();
285         let proc_macro_clients = self.proc_macro_clients.clone();
286 
287         self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
288             sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap();
289 
290             let dummy_replacements = &dummy_replacements;
291             let progress = {
292                 let sender = sender.clone();
293                 &move |msg| {
294                     sender.send(Task::LoadProcMacros(ProcMacroProgress::Report(msg))).unwrap()
295                 }
296             };
297 
298             let mut res = FxHashMap::default();
299             let chain = proc_macro_clients
300                 .iter()
301                 .map(|res| res.as_ref().map_err(|e| e.to_string()))
302                 .chain(iter::repeat_with(|| Err("Proc macros servers are not running".into())));
303             for (client, paths) in chain.zip(paths) {
304                 res.extend(paths.into_iter().map(move |(crate_id, res)| {
305                     (
306                         crate_id,
307                         res.map_or_else(
308                             |_| Err("proc macro crate is missing dylib".to_owned()),
309                             |(crate_name, path)| {
310                                 progress(path.display().to_string());
311                                 client.as_ref().map_err(Clone::clone).and_then(|client| {
312                                     load_proc_macro(
313                                         client,
314                                         &path,
315                                         crate_name
316                                             .as_deref()
317                                             .and_then(|crate_name| {
318                                                 dummy_replacements.get(crate_name).map(|v| &**v)
319                                             })
320                                             .unwrap_or_default(),
321                                     )
322                                 })
323                             },
324                         ),
325                     )
326                 }));
327             }
328 
329             sender.send(Task::LoadProcMacros(ProcMacroProgress::End(res))).unwrap();
330         });
331     }
332 
set_proc_macros(&mut self, proc_macros: ProcMacros)333     pub(crate) fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
334         let mut change = Change::new();
335         change.set_proc_macros(proc_macros);
336         self.analysis_host.apply_change(change);
337     }
338 
switch_workspaces(&mut self, cause: Cause)339     pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
340         let _p = profile::span("GlobalState::switch_workspaces");
341         tracing::info!(%cause, "will switch workspaces");
342 
343         let Some((workspaces, force_reload_crate_graph)) = self.fetch_workspaces_queue.last_op_result() else { return; };
344 
345         if let Err(_) = self.fetch_workspace_error() {
346             if !self.workspaces.is_empty() {
347                 if *force_reload_crate_graph {
348                     self.recreate_crate_graph(cause);
349                 }
350                 // It only makes sense to switch to a partially broken workspace
351                 // if we don't have any workspace at all yet.
352                 return;
353             }
354         }
355 
356         let workspaces =
357             workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
358 
359         let same_workspaces = workspaces.len() == self.workspaces.len()
360             && workspaces
361                 .iter()
362                 .zip(self.workspaces.iter())
363                 .all(|(l, r)| l.eq_ignore_build_data(r));
364 
365         if same_workspaces {
366             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
367             if Arc::ptr_eq(workspaces, &self.workspaces) {
368                 tracing::debug!("set build scripts to workspaces");
369 
370                 let workspaces = workspaces
371                     .iter()
372                     .cloned()
373                     .zip(build_scripts)
374                     .map(|(mut ws, bs)| {
375                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
376                         ws
377                     })
378                     .collect::<Vec<_>>();
379 
380                 // Workspaces are the same, but we've updated build data.
381                 self.workspaces = Arc::new(workspaces);
382             } else {
383                 tracing::info!("build scripts do not match the version of the active workspace");
384                 if *force_reload_crate_graph {
385                     self.recreate_crate_graph(cause);
386                 }
387                 // Current build scripts do not match the version of the active
388                 // workspace, so there's nothing for us to update.
389                 return;
390             }
391         } else {
392             tracing::debug!("abandon build scripts for workspaces");
393 
394             // Here, we completely changed the workspace (Cargo.toml edit), so
395             // we don't care about build-script results, they are stale.
396             // FIXME: can we abort the build scripts here?
397             self.workspaces = Arc::new(workspaces);
398         }
399 
400         if let FilesWatcher::Client = self.config.files().watcher {
401             let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
402                 watchers: self
403                     .workspaces
404                     .iter()
405                     .flat_map(|ws| ws.to_roots())
406                     .filter(|it| it.is_local)
407                     .flat_map(|root| {
408                         root.include.into_iter().flat_map(|it| {
409                             [
410                                 format!("{}/**/*.rs", it.display()),
411                                 format!("{}/**/Cargo.toml", it.display()),
412                                 format!("{}/**/Cargo.lock", it.display()),
413                             ]
414                         })
415                     })
416                     .map(|glob_pattern| lsp_types::FileSystemWatcher {
417                         glob_pattern: lsp_types::GlobPattern::String(glob_pattern),
418                         kind: None,
419                     })
420                     .collect(),
421             };
422             let registration = lsp_types::Registration {
423                 id: "workspace/didChangeWatchedFiles".to_string(),
424                 method: "workspace/didChangeWatchedFiles".to_string(),
425                 register_options: Some(serde_json::to_value(registration_options).unwrap()),
426             };
427             self.send_request::<lsp_types::request::RegisterCapability>(
428                 lsp_types::RegistrationParams { registrations: vec![registration] },
429                 |_, _| (),
430             );
431         }
432 
433         let files_config = self.config.files();
434         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
435 
436         if self.proc_macro_clients.is_empty() || !same_workspaces {
437             if self.config.expand_proc_macros() {
438                 tracing::info!("Spawning proc-macro servers");
439 
440                 // FIXME: use `Arc::from_iter` when it becomes available
441                 self.proc_macro_clients = Arc::from(
442                     self.workspaces
443                         .iter()
444                         .map(|ws| {
445                             let path = match self.config.proc_macro_srv() {
446                                 Some(path) => path,
447                                 None => ws.find_sysroot_proc_macro_srv()?,
448                             };
449 
450                             tracing::info!("Using proc-macro server at {}", path.display(),);
451                             ProcMacroServer::spawn(path.clone()).map_err(|err| {
452                                 tracing::error!(
453                                     "Failed to run proc-macro server from path {}, error: {:?}",
454                                     path.display(),
455                                     err
456                                 );
457                                 anyhow::anyhow!(
458                                     "Failed to run proc-macro server from path {}, error: {:?}",
459                                     path.display(),
460                                     err
461                                 )
462                             })
463                         })
464                         .collect::<Vec<_>>(),
465                 )
466             };
467         }
468 
469         let watch = match files_config.watcher {
470             FilesWatcher::Client => vec![],
471             FilesWatcher::Server => project_folders.watch,
472         };
473         self.vfs_config_version += 1;
474         self.loader.handle.set_config(vfs::loader::Config {
475             load: project_folders.load,
476             watch,
477             version: self.vfs_config_version,
478         });
479         self.source_root_config = project_folders.source_root_config;
480 
481         self.recreate_crate_graph(cause);
482 
483         tracing::info!("did switch workspaces");
484     }
485 
recreate_crate_graph(&mut self, cause: String)486     fn recreate_crate_graph(&mut self, cause: String) {
487         // Create crate graph from all the workspaces
488         let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = {
489             let vfs = &mut self.vfs.write().0;
490             let loader = &mut self.loader;
491             // crate graph construction relies on these paths, record them so when one of them gets
492             // deleted or created we trigger a reconstruction of the crate graph
493             let mut crate_graph_file_dependencies = FxHashSet::default();
494 
495             let mut load = |path: &AbsPath| {
496                 let _p = profile::span("switch_workspaces::load");
497                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
498                 crate_graph_file_dependencies.insert(vfs_path.clone());
499                 match vfs.file_id(&vfs_path) {
500                     Some(file_id) => Some(file_id),
501                     None => {
502                         if !self.mem_docs.contains(&vfs_path) {
503                             let contents = loader.handle.load_sync(path);
504                             vfs.set_file_contents(vfs_path.clone(), contents);
505                         }
506                         vfs.file_id(&vfs_path)
507                     }
508                 }
509             };
510 
511             let mut crate_graph = CrateGraph::default();
512             let mut proc_macros = Vec::default();
513             for ws in &**self.workspaces {
514                 let (other, mut crate_proc_macros) =
515                     ws.to_crate_graph(&mut load, &self.config.extra_env());
516                 crate_graph.extend(other, &mut crate_proc_macros);
517                 proc_macros.push(crate_proc_macros);
518             }
519             (crate_graph, proc_macros, crate_graph_file_dependencies)
520         };
521 
522         if self.config.expand_proc_macros() {
523             self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
524         }
525         let mut change = Change::new();
526         change.set_crate_graph(crate_graph);
527         self.analysis_host.apply_change(change);
528         self.crate_graph_file_dependencies = crate_graph_file_dependencies;
529         self.process_changes();
530 
531         self.reload_flycheck();
532     }
533 
fetch_workspace_error(&self) -> Result<(), String>534     pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
535         let mut buf = String::new();
536 
537         let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
538         if last_op_result.is_empty() {
539             stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
540         } else {
541             for ws in last_op_result {
542                 if let Err(err) = ws {
543                     stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
544                 }
545             }
546         }
547 
548         if buf.is_empty() {
549             return Ok(());
550         }
551 
552         Err(buf)
553     }
554 
fetch_build_data_error(&self) -> Result<(), String>555     pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
556         let mut buf = String::new();
557 
558         for ws in &self.fetch_build_data_queue.last_op_result().1 {
559             match ws {
560                 Ok(data) => match data.error() {
561                     Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
562                     _ => (),
563                 },
564                 // io errors
565                 Err(err) => stdx::format_to!(buf, "{:#}\n", err),
566             }
567         }
568 
569         if buf.is_empty() {
570             Ok(())
571         } else {
572             Err(buf)
573         }
574     }
575 
reload_flycheck(&mut self)576     fn reload_flycheck(&mut self) {
577         let _p = profile::span("GlobalState::reload_flycheck");
578         let config = self.config.flycheck();
579         let sender = self.flycheck_sender.clone();
580         let invocation_strategy = match config {
581             FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace,
582             FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy,
583         };
584 
585         self.flycheck = match invocation_strategy {
586             flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
587                 0,
588                 Box::new(move |msg| sender.send(msg).unwrap()),
589                 config,
590                 self.config.root_path().clone(),
591             )],
592             flycheck::InvocationStrategy::PerWorkspace => {
593                 self.workspaces
594                     .iter()
595                     .enumerate()
596                     .filter_map(|(id, w)| match w {
597                         ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
598                         ProjectWorkspace::Json { project, .. } => {
599                             // Enable flychecks for json projects if a custom flycheck command was supplied
600                             // in the workspace configuration.
601                             match config {
602                                 FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
603                                 _ => None,
604                             }
605                         }
606                         ProjectWorkspace::DetachedFiles { .. } => None,
607                     })
608                     .map(|(id, root)| {
609                         let sender = sender.clone();
610                         FlycheckHandle::spawn(
611                             id,
612                             Box::new(move |msg| sender.send(msg).unwrap()),
613                             config.clone(),
614                             root.to_path_buf(),
615                         )
616                     })
617                     .collect()
618             }
619         }
620         .into();
621     }
622 }
623 
624 #[derive(Default)]
625 pub(crate) struct ProjectFolders {
626     pub(crate) load: Vec<vfs::loader::Entry>,
627     pub(crate) watch: Vec<usize>,
628     pub(crate) source_root_config: SourceRootConfig,
629 }
630 
631 impl ProjectFolders {
new( workspaces: &[ProjectWorkspace], global_excludes: &[AbsPathBuf], ) -> ProjectFolders632     pub(crate) fn new(
633         workspaces: &[ProjectWorkspace],
634         global_excludes: &[AbsPathBuf],
635     ) -> ProjectFolders {
636         let mut res = ProjectFolders::default();
637         let mut fsc = FileSetConfig::builder();
638         let mut local_filesets = vec![];
639 
640         // Dedup source roots
641         // Depending on the project setup, we can have duplicated source roots, or for example in
642         // the case of the rustc workspace, we can end up with two source roots that are almost the
643         // same but not quite, like:
644         // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] }
645         // PackageRoot {
646         //     is_local: true,
647         //     include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")],
648         //     exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")]
649         // }
650         //
651         // The first one comes from the explicit rustc workspace which points to the rustc workspace itself
652         // The second comes from the rustc workspace that we load as the actual project workspace
653         // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries.
654         // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work,
655         // so we need to also coalesce the includes if they overlap.
656 
657         let mut roots: Vec<_> = workspaces
658             .iter()
659             .flat_map(|ws| ws.to_roots())
660             .update(|root| root.include.sort())
661             .sorted_by(|a, b| a.include.cmp(&b.include))
662             .collect();
663 
664         // map that tracks indices of overlapping roots
665         let mut overlap_map = FxHashMap::<_, Vec<_>>::default();
666         let mut done = false;
667 
668         while !mem::replace(&mut done, true) {
669             // maps include paths to indices of the corresponding root
670             let mut include_to_idx = FxHashMap::default();
671             // Find and note down the indices of overlapping roots
672             for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) {
673                 for include in &root.include {
674                     match include_to_idx.entry(include) {
675                         Entry::Occupied(e) => {
676                             overlap_map.entry(*e.get()).or_default().push(idx);
677                         }
678                         Entry::Vacant(e) => {
679                             e.insert(idx);
680                         }
681                     }
682                 }
683             }
684             for (k, v) in overlap_map.drain() {
685                 done = false;
686                 for v in v {
687                     let r = mem::replace(
688                         &mut roots[v],
689                         PackageRoot { is_local: false, include: vec![], exclude: vec![] },
690                     );
691                     roots[k].is_local |= r.is_local;
692                     roots[k].include.extend(r.include);
693                     roots[k].exclude.extend(r.exclude);
694                 }
695                 roots[k].include.sort();
696                 roots[k].exclude.sort();
697                 roots[k].include.dedup();
698                 roots[k].exclude.dedup();
699             }
700         }
701 
702         for root in roots.into_iter().filter(|it| !it.include.is_empty()) {
703             let file_set_roots: Vec<VfsPath> =
704                 root.include.iter().cloned().map(VfsPath::from).collect();
705 
706             let entry = {
707                 let mut dirs = vfs::loader::Directories::default();
708                 dirs.extensions.push("rs".into());
709                 dirs.include.extend(root.include);
710                 dirs.exclude.extend(root.exclude);
711                 for excl in global_excludes {
712                     if dirs
713                         .include
714                         .iter()
715                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
716                     {
717                         dirs.exclude.push(excl.clone());
718                     }
719                 }
720 
721                 vfs::loader::Entry::Directories(dirs)
722             };
723 
724             if root.is_local {
725                 res.watch.push(res.load.len());
726             }
727             res.load.push(entry);
728 
729             if root.is_local {
730                 local_filesets.push(fsc.len());
731             }
732             fsc.add_file_set(file_set_roots)
733         }
734 
735         let fsc = fsc.build();
736         res.source_root_config = SourceRootConfig { fsc, local_filesets };
737 
738         res
739     }
740 }
741 
742 #[derive(Default, Debug)]
743 pub(crate) struct SourceRootConfig {
744     pub(crate) fsc: FileSetConfig,
745     pub(crate) local_filesets: Vec<usize>,
746 }
747 
748 impl SourceRootConfig {
partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot>749     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
750         let _p = profile::span("SourceRootConfig::partition");
751         self.fsc
752             .partition(vfs)
753             .into_iter()
754             .enumerate()
755             .map(|(idx, file_set)| {
756                 let is_local = self.local_filesets.contains(&idx);
757                 if is_local {
758                     SourceRoot::new_local(file_set)
759                 } else {
760                     SourceRoot::new_library(file_set)
761                 }
762             })
763             .collect()
764     }
765 }
766 
767 /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
768 /// with an identity dummy expander.
load_proc_macro( server: &ProcMacroServer, path: &AbsPath, dummy_replace: &[Box<str>], ) -> ProcMacroLoadResult769 pub(crate) fn load_proc_macro(
770     server: &ProcMacroServer,
771     path: &AbsPath,
772     dummy_replace: &[Box<str>],
773 ) -> ProcMacroLoadResult {
774     let res: Result<Vec<_>, String> = (|| {
775         let dylib = MacroDylib::new(path.to_path_buf());
776         let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
777         if vec.is_empty() {
778             return Err("proc macro library returned no proc macros".to_string());
779         }
780         Ok(vec
781             .into_iter()
782             .map(|expander| expander_to_proc_macro(expander, dummy_replace))
783             .collect())
784     })();
785     return match res {
786         Ok(proc_macros) => {
787             tracing::info!(
788                 "Loaded proc-macros for {}: {:?}",
789                 path.display(),
790                 proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
791             );
792             Ok(proc_macros)
793         }
794         Err(e) => {
795             tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
796             Err(e)
797         }
798     };
799 
800     fn expander_to_proc_macro(
801         expander: proc_macro_api::ProcMacro,
802         dummy_replace: &[Box<str>],
803     ) -> ProcMacro {
804         let name = SmolStr::from(expander.name());
805         let kind = match expander.kind() {
806             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
807             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
808             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
809         };
810         let expander: sync::Arc<dyn ProcMacroExpander> =
811             if dummy_replace.iter().any(|replace| &**replace == name) {
812                 match kind {
813                     ProcMacroKind::Attr => sync::Arc::new(IdentityExpander),
814                     _ => sync::Arc::new(EmptyExpander),
815                 }
816             } else {
817                 sync::Arc::new(Expander(expander))
818             };
819         ProcMacro { name, kind, expander }
820     }
821 
822     #[derive(Debug)]
823     struct Expander(proc_macro_api::ProcMacro);
824 
825     impl ProcMacroExpander for Expander {
826         fn expand(
827             &self,
828             subtree: &tt::Subtree,
829             attrs: Option<&tt::Subtree>,
830             env: &Env,
831         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
832             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
833             match self.0.expand(subtree, attrs, env) {
834                 Ok(Ok(subtree)) => Ok(subtree),
835                 Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
836                 Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
837             }
838         }
839     }
840 
841     /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
842     #[derive(Debug)]
843     struct IdentityExpander;
844 
845     impl ProcMacroExpander for IdentityExpander {
846         fn expand(
847             &self,
848             subtree: &tt::Subtree,
849             _: Option<&tt::Subtree>,
850             _: &Env,
851         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
852             Ok(subtree.clone())
853         }
854     }
855 
856     /// Empty expander, used for proc-macros that are deliberately ignored by the user.
857     #[derive(Debug)]
858     struct EmptyExpander;
859 
860     impl ProcMacroExpander for EmptyExpander {
861         fn expand(
862             &self,
863             _: &tt::Subtree,
864             _: Option<&tt::Subtree>,
865             _: &Env,
866         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
867             Ok(tt::Subtree::empty())
868         }
869     }
870 }
871 
should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool872 pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
873     const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
874     const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
875 
876     let file_name = match path.file_name().unwrap_or_default().to_str() {
877         Some(it) => it,
878         None => return false,
879     };
880 
881     if let "Cargo.toml" | "Cargo.lock" = file_name {
882         return true;
883     }
884     if change_kind == ChangeKind::Modify {
885         return false;
886     }
887 
888     // .cargo/config{.toml}
889     if path.extension().unwrap_or_default() != "rs" {
890         let is_cargo_config = matches!(file_name, "config.toml" | "config")
891             && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false);
892         return is_cargo_config;
893     }
894 
895     if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
896         return true;
897     }
898     let parent = match path.parent() {
899         Some(it) => it,
900         None => return false,
901     };
902     if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
903         return true;
904     }
905     if file_name == "main.rs" {
906         let grand_parent = match parent.parent() {
907             Some(it) => it,
908             None => return false,
909         };
910         if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
911             return true;
912         }
913     }
914     false
915 }
916