mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-01 14:21:44 +00:00
fix: potential bugs when build scripts do not match the current project
This commit is contained in:
parent
f4de2ece0d
commit
b8b166e674
4 changed files with 173 additions and 80 deletions
|
@ -10,6 +10,7 @@ use cfg::{CfgDiff, CfgOptions};
|
||||||
use paths::{AbsPath, AbsPathBuf};
|
use paths::{AbsPath, AbsPathBuf};
|
||||||
use proc_macro_api::ProcMacroClient;
|
use proc_macro_api::ProcMacroClient;
|
||||||
use rustc_hash::{FxHashMap, FxHashSet};
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
|
use stdx::always;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
build_scripts::BuildScriptOutput,
|
build_scripts::BuildScriptOutput,
|
||||||
|
@ -39,6 +40,7 @@ pub enum ProjectWorkspace {
|
||||||
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
|
/// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
|
||||||
Cargo {
|
Cargo {
|
||||||
cargo: CargoWorkspace,
|
cargo: CargoWorkspace,
|
||||||
|
build_scripts: WorkspaceBuildScripts,
|
||||||
sysroot: Sysroot,
|
sysroot: Sysroot,
|
||||||
rustc: Option<CargoWorkspace>,
|
rustc: Option<CargoWorkspace>,
|
||||||
/// Holds cfg flags for the current target. We get those by running
|
/// Holds cfg flags for the current target. We get those by running
|
||||||
|
@ -69,7 +71,14 @@ impl fmt::Debug for ProjectWorkspace {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// Make sure this isn't too verbose.
|
// Make sure this isn't too verbose.
|
||||||
match self {
|
match self {
|
||||||
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides } => f
|
ProjectWorkspace::Cargo {
|
||||||
|
cargo,
|
||||||
|
build_scripts: _,
|
||||||
|
sysroot,
|
||||||
|
rustc,
|
||||||
|
rustc_cfg,
|
||||||
|
cfg_overrides,
|
||||||
|
} => f
|
||||||
.debug_struct("Cargo")
|
.debug_struct("Cargo")
|
||||||
.field("root", &cargo.workspace_root().file_name())
|
.field("root", &cargo.workspace_root().file_name())
|
||||||
.field("n_packages", &cargo.packages().len())
|
.field("n_packages", &cargo.packages().len())
|
||||||
|
@ -169,7 +178,14 @@ impl ProjectWorkspace {
|
||||||
let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
|
let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
|
||||||
|
|
||||||
let cfg_overrides = config.cfg_overrides();
|
let cfg_overrides = config.cfg_overrides();
|
||||||
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides }
|
ProjectWorkspace::Cargo {
|
||||||
|
cargo,
|
||||||
|
build_scripts: WorkspaceBuildScripts::default(),
|
||||||
|
sysroot,
|
||||||
|
rustc,
|
||||||
|
rustc_cfg,
|
||||||
|
cfg_overrides,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -196,10 +212,34 @@ impl ProjectWorkspace {
|
||||||
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
|
Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn run_build_scripts(
|
||||||
|
&self,
|
||||||
|
config: &CargoConfig,
|
||||||
|
progress: &dyn Fn(String),
|
||||||
|
) -> Result<WorkspaceBuildScripts> {
|
||||||
|
match self {
|
||||||
|
ProjectWorkspace::Cargo { cargo, .. } => {
|
||||||
|
WorkspaceBuildScripts::run(config, cargo, progress)
|
||||||
|
}
|
||||||
|
ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
|
||||||
|
Ok(WorkspaceBuildScripts::default())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
|
||||||
|
match self {
|
||||||
|
ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
|
||||||
|
_ => {
|
||||||
|
always!(bs == WorkspaceBuildScripts::default());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the roots for the current `ProjectWorkspace`
|
/// Returns the roots for the current `ProjectWorkspace`
|
||||||
/// The return type contains the path and whether or not
|
/// The return type contains the path and whether or not
|
||||||
/// the root is a member of the current workspace
|
/// the root is a member of the current workspace
|
||||||
pub fn to_roots(&self, build_scripts: &WorkspaceBuildScripts) -> Vec<PackageRoot> {
|
pub fn to_roots(&self) -> Vec<PackageRoot> {
|
||||||
match self {
|
match self {
|
||||||
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
|
ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
|
||||||
.crates()
|
.crates()
|
||||||
|
@ -218,7 +258,14 @@ impl ProjectWorkspace {
|
||||||
})
|
})
|
||||||
}))
|
}))
|
||||||
.collect::<Vec<_>>(),
|
.collect::<Vec<_>>(),
|
||||||
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg: _, cfg_overrides: _ } => {
|
ProjectWorkspace::Cargo {
|
||||||
|
cargo,
|
||||||
|
sysroot,
|
||||||
|
rustc,
|
||||||
|
rustc_cfg: _,
|
||||||
|
cfg_overrides: _,
|
||||||
|
build_scripts,
|
||||||
|
} => {
|
||||||
cargo
|
cargo
|
||||||
.packages()
|
.packages()
|
||||||
.map(|pkg| {
|
.map(|pkg| {
|
||||||
|
@ -302,7 +349,6 @@ impl ProjectWorkspace {
|
||||||
|
|
||||||
pub fn to_crate_graph(
|
pub fn to_crate_graph(
|
||||||
&self,
|
&self,
|
||||||
build_scripts: &WorkspaceBuildScripts,
|
|
||||||
proc_macro_client: Option<&ProcMacroClient>,
|
proc_macro_client: Option<&ProcMacroClient>,
|
||||||
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
|
||||||
) -> CrateGraph {
|
) -> CrateGraph {
|
||||||
|
@ -320,8 +366,14 @@ impl ProjectWorkspace {
|
||||||
project,
|
project,
|
||||||
sysroot,
|
sysroot,
|
||||||
),
|
),
|
||||||
ProjectWorkspace::Cargo { cargo, sysroot, rustc, rustc_cfg, cfg_overrides } => {
|
ProjectWorkspace::Cargo {
|
||||||
cargo_to_crate_graph(
|
cargo,
|
||||||
|
sysroot,
|
||||||
|
rustc,
|
||||||
|
rustc_cfg,
|
||||||
|
cfg_overrides,
|
||||||
|
build_scripts,
|
||||||
|
} => cargo_to_crate_graph(
|
||||||
rustc_cfg.clone(),
|
rustc_cfg.clone(),
|
||||||
cfg_overrides,
|
cfg_overrides,
|
||||||
&proc_macro_loader,
|
&proc_macro_loader,
|
||||||
|
@ -330,8 +382,7 @@ impl ProjectWorkspace {
|
||||||
build_scripts,
|
build_scripts,
|
||||||
sysroot,
|
sysroot,
|
||||||
rustc,
|
rustc,
|
||||||
)
|
),
|
||||||
}
|
|
||||||
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
|
ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
|
||||||
detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
|
detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub(crate) fn load_workspace_at(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_workspace(
|
fn load_workspace(
|
||||||
ws: ProjectWorkspace,
|
mut ws: ProjectWorkspace,
|
||||||
cargo_config: &CargoConfig,
|
cargo_config: &CargoConfig,
|
||||||
load_config: &LoadCargoConfig,
|
load_config: &LoadCargoConfig,
|
||||||
progress: &dyn Fn(String),
|
progress: &dyn Fn(String),
|
||||||
|
@ -56,22 +56,20 @@ fn load_workspace(
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let build_scripts = match &ws {
|
ws.set_build_scripts(if load_config.load_out_dirs_from_check {
|
||||||
ProjectWorkspace::Cargo { cargo, .. } if load_config.load_out_dirs_from_check => {
|
ws.run_build_scripts(cargo_config, progress)?
|
||||||
WorkspaceBuildScripts::run(cargo_config, cargo, progress)?
|
} else {
|
||||||
}
|
WorkspaceBuildScripts::default()
|
||||||
_ => WorkspaceBuildScripts::default(),
|
});
|
||||||
};
|
|
||||||
|
|
||||||
let crate_graph =
|
let crate_graph = ws.to_crate_graph(proc_macro_client.as_ref(), &mut |path: &AbsPath| {
|
||||||
ws.to_crate_graph(&build_scripts, proc_macro_client.as_ref(), &mut |path: &AbsPath| {
|
|
||||||
let contents = loader.load_sync(path);
|
let contents = loader.load_sync(path);
|
||||||
let path = vfs::VfsPath::from(path.to_path_buf());
|
let path = vfs::VfsPath::from(path.to_path_buf());
|
||||||
vfs.set_file_contents(path.clone(), contents);
|
vfs.set_file_contents(path.clone(), contents);
|
||||||
vfs.file_id(&path)
|
vfs.file_id(&path)
|
||||||
});
|
});
|
||||||
|
|
||||||
let project_folders = ProjectFolders::new(&[ws], &[build_scripts], &[]);
|
let project_folders = ProjectFolders::new(&[ws], &[]);
|
||||||
loader.set_config(vfs::loader::Config {
|
loader.set_config(vfs::loader::Config {
|
||||||
load: project_folders.load,
|
load: project_folders.load,
|
||||||
watch: vec![],
|
watch: vec![],
|
||||||
|
|
|
@ -74,18 +74,35 @@ pub(crate) struct GlobalState {
|
||||||
pub(crate) vfs_progress_n_total: usize,
|
pub(crate) vfs_progress_n_total: usize,
|
||||||
pub(crate) vfs_progress_n_done: usize,
|
pub(crate) vfs_progress_n_done: usize,
|
||||||
|
|
||||||
/// For both `workspaces` and `workspace_build_data`, the field stores the
|
/// `workspaces` field stores the data we actually use, while the `OpQueue`
|
||||||
/// data we actually use, while the `OpQueue` stores the result of the last
|
/// stores the result of the last fetch.
|
||||||
/// fetch.
|
|
||||||
///
|
///
|
||||||
/// If the fetch (partially) fails, we do not update the values.
|
/// If the fetch (partially) fails, we do not update the current value.
|
||||||
///
|
///
|
||||||
/// Invariant: workspaces.len() == workspace_build_data
|
/// The handling of build data is subtle. We fetch workspace in two phases:
|
||||||
|
///
|
||||||
|
/// *First*, we run `cargo metadata`, which gives us fast results for
|
||||||
|
/// initial analysis.
|
||||||
|
///
|
||||||
|
/// *Second*, we run `cargo check` which runs build scripts and compiles
|
||||||
|
/// proc macros.
|
||||||
|
///
|
||||||
|
/// We need both for the precise analysis, but we want rust-analyzer to be
|
||||||
|
/// at least partially available just after the first phase. That's because
|
||||||
|
/// first phase is much faster, and is much less likely to fail.
|
||||||
|
///
|
||||||
|
/// This creates a complication -- by the time the second phase completes,
|
||||||
|
/// the results of the fist phase could be invalid. That is, while we run
|
||||||
|
/// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
|
||||||
|
/// `cargo metadata` completes before `cargo check`.
|
||||||
|
///
|
||||||
|
/// An additional complication is that we want to avoid needless work. When
|
||||||
|
/// the user just adds comments or whitespace to Cargo.toml, we do not want
|
||||||
|
/// to invalidate any salsa caches.
|
||||||
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
|
||||||
pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
|
pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
|
||||||
|
pub(crate) fetch_build_data_queue:
|
||||||
pub(crate) workspace_build_data: Vec<WorkspaceBuildScripts>,
|
OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
|
||||||
pub(crate) fetch_build_data_queue: OpQueue<Vec<anyhow::Result<WorkspaceBuildScripts>>>,
|
|
||||||
|
|
||||||
pub(crate) prime_caches_queue: OpQueue<()>,
|
pub(crate) prime_caches_queue: OpQueue<()>,
|
||||||
|
|
||||||
|
@ -149,7 +166,6 @@ impl GlobalState {
|
||||||
|
|
||||||
workspaces: Arc::new(Vec::new()),
|
workspaces: Arc::new(Vec::new()),
|
||||||
fetch_workspaces_queue: OpQueue::default(),
|
fetch_workspaces_queue: OpQueue::default(),
|
||||||
workspace_build_data: Vec::new(),
|
|
||||||
prime_caches_queue: OpQueue::default(),
|
prime_caches_queue: OpQueue::default(),
|
||||||
|
|
||||||
fetch_build_data_queue: OpQueue::default(),
|
fetch_build_data_queue: OpQueue::default(),
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
//! Project loading & configuration updates
|
//! Project loading & configuration updates
|
||||||
use std::{mem, sync::Arc};
|
use std::{mem, sync::Arc};
|
||||||
|
|
||||||
use always_assert::always;
|
|
||||||
use flycheck::{FlycheckConfig, FlycheckHandle};
|
use flycheck::{FlycheckConfig, FlycheckHandle};
|
||||||
use hir::db::DefDatabase;
|
use hir::db::DefDatabase;
|
||||||
use ide::Change;
|
use ide::Change;
|
||||||
|
@ -27,7 +26,7 @@ pub(crate) enum ProjectWorkspaceProgress {
|
||||||
pub(crate) enum BuildDataProgress {
|
pub(crate) enum BuildDataProgress {
|
||||||
Begin,
|
Begin,
|
||||||
Report(String),
|
Report(String),
|
||||||
End(Vec<anyhow::Result<WorkspaceBuildScripts>>),
|
End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GlobalState {
|
impl GlobalState {
|
||||||
|
@ -114,7 +113,7 @@ impl GlobalState {
|
||||||
message: None,
|
message: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(error) = self.build_data_error() {
|
if let Some(error) = self.fetch_build_data_error() {
|
||||||
status.health = lsp_ext::Health::Warning;
|
status.health = lsp_ext::Health::Warning;
|
||||||
status.message = Some(error)
|
status.message = Some(error)
|
||||||
}
|
}
|
||||||
|
@ -229,6 +228,7 @@ impl GlobalState {
|
||||||
};
|
};
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
for ws in workspaces.iter() {
|
for ws in workspaces.iter() {
|
||||||
|
res.push(ws.run_build_scripts(&config, &progress));
|
||||||
let ws = match ws {
|
let ws = match ws {
|
||||||
ProjectWorkspace::Cargo { cargo, .. } => cargo,
|
ProjectWorkspace::Cargo { cargo, .. } => cargo,
|
||||||
ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
|
ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
|
||||||
|
@ -238,12 +238,12 @@ impl GlobalState {
|
||||||
};
|
};
|
||||||
res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
|
res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
|
||||||
}
|
}
|
||||||
sender.send(Task::FetchBuildData(BuildDataProgress::End(res))).unwrap();
|
sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
pub(crate) fn fetch_build_data_completed(
|
pub(crate) fn fetch_build_data_completed(
|
||||||
&mut self,
|
&mut self,
|
||||||
build_data: Vec<anyhow::Result<WorkspaceBuildScripts>>,
|
build_data: (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>),
|
||||||
) {
|
) {
|
||||||
self.fetch_build_data_queue.op_completed(build_data)
|
self.fetch_build_data_queue.op_completed(build_data)
|
||||||
}
|
}
|
||||||
|
@ -255,11 +255,13 @@ impl GlobalState {
|
||||||
if let Some(error_message) = self.fetch_workspace_error() {
|
if let Some(error_message) = self.fetch_workspace_error() {
|
||||||
log::error!("failed to switch workspaces: {}", error_message);
|
log::error!("failed to switch workspaces: {}", error_message);
|
||||||
if !self.workspaces.is_empty() {
|
if !self.workspaces.is_empty() {
|
||||||
|
// It only makes sense to switch to a partially broken workspace
|
||||||
|
// if we don't have any workspace at all yet.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(error_message) = self.build_data_error() {
|
if let Some(error_message) = self.fetch_build_data_error() {
|
||||||
log::error!("failed to switch build data: {}", error_message);
|
log::error!("failed to switch build data: {}", error_message);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,32 +272,67 @@ impl GlobalState {
|
||||||
.filter_map(|res| res.as_ref().ok().cloned())
|
.filter_map(|res| res.as_ref().ok().cloned())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let mut build_scripts = self
|
fn eq_ignore_build_data<'a>(
|
||||||
.fetch_build_data_queue
|
left: &'a ProjectWorkspace,
|
||||||
.last_op_result()
|
right: &'a ProjectWorkspace,
|
||||||
|
) -> bool {
|
||||||
|
let key = |p: &'a ProjectWorkspace| match p {
|
||||||
|
ProjectWorkspace::Cargo {
|
||||||
|
cargo,
|
||||||
|
sysroot,
|
||||||
|
rustc,
|
||||||
|
rustc_cfg,
|
||||||
|
cfg_overrides,
|
||||||
|
|
||||||
|
build_scripts: _,
|
||||||
|
} => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
match (key(left), key(right)) {
|
||||||
|
(Some(lk), Some(rk)) => lk == rk,
|
||||||
|
_ => left == right,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let same_workspaces = workspaces.len() == self.workspaces.len()
|
||||||
|
&& workspaces
|
||||||
.iter()
|
.iter()
|
||||||
.map(|res| res.as_ref().ok().cloned().unwrap_or_default())
|
.zip(self.workspaces.iter())
|
||||||
|
.all(|(l, r)| eq_ignore_build_data(l, r));
|
||||||
|
|
||||||
|
if same_workspaces {
|
||||||
|
let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
|
||||||
|
if Arc::ptr_eq(&workspaces, &self.workspaces) {
|
||||||
|
let workspaces = workspaces
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.zip(build_scripts)
|
||||||
|
.map(|(mut ws, bs)| {
|
||||||
|
ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
|
||||||
|
ws
|
||||||
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// FIXME: This is not even remotely correct. I do hope that this is
|
// Workspaces are the same, but we've updated build data.
|
||||||
// eventually consistent though. We need to figure a better way to map
|
self.workspaces = Arc::new(workspaces);
|
||||||
// `cargo metadata` to `cargo check` in the future.
|
} else {
|
||||||
//
|
// Current build scripts do not match the version of the active
|
||||||
// I *think* what we need here is an extra field on `ProjectWorkspace`,
|
// workspace, so there's nothing for us to update.
|
||||||
// and a workflow to set it, once build data is ready.
|
|
||||||
build_scripts.resize_with(workspaces.len(), WorkspaceBuildScripts::default);
|
|
||||||
|
|
||||||
if *self.workspaces == workspaces && self.workspace_build_data == build_scripts {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Here, we completely changed the workspace (Cargo.toml edit), so
|
||||||
|
// we don't care about build-script results, they are stale.
|
||||||
|
self.workspaces = Arc::new(workspaces)
|
||||||
|
}
|
||||||
|
|
||||||
if let FilesWatcher::Client = self.config.files().watcher {
|
if let FilesWatcher::Client = self.config.files().watcher {
|
||||||
if self.config.did_change_watched_files_dynamic_registration() {
|
if self.config.did_change_watched_files_dynamic_registration() {
|
||||||
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
|
let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
|
||||||
watchers: workspaces
|
watchers: self
|
||||||
|
.workspaces
|
||||||
.iter()
|
.iter()
|
||||||
.zip(&build_scripts)
|
.flat_map(|ws| ws.to_roots())
|
||||||
.flat_map(|(ws, bs)| ws.to_roots(bs))
|
|
||||||
.filter(|it| it.is_member)
|
.filter(|it| it.is_member)
|
||||||
.flat_map(|root| {
|
.flat_map(|root| {
|
||||||
root.include.into_iter().flat_map(|it| {
|
root.include.into_iter().flat_map(|it| {
|
||||||
|
@ -327,8 +364,7 @@ impl GlobalState {
|
||||||
let mut change = Change::new();
|
let mut change = Change::new();
|
||||||
|
|
||||||
let files_config = self.config.files();
|
let files_config = self.config.files();
|
||||||
let project_folders =
|
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
|
||||||
ProjectFolders::new(&workspaces, &build_scripts, &files_config.exclude);
|
|
||||||
|
|
||||||
if self.proc_macro_client.is_none() {
|
if self.proc_macro_client.is_none() {
|
||||||
self.proc_macro_client = match self.config.proc_macro_srv() {
|
self.proc_macro_client = match self.config.proc_macro_srv() {
|
||||||
|
@ -377,12 +413,8 @@ impl GlobalState {
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
for (ws, bs) in workspaces.iter().zip(&build_scripts) {
|
for ws in self.workspaces.iter() {
|
||||||
crate_graph.extend(ws.to_crate_graph(
|
crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load));
|
||||||
bs,
|
|
||||||
self.proc_macro_client.as_ref(),
|
|
||||||
&mut load,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
crate_graph
|
crate_graph
|
||||||
|
@ -390,8 +422,6 @@ impl GlobalState {
|
||||||
change.set_crate_graph(crate_graph);
|
change.set_crate_graph(crate_graph);
|
||||||
|
|
||||||
self.source_root_config = project_folders.source_root_config;
|
self.source_root_config = project_folders.source_root_config;
|
||||||
self.workspaces = Arc::new(workspaces);
|
|
||||||
self.workspace_build_data = build_scripts;
|
|
||||||
|
|
||||||
self.analysis_host.apply_change(change);
|
self.analysis_host.apply_change(change);
|
||||||
self.process_changes();
|
self.process_changes();
|
||||||
|
@ -415,10 +445,10 @@ impl GlobalState {
|
||||||
Some(buf)
|
Some(buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_data_error(&self) -> Option<String> {
|
fn fetch_build_data_error(&self) -> Option<String> {
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
|
|
||||||
for ws in self.fetch_build_data_queue.last_op_result() {
|
for ws in &self.fetch_build_data_queue.last_op_result().1 {
|
||||||
if let Err(err) = ws {
|
if let Err(err) = ws {
|
||||||
stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
|
stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
|
||||||
}
|
}
|
||||||
|
@ -481,15 +511,13 @@ pub(crate) struct ProjectFolders {
|
||||||
impl ProjectFolders {
|
impl ProjectFolders {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(
|
||||||
workspaces: &[ProjectWorkspace],
|
workspaces: &[ProjectWorkspace],
|
||||||
build_scripts: &[WorkspaceBuildScripts],
|
|
||||||
global_excludes: &[AbsPathBuf],
|
global_excludes: &[AbsPathBuf],
|
||||||
) -> ProjectFolders {
|
) -> ProjectFolders {
|
||||||
always!(workspaces.len() == build_scripts.len());
|
|
||||||
let mut res = ProjectFolders::default();
|
let mut res = ProjectFolders::default();
|
||||||
let mut fsc = FileSetConfig::builder();
|
let mut fsc = FileSetConfig::builder();
|
||||||
let mut local_filesets = vec![];
|
let mut local_filesets = vec![];
|
||||||
|
|
||||||
for root in workspaces.iter().zip(build_scripts).flat_map(|(ws, bs)| ws.to_roots(bs)) {
|
for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
|
||||||
let file_set_roots: Vec<VfsPath> =
|
let file_set_roots: Vec<VfsPath> =
|
||||||
root.include.iter().cloned().map(VfsPath::from).collect();
|
root.include.iter().cloned().map(VfsPath::from).collect();
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue