mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-02 22:54:58 +00:00
Move dedup-dev-deps tests into rust-analyzer crate
This commit is contained in:
parent
b1404d387a
commit
0ccb3b8731
8 changed files with 233 additions and 152 deletions
|
@ -47,7 +47,9 @@ mod integrated_benchmarks;
|
|||
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version};
|
||||
pub use crate::{
|
||||
caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version,
|
||||
};
|
||||
|
||||
pub fn from_json<T: DeserializeOwned>(
|
||||
what: &'static str,
|
||||
|
|
|
@ -17,8 +17,9 @@ use std::{iter, mem};
|
|||
|
||||
use flycheck::{FlycheckConfig, FlycheckHandle};
|
||||
use hir::{db::DefDatabase, Change, ProcMacros};
|
||||
use ide::CrateId;
|
||||
use ide_db::{
|
||||
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths},
|
||||
base_db::{salsa::Durability, CrateGraph, CrateOrigin, ProcMacroPaths, Version},
|
||||
FxHashMap,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
|
@ -28,7 +29,7 @@ use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
|
|||
use rustc_hash::FxHashSet;
|
||||
use stdx::{format_to, thread::ThreadIntent};
|
||||
use triomphe::Arc;
|
||||
use vfs::{AbsPath, ChangeKind};
|
||||
use vfs::{AbsPath, AbsPathBuf, ChangeKind};
|
||||
|
||||
use crate::{
|
||||
config::{Config, FilesWatcher, LinkedProject},
|
||||
|
@ -532,7 +533,7 @@ impl GlobalState {
|
|||
// deleted or created we trigger a reconstruction of the crate graph
|
||||
let mut crate_graph_file_dependencies = FxHashSet::default();
|
||||
|
||||
let mut load = |path: &AbsPath| {
|
||||
let load = |path: &AbsPath| {
|
||||
let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered();
|
||||
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
|
||||
crate_graph_file_dependencies.insert(vfs_path.clone());
|
||||
|
@ -547,56 +548,8 @@ impl GlobalState {
|
|||
}
|
||||
};
|
||||
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let mut proc_macro_paths = Vec::default();
|
||||
let mut layouts = Vec::default();
|
||||
let mut toolchains = Vec::default();
|
||||
let e = Err(Arc::from("missing layout"));
|
||||
for ws in &**self.workspaces {
|
||||
let (other, mut crate_proc_macros) =
|
||||
ws.to_crate_graph(&mut load, self.config.extra_env());
|
||||
let num_layouts = layouts.len();
|
||||
let num_toolchains = toolchains.len();
|
||||
let (toolchain, layout) = match ws {
|
||||
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
|
||||
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
|
||||
(toolchain.clone(), target_layout.clone())
|
||||
}
|
||||
ProjectWorkspace::DetachedFiles { .. } => {
|
||||
(None, Err("detached files have no layout".into()))
|
||||
}
|
||||
};
|
||||
|
||||
let mapping = crate_graph.extend(
|
||||
other,
|
||||
&mut crate_proc_macros,
|
||||
|(cg_id, _cg_data), (_o_id, _o_data)| {
|
||||
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
|
||||
// we can merge the crates.
|
||||
layouts[cg_id.into_raw().into_u32() as usize] == layout
|
||||
&& toolchains[cg_id.into_raw().into_u32() as usize] == toolchain
|
||||
},
|
||||
);
|
||||
// Populate the side tables for the newly merged crates
|
||||
mapping.values().for_each(|val| {
|
||||
let idx = val.into_raw().into_u32() as usize;
|
||||
// we only need to consider crates that were not merged and remapped, as the
|
||||
// ones that were remapped already have the correct layout and toolchain
|
||||
if idx >= num_layouts {
|
||||
if layouts.len() <= idx {
|
||||
layouts.resize(idx + 1, e.clone());
|
||||
}
|
||||
layouts[idx] = layout.clone();
|
||||
}
|
||||
if idx >= num_toolchains {
|
||||
if toolchains.len() <= idx {
|
||||
toolchains.resize(idx + 1, None);
|
||||
}
|
||||
toolchains[idx] = toolchain.clone();
|
||||
}
|
||||
});
|
||||
proc_macro_paths.push(crate_proc_macros);
|
||||
}
|
||||
let (crate_graph, proc_macro_paths, layouts, toolchains) =
|
||||
ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load);
|
||||
|
||||
let mut change = Change::new();
|
||||
if self.config.expand_proc_macros() {
|
||||
|
@ -609,6 +562,8 @@ impl GlobalState {
|
|||
self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths);
|
||||
}
|
||||
change.set_crate_graph(crate_graph);
|
||||
change.set_target_data_layouts(layouts);
|
||||
change.set_toolchains(toolchains);
|
||||
self.analysis_host.apply_change(change);
|
||||
self.crate_graph_file_dependencies = crate_graph_file_dependencies;
|
||||
}
|
||||
|
@ -719,6 +674,97 @@ impl GlobalState {
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME: Move this into load-cargo?
|
||||
pub fn ws_to_crate_graph(
|
||||
workspaces: &[ProjectWorkspace],
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
mut load: impl FnMut(&AbsPath) -> Option<vfs::FileId>,
|
||||
) -> (
|
||||
CrateGraph,
|
||||
Vec<FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>>,
|
||||
Vec<Result<Arc<str>, Arc<str>>>,
|
||||
Vec<Option<Version>>,
|
||||
) {
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
let mut proc_macro_paths = Vec::default();
|
||||
let mut layouts = Vec::default();
|
||||
let mut toolchains = Vec::default();
|
||||
let e = Err(Arc::from("missing layout"));
|
||||
for ws in workspaces {
|
||||
let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env);
|
||||
let num_layouts = layouts.len();
|
||||
let num_toolchains = toolchains.len();
|
||||
let (toolchain, layout) = match ws {
|
||||
ProjectWorkspace::Cargo { toolchain, target_layout, .. }
|
||||
| ProjectWorkspace::Json { toolchain, target_layout, .. } => {
|
||||
(toolchain.clone(), target_layout.clone())
|
||||
}
|
||||
ProjectWorkspace::DetachedFiles { .. } => {
|
||||
(None, Err("detached files have no layout".into()))
|
||||
}
|
||||
};
|
||||
|
||||
let mapping = crate_graph.extend(
|
||||
other,
|
||||
&mut crate_proc_macros,
|
||||
|(cg_id, _cg_data), (_o_id, _o_data)| {
|
||||
// if the newly created crate graph's layout is equal to the crate of the merged graph, then
|
||||
// we can merge the crates.
|
||||
let id = cg_id.into_raw().into_u32() as usize;
|
||||
if layouts[id] == layout && toolchains[id] == toolchain {
|
||||
let (res, update) = match (&_cg_data.origin, &_o_data.origin) {
|
||||
(a, b)
|
||||
if a == b && _cg_data.eq_ignoring_origin_and_deps(_o_data, false) =>
|
||||
{
|
||||
(true, false)
|
||||
}
|
||||
(a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. })
|
||||
| (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. })
|
||||
if _cg_data.eq_ignoring_origin_and_deps(_o_data, true) =>
|
||||
{
|
||||
// If the origins differ, check if the two crates are equal without
|
||||
// considering the dev dependencies, if they are, they most likely are in
|
||||
// different loaded workspaces which may cause issues. We keep the local
|
||||
// version and discard the library one as the local version may have
|
||||
// dev-dependencies that we want to keep resolving. See #15656 for more
|
||||
// information.
|
||||
(true, !a.is_local())
|
||||
}
|
||||
(_, _) => (false, false),
|
||||
};
|
||||
if res && update {
|
||||
_cg_data.origin = _o_data.origin.clone();
|
||||
_cg_data.dependencies = _o_data.dependencies.clone();
|
||||
}
|
||||
res
|
||||
} else {
|
||||
false
|
||||
}
|
||||
},
|
||||
);
|
||||
// Populate the side tables for the newly merged crates
|
||||
mapping.values().for_each(|val| {
|
||||
let idx = val.into_raw().into_u32() as usize;
|
||||
// we only need to consider crates that were not merged and remapped, as the
|
||||
// ones that were remapped already have the correct layout and toolchain
|
||||
if idx >= num_layouts {
|
||||
if layouts.len() <= idx {
|
||||
layouts.resize(idx + 1, e.clone());
|
||||
}
|
||||
layouts[idx] = layout.clone();
|
||||
}
|
||||
if idx >= num_toolchains {
|
||||
if toolchains.len() <= idx {
|
||||
toolchains.resize(idx + 1, None);
|
||||
}
|
||||
toolchains[idx] = toolchain.clone();
|
||||
}
|
||||
});
|
||||
proc_macro_paths.push(crate_proc_macros);
|
||||
}
|
||||
(crate_graph, proc_macro_paths, layouts, toolchains)
|
||||
}
|
||||
|
||||
pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
|
||||
const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
|
||||
const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
|
||||
|
|
118
crates/rust-analyzer/tests/crate_graph.rs
Normal file
118
crates/rust-analyzer/tests/crate_graph.rs
Normal file
|
@ -0,0 +1,118 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts};
|
||||
use rust_analyzer::ws_to_crate_graph;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::DeserializeOwned;
|
||||
use vfs::{AbsPathBuf, FileId};
|
||||
|
||||
fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace {
|
||||
let meta = get_test_json_file(file);
|
||||
let cargo_workspace = CargoWorkspace::new(meta);
|
||||
ProjectWorkspace::Cargo {
|
||||
cargo: cargo_workspace,
|
||||
build_scripts: WorkspaceBuildScripts::default(),
|
||||
sysroot: Ok(get_fake_sysroot()),
|
||||
rustc: Err(None),
|
||||
rustc_cfg: Vec::new(),
|
||||
cfg_overrides: Default::default(),
|
||||
toolchain: None,
|
||||
target_layout: Err("target_data_layout not loaded".into()),
|
||||
cargo_config_extra_env: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
|
||||
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
let file = base.join("tests/test_data").join(file);
|
||||
let data = std::fs::read_to_string(file).unwrap();
|
||||
let mut json = data.parse::<serde_json::Value>().unwrap();
|
||||
fixup_paths(&mut json);
|
||||
return serde_json::from_value(json).unwrap();
|
||||
|
||||
fn fixup_paths(val: &mut serde_json::Value) {
|
||||
match val {
|
||||
serde_json::Value::String(s) => replace_root(s, true),
|
||||
serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
|
||||
serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
|
||||
serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn replace_root(s: &mut String, direction: bool) {
|
||||
if direction {
|
||||
let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
|
||||
*s = s.replace("$ROOT$", root)
|
||||
} else {
|
||||
let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
|
||||
*s = s.replace(root, "$ROOT$")
|
||||
}
|
||||
}
|
||||
|
||||
fn get_fake_sysroot_path() -> PathBuf {
|
||||
let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
base.join("../project-model/test_data/fake-sysroot")
|
||||
}
|
||||
|
||||
fn get_fake_sysroot() -> Sysroot {
|
||||
let sysroot_path = get_fake_sysroot_path();
|
||||
// there's no `libexec/` directory with a `proc-macro-srv` binary in that
|
||||
// fake sysroot, so we give them both the same path:
|
||||
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
|
||||
let sysroot_src_dir = sysroot_dir.clone();
|
||||
Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deduplicate_origin_dev() {
|
||||
let path_map = &mut FxHashMap::default();
|
||||
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
|
||||
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
|
||||
|
||||
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
|
||||
let len = path_map.len();
|
||||
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
|
||||
});
|
||||
|
||||
let mut crates_named_p2 = vec![];
|
||||
for id in crate_graph.iter() {
|
||||
let krate = &crate_graph[id];
|
||||
if let Some(name) = krate.display_name.as_ref() {
|
||||
if name.to_string() == "p2" {
|
||||
crates_named_p2.push(krate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(crates_named_p2.len() == 1);
|
||||
let p2 = crates_named_p2[0];
|
||||
assert!(p2.origin.is_local());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deduplicate_origin_dev_rev() {
|
||||
let path_map = &mut FxHashMap::default();
|
||||
let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json");
|
||||
let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json");
|
||||
|
||||
let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| {
|
||||
let len = path_map.len();
|
||||
Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32)))
|
||||
});
|
||||
|
||||
let mut crates_named_p2 = vec![];
|
||||
for id in crate_graph.iter() {
|
||||
let krate = &crate_graph[id];
|
||||
if let Some(name) = krate.display_name.as_ref() {
|
||||
if name.to_string() == "p2" {
|
||||
crates_named_p2.push(krate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert!(crates_named_p2.len() == 1);
|
||||
let p2 = crates_named_p2[0];
|
||||
assert!(p2.origin.is_local());
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
{
|
||||
"packages": [
|
||||
{
|
||||
"name": "p1",
|
||||
"version": "0.1.0",
|
||||
"id": "p1 0.1.0 (path+file:///example_project/p1)",
|
||||
"license": null,
|
||||
"license_file": null,
|
||||
"description": null,
|
||||
"source": null,
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "p2",
|
||||
"source": null,
|
||||
"req": "*",
|
||||
"kind": null,
|
||||
"rename": null,
|
||||
"optional": false,
|
||||
"uses_default_features": true,
|
||||
"features": [],
|
||||
"target": null,
|
||||
"registry": null,
|
||||
"path": "$ROOT$example_project/p2"
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"kind": [
|
||||
"lib"
|
||||
],
|
||||
"crate_types": [
|
||||
"lib"
|
||||
],
|
||||
"name": "p1",
|
||||
"src_path": "$ROOT$example_project/p1/src/lib.rs",
|
||||
"edition": "2021",
|
||||
"doc": true,
|
||||
"doctest": true,
|
||||
"test": true
|
||||
}
|
||||
],
|
||||
"features": {},
|
||||
"manifest_path": "$ROOT$example_project/p1/Cargo.toml",
|
||||
"metadata": null,
|
||||
"publish": null,
|
||||
"authors": [],
|
||||
"categories": [],
|
||||
"keywords": [],
|
||||
"readme": null,
|
||||
"repository": null,
|
||||
"homepage": null,
|
||||
"documentation": null,
|
||||
"edition": "2021",
|
||||
"links": null,
|
||||
"default_run": null,
|
||||
"rust_version": null
|
||||
},
|
||||
{
|
||||
"name": "p2",
|
||||
"version": "0.1.0",
|
||||
"id": "p2 0.1.0 (path+file:///example_project/p2)",
|
||||
"license": null,
|
||||
"license_file": null,
|
||||
"description": null,
|
||||
"source": null,
|
||||
"dependencies": [],
|
||||
"targets": [
|
||||
{
|
||||
"kind": [
|
||||
"lib"
|
||||
],
|
||||
"crate_types": [
|
||||
"lib"
|
||||
],
|
||||
"name": "p2",
|
||||
"src_path": "$ROOT$example_project/p2/src/lib.rs",
|
||||
"edition": "2021",
|
||||
"doc": true,
|
||||
"doctest": true,
|
||||
"test": true
|
||||
}
|
||||
],
|
||||
"features": {},
|
||||
"manifest_path": "$ROOT$example_project/p2/Cargo.toml",
|
||||
"metadata": null,
|
||||
"publish": null,
|
||||
"authors": [],
|
||||
"categories": [],
|
||||
"keywords": [],
|
||||
"readme": null,
|
||||
"repository": null,
|
||||
"homepage": null,
|
||||
"documentation": null,
|
||||
"edition": "2021",
|
||||
"links": null,
|
||||
"default_run": null,
|
||||
"rust_version": null
|
||||
}
|
||||
],
|
||||
"workspace_members": [
|
||||
"p1 0.1.0 (path+file:///example_project/p1)"
|
||||
],
|
||||
"workspace_default_members": [
|
||||
"p1 0.1.0 (path+file:///example_project/p1)"
|
||||
],
|
||||
"resolve": {
|
||||
"nodes": [
|
||||
{
|
||||
"id": "p1 0.1.0 (path+file:///example_project/p1)",
|
||||
"dependencies": [
|
||||
"p2 0.1.0 (path+file:///example_project/p2)"
|
||||
],
|
||||
"deps": [
|
||||
{
|
||||
"name": "p2",
|
||||
"pkg": "p2 0.1.0 (path+file:///example_project/p2)",
|
||||
"dep_kinds": [
|
||||
{
|
||||
"kind": null,
|
||||
"target": null
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"features": []
|
||||
},
|
||||
{
|
||||
"id": "p2 0.1.0 (path+file:///example_project/p2)",
|
||||
"dependencies": [],
|
||||
"deps": [],
|
||||
"features": []
|
||||
}
|
||||
],
|
||||
"root": "p1 0.1.0 (path+file:///example_project/p1)"
|
||||
},
|
||||
"target_directory": "$ROOT$example_project/p1/target",
|
||||
"version": 1,
|
||||
"workspace_root": "$ROOT$example_project/p1",
|
||||
"metadata": null
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
{
|
||||
"packages": [
|
||||
{
|
||||
"name": "p2",
|
||||
"version": "0.1.0",
|
||||
"id": "p2 0.1.0 (path+file:///example_project/p2)",
|
||||
"license": null,
|
||||
"license_file": null,
|
||||
"description": null,
|
||||
"source": null,
|
||||
"dependencies": [],
|
||||
"targets": [
|
||||
{
|
||||
"kind": [
|
||||
"lib"
|
||||
],
|
||||
"crate_types": [
|
||||
"lib"
|
||||
],
|
||||
"name": "p2",
|
||||
"src_path": "$ROOT$example_project/p2/src/lib.rs",
|
||||
"edition": "2021",
|
||||
"doc": true,
|
||||
"doctest": true,
|
||||
"test": true
|
||||
}
|
||||
],
|
||||
"features": {},
|
||||
"manifest_path": "$ROOT$example_project/p2/Cargo.toml",
|
||||
"metadata": null,
|
||||
"publish": null,
|
||||
"authors": [],
|
||||
"categories": [],
|
||||
"keywords": [],
|
||||
"readme": null,
|
||||
"repository": null,
|
||||
"homepage": null,
|
||||
"documentation": null,
|
||||
"edition": "2021",
|
||||
"links": null,
|
||||
"default_run": null,
|
||||
"rust_version": null
|
||||
}
|
||||
],
|
||||
"workspace_members": [
|
||||
"p2 0.1.0 (path+file:///example_project/p2)"
|
||||
],
|
||||
"workspace_default_members": [
|
||||
"p2 0.1.0 (path+file:///example_project/p2)"
|
||||
],
|
||||
"resolve": {
|
||||
"nodes": [
|
||||
{
|
||||
"id": "p2 0.1.0 (path+file:///example_project/p2)",
|
||||
"dependencies": [],
|
||||
"deps": [],
|
||||
"features": []
|
||||
}
|
||||
],
|
||||
"root": "p2 0.1.0 (path+file:///example_project/p2)"
|
||||
},
|
||||
"target_directory": "$ROOT$example_project/p2/target",
|
||||
"version": 1,
|
||||
"workspace_root": "$ROOT$example_project/p2",
|
||||
"metadata": null
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue