Lock all packages in workspace (#4016)

When creating a lockfile, lock the combined dependencies for all
packages in a workspace. This make the lockfile independent of where you
are in the workspace.

Fixes #3983
This commit is contained in:
konsti 2024-06-06 21:09:44 +02:00 committed by GitHub
parent e9fc99e622
commit a6f53e2aa4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 239 additions and 70 deletions

1
Cargo.lock generated
View file

@ -4910,6 +4910,7 @@ dependencies = [
"requirements-txt",
"rkyv",
"rustc-hash",
"same-file",
"schemars",
"serde",
"textwrap",

View file

@ -222,17 +222,12 @@ fn path_source(
editable: bool,
) -> Result<RequirementSource, LoweringError> {
let url = VerbatimUrl::parse_path(path.as_ref(), project_dir)?
.with_given(path.as_ref().to_string_lossy().to_string());
.with_given(path.as_ref().to_string_lossy());
let path_buf = path.as_ref().to_path_buf();
let path_buf = path_buf
.absolutize_from(project_dir)
.map_err(|err| LoweringError::Absolutize(path.as_ref().to_path_buf(), err))?
.to_path_buf();
//if !editable {
// // TODO(konsti): Support this. Currently we support `{ workspace = true }`, but we don't
// // support `{ workspace = true, editable = false }` since we only collect editables.
// return Err(LoweringError::NonEditableWorkspaceDependency);
//}
Ok(RequirementSource::Path {
path: path_buf,
url,

View file

@ -63,7 +63,7 @@ pub struct ToolUv {
pub dev_dependencies: Option<Vec<pep508_rs::Requirement<VerbatimParsedUrl>>>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ToolUvWorkspace {
pub members: Option<Vec<SerdePattern>>,

View file

@ -98,14 +98,22 @@ impl Workspace {
let (workspace_root, workspace_definition, workspace_pyproject_toml) =
if let Some(workspace) = explicit_root {
// We have found the explicit root immediately.
workspace
} else if pyproject_toml.project.is_none() {
// Without a project, it can't be an implicit root
return Err(WorkspaceError::MissingProject(project_path));
} else if let Some(workspace) = find_workspace(&project_path, stop_discovery_at).await?
{
// We have found an explicit root above.
workspace
} else {
return Err(WorkspaceError::MissingWorkspace(project_path));
// Support implicit single project workspaces.
(
project_path.clone(),
ToolUvWorkspace::default(),
pyproject_toml.clone(),
)
};
debug!(
@ -145,6 +153,47 @@ impl Workspace {
})
}
/// Returns the set of requirements that include all packages in the workspace.
pub fn members_as_requirements(&self) -> Vec<Requirement> {
self.packages
.values()
.filter_map(|member| {
let project = member.pyproject_toml.project.as_ref()?;
// Extract the extras available in the project.
let extras = project
.optional_dependencies
.as_ref()
.map(|optional_dependencies| {
// It's a `BTreeMap` so the keys are sorted.
optional_dependencies.keys().cloned().collect::<Vec<_>>()
})
.unwrap_or_default();
let url = VerbatimUrl::from_path(&member.root)
.expect("path is valid URL")
.with_given(member.root.to_string_lossy());
Some(Requirement {
name: project.name.clone(),
extras,
marker: None,
source: RequirementSource::Path {
path: member.root.clone(),
editable: true,
url,
},
origin: None,
})
})
.collect()
}
/// If there is a package at the workspace root, return it.
pub fn root_member(&self) -> Option<&WorkspaceMember> {
self.packages
.values()
.find(|package| package.root == self.root)
}
/// The path to the workspace root, the directory containing the top level `pyproject.toml` with
/// the `uv.tool.workspace`, or the `pyproject.toml` in an implicit single workspace project.
pub fn root(&self) -> &PathBuf {
@ -490,30 +539,6 @@ impl ProjectWorkspace {
&self.workspace().packages[&self.project_name]
}
/// Return the [`Requirement`] entries for the project, which is the current project as
/// editable.
pub fn requirements(&self) -> Vec<Requirement> {
vec![Requirement {
name: self.project_name.clone(),
extras: self.workspace().packages[&self.project_name]
.pyproject_toml
.project
.as_ref()
.and_then(|project| project.optional_dependencies.as_ref())
.map(|optional_dependencies| {
optional_dependencies.keys().cloned().collect::<Vec<_>>()
})
.unwrap_or_default(),
marker: None,
source: RequirementSource::Path {
path: self.project_root.clone(),
editable: true,
url: VerbatimUrl::from_path(&self.project_root).expect("path is valid URL"),
},
origin: None,
}]
}
/// Find the workspace for a project.
pub async fn from_project(
project_path: &Path,

View file

@ -6,7 +6,7 @@ use anyhow::Result;
use requirements_txt::RequirementsTxt;
use uv_client::{BaseClientBuilder, Connectivity};
use uv_configuration::Upgrade;
use uv_distribution::ProjectWorkspace;
use uv_distribution::Workspace;
use uv_git::ResolvedRepositoryReference;
use uv_resolver::{Lock, Preference, PreferenceError};
@ -64,17 +64,14 @@ pub async fn read_requirements_txt(
}
/// Load the preferred requirements from an existing lockfile, applying the upgrade strategy.
pub async fn read_lockfile(
project: &ProjectWorkspace,
upgrade: &Upgrade,
) -> Result<LockedRequirements> {
pub async fn read_lockfile(workspace: &Workspace, upgrade: &Upgrade) -> Result<LockedRequirements> {
// As an optimization, skip reading the lockfile is we're upgrading all packages anyway.
if upgrade.is_all() {
return Ok(LockedRequirements::default());
}
// If an existing lockfile exists, build up a set of preferences.
let lockfile = project.workspace().root().join("uv.lock");
let lockfile = workspace.root().join("uv.lock");
let lock = match fs_err::tokio::read_to_string(&lockfile).await {
Ok(encoded) => match toml::from_str::<Lock>(&encoded) {
Ok(lock) => lock,

View file

@ -46,6 +46,7 @@ petgraph = { workspace = true }
pubgrub = { workspace = true }
rkyv = { workspace = true }
rustc-hash = { workspace = true }
same-file = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true }
textwrap = { workspace = true }

View file

@ -4,12 +4,13 @@ use either::Either;
use itertools::Itertools;
use pubgrub::range::Range;
use rustc_hash::FxHashSet;
use same_file::is_same_file;
use tracing::warn;
use distribution_types::Verbatim;
use pep440_rs::Version;
use pep508_rs::MarkerEnvironment;
use pypi_types::{Requirement, RequirementSource};
use pypi_types::{ParsedUrl, Requirement, RequirementSource, VerbatimParsedUrl};
use uv_configuration::{Constraints, Overrides};
use uv_git::GitResolver;
use uv_normalize::{ExtraName, GroupName, PackageName};
@ -308,7 +309,7 @@ impl PubGrubRequirement {
version: Range::full(),
})
}
RequirementSource::Path { url, .. } => {
RequirementSource::Path { url, path, .. } => {
let Some(expected) = urls.get(&requirement.name) else {
return Err(ResolveError::DisallowedUrl(
requirement.name.clone(),
@ -316,7 +317,22 @@ impl PubGrubRequirement {
));
};
if !Urls::is_allowed(&expected.verbatim, url, git) {
let mut is_allowed = Urls::is_allowed(&expected.verbatim, url, git);
if !is_allowed {
if let VerbatimParsedUrl {
parsed_url: ParsedUrl::Path(previous_path),
..
} = &expected
{
// On Windows, we can have two versions of the same path, e.g.
// `C:\Users\KONSTA~1` and `C:\Users\Konstantin`.
if is_same_file(path, &previous_path.path).unwrap_or(false) {
is_allowed = true;
}
}
}
if !is_allowed {
return Err(ResolveError::ConflictingUrlsTransitive(
requirement.name.clone(),
expected.verbatim.verbatim().to_string(),

View file

@ -1,4 +1,5 @@
use rustc_hash::FxHashMap;
use same_file::is_same_file;
use tracing::debug;
use url::Url;
@ -66,6 +67,18 @@ impl Urls {
verbatim: url.clone(),
};
if let Some(previous) = urls.insert(requirement.name.clone(), url.clone()) {
if let VerbatimParsedUrl {
parsed_url: ParsedUrl::Path(previous_path),
..
} = &previous
{
// On Windows, we can have two versions of the same path, e.g.
// `C:\Users\KONSTA~1` and `C:\Users\Konstantin`.
if is_same_file(path, &previous_path.path).unwrap_or(false) {
continue;
}
}
if !is_equal(&previous.verbatim, &url.verbatim) {
return Err(ResolveError::ConflictingUrlsDirect(
requirement.name.clone(),

View file

@ -12,9 +12,10 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::{ProjectWorkspace, DEV_DEPENDENCIES};
use uv_distribution::{Workspace, DEV_DEPENDENCIES};
use uv_git::GitResolver;
use uv_interpreter::PythonEnvironment;
use uv_normalize::PackageName;
use uv_requirements::upgrade::{read_lockfile, LockedRequirements};
use uv_resolver::{ExcludeNewer, FlatIndex, InMemoryIndex, Lock, OptionsBuilder};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
@ -39,14 +40,22 @@ pub(crate) async fn lock(
}
// Find the project requirements.
let project = ProjectWorkspace::discover(&std::env::current_dir()?, None).await?;
let workspace = Workspace::discover(&std::env::current_dir()?, None).await?;
// Discover or create the virtual environment.
let venv = project::init_environment(&project, preview, cache, printer)?;
let venv = project::init_environment(&workspace, preview, cache, printer)?;
// Perform the lock operation.
let root_project_name = workspace.root_member().and_then(|member| {
member
.pyproject_toml()
.project
.as_ref()
.map(|project| project.name.clone())
});
match do_lock(
&project,
root_project_name,
&workspace,
&venv,
&index_locations,
upgrade,
@ -73,7 +82,8 @@ pub(crate) async fn lock(
/// Lock the project requirements into a lockfile.
#[allow(clippy::too_many_arguments)]
pub(super) async fn do_lock(
project: &ProjectWorkspace,
root_project_name: Option<PackageName>,
workspace: &Workspace,
venv: &PythonEnvironment,
index_locations: &IndexLocations,
upgrade: Upgrade,
@ -83,32 +93,39 @@ pub(super) async fn do_lock(
printer: Printer,
) -> Result<Lock, ProjectError> {
// When locking, include the project itself (as editable).
let requirements = project
.requirements()
let requirements = workspace
.members_as_requirements()
.into_iter()
.map(UnresolvedRequirementSpecification::from)
.collect::<Vec<_>>();
.collect();
let constraints = vec![];
let overrides = vec![];
let dev = vec![DEV_DEPENDENCIES.clone()];
let source_trees = vec![];
let project_name = project.project_name().clone();
// Determine the supported Python range. If no range is defined, and warn and default to the
// current minor version.
let project = root_project_name
.as_ref()
.and_then(|name| workspace.packages().get(name));
let requires_python = if let Some(requires_python) =
project.current_project().project().requires_python.as_ref()
project.and_then(|root_project| root_project.project().requires_python.as_ref())
{
Cow::Borrowed(requires_python)
} else {
let requires_python = VersionSpecifiers::from(
VersionSpecifier::greater_than_equal_version(venv.interpreter().python_minor_version()),
);
warn_user!(
"No `requires-python` field found in `{}`. Defaulting to `{requires_python}`.",
project.current_project().project().name,
);
if let Some(root_project_name) = root_project_name.as_ref() {
warn_user!(
"No `requires-python` field found in `{root_project_name}`. Defaulting to `{requires_python}`.",
);
} else {
warn_user!(
"No `requires-python` field found in workspace. Defaulting to `{requires_python}`.",
);
}
Cow::Owned(requires_python)
};
@ -143,7 +160,7 @@ pub(super) async fn do_lock(
let options = OptionsBuilder::new().exclude_newer(exclude_newer).build();
// If an existing lockfile exists, build up a set of preferences.
let LockedRequirements { preferences, git } = read_lockfile(project, &upgrade).await?;
let LockedRequirements { preferences, git } = read_lockfile(workspace, &upgrade).await?;
// Create the Git resolver.
let git = GitResolver::from_refs(git);
@ -175,7 +192,7 @@ pub(super) async fn do_lock(
overrides,
dev,
source_trees,
Some(project_name),
root_project_name,
&extras,
preferences,
EmptyInstalledPackages,
@ -203,11 +220,7 @@ pub(super) async fn do_lock(
// Write the lockfile to disk.
let lock = Lock::from_resolution_graph(&resolution)?;
let encoded = lock.to_toml()?;
fs_err::tokio::write(
project.workspace().root().join("uv.lock"),
encoded.as_bytes(),
)
.await?;
fs_err::tokio::write(workspace.root().join("uv.lock"), encoded.as_bytes()).await?;
Ok(lock)
}

View file

@ -15,7 +15,7 @@ use uv_configuration::{
SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::ProjectWorkspace;
use uv_distribution::Workspace;
use uv_fs::Simplified;
use uv_git::GitResolver;
use uv_installer::{SatisfiesResult, SitePackages};
@ -66,12 +66,12 @@ pub(crate) enum ProjectError {
/// Initialize a virtual environment for the current project.
pub(crate) fn init_environment(
project: &ProjectWorkspace,
workspace: &Workspace,
preview: PreviewMode,
cache: &Cache,
printer: Printer,
) -> Result<PythonEnvironment, ProjectError> {
let venv = project.workspace().root().join(".venv");
let venv = workspace.root().join(".venv");
// Discover or create the virtual environment.
// TODO(charlie): If the environment isn't compatible with `--python`, recreate it.

View file

@ -61,11 +61,18 @@ pub(crate) async fn run(
} else {
ProjectWorkspace::discover(&std::env::current_dir()?, None).await?
};
let venv = project::init_environment(&project, preview, cache, printer)?;
let venv = project::init_environment(project.workspace(), preview, cache, printer)?;
// Lock and sync the environment.
let root_project_name = project
.current_project()
.pyproject_toml()
.project
.as_ref()
.map(|project| project.name.clone());
let lock = project::lock::do_lock(
&project,
root_project_name,
project.workspace(),
&venv,
&index_locations,
upgrade,

View file

@ -40,7 +40,7 @@ pub(crate) async fn sync(
let project = ProjectWorkspace::discover(&std::env::current_dir()?, None).await?;
// Discover or create the virtual environment.
let venv = project::init_environment(&project, preview, cache, printer)?;
let venv = project::init_environment(project.workspace(), preview, cache, printer)?;
// Read the lockfile.
let lock: Lock = {

View file

@ -3,6 +3,8 @@ use std::path::PathBuf;
use std::process::Command;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use url::Url;
use crate::common::{copy_dir_ignore, get_bin, uv_snapshot, TestContext, EXCLUDE_NEWER};
@ -59,6 +61,28 @@ fn run_workspace(context: &TestContext) -> Command {
command
}
/// A `uv lock` command.
fn lock_workspace(context: &TestContext) -> Command {
let mut command = Command::new(get_bin());
command
.arg("lock")
.arg("--preview")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--python")
.arg(context.interpreter())
.arg("--exclude-newer")
.arg(EXCLUDE_NEWER)
.env("UV_NO_WRAP", "1");
if cfg!(all(windows, debug_assertions)) {
// TODO(konstin): Reduce stack usage in debug mode enough that the tests pass with the
// default windows stack of 1MB
command.env("UV_STACK_SIZE", (4 * 1024 * 1024).to_string());
}
command
}
fn workspaces_dir() -> PathBuf {
env::current_dir()
.unwrap()
@ -405,7 +429,7 @@ fn test_uv_run_with_package_virtual_workspace() -> Result<()> {
Success
----- stderr -----
Resolved 7 packages in [TIME]
Resolved 10 packages in [TIME]
Downloaded 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
@ -473,7 +497,7 @@ fn test_uv_run_with_package_root_workspace() -> Result<()> {
Success
----- stderr -----
Resolved 7 packages in [TIME]
Resolved 10 packages in [TIME]
Downloaded 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
@ -505,3 +529,80 @@ fn test_uv_run_with_package_root_workspace() -> Result<()> {
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are.
fn workspace_lock_idempotence(workspace: &str, subdirectories: &[&str]) -> Result<()> {
let mut shared_lock = None;
for dir in subdirectories {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join(workspace);
copy_dir_ignore(workspaces_dir().join(workspace), &work_dir)?;
// TODO(konsti): `--python` is being ignored atm, so we need to create the correct venv
// ourselves and add the output filters.
let venv = work_dir.join(".venv");
assert_cmd::Command::new(get_bin())
.arg("venv")
.arg("-p")
.arg(context.interpreter())
.arg(&venv)
.assert();
lock_workspace(&context)
.current_dir(&work_dir.join(dir))
.assert()
.success();
let raw_lock = fs_err::read_to_string(work_dir.join("uv.lock"))?;
// Remove temp paths from lock.
// TODO(konsti): There shouldn't be absolute paths in the lock to begin with.
let redacted_lock = raw_lock
.replace(
Url::from_directory_path(&context.temp_dir)
.unwrap()
.as_str(),
"file:///tmp",
)
.replace(
Url::from_directory_path(fs_err::canonicalize(&context.temp_dir)?)
.unwrap()
.as_str(),
"file:///tmp",
);
// Check the lockfile is the same for all resolutions.
if let Some(shared_lock) = &shared_lock {
assert_eq!(shared_lock, &redacted_lock);
} else {
shared_lock = Some(redacted_lock);
}
}
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are.
#[test]
fn workspace_lock_idempotence_root_workspace() -> Result<()> {
workspace_lock_idempotence(
"albatross-root-workspace",
&[".", "packages/bird-feeder", "packages/seeds"],
)?;
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are, and that locking
/// works even if there is no root project.
#[test]
fn workspace_lock_idempotence_virtual_workspace() -> Result<()> {
workspace_lock_idempotence(
"albatross-virtual-workspace",
&[
".",
"packages/albatross",
"packages/bird-feeder",
"packages/seeds",
],
)?;
Ok(())
}