Support workspace to workspace path dependencies (#4833)

Add support for path dependencies from a package in one workspace to a
package in another workspace, which it self has workspace dependencies.

Say we have a main workspace with packages `a` and `b`, and a second
workspace with `c` and `d`. We have `a -> b`, `b -> c`, `c -> d`. This
would previously lead to a mangled path for `d`, which is now fixed.

Like distribution paths, we split workspace paths into an absolute
install path and a relative (or absolute, if the user provided an
absolute path) lock path.

Part of https://github.com/astral-sh/uv/issues/3943
This commit is contained in:
konsti 2024-07-16 22:38:46 +02:00 committed by GitHub
parent b5ec859273
commit abb6ac5127
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 410 additions and 88 deletions

View file

@ -166,7 +166,8 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> {
#[derive(Debug, Clone)]
pub struct DirectorySourceUrl<'a> {
pub url: &'a Url,
pub path: Cow<'a, Path>,
pub install_path: Cow<'a, Path>,
pub lock_path: Cow<'a, Path>,
pub editable: bool,
}
@ -180,7 +181,8 @@ impl<'a> From<&'a DirectorySourceDist> for DirectorySourceUrl<'a> {
fn from(dist: &'a DirectorySourceDist) -> Self {
Self {
url: &dist.url,
path: Cow::Borrowed(&dist.install_path),
install_path: Cow::Borrowed(&dist.install_path),
lock_path: Cow::Borrowed(&dist.lock_path),
editable: dist.editable,
}
}

View file

@ -8,7 +8,7 @@ use regex::Regex;
use thiserror::Error;
use url::{ParseError, Url};
use uv_fs::{normalize_path, normalize_url_path};
use uv_fs::{normalize_absolute_path, normalize_url_path};
use crate::Pep508Url;
@ -42,7 +42,7 @@ impl VerbatimUrl {
let path = path.as_ref();
// Normalize the path.
let path = normalize_path(path)
let path = normalize_absolute_path(path)
.map_err(|err| VerbatimUrlError::Normalization(path.to_path_buf(), err))?;
// Extract the fragment, if it exists.
@ -83,7 +83,7 @@ impl VerbatimUrl {
};
// Normalize the path.
let path = normalize_path(&path)
let path = normalize_absolute_path(&path)
.map_err(|err| VerbatimUrlError::Normalization(path.clone(), err))?;
// Extract the fragment, if it exists.
@ -113,7 +113,7 @@ impl VerbatimUrl {
};
// Normalize the path.
let Ok(path) = normalize_path(&path) else {
let Ok(path) = normalize_absolute_path(&path) else {
return Err(VerbatimUrlError::WorkingDirectory(path));
};

View file

@ -201,19 +201,37 @@ pub(crate) fn lower_requirement(
if matches!(requirement.version_or_url, Some(VersionOrUrl::Url(_))) {
return Err(LoweringError::ConflictingUrls);
}
let path = workspace
let member = workspace
.packages()
.get(&requirement.name)
.ok_or(LoweringError::UndeclaredWorkspacePackage)?
.clone();
// The lockfile is relative to the workspace root.
let relative_to_workspace = relative_to(path.root(), workspace.install_path())
// Say we have:
// ```
// root
// ├── main_workspace <- We want to the path from here ...
// │ ├── pyproject.toml
// │ └── uv.lock
// └──current_workspace
// └── packages
// └── current_package <- ... to here.
// └── pyproject.toml
// ```
// The path we need in the lockfile: `../current_workspace/packages/current_project`
// member root: `/root/current_workspace/packages/current_project`
// workspace install root: `/root/current_workspace`
// relative to workspace: `packages/current_project`
// workspace lock root: `../current_workspace`
// relative to main workspace: `../current_workspace/packages/current_project`
let relative_to_workspace = relative_to(member.root(), workspace.install_path())
.map_err(LoweringError::RelativeTo)?;
let url = VerbatimUrl::parse_absolute_path(path.root())?
.with_given(relative_to_workspace.to_string_lossy());
let relative_to_main_workspace = workspace.lock_path().join(relative_to_workspace);
let url = VerbatimUrl::parse_absolute_path(member.root())?
.with_given(relative_to_main_workspace.to_string_lossy());
RequirementSource::Directory {
install_path: path.root().clone(),
lock_path: relative_to_workspace,
install_path: member.root().clone(),
lock_path: relative_to_main_workspace,
url,
editable: editable.unwrap_or(true),
}

View file

@ -57,7 +57,8 @@ impl Metadata {
/// dependencies.
pub async fn from_workspace(
metadata: Metadata23,
project_root: &Path,
install_path: &Path,
lock_path: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataError> {
// Lower the requirements.
@ -66,13 +67,14 @@ impl Metadata {
requires_dist,
provides_extras,
dev_dependencies,
} = RequiresDist::from_workspace(
} = RequiresDist::from_project_maybe_workspace(
pypi_types::RequiresDist {
name: metadata.name,
requires_dist: metadata.requires_dist,
provides_extras: metadata.provides_extras,
},
project_root,
install_path,
lock_path,
preview_mode,
)
.await?;

View file

@ -42,15 +42,16 @@ impl RequiresDist {
/// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory
/// dependencies.
pub async fn from_workspace(
pub async fn from_project_maybe_workspace(
metadata: pypi_types::RequiresDist,
project_root: &Path,
install_path: &Path,
lock_path: &Path,
preview_mode: PreviewMode,
) -> Result<Self, MetadataError> {
// TODO(konsti): Limit discovery for Git checkouts to Git root.
// TODO(konsti): Cache workspace discovery.
let Some(project_workspace) =
ProjectWorkspace::from_maybe_project_root(project_root, None).await?
ProjectWorkspace::from_maybe_project_root(install_path, lock_path, None).await?
else {
return Ok(Self::from_metadata23(metadata));
};
@ -58,7 +59,7 @@ impl RequiresDist {
Self::from_project_workspace(metadata, &project_workspace, preview_mode)
}
pub fn from_project_workspace(
fn from_project_workspace(
metadata: pypi_types::RequiresDist,
project_workspace: &ProjectWorkspace,
preview_mode: PreviewMode,
@ -159,6 +160,7 @@ mod test {
let pyproject_toml = PyProjectToml::from_string(contents.to_string())?;
let path = Path::new("pyproject.toml");
let project_workspace = ProjectWorkspace::from_project(
path,
path,
pyproject_toml
.project

View file

@ -423,8 +423,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
/// Return the [`RequiresDist`] from a `pyproject.toml`, if it can be statically extracted.
pub(crate) async fn requires_dist(&self, project_root: &Path) -> Result<RequiresDist, Error> {
let requires_dist = read_requires_dist(project_root).await?;
let requires_dist =
RequiresDist::from_workspace(requires_dist, project_root, self.preview_mode).await?;
let requires_dist = RequiresDist::from_project_maybe_workspace(
requires_dist,
project_root,
project_root,
self.preview_mode,
)
.await?;
Ok(requires_dist)
}
@ -916,7 +921,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (disk_filename, filename, metadata) = self
.build_distribution(source, &resource.path, None, &cache_shard)
.build_distribution(source, &resource.install_path, None, &cache_shard)
.await?;
if let Some(task) = task {
@ -978,14 +983,19 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode)
.await?,
Metadata::from_workspace(
metadata,
resource.install_path.as_ref(),
resource.lock_path.as_ref(),
self.preview_mode,
)
.await?,
));
}
// If the backend supports `prepare_metadata_for_build_wheel`, use it.
if let Some(metadata) = self
.build_metadata(source, &resource.path, None)
.build_metadata(source, &resource.install_path, None)
.boxed_local()
.await?
{
@ -998,8 +1008,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode)
.await?,
Metadata::from_workspace(
metadata,
resource.install_path.as_ref(),
resource.lock_path.as_ref(),
self.preview_mode,
)
.await?,
));
}
@ -1010,7 +1025,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map(|reporter| reporter.on_build_start(source));
let (_disk_filename, _filename, metadata) = self
.build_distribution(source, &resource.path, None, &cache_shard)
.build_distribution(source, &resource.install_path, None, &cache_shard)
.await?;
if let Some(task) = task {
@ -1025,7 +1040,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, resource.path.as_ref(), self.preview_mode).await?,
Metadata::from_workspace(
metadata,
resource.install_path.as_ref(),
resource.lock_path.as_ref(),
self.preview_mode,
)
.await?,
))
}
@ -1036,15 +1057,17 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
cache_shard: &CacheShard,
) -> Result<Revision, Error> {
// Verify that the source tree exists.
if !resource.path.is_dir() {
if !resource.install_path.is_dir() {
return Err(Error::NotFound(resource.url.clone()));
}
// Determine the last-modified time of the source distribution.
let Some(modified) =
ArchiveTimestamp::from_source_tree(&resource.path).map_err(Error::CacheRead)?
ArchiveTimestamp::from_source_tree(&resource.install_path).map_err(Error::CacheRead)?
else {
return Err(Error::DirWithoutEntrypoint(resource.path.to_path_buf()));
return Err(Error::DirWithoutEntrypoint(
resource.install_path.to_path_buf(),
));
};
// Read the existing metadata from the cache. We treat source trees as if `--refresh` is
@ -1225,7 +1248,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
if let Some(metadata) = read_cached_metadata(&metadata_entry).await? {
debug!("Using cached metadata for: {source}");
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
Metadata::from_workspace(
metadata,
fetch.path(),
fetch.path(),
self.preview_mode,
)
.await?,
));
}
}
@ -1245,7 +1274,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
return Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
Metadata::from_workspace(metadata, fetch.path(), fetch.path(), self.preview_mode)
.await?,
));
}
@ -1271,7 +1301,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?;
Ok(ArchiveMetadata::from(
Metadata::from_workspace(metadata, fetch.path(), self.preview_mode).await?,
Metadata::from_workspace(metadata, fetch.path(), fetch.path(), self.preview_mode)
.await?,
))
}

View file

@ -10,7 +10,7 @@ use tracing::{debug, trace};
use pep508_rs::{RequirementOrigin, VerbatimUrl};
use pypi_types::{Requirement, RequirementSource};
use uv_fs::{absolutize_path, Simplified};
use uv_fs::{absolutize_path, normalize_path, relative_to, Simplified};
use uv_normalize::PackageName;
use uv_warnings::warn_user;
@ -46,20 +46,29 @@ pub enum WorkspaceError {
#[derive(Debug, Clone)]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct Workspace {
/// The path to the workspace root, the directory containing the top level `pyproject.toml` with
/// The path to the workspace root.
///
/// The workspace root is the directory containing the top level `pyproject.toml` with
/// the `uv.tool.workspace`, or the `pyproject.toml` in an implicit single workspace project.
install_path: PathBuf,
/// The same path as `install_path`, but relative to the main workspace.
///
/// We use this value to compute relative paths for workspace-to-workspace dependencies. It's an
/// empty path for the main workspace.
lock_path: PathBuf,
/// The members of the workspace.
packages: BTreeMap<PackageName, WorkspaceMember>,
/// The sources table from the workspace `pyproject.toml`. It is overridden by the project
/// sources.
/// The sources table from the workspace `pyproject.toml`.
///
/// This table is overridden by the project sources.
sources: BTreeMap<PackageName, Source>,
}
impl Workspace {
/// Find the workspace containing the given path.
///
/// Unlike the [`ProjectWorkspace`] discovery, this does not require a current project.
/// Unlike the [`ProjectWorkspace`] discovery, this does not require a current project. It also
/// always uses absolute path, i.e. this method only supports discovering the main workspace.
///
/// Steps of workspace discovery: Start by looking at the closest `pyproject.toml`:
/// * If it's an explicit workspace root: Collect workspace from this root, we're done.
@ -71,20 +80,21 @@ impl Workspace {
path: &Path,
stop_discovery_at: Option<&Path>,
) -> Result<Workspace, WorkspaceError> {
let project_root = path
let path = absolutize_path(path)
.map_err(WorkspaceError::Normalize)?
.to_path_buf();
let project_path = path
.ancestors()
.find(|path| path.join("pyproject.toml").is_file())
.ok_or(WorkspaceError::MissingPyprojectToml)?;
.ok_or(WorkspaceError::MissingPyprojectToml)?
.to_path_buf();
let pyproject_path = project_root.join("pyproject.toml");
let pyproject_path = project_path.join("pyproject.toml");
let contents = fs_err::tokio::read_to_string(&pyproject_path).await?;
let pyproject_toml = PyProjectToml::from_string(contents)
.map_err(|err| WorkspaceError::Toml(pyproject_path.clone(), Box::new(err)))?;
let project_path = absolutize_path(project_root)
.map_err(WorkspaceError::Normalize)?
.to_path_buf();
// Check if the project is explicitly marked as unmanaged.
if pyproject_toml
.tool
@ -150,6 +160,8 @@ impl Workspace {
pyproject_toml,
});
Self::collect_members(
workspace_root.clone(),
// This method supports only absolute paths.
workspace_root,
workspace_definition,
workspace_pyproject_toml,
@ -248,6 +260,12 @@ impl Workspace {
&self.install_path
}
/// The same path as `install_path()`, but relative to the main workspace. We use this value
/// to compute relative paths for workspace-to-workspace dependencies.
pub fn lock_path(&self) -> &PathBuf {
&self.lock_path
}
/// The path to the workspace virtual environment.
pub fn venv(&self) -> PathBuf {
self.install_path.join(".venv")
@ -266,6 +284,7 @@ impl Workspace {
/// Collect the workspace member projects from the `members` and `excludes` entries.
async fn collect_members(
workspace_root: PathBuf,
lock_path: PathBuf,
workspace_definition: ToolUvWorkspace,
workspace_pyproject_toml: PyProjectToml,
current_project: Option<WorkspaceMember>,
@ -387,6 +406,7 @@ impl Workspace {
Ok(Workspace {
install_path: workspace_root,
lock_path,
packages: workspace_members,
sources: workspace_sources,
})
@ -541,7 +561,7 @@ impl ProjectWorkspace {
}
/// Discover the workspace starting from the directory containing the `pyproject.toml`.
pub async fn from_project_root(
async fn from_project_root(
project_root: &Path,
stop_discovery_at: Option<&Path>,
) -> Result<Self, WorkspaceError> {
@ -557,17 +577,25 @@ impl ProjectWorkspace {
.clone()
.ok_or_else(|| WorkspaceError::MissingProject(pyproject_path.clone()))?;
Self::from_project(project_root, &project, &pyproject_toml, stop_discovery_at).await
Self::from_project(
project_root,
Path::new(""),
&project,
&pyproject_toml,
stop_discovery_at,
)
.await
}
/// If the current directory contains a `pyproject.toml` with a `project` table, discover the
/// workspace and return it, otherwise it is a dynamic path dependency and we return `Ok(None)`.
pub async fn from_maybe_project_root(
project_root: &Path,
install_path: &Path,
lock_path: &Path,
stop_discovery_at: Option<&Path>,
) -> Result<Option<Self>, WorkspaceError> {
// Read the `pyproject.toml`.
let pyproject_path = project_root.join("pyproject.toml");
let pyproject_path = install_path.join("pyproject.toml");
let Ok(contents) = fs_err::tokio::read_to_string(&pyproject_path).await else {
// No `pyproject.toml`, but there may still be a `setup.py` or `setup.cfg`.
return Ok(None);
@ -582,7 +610,14 @@ impl ProjectWorkspace {
};
Ok(Some(
Self::from_project(project_root, &project, &pyproject_toml, stop_discovery_at).await?,
Self::from_project(
install_path,
lock_path,
&project,
&pyproject_toml,
stop_discovery_at,
)
.await?,
))
}
@ -609,12 +644,13 @@ impl ProjectWorkspace {
/// Find the workspace for a project.
pub async fn from_project(
project_path: &Path,
install_path: &Path,
lock_path: &Path,
project: &Project,
project_pyproject_toml: &PyProjectToml,
stop_discovery_at: Option<&Path>,
) -> Result<Self, WorkspaceError> {
let project_path = absolutize_path(project_path)
let project_path = absolutize_path(install_path)
.map_err(WorkspaceError::Normalize)?
.to_path_buf();
@ -669,6 +705,8 @@ impl ProjectWorkspace {
project_name: project.name.clone(),
workspace: Workspace {
install_path: project_path.clone(),
// The workspace and the project are the same, so the relative path is, too.
lock_path: lock_path.to_path_buf(),
packages: current_project_as_members,
// There may be package sources, but we don't need to duplicate them into the
// workspace sources.
@ -682,8 +720,28 @@ impl ProjectWorkspace {
workspace_root.simplified_display()
);
// Say we have:
// ```
// root
// ├── main_workspace <- The reference point
// │ ├── pyproject.toml
// │ └── uv.lock
// └──current_workspace <- We want this relative to the main workspace
// └── packages
// └── current_package <- We have this relative to the main workspace
// └── pyproject.toml
// ```
// The lock path we need: `../current_workspace`
// workspace root: `/root/current_workspace`
// project path: `/root/current_workspace/packages/current_project`
// relative to workspace: `../..`
// lock path: `../current_workspace`
let up_to_root = relative_to(&workspace_root, &project_path)?;
let lock_path = normalize_path(&lock_path.join(up_to_root));
let workspace = Workspace::collect_members(
workspace_root,
lock_path,
workspace_definition,
workspace_pyproject_toml,
Some(current_project),
@ -901,10 +959,17 @@ impl VirtualProject {
///
/// Similar to calling [`ProjectWorkspace::discover`] with a fallback to [`Workspace::discover`],
/// but avoids rereading the `pyproject.toml` (and relying on error-handling as control flow).
///
/// This method requires an absolute path and panics otherwise, i.e. this method only supports
/// discovering the main workspace.
pub async fn discover(
path: &Path,
stop_discovery_at: Option<&Path>,
) -> Result<Self, WorkspaceError> {
assert!(
path.is_absolute(),
"virtual project discovery with relative path"
);
let project_root = path
.ancestors()
.take_while(|path| {
@ -931,6 +996,7 @@ impl VirtualProject {
// If the `pyproject.toml` contains a `[project]` table, it's a project.
let project = ProjectWorkspace::from_project(
project_root,
Path::new(""),
project,
&pyproject_toml,
stop_discovery_at,
@ -950,6 +1016,7 @@ impl VirtualProject {
let workspace = Workspace::collect_members(
project_path,
PathBuf::new(),
workspace.clone(),
pyproject_toml,
None,
@ -1032,6 +1099,7 @@ mod tests {
"project_name": "bird-feeder",
"workspace": {
"install_path": "[ROOT]/albatross-in-example/examples/bird-feeder",
"lock_path": "",
"packages": {
"bird-feeder": {
"root": "[ROOT]/albatross-in-example/examples/bird-feeder",
@ -1067,6 +1135,7 @@ mod tests {
"project_name": "bird-feeder",
"workspace": {
"install_path": "[ROOT]/albatross-project-in-excluded/excluded/bird-feeder",
"lock_path": "",
"packages": {
"bird-feeder": {
"root": "[ROOT]/albatross-project-in-excluded/excluded/bird-feeder",
@ -1101,6 +1170,7 @@ mod tests {
"project_name": "albatross",
"workspace": {
"install_path": "[ROOT]/albatross-root-workspace",
"lock_path": "",
"packages": {
"albatross": {
"root": "[ROOT]/albatross-root-workspace",
@ -1159,6 +1229,7 @@ mod tests {
"project_name": "albatross",
"workspace": {
"install_path": "[ROOT]/albatross-virtual-workspace",
"lock_path": "../..",
"packages": {
"albatross": {
"root": "[ROOT]/albatross-virtual-workspace/packages/albatross",
@ -1211,6 +1282,7 @@ mod tests {
"project_name": "albatross",
"workspace": {
"install_path": "[ROOT]/albatross-just-project",
"lock_path": "",
"packages": {
"albatross": {
"root": "[ROOT]/albatross-just-project",

View file

@ -160,7 +160,7 @@ pub fn normalize_url_path(path: &str) -> Cow<'_, str> {
///
/// When a relative path is provided with `..` components that extend beyond the base directory.
/// For example, `./a/../../b` cannot be normalized because it escapes the base directory.
pub fn normalize_path(path: &Path) -> Result<PathBuf, std::io::Error> {
pub fn normalize_absolute_path(path: &Path) -> Result<PathBuf, std::io::Error> {
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
components.next();
@ -180,7 +180,10 @@ pub fn normalize_path(path: &Path) -> Result<PathBuf, std::io::Error> {
if !ret.pop() {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"cannot normalize a relative path beyond the base directory",
format!(
"cannot normalize a relative path beyond the base directory: {}",
path.display()
),
));
}
}
@ -192,6 +195,52 @@ pub fn normalize_path(path: &Path) -> Result<PathBuf, std::io::Error> {
Ok(ret)
}
/// Normalize a path, removing things like `.` and `..`.
///
/// Unlike [`normalize_absolute_path`], this works with relative paths and does never error.
///
/// Note that we can theoretically go beyond the root dir here (e.g. `/usr/../../foo` becomes
/// `/../foo`), but that's not a (correctness) problem, we will fail later with a file not found
/// error with a path computed from the user's input.
///
/// # Examples
///
/// In: `../../workspace-git-path-dep-test/packages/c/../../packages/d`
/// Out: `../../workspace-git-path-dep-test/packages/d`
///
/// In: `workspace-git-path-dep-test/packages/c/../../packages/d`
/// Out: `workspace-git-path-dep-test/packages/d`
///
/// In: `./a/../../b`
/// Out: `../b`
pub fn normalize_path(path: &Path) -> PathBuf {
let mut normalized = PathBuf::new();
for component in path.components() {
match component {
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
// Preserve filesystem roots and regular path components.
normalized.push(component);
}
Component::ParentDir => {
match normalized.components().last() {
None | Some(Component::ParentDir | Component::RootDir) => {
// Preserve leading and above-root `..`
normalized.push(component);
}
Some(Component::Normal(_) | Component::Prefix(_) | Component::CurDir) => {
// Remove inner `..`
normalized.pop();
}
}
}
Component::CurDir => {
// Remove `.`
}
}
}
normalized
}
/// Convert a path to an absolute path, relative to the current working directory.
///
/// Unlike [`std::fs::canonicalize`], this function does not resolve symlinks and does not require
@ -402,16 +451,16 @@ mod tests {
#[test]
fn test_normalize_path() {
let path = Path::new("/a/b/../c/./d");
let normalized = normalize_path(path).unwrap();
let normalized = normalize_absolute_path(path).unwrap();
assert_eq!(normalized, Path::new("/a/c/d"));
let path = Path::new("/a/../c/./d");
let normalized = normalize_path(path).unwrap();
let normalized = normalize_absolute_path(path).unwrap();
assert_eq!(normalized, Path::new("/c/d"));
// This should be an error.
let path = Path::new("/a/../../c/./d");
let err = normalize_path(path).unwrap_err();
let err = normalize_absolute_path(path).unwrap_err();
assert_eq!(err.kind(), std::io::ErrorKind::InvalidInput);
}
@ -442,4 +491,23 @@ mod tests {
Path::new("../../../bin/foo_launcher")
);
}
#[test]
fn test_normalize_relative() {
let cases = [
(
"../../workspace-git-path-dep-test/packages/c/../../packages/d",
"../../workspace-git-path-dep-test/packages/d",
),
(
"workspace-git-path-dep-test/packages/c/../../packages/d",
"workspace-git-path-dep-test/packages/d",
),
("./a/../../b", "../b"),
("/usr/../../foo", "/../foo"),
];
for (input, expected) in cases {
assert_eq!(normalize_path(Path::new(input)), Path::new(expected));
}
}
}

View file

@ -172,7 +172,8 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
};
let source = SourceUrl::Directory(DirectorySourceUrl {
url: &url,
path: Cow::Borrowed(source_tree),
install_path: Cow::Borrowed(source_tree),
lock_path: Cow::Borrowed(source_tree),
editable: false,
});

View file

@ -255,7 +255,8 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
SourceUrl::Directory(DirectorySourceUrl {
url: &requirement.url.verbatim,
path: Cow::Borrowed(&parsed_directory_url.install_path),
install_path: Cow::Borrowed(&parsed_directory_url.install_path),
lock_path: Cow::Borrowed(&parsed_directory_url.lock_path),
editable: parsed_directory_url.editable,
})
}

View file

@ -1,36 +1,13 @@
use std::env;
use std::path::Path;
use anyhow::Result;
use indoc::{formatdoc, indoc};
use indoc::indoc;
use insta::assert_snapshot;
use crate::common::{uv_snapshot, TestContext};
use crate::common::{make_project, uv_snapshot, TestContext};
mod common;
/// Create a stub package `name` in `dir` with the given `pyproject.toml` body.
fn make_project(dir: &Path, name: &str, body: &str) -> Result<()> {
let pyproject_toml = formatdoc! {r#"
[project]
name = "{name}"
version = "0.1.0"
description = "Test package for direct URLs in branches"
requires-python = ">=3.11,<3.13"
{body}
[build-system]
requires = ["flit_core>=3.8,<4"]
build-backend = "flit_core.buildapi"
"#
};
fs_err::create_dir_all(dir)?;
fs_err::write(dir.join("pyproject.toml"), pyproject_toml)?;
fs_err::create_dir(dir.join(name))?;
fs_err::write(dir.join(name).join("__init__.py"), "")?;
Ok(())
}
/// The root package has diverging URLs for disjoint markers:
/// ```toml
/// dependencies = [

View file

@ -12,6 +12,7 @@ use std::str::FromStr;
use assert_cmd::assert::{Assert, OutputAssertExt};
use assert_fs::assert::PathAssert;
use assert_fs::fixture::{ChildPath, PathChild, PathCreateDir, SymlinkToFile};
use indoc::formatdoc;
use predicates::prelude::predicate;
use regex::Regex;
@ -929,6 +930,28 @@ pub fn copy_dir_ignore(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::
Ok(())
}
/// Create a stub package `name` in `dir` with the given `pyproject.toml` body.
pub fn make_project(dir: &Path, name: &str, body: &str) -> anyhow::Result<()> {
let pyproject_toml = formatdoc! {r#"
[project]
name = "{name}"
version = "0.1.0"
description = "Test package for direct URLs in branches"
requires-python = ">=3.11,<3.13"
{body}
[build-system]
requires = ["flit_core>=3.8,<4"]
build-backend = "flit_core.buildapi"
"#
};
fs_err::create_dir_all(dir)?;
fs_err::write(dir.join("pyproject.toml"), pyproject_toml)?;
fs_err::create_dir(dir.join(name))?;
fs_err::write(dir.join(name).join("__init__.py"), "")?;
Ok(())
}
/// Utility macro to return the name of the current function.
///
/// https://stackoverflow.com/a/40234666/3549270

View file

@ -1,11 +1,16 @@
use std::collections::BTreeMap;
use std::env;
use std::path::{Path, PathBuf};
use std::process::Command;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileWriteStr, PathChild};
use indoc::indoc;
use insta::assert_json_snapshot;
use serde::{Deserialize, Serialize};
use crate::common::{copy_dir_ignore, uv_snapshot, TestContext};
use crate::common::{copy_dir_ignore, make_project, uv_snapshot, TestContext};
mod common;
@ -565,3 +570,123 @@ fn workspace_lock_idempotence_virtual_workspace() -> Result<()> {
)?;
Ok(())
}
/// Extract just the sources from the lock file, to test path resolution.
#[derive(Deserialize, Serialize)]
struct SourceLock {
distribution: Vec<Distribution>,
}
impl SourceLock {
fn sources(self) -> BTreeMap<String, toml::Value> {
self.distribution
.into_iter()
.map(|distribution| (distribution.name, distribution.source))
.collect()
}
}
#[derive(Deserialize, Serialize)]
struct Distribution {
name: String,
source: toml::Value,
}
/// Test path dependencies from one workspace into another.
///
/// We have a main workspace with packages `a` and `b`, and a second workspace with `c`, `d` and
/// `e`. We have `a -> b`, `b -> c`, `c -> d`. `e` should not be installed.
#[test]
fn workspace_to_workspace_paths_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let main_workspace = context.temp_dir.child("main-workspace");
main_workspace
.child("pyproject.toml")
.write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&main_workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r#"
dependencies = ["c"]
[tool.uv.sources]
c = { path = "../../../other-workspace/packages/c", editable = true }
"#};
make_project(&main_workspace.join("packages").join("b"), "b", deps)?;
// Build the second workspace ...
let other_workspace = context.temp_dir.child("other-workspace");
other_workspace
.child("pyproject.toml")
.write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with c ...
let deps = indoc! {r#"
dependencies = ["d"]
[tool.uv.sources]
d = { workspace = true }
"#};
make_project(&other_workspace.join("packages").join("c"), "c", deps)?;
// ... and d ...
let deps = indoc! {r"
dependencies = []
"};
make_project(&other_workspace.join("packages").join("d"), "d", deps)?;
// ... and e.
let deps = indoc! {r#"
dependencies = ["numpy>=2.0.0,<3"]
"#};
make_project(&other_workspace.join("packages").join("e"), "e", deps)?;
uv_snapshot!(context.filters(), context.lock().arg("--preview").current_dir(&main_workspace), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 4 packages in [TIME]
"###
);
let lock: SourceLock =
toml::from_str(&fs_err::read_to_string(main_workspace.join("uv.lock"))?)?;
assert_json_snapshot!(lock.sources(), @r###"
{
"a": {
"editable": "packages/a"
},
"b": {
"editable": "packages/b"
},
"c": {
"editable": "../other-workspace/packages/c"
},
"d": {
"editable": "../other-workspace/packages/d"
}
}
"###);
Ok(())
}