Change the definition of --locked to require satisfaction check (#6102)

## Summary

This PR changes the definition of `--locked` from:

> Produces the same `Lock`

To:

> Passes `Lock::satisfies`

This is a subtle but important difference. Previous, if
`Lock::satisfies` failed, we would run a resolution, then do
`existing_lock == lock`. If the two weren't equal, and `--locked` was
specified, we'd throw an error.

The equality check is hard to get right. For example, it means that we
can't ship #6076 without changing our marker representation, since the
deserialized lockfile "loses" some of the internal marker state that
gets accumulated during resolution.

The downside of this change is that there could be scenarios in which
`uv lock --locked` fails even though the lockfile would actually work
and the exact TOML would be unchanged. But... I think it's ok if
`--locked` fails after the user modifies something?
This commit is contained in:
Charlie Marsh 2024-08-15 08:17:28 -04:00 committed by GitHub
parent 7551097a17
commit 6333823236
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 298 additions and 584 deletions

View file

@ -40,7 +40,7 @@ use crate::{ExcludeNewer, PrereleaseMode, RequiresPython, ResolutionGraph, Resol
/// The current version of the lockfile format.
const VERSION: u32 = 1;
#[derive(Clone, Debug, serde::Deserialize, PartialEq, Eq)]
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(try_from = "LockWire")]
pub struct Lock {
version: u32,

View file

@ -441,7 +441,7 @@ pub(crate) async fn add(
)
.await
{
Ok(lock) => lock,
Ok(result) => result.into_lock(),
Err(ProjectError::Operation(pip::operations::Error::Resolve(
uv_resolver::ResolveError::NoSolution(err),
))) => {
@ -463,8 +463,8 @@ pub(crate) async fn add(
if !raw_sources {
// Extract the minimum-supported version for each dependency.
let mut minimum_version =
FxHashMap::with_capacity_and_hasher(lock.lock.packages().len(), FxBuildHasher);
for dist in lock.lock.packages() {
FxHashMap::with_capacity_and_hasher(lock.packages().len(), FxBuildHasher);
for dist in lock.packages() {
let name = dist.name();
let version = dist.version();
match minimum_version.entry(name) {
@ -563,7 +563,7 @@ pub(crate) async fn add(
project::sync::do_sync(
&project,
&venv,
&lock.lock,
&lock,
&extras,
dev,
Modifications::Sufficient,

View file

@ -9,13 +9,16 @@ use rustc_hash::{FxBuildHasher, FxHashMap};
use tracing::debug;
use distribution_types::{
FlatIndexLocation, IndexUrl, UnresolvedRequirementSpecification, UrlString,
FlatIndexLocation, IndexLocations, IndexUrl, UnresolvedRequirementSpecification, UrlString,
};
use pep440_rs::Version;
use pypi_types::Requirement;
use uv_auth::store_credentials_from_url;
use uv_cache::Cache;
use uv_client::{Connectivity, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy};
use uv_configuration::{
Concurrency, ExtrasSpecification, PreviewMode, Reinstall, SetupPyStrategy, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
use uv_fs::CWD;
@ -25,10 +28,10 @@ use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreferenc
use uv_requirements::upgrade::{read_lock_requirements, LockedRequirements};
use uv_requirements::NamedRequirementsResolver;
use uv_resolver::{
FlatIndex, Lock, OptionsBuilder, PythonRequirement, RequiresPython, ResolverManifest,
FlatIndex, Lock, Options, OptionsBuilder, PythonRequirement, RequiresPython, ResolverManifest,
ResolverMarkers,
};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::{DiscoveryOptions, Workspace};
@ -41,11 +44,27 @@ use crate::settings::{ResolverSettings, ResolverSettingsRef};
/// The result of running a lock operation.
#[derive(Debug, Clone)]
pub(crate) struct LockResult {
/// The previous lock, if any.
pub(crate) previous: Option<Lock>,
/// The updated lock.
pub(crate) lock: Lock,
pub(crate) enum LockResult {
/// The lock was unchanged.
Unchanged(Lock),
/// The lock was changed.
Changed(Option<Lock>, Lock),
}
impl LockResult {
pub(crate) fn lock(&self) -> &Lock {
match self {
LockResult::Unchanged(lock) => lock,
LockResult::Changed(_, lock) => lock,
}
}
pub(crate) fn into_lock(self) -> Lock {
match self {
LockResult::Unchanged(lock) => lock,
LockResult::Changed(_, lock) => lock,
}
}
}
/// Resolve the project requirements into a lockfile.
@ -102,8 +121,8 @@ pub(crate) async fn lock(
.await
{
Ok(lock) => {
if let Some(previous) = lock.previous.as_ref() {
report_upgrades(previous, &lock.lock, printer)?;
if let LockResult::Changed(Some(previous), lock) = &lock {
report_upgrades(previous, lock, printer)?;
}
Ok(ExitStatus::Success)
}
@ -149,10 +168,7 @@ pub(super) async fn do_safe_lock(
let existing = read(workspace)
.await?
.ok_or_else(|| ProjectError::MissingLockfile)?;
Ok(LockResult {
previous: None,
lock: existing,
})
Ok(LockResult::Unchanged(existing))
} else if locked {
// Read the existing lockfile.
let existing = read(workspace)
@ -160,10 +176,10 @@ pub(super) async fn do_safe_lock(
.ok_or_else(|| ProjectError::MissingLockfile)?;
// Perform the lock operation, but don't write the lockfile to disk.
let lock = do_lock(
let result = do_lock(
workspace,
interpreter,
Some(&existing),
Some(existing),
settings,
&state,
logger,
@ -176,24 +192,21 @@ pub(super) async fn do_safe_lock(
)
.await?;
// If the locks disagree, return an error.
if lock != existing {
// If the lockfile changed, return an error.
if matches!(result, LockResult::Changed(_, _)) {
return Err(ProjectError::LockMismatch);
}
Ok(LockResult {
previous: Some(existing),
lock,
})
Ok(result)
} else {
// Read the existing lockfile.
let existing = read(workspace).await?;
// Perform the lock operation.
let lock = do_lock(
let result = do_lock(
workspace,
interpreter,
existing.as_ref(),
existing,
settings,
&state,
logger,
@ -206,14 +219,12 @@ pub(super) async fn do_safe_lock(
)
.await?;
if !existing.as_ref().is_some_and(|existing| *existing == lock) {
commit(&lock, workspace).await?;
// If the lockfile changed, write it to disk.
if let LockResult::Changed(_, lock) = &result {
commit(lock, workspace).await?;
}
Ok(LockResult {
previous: existing,
lock,
})
Ok(result)
}
}
@ -221,7 +232,7 @@ pub(super) async fn do_safe_lock(
async fn do_lock(
workspace: &Workspace,
interpreter: &Interpreter,
existing_lock: Option<&Lock>,
existing_lock: Option<Lock>,
settings: ResolverSettingsRef<'_>,
state: &SharedState,
logger: Box<dyn ResolveLogger>,
@ -231,7 +242,9 @@ async fn do_lock(
native_tls: bool,
cache: &Cache,
printer: Printer,
) -> Result<Lock, ProjectError> {
) -> Result<LockResult, ProjectError> {
let start = std::time::Instant::now();
// Extract the project settings.
let ResolverSettingsRef {
index_locations,
@ -392,198 +405,74 @@ async fn do_lock(
.await?;
// If any of the resolution-determining settings changed, invalidate the lock.
let existing_lock = existing_lock.filter(|lock| {
if lock.resolution_mode() != options.resolution_mode {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in resolution mode: `{}` vs. `{}`",
lock.resolution_mode().cyan(),
options.resolution_mode.cyan()
);
return false;
}
if lock.prerelease_mode() != options.prerelease_mode {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in pre-release mode: `{}` vs. `{}`",
lock.prerelease_mode().cyan(),
options.prerelease_mode.cyan()
);
return false;
}
match (lock.exclude_newer(), options.exclude_newer) {
(None, None) => (),
(Some(existing), Some(provided)) if existing == provided => (),
(Some(existing), Some(provided)) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in timestamp cutoff: `{}` vs. `{}`",
existing.cyan(),
provided.cyan()
);
return false;
}
(Some(existing), None) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to removal of timestamp cutoff: `{}`",
existing.cyan(),
);
return false;
}
(None, Some(provided)) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to addition of timestamp cutoff: `{}`",
provided.cyan()
);
return false;
}
}
true
});
// If an existing lockfile exists, build up a set of preferences.
let LockedRequirements { preferences, git } = existing_lock
.as_ref()
.map(|lock| read_lock_requirements(lock, upgrade))
.unwrap_or_default();
// Populate the Git resolver.
for ResolvedRepositoryReference { reference, sha } in git {
debug!("Inserting Git reference into resolver: `{reference:?}` at `{sha}`");
state.git.insert(reference, sha);
}
let start = std::time::Instant::now();
let existing_lock = existing_lock.filter(|lock| {
match (lock.requires_python(), requires_python) {
// If the Requires-Python bound in the lockfile is weaker or equivalent to the
// Requires-Python bound in the workspace, we should have the necessary wheels to perform
// a locked resolution.
(None, _) => true,
(Some(locked), specified) => {
if locked.bound() == specified.bound() {
true
} else {
// On the other hand, if the bound in the lockfile is stricter, meaning the
// bound has since been weakened, we have to perform a clean resolution to ensure
// we fetch the necessary wheels.
debug!("Ignoring existing lockfile due to change in `requires-python`");
false
}
}
}
});
// When we run the same resolution from the lockfile again, we could get a different result the
// second time due to the preferences causing us to skip a fork point (see
// "preferences-dependent-forking" packse scenario). To avoid this, we store the forks in the
// lockfile. We read those after all the lockfile filters, to allow the forks to change when
// the environment changed, e.g. the python bound check above can lead to different forking.
let resolver_markers = ResolverMarkers::universal(if upgrade.is_all() {
// We're discarding all preferences, so we're also discarding the existing forks.
vec![]
} else {
existing_lock
.map(|lock| lock.fork_markers().to_vec())
.unwrap_or_default()
});
// If any upgrades are specified, don't use the existing lockfile.
let existing_lock = existing_lock.filter(|_| {
debug!("Ignoring existing lockfile due to `--upgrade`");
upgrade.is_none()
});
// If the user provided at least one index URL (from the command line, or from a configuration
// file), don't use the existing lockfile if it references any registries that are no longer
// included in the current configuration.
let existing_lock = existing_lock.filter(|lock| {
// If _no_ indexes were provided, we assume that the user wants to reuse the existing
// distributions, even though a failure to reuse the lockfile will result in re-resolving
// against PyPI by default.
if settings.index_locations.is_none() {
return true;
}
// Collect the set of available indexes (both `--index-url` and `--find-links` entries).
let indexes = settings
.index_locations
.indexes()
.map(IndexUrl::redacted)
.chain(
settings
.index_locations
.flat_index()
.map(FlatIndexLocation::redacted),
let existing_lock = if let Some(existing_lock) = existing_lock {
Some(
ValidatedLock::validate(
existing_lock,
workspace,
&members,
&constraints,
&overrides,
interpreter,
&requires_python,
index_locations,
upgrade,
&options,
&database,
printer,
)
.map(UrlString::from)
.collect::<BTreeSet<_>>();
// Find any packages in the lockfile that reference a registry that is no longer included in
// the current configuration.
for package in lock.packages() {
let Some(index) = package.index() else {
continue;
};
if !indexes.contains(index) {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to removal of referenced registry: {index}"
);
return false;
}
}
true
});
let existing_lock = match existing_lock {
None => None,
// Try to resolve using metadata in the lockfile.
//
// When resolving from the lockfile we can still download and install new distributions,
// but we rely on the lockfile for the metadata of any existing distributions. If we have
// any outdated metadata we fall back to a clean resolve.
Some(lock) => {
if lock
.satisfies(
workspace,
&members,
&constraints,
&overrides,
interpreter.tags()?,
&database,
)
.await?
{
debug!("Existing `uv.lock` satisfies workspace requirements");
Some(lock)
} else {
debug!("Existing `uv.lock` does not satisfy workspace requirements; ignoring...");
None
}
}
.await?,
)
} else {
None
};
match existing_lock {
// Resolution from the lockfile succeeded.
Some(lock) => {
Some(ValidatedLock::Satisfies(lock)) => {
// Print the success message after completing resolution.
logger.on_complete(lock.len(), start, printer)?;
// TODO(charlie): Avoid cloning here.
Ok(lock.clone())
Ok(LockResult::Unchanged(lock))
}
// The lockfile did not contain enough information to obtain a resolution, fallback
// to a fresh resolve.
None => {
_ => {
debug!("Starting clean resolution");
// If an existing lockfile exists, build up a set of preferences.
let LockedRequirements { preferences, git } = existing_lock
.as_ref()
.and_then(|lock| match &lock {
ValidatedLock::Preferable(lock) => Some(lock),
ValidatedLock::Satisfies(lock) => Some(lock),
ValidatedLock::Unusable(_) => None,
})
.map(|lock| read_lock_requirements(lock, upgrade))
.unwrap_or_default();
// Populate the Git resolver.
for ResolvedRepositoryReference { reference, sha } in git {
debug!("Inserting Git reference into resolver: `{reference:?}` at `{sha}`");
state.git.insert(reference, sha);
}
// When we run the same resolution from the lockfile again, we could get a different result the
// second time due to the preferences causing us to skip a fork point (see
// "preferences-dependent-forking" packse scenario). To avoid this, we store the forks in the
// lockfile. We read those after all the lockfile filters, to allow the forks to change when
// the environment changed, e.g. the python bound check above can lead to different forking.
let resolver_markers = ResolverMarkers::universal(if upgrade.is_all() {
// We're discarding all preferences, so we're also discarding the existing forks.
vec![]
} else {
existing_lock
.as_ref()
.map(|existing_lock| existing_lock.lock().fork_markers().to_vec())
.unwrap_or_default()
});
// Resolve the requirements.
let resolution = pip::operations::resolve(
requirements,
@ -624,13 +513,187 @@ async fn do_lock(
// Notify the user of any resolution diagnostics.
pip::operations::diagnose_resolution(resolution.diagnostics(), printer)?;
Ok(
Lock::from_resolution_graph(&resolution)?.with_manifest(ResolverManifest::new(
members,
constraints,
overrides,
)),
let previous = existing_lock.map(ValidatedLock::into_lock);
let lock = Lock::from_resolution_graph(&resolution)?
.with_manifest(ResolverManifest::new(members, constraints, overrides));
Ok(LockResult::Changed(previous, lock))
}
}
}
#[derive(Debug)]
enum ValidatedLock {
/// An existing lockfile was provided, but its contents should be ignored.
Unusable(Lock),
/// An existing lockfile was provided, and it satisfies the workspace requirements.
Satisfies(Lock),
/// An existing lockfile was provided, and the locked versions should be preferred if possible,
/// even though the lockfile does not satisfy the workspace requirements.
Preferable(Lock),
}
impl ValidatedLock {
/// Validate a [`Lock`] against the workspace requirements.
async fn validate<Context: BuildContext>(
lock: Lock,
workspace: &Workspace,
members: &[PackageName],
constraints: &[Requirement],
overrides: &[Requirement],
interpreter: &Interpreter,
requires_python: &RequiresPython,
index_locations: &IndexLocations,
upgrade: &Upgrade,
options: &Options,
database: &DistributionDatabase<'_, Context>,
printer: Printer,
) -> Result<Self, ProjectError> {
// Start with the most severe condition: a fundamental option changed between resolutions.
if lock.resolution_mode() != options.resolution_mode {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in resolution mode: `{}` vs. `{}`",
lock.resolution_mode().cyan(),
options.resolution_mode.cyan()
);
return Ok(Self::Unusable(lock));
}
if lock.prerelease_mode() != options.prerelease_mode {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in pre-release mode: `{}` vs. `{}`",
lock.prerelease_mode().cyan(),
options.prerelease_mode.cyan()
);
return Ok(Self::Unusable(lock));
}
match (lock.exclude_newer(), options.exclude_newer) {
(None, None) => (),
(Some(existing), Some(provided)) if existing == provided => (),
(Some(existing), Some(provided)) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in timestamp cutoff: `{}` vs. `{}`",
existing.cyan(),
provided.cyan()
);
return Ok(Self::Unusable(lock));
}
(Some(existing), None) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to removal of timestamp cutoff: `{}`",
existing.cyan(),
);
return Ok(Self::Unusable(lock));
}
(None, Some(provided)) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to addition of timestamp cutoff: `{}`",
provided.cyan()
);
return Ok(Self::Unusable(lock));
}
}
// If the user specified `--upgrade`, then at best we can prefer some of the existing
// versions.
if !upgrade.is_none() {
debug!("Ignoring existing lockfile due to `--upgrade`");
return Ok(Self::Preferable(lock));
}
// If the Requires-Python bound in the lockfile is weaker or equivalent to the
// Requires-Python bound in the workspace, we should have the necessary wheels to perform
// a locked resolution.
if let Some(locked) = lock.requires_python() {
if locked.bound() != requires_python.bound() {
// On the other hand, if the bound in the lockfile is stricter, meaning the
// bound has since been weakened, we have to perform a clean resolution to ensure
// we fetch the necessary wheels.
debug!("Ignoring existing lockfile due to change in `requires-python`");
// It's fine to prefer the existing versions, though.
return Ok(Self::Preferable(lock));
}
}
// If the user provided at least one index URL (from the command line, or from a configuration
// file), don't use the existing lockfile if it references any registries that are no longer
// included in the current configuration.
//
// However, iIf _no_ indexes were provided, we assume that the user wants to reuse the existing
// distributions, even though a failure to reuse the lockfile will result in re-resolving
// against PyPI by default.
if !index_locations.is_none() {
// Collect the set of available indexes (both `--index-url` and `--find-links` entries).
let indexes = index_locations
.indexes()
.map(IndexUrl::redacted)
.chain(
index_locations
.flat_index()
.map(FlatIndexLocation::redacted),
)
.map(UrlString::from)
.collect::<BTreeSet<_>>();
// Find any packages in the lockfile that reference a registry that is no longer included in
// the current configuration.
for package in lock.packages() {
let Some(index) = package.index() else {
continue;
};
if !indexes.contains(index) {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to removal of referenced registry: {index}"
);
// It's fine to prefer the existing versions, though.
return Ok(Self::Preferable(lock));
}
}
}
// Determine whether the lockfile satisfies the workspace requirements.
if lock
.satisfies(
workspace,
members,
constraints,
overrides,
interpreter.tags()?,
database,
)
.await?
{
debug!("Existing `uv.lock` satisfies workspace requirements");
Ok(Self::Satisfies(lock))
} else {
debug!("Existing `uv.lock` does not satisfy workspace requirements; ignoring...");
Ok(Self::Preferable(lock))
}
}
/// Return the inner [`Lock`].
fn lock(&self) -> &Lock {
match self {
ValidatedLock::Unusable(lock) => lock,
ValidatedLock::Satisfies(lock) => lock,
ValidatedLock::Preferable(lock) => lock,
}
}
/// Convert the [`ValidatedLock`] into a [`Lock`].
#[must_use]
fn into_lock(self) -> Lock {
match self {
ValidatedLock::Unusable(lock) => lock,
ValidatedLock::Satisfies(lock) => lock,
ValidatedLock::Preferable(lock) => lock,
}
}
}

View file

@ -174,7 +174,8 @@ pub(crate) async fn remove(
cache,
printer,
)
.await?;
.await?
.into_lock();
if no_sync {
return Ok(ExitStatus::Success);
@ -191,7 +192,7 @@ pub(crate) async fn remove(
project::sync::do_sync(
&project,
&venv,
&lock.lock,
&lock,
&extras,
dev,
Modifications::Exact,

View file

@ -36,7 +36,7 @@ use crate::commands::pip::operations::Modifications;
use crate::commands::project::environment::CachedEnvironment;
use crate::commands::project::{ProjectError, WorkspacePython};
use crate::commands::reporters::PythonDownloadReporter;
use crate::commands::{pip, project, ExitStatus, SharedState};
use crate::commands::{project, ExitStatus, SharedState};
use crate::printer::Printer;
use crate::settings::ResolverInstallerSettings;
@ -387,7 +387,7 @@ pub(crate) async fn run(
.await?
};
let lock = match project::lock::do_safe_lock(
let result = match project::lock::do_safe_lock(
locked,
frozen,
project.workspace(),
@ -407,8 +407,8 @@ pub(crate) async fn run(
)
.await
{
Ok(lock) => lock,
Err(ProjectError::Operation(pip::operations::Error::Resolve(
Ok(result) => result,
Err(ProjectError::Operation(operations::Error::Resolve(
uv_resolver::ResolveError::NoSolution(err),
))) => {
let report = miette::Report::msg(format!("{err}")).context(err.header());
@ -421,7 +421,7 @@ pub(crate) async fn run(
project::sync::do_sync(
&project,
&venv,
&lock.lock,
result.lock(),
&extras,
dev,
Modifications::Sufficient,

View file

@ -89,7 +89,7 @@ pub(crate) async fn sync(
)
.await
{
Ok(lock) => lock,
Ok(result) => result.into_lock(),
Err(ProjectError::Operation(pip::operations::Error::Resolve(
uv_resolver::ResolveError::NoSolution(err),
))) => {
@ -107,7 +107,7 @@ pub(crate) async fn sync(
do_sync(
&project,
&venv,
&lock.lock,
&lock,
&extras,
dev,
modifications,

View file

@ -79,7 +79,8 @@ pub(crate) async fn tree(
cache,
printer,
)
.await?;
.await?
.into_lock();
// Apply the platform tags to the markers.
let markers = match (python_platform, python_version) {
@ -93,7 +94,7 @@ pub(crate) async fn tree(
// Render the tree.
let tree = TreeDisplay::new(
&lock.lock,
&lock,
(!universal).then(|| markers.as_ref()),
depth.into(),
prune,

View file

@ -128,19 +128,6 @@ fn fork_allows_non_conflicting_non_overlapping_dependencies() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -254,19 +241,6 @@ fn fork_allows_non_conflicting_repeated_dependencies() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -388,19 +362,6 @@ fn fork_basic() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -741,19 +702,6 @@ fn fork_filter_sibling_dependencies() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -869,19 +817,6 @@ fn fork_upgrade() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1038,19 +973,6 @@ fn fork_incomplete_markers() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1186,19 +1108,6 @@ fn fork_marker_accrue() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1444,19 +1353,6 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1626,19 +1522,6 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1809,19 +1692,6 @@ fn fork_marker_inherit_combined() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -1965,19 +1835,6 @@ fn fork_marker_inherit_isolated() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2139,19 +1996,6 @@ fn fork_marker_inherit_transitive() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2285,19 +2129,6 @@ fn fork_marker_inherit() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2460,19 +2291,6 @@ fn fork_marker_limited_inherit() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2617,19 +2435,6 @@ fn fork_marker_selection() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2798,19 +2603,6 @@ fn fork_marker_track() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -2945,19 +2737,6 @@ fn fork_non_fork_marker_transitive() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -3241,19 +3020,6 @@ fn fork_overlapping_markers_basic() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -3495,19 +3261,6 @@ fn preferences_dependent_forking_bistable() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -3936,19 +3689,6 @@ fn preferences_dependent_forking_tristable() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4151,19 +3891,6 @@ fn preferences_dependent_forking() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4347,19 +4074,6 @@ fn fork_remaining_universe_partitioning() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4445,19 +4159,6 @@ fn fork_requires_python_full_prerelease() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4543,19 +4244,6 @@ fn fork_requires_python_full() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4657,19 +4345,6 @@ fn fork_requires_python_patch_overlap() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
@ -4752,18 +4427,5 @@ fn fork_requires_python() -> Result<()> {
.assert()
.success();
// Assert the idempotence of `uv lock` when resolving with the lockfile preferences,
// by upgrading an irrelevant package.
context
.lock()
.arg("--locked")
.arg("--upgrade-package")
.arg("packse")
.env_remove("UV_EXCLUDE_NEWER")
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}