This commit is contained in:
Rene Leonhardt 2025-07-05 09:14:38 +03:00 committed by GitHub
commit 90dc2b1dc0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 1640 additions and 36 deletions

47
Cargo.lock generated
View file

@ -1032,6 +1032,16 @@ dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-next"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
dependencies = [
"cfg-if",
"dirs-sys-next",
]
[[package]]
name = "dirs-sys"
version = "0.4.1"
@ -1044,6 +1054,17 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "dirs-sys-next"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
@ -2747,6 +2768,20 @@ dependencies = [
"yansi",
]
[[package]]
name = "prettytable-rs"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a"
dependencies = [
"csv",
"encode_unicode",
"is-terminal",
"lazy_static",
"term",
"unicode-width 0.1.14",
]
[[package]]
name = "priority-queue"
version = "2.3.1"
@ -3917,6 +3952,17 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "term"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
dependencies = [
"dirs-next",
"rustversion",
"winapi",
]
[[package]]
name = "terminal_size"
version = "0.4.1"
@ -4634,6 +4680,7 @@ dependencies = [
"owo-colors",
"petgraph",
"predicates",
"prettytable-rs",
"regex",
"reqwest",
"rkyv",

View file

@ -1,12 +1,11 @@
use std::ffi::OsString;
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use std::str::FromStr;
use anyhow::{Result, anyhow};
use clap::builder::Styles;
use clap::builder::styling::{AnsiColor, Effects, Style};
use clap::{Args, Parser, Subcommand};
use std::ffi::OsString;
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use std::str::FromStr;
use uv_cache::CacheArgs;
use uv_configuration::{
@ -22,6 +21,7 @@ use uv_redacted::DisplaySafeUrl;
use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode};
use uv_static::EnvVars;
use uv_torch::TorchMode;
use uv_workspace::pyproject::DependencyType;
use uv_workspace::pyproject_mut::AddBoundsKind;
pub mod comma;
@ -608,6 +608,62 @@ pub struct VersionArgs {
pub python: Option<Maybe<String>>,
}
#[derive(Args)]
pub struct UpgradeProjectArgs {
/// Run without performing the upgrades.
#[arg(long)]
pub dry_run: bool,
/// Search recursively for pyproject.toml files.
#[arg(long, env = EnvVars::UV_UPGRADE_RECURSIVE)]
pub recursive: bool,
/// Only search specific tables in pyproject.toml: `prod,dev,optional,groups`.
#[arg(
long,
env = EnvVars::UV_UPGRADE_TYPES,
value_delimiter = ',',
value_parser = parse_dependency_type,
)]
pub types: Vec<Maybe<DependencyType>>,
/// Allow only some version digits to change, others will be skipped:
/// `1,2,3,4` (major, minor, patch, build number).
#[arg(
long,
env = EnvVars::UV_UPGRADE_TYPES,
value_delimiter = ',',
value_parser = parse_version_digit,
)]
pub allow: Vec<Maybe<usize>>,
#[command(flatten)]
pub refresh: RefreshArgs,
/// The Python interpreter to use during resolution (overrides pyproject.toml).
///
/// A Python interpreter is required for building source distributions to determine package
/// metadata when there are not wheels.
///
/// The interpreter is also used as the fallback value for the minimum Python version if
/// `requires-python` is not set.
///
/// See `uv help python` for details on Python discovery and supported request formats.
#[arg(
long,
short,
env = EnvVars::UV_PYTHON,
verbatim_doc_comment,
help_heading = "Python options",
value_parser = parse_maybe_string,
)]
pub python: Option<Maybe<String>>,
/// Upgrade only the given requirements (i.e. `uv<0.5`) instead of pyproject.toml files.
#[arg(required = false, value_parser = parse_requirement)]
pub requirements: Vec<Maybe<Requirement>>,
}
#[derive(Debug, Copy, Clone, PartialEq, clap::ValueEnum)]
pub enum VersionBump {
/// Increase the major version (1.2.3 => 2.0.0)
@ -871,6 +927,8 @@ pub enum ProjectCommand {
Remove(RemoveArgs),
/// Read or update the project's version.
Version(VersionArgs),
/// Upgrade the project's dependency constraints.
Upgrade(UpgradeProjectArgs),
/// Update the project's environment.
///
/// Syncing ensures that all project dependencies are installed and up-to-date with the
@ -1053,6 +1111,45 @@ fn parse_insecure_host(input: &str) -> Result<Maybe<TrustedHost>, String> {
}
}
/// Parse a string into an [`DependencyType`], mapping the empty string to `None`.
fn parse_dependency_type(input: &str) -> Result<Maybe<DependencyType>, String> {
if input.is_empty() {
Ok(Maybe::None)
} else {
match DependencyType::from_str(input) {
Ok(table) => Ok(Maybe::Some(table)),
Err(err) => Err(err.to_string()),
}
}
}
/// Parse a string like `uv<0.5` into an [`Requirement`], mapping the empty string to `None`.
fn parse_requirement(input: &str) -> Result<Maybe<Requirement>, String> {
if input.is_empty() {
Ok(Maybe::None)
} else {
match Requirement::from_str(input) {
Ok(table) => Ok(Maybe::Some(table)),
Err(err) => Err(err.to_string()),
}
}
}
/// Parse a string into an [`usize`], mapping the empty string or unknown digits to `None`.
///
/// Allowed: 1, 2, 3 or 4.
fn parse_version_digit(input: &str) -> Result<Maybe<usize>, String> {
if input.is_empty() {
Ok(Maybe::None)
} else {
match usize::from_str(input) {
Ok(digit) if (1..=4).contains(&digit) => Ok(Maybe::Some(digit)),
Ok(_) => Ok(Maybe::None),
Err(err) => Err(err.to_string()),
}
}
}
/// Parse a string into a [`PathBuf`]. The string can represent a file, either as a path or a
/// `file://` URL.
fn parse_file_path(input: &str) -> Result<PathBuf, String> {

View file

@ -419,7 +419,7 @@ impl<'a> IndexLocations {
}
/// Clone the index locations into a [`IndexUrls`] instance.
pub fn index_urls(&'a self) -> IndexUrls {
pub fn index_urls(&self) -> IndexUrls {
IndexUrls {
indexes: self.indexes.clone(),
no_index: self.no_index,

View file

@ -1,11 +1,11 @@
use std::collections::Bound;
use std::str::FromStr;
use version_ranges::Ranges;
use uv_distribution_filename::WheelFilename;
use uv_pep440::{
LowerBound, UpperBound, Version, VersionSpecifier, VersionSpecifiers,
release_specifiers_to_ranges,
VersionSpecifiersParseError, release_specifiers_to_ranges,
};
use uv_pep508::{MarkerExpression, MarkerTree, MarkerValueVersion};
use uv_platform_tags::{AbiTag, LanguageTag};
@ -502,6 +502,19 @@ impl RequiresPython {
}
})
}
/// Remove trailing zeroes from all specifiers
pub fn remove_zeroes(&self) -> String {
self.specifiers().remove_zeroes().to_string()
}
}
impl FromStr for RequiresPython {
type Err = VersionSpecifiersParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
VersionSpecifiers::from_str(s).map(|v| Self::from_specifiers(&v))
}
}
impl std::fmt::Display for RequiresPython {

View file

@ -359,7 +359,7 @@ pub async fn persist_with_retry(
// So every time we fail, we need to reset the `NamedTempFile` to try again.
//
// Every time we (re)try we call this outer closure (`let persist = ...`), so it needs to
// be at least a `FnMut` (as opposed to `Fnonce`). However the closure needs to return a
// be at least a `FnMut` (as opposed to `FnOnce`). However the closure needs to return a
// totally owned `Future` (so effectively it returns a `FnOnce`).
//
// But if the `Future` is totally owned it *necessarily* can't write back the `NamedTempFile`

View file

@ -30,7 +30,7 @@ pub use version_ranges::{
pub use {
version::{
LocalSegment, LocalVersion, LocalVersionSlice, MIN_VERSION, Operator, OperatorParseError,
Prerelease, PrereleaseKind, Version, VersionParseError, VersionPattern,
Prerelease, PrereleaseKind, Version, VersionDigit, VersionParseError, VersionPattern,
VersionPatternParseError,
},
version_specifier::{

View file

@ -1,10 +1,12 @@
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use std::fmt::Formatter;
use std::cmp::min;
use std::fmt::{Display, Formatter};
use std::num::NonZero;
use std::ops::Deref;
use std::sync::LazyLock;
use std::{
borrow::Borrow,
cmp,
cmp::Ordering,
hash::{Hash, Hasher},
str::FromStr,
@ -742,6 +744,93 @@ impl Version {
// release is equal, so compare the other parts
sortable_tuple(self).cmp(&sortable_tuple(other))
}
/// Bump to `latest` version so it is contained in the same operator.
///
/// Returns (bumped, upgraded): `upgraded == false` means downgraded.
///
/// | old | latest | new |
/// |-|-|-|
/// | <0.1 | 0.1.2 | <0.2 |
/// | ==0.0.* | 0.1.2 | ==0.1.* |
/// | ==0.2.* | 0.1.2 | ==0.1.* |
/// | <1.0 | 1.2.3 | <1.3 |
/// | <1.0 | 2.0.0 | <2.1 |
/// | <1.2.3 | 1.2.3 | <1.3.0 |
/// | <=1.2.2 | 1.2.3 | <=1.2.3 |
/// | <=1.2.0 | 1.2.3 | <=1.3.0 |
/// | >=2.0.0 | 1.2.3 | >=1.2.0 |
/// | >=2.0.2 | 1.2.3 | >=1.2.3 |
/// | <1.2.3.4 | 1.2.3.4 | <1.2.3.5 |
pub(crate) fn bump_to(&mut self, latest: &Self, operator: Operator) -> (bool, bool) {
let op_equal = operator == Operator::Equal
|| operator == Operator::EqualStar
|| operator == Operator::ExactEqual;
let ordering = self.cmp_slow(latest);
// Special case: Correct if > or >= and value > latest (downgrade)
let downgrade = ordering == Ordering::Greater
&& (operator == Operator::GreaterThan
|| operator == Operator::GreaterThanEqual
|| operator == Operator::Equal
|| operator == Operator::EqualStar)
|| ordering == Ordering::Equal && operator == Operator::GreaterThan;
let new = &*latest.release();
let old = &*self.release();
let last = *old.last().unwrap();
let zero = last == 0;
let opgt = operator == Operator::GreaterThan;
let opeq = operator == Operator::Equal;
let oplt = operator == Operator::LessThan;
let ople = operator == Operator::LessThanEqual;
let orle = ordering != Ordering::Greater;
let enough_len = old.len() >= new.len();
let subtract = downgrade && zero && enough_len && !opeq;
// Note: Even if there is no unyanked 1.0.0 downgrade, the constraint will find `latest` 1.0.1
let delta = if downgrade {
// set version to 0 (negative delta) or don't change it
if subtract { *new.last().unwrap() } else { 0 }
} else {
u64::from(!(op_equal || ople && orle && enough_len || opgt)) // add 1 if operator needs
};
let addsub = |v: u64| if subtract { v - delta } else { v + delta };
if downgrade || orle {
let minor = oplt || ople && zero;
let upgrade = match min(old.len(), new.len()) {
// <1 to <=1.0.0 : <2
1 => Version::new([addsub(new[0])]),
// <1.2 to <=1.2.3 : <1.3
2 => Version::new([new[0], addsub(new[1])]),
// <1.2.3 to <=1.2.3 : <1.2.4
3 => Version::new([
new[0],
new[1] + u64::from(minor),
if minor { 0 } else { addsub(new[2]) },
]),
// <1.2.3.4 to <=1.2.3.4 : <1.2.3.5
4 => Version::new([new[0], new[1], new[2], addsub(new[3])]),
_ => latest.clone(),
};
self.inner = upgrade.inner;
return (true, !downgrade);
}
(false, false)
}
/// Remove trailing zeroes from the release
#[must_use]
pub fn remove_zeroes(&self) -> Self {
let mut r = vec![];
let mut found = false;
for d in self.release().to_vec().iter().rev() {
if !found && *d == 0 {
continue;
}
found = true;
r.push(*d);
}
Self::new(r.iter().rev())
}
}
impl<'de> Deserialize<'de> for Version {
@ -1455,6 +1544,68 @@ pub struct Release<'a> {
inner: ReleaseInner<'a>,
}
impl Release<'_> {
/// Which digit changed: `1`=major, `2`=minor, `3`=patch, `4`=build number, `None`=undetermined/equal
pub(crate) fn semver_change(&self, right: &Self, op: Operator) -> Option<usize> {
let l = self.len();
let mut me: Vec<_> = self[..].to_vec();
// decrease the last digit != 0 to determine major/minor upgrade:
// <1 to <1.0.1 is major
// <1.2.0 to <1.2.1 is minor
if op == Operator::LessThan {
for i in (0..l).rev() {
if me[i] != 0 {
me[i] -= 1;
break;
}
}
}
if l < right.len() && (op == Operator::LessThan || op == Operator::GreaterThan) {
let lhs = &me[..l];
let mut non_zero = 0;
for (i, v) in lhs.iter().enumerate() {
if *v != 0 {
non_zero = i + 1;
}
}
return Some(non_zero);
}
let l = cmp::min(self.len(), right.len());
// Slice to the loop iteration range to enable bound check
// elimination in the compiler
let lhs = &me[..l];
let rhs = &right[..l];
for (i, v) in lhs.iter().enumerate() {
match v.cmp(&rhs[i]) {
Ordering::Equal => (),
_ => return Some(i + 1),
}
}
let longer = if self.len() > right.len() {
&me[..]
} else {
&right[..]
};
if self.cmp(right) == Ordering::Equal {
if op == Operator::GreaterThanEqual {
return Some(l); // Operator downgrade
}
for (i, v) in longer.iter().enumerate() {
if *v != 0 {
return Some(i + 1);
}
}
}
None
}
}
enum ReleaseInner<'a> {
// The small versions unpacked into larger u64 values.
// We're storing at most 4 u64 plus determinant for the duration of the release call on the
@ -2501,6 +2652,59 @@ impl From<VersionParseError> for VersionPatternParseError {
}
}
/// Store digits for a small version
#[derive(Default)]
pub struct VersionDigit {
major: usize,
minor: usize,
patch: usize,
build: usize,
}
impl VersionDigit {
/// Increase a digit
pub fn add(&mut self, digit: usize) -> bool {
match digit {
1 => self.major += 1,
2 => self.minor += 1,
3 => self.patch += 1,
4 => self.build += 1,
_ => return false,
}
true
}
/// Increase all digits from `other`
pub fn add_other(&mut self, other: &Self) {
self.major += other.major;
self.minor += other.minor;
self.patch += other.patch;
self.build = other.build;
}
/// Are all digits empty?
pub fn is_empty(&self) -> bool {
self.major == 0 && self.minor == 0 && self.patch == 0 && self.build == 0
}
/// Format all digits as 1.2.3.4 if not empty.
pub fn format(&self, prefix: &str, suffix: &str) -> String {
if self.is_empty() {
return String::new();
}
format!(
"{prefix}{}.{}.{}.{}{suffix}",
self.major, self.minor, self.patch, self.build
)
}
}
impl Display for VersionDigit {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "{}", self.format("", ""))
}
}
/// Compare the release parts of two versions, e.g. `4.3.1` > `4.2`, `1.1.0` ==
/// `1.1` and `1.16` < `1.19`
pub(crate) fn compare_release(this: &[u64], other: &[u64]) -> Ordering {

View file

@ -1,9 +1,10 @@
use std::borrow::Cow;
use std::cmp::Ordering;
use std::fmt::Formatter;
use std::ops::Bound;
use std::ops::{Bound, DerefMut};
use std::str::FromStr;
use crate::version::VersionDigit;
use crate::{
Operator, OperatorParseError, Version, VersionPattern, VersionPatternParseError, version,
};
@ -42,6 +43,12 @@ impl std::ops::Deref for VersionSpecifiers {
}
}
impl DerefMut for VersionSpecifiers {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl VersionSpecifiers {
/// Matches all versions.
pub fn empty() -> Self {
@ -122,6 +129,57 @@ impl VersionSpecifiers {
Self::from_unsorted(specifiers)
}
/// Bump last constraint if it doesn't contain `latest`
pub fn bump_last(
&mut self,
latest: &Version,
allow: &[usize],
skipped: &mut VersionDigit,
) -> (bool, bool, Option<usize>, bool) {
match self.last_mut().filter(|last| !last.contains(latest)) {
Some(last) => {
let last_copy = last.clone();
let mut last_new = last.clone();
let (mut bumped, mut upgraded) = last_new.version.bump_to(latest, last.operator);
// Special case: unresolvable requirement has been downgraded to latest
// We need to change >2.0 to >=1.2 for latest 1.2.3 (EqualStar and >= stay untouched)
if bumped && last.operator == Operator::GreaterThan {
last_new.operator = Operator::GreaterThanEqual; // Operator downgrade
upgraded = false;
}
let old = last_copy.version.release();
let new = last_new.version.release();
let mut semver = bumped
.then(|| old.semver_change(&new, last_new.operator))
.flatten();
let mut skip = false;
if matches!(semver, Some(i) if i > 0 && allow.contains(&i)) {
*last = last_new;
} else if bumped {
bumped = false;
upgraded = false;
skip = skipped.add(semver.unwrap_or(0)); // Skip forbidden digits
semver = None;
}
(bumped, upgraded, semver, skip)
}
_ => (false, false, None, false),
}
}
/// Remove trailing zeroes from all specifiers
#[must_use]
pub fn remove_zeroes(&self) -> Self {
Self::from_unsorted(
self.iter()
.map(|v| {
VersionSpecifier::from_version(*v.operator(), v.version().remove_zeroes())
.unwrap()
})
.collect(),
)
}
}
impl FromIterator<VersionSpecifier> for VersionSpecifiers {
@ -974,9 +1032,8 @@ impl std::fmt::Display for TildeVersionSpecifier<'_> {
#[cfg(test)]
mod tests {
use std::{cmp::Ordering, str::FromStr};
use indoc::indoc;
use std::{cmp::Ordering, str::FromStr};
use crate::LocalSegment;
@ -1985,4 +2042,151 @@ Failed to parse version: Unexpected end of version specifier, expected operator.
"The ~= operator requires at least two segments in the release version"
);
}
#[test]
fn test_bump_last() {
let success = [
(1, "1.2.3", "<1", "<2", 1), // upgrades
(0, "1.2.3", "<2", "", 0),
(0, "1.2.3", "<2.0", "", 0),
(0, "1.2.3", "<2.0.0", "", 0),
(1, "1.2.3", "<1.0", "<1.3", 1),
(2, "1.2.3", "<1.2", "<1.3", 1),
(1, "2.0.0", "<1.0", "<2.1", 1),
(0, "1.2.3", "<1.3", "", 0),
(1, "1.2.3", "<1.0.0", "<1.3.0", 1),
(2, "1.2.3", "<1.2.0", "<1.3.0", 1),
(2, "1.2.3", "<1.2.3", "<1.3.0", 1),
(0, "1.2.3", "<1.2.4", "", 0),
(0, "1.2.3", "<1.3.0", "", 0),
(4, "1.2.3.4", "<1.2.3.4", "<1.2.3.5", 1),
(1, "1.2.3", "<=1", "<=2", 1),
(2, "1.2.3", "<=1.0", "<=1.3", 1),
(2, "1.2.3", "<=1.2", "<=1.3", 1),
(2, "1.2.3", "<=1.0.0", "<=1.3.0", 1),
(2, "1.2.3", "<=1.2.0", "<=1.3.0", 1),
(3, "1.2.3", "<=1.2.2", "<=1.2.3", 1),
(4, "1.2.3.4", "<=1.2.3.3", "<=1.2.3.4", 1),
(1, "1.2.3", "==0.*", "==1.*", 1),
(2, "1.2.3", "==1.0.*", "==1.2.*", 1),
(2, "1.2.3", "==1.1.*", "==1.2.*", 1),
(1, "1.2.3", "==0.1.0", "==1.2.3", 1),
(1, "1.2.3", "==0.1.2", "==1.2.3", 1),
(4, "1.2.3.4", "==1.2.3.3", "==1.2.3.4", 1),
(0, "1.2.3", "<=1.2.3", "", 0),
(0, "1.2.3", "<=1.2.4", "", 0),
(1, "1.2.3", ">2", ">=1", 2), // downgrades
(1, "1.2.3", ">2.0", ">=1.2", 2),
(1, "1.2.3", ">2.0.0", ">=1.2.0", 2),
(1, "1.2.3", ">2.0.1", ">=1.2.3", 2),
(2, "1.2.3", ">1.3", ">=1.2", 2),
(2, "1.2.3", ">1.3.0", ">=1.2.0", 2),
(2, "1.2.3", ">1.3.1", ">=1.2.3", 2),
(3, "1.2.3", ">1.2.3", ">=1.2.3", 2), // operator "downgrade"
(3, "1.2.3", ">1.2.4", ">=1.2.3", 2),
(4, "1.2.3.4", ">1.2.3.4", ">=1.2.3.4", 2), // operator "downgrade"
(1, "1.2.3", ">=2", ">=1", 2),
(1, "1.2.3", ">=2.0", ">=1.2", 2),
(1, "1.2.3", ">=2.0.0", ">=1.2.0", 2),
(1, "1.2.3", ">=2.0.1", ">=1.2.3", 2),
(2, "1.2.3", ">=1.3", ">=1.2", 2),
(2, "1.2.3", ">=1.3.0", ">=1.2.0", 2),
(2, "1.2.3", ">=1.3.1", ">=1.2.3", 2),
(3, "1.2.3", ">=1.2.4", ">=1.2.3", 2),
(4, "1.2.3.4", ">=1.2.3.5", ">=1.2.3.4", 2),
(1, "1.2.3", "==2.*", "==1.*", 2),
(1, "1.2.3", "==2.0.*", "==1.2.*", 2),
(1, "1.2.3", "==2.1.*", "==1.2.*", 2),
(2, "1.2.3", "==1.3.*", "==1.2.*", 2),
(2, "1.2.3", "==1.3.0", "==1.2.3", 2),
(2, "1.2.3", "==1.3.2", "==1.2.3", 2),
(4, "1.2.3.4", "==1.2.3.5", "==1.2.3.4", 2),
(0, "1.2.3", ">1", "", 0), // unchanged
(0, "1.2.3", ">1.2", "", 0),
(0, "1.2.3", ">1.2.0", "", 0),
(0, "1.2.3", ">1.2.2", "", 0),
(0, "1.2.3.4", ">1.2.3.3", "", 0),
(0, "1.2.3", ">=1", "", 0),
(0, "1.2.3", ">=1.2", "", 0),
(0, "1.2.3", ">=1.2.0", "", 0),
(0, "1.2.3", ">=1.2.3", "", 0),
(0, "1.2.3.4", ">=1.2.3.4", "", 0),
(0, "1.2.3", "==1.*", "", 0),
(0, "1.2.3", "==1.2.*", "", 0),
(0, "1.2.3", "==1.2.3", "", 0),
(0, "1.2.3.4", "==1.2.3.*", "", 0),
(0, "1.2.3.4", "==1.2.3.4", "", 0),
(2, "0.1", "<0.1.0", "<0.2", 1), // irregular versions (i.e. pydantic 0.1)
(2, "0.1", ">0.1", ">=0.1", 2), // irregular versions (i.e. pydantic 0.1)
];
for (i, tuple) in success.iter().enumerate() {
#[allow(clippy::cast_possible_truncation)]
let current_line = line!() - (success.len() - i + 3) as u32;
let (semver, latest, old, new, upgrade) = tuple;
// if i != 0 { continue }
let mut modified = VersionSpecifiers::from_str(old).unwrap();
let old = VersionSpecifiers::from_str(old).unwrap();
let new = VersionSpecifiers::from_str(new).unwrap();
let latest = Version::from_str(latest).unwrap();
let mut skipped = VersionDigit::default();
let (bumped, upgraded, semver_change, _skip) =
modified.bump_last(&latest, &[1, 2, 3, 4], &mut skipped);
let should_bump = !new.to_string().is_empty();
let not = if should_bump { "not " } else { "" };
let semver_changed = semver_change.unwrap_or(0);
let testcase = format!("\nline {current_line}: {tuple:?}");
assert_eq!(
old.contains(&latest),
!bumped && !should_bump,
"[{i}]: test data incorrect: old {old} contains {latest} -> should not bump{testcase}"
);
assert!(
new.contains(&latest),
"[{i}]: test data incorrect: new {new} doesn't contain {latest} -> should bump{testcase}"
);
assert_eq!(
should_bump,
*upgrade != 0,
"[{i}]: {} and expected 0 (unchanged) but test data = {upgrade}{testcase}",
if should_bump {
"should bump"
} else {
"should not bump"
}
);
assert_eq!(
bumped, should_bump,
"[{i}]: did {not}bump {old} to {modified} for {latest}{testcase}"
);
assert!(
skipped.is_empty(),
"[{i}]: skipped {skipped}: did not bump {old} to {modified} {latest}{testcase}",
);
assert_eq!(
upgraded,
should_bump && *upgrade == 1,
"[{i}]: did not {} {old} to {modified} for {latest}{testcase}",
if *upgrade == 1 {
"upgrade"
} else {
"downgrade"
}
);
if !should_bump {
assert_eq!(
modified, old,
"[{i}]: expected unchanged {old} but got {modified}{testcase}"
);
continue;
}
assert_eq!(
modified, new,
"[{i}]: expected {old} to become {new} but got {modified}{testcase}"
);
assert_eq!(
*semver, semver_changed,
"[{i}]: expected {old} -> {new} = semver {semver} but got {semver_changed}{testcase}"
);
}
}
}

View file

@ -222,10 +222,10 @@ impl<'de, T: Pep508Url> Deserialize<'de> for Requirement<T> {
{
struct RequirementVisitor<T>(std::marker::PhantomData<T>);
impl<T: Pep508Url> serde::de::Visitor<'_> for RequirementVisitor<T> {
impl<T: Pep508Url> de::Visitor<'_> for RequirementVisitor<T> {
type Value = Requirement<T>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("a string containing a PEP 508 requirement")
}

View file

@ -713,6 +713,27 @@ impl EnvVars {
#[attr_hidden]
pub const UV_RUN_MAX_RECURSION_DEPTH: &'static str = "UV_RUN_MAX_RECURSION_DEPTH";
/// Should `uv upgrade` allow recursive search for pyproject.toml files?
#[attr_hidden]
pub const UV_UPGRADE_RECURSIVE: &'static str = "UV_UPGRADE_RECURSIVE";
/// Which pyproject.toml tables should `uv upgrade` search?
///
/// Default `prod,dev,optional,groups`.
///
/// * prod: `project.dependencies`
/// * dev: `tool.uv.dev-dependencies`
/// * optional: `project.optional-dependencies`
/// * groups: `dependency-groups`
#[attr_hidden]
pub const UV_UPGRADE_TYPES: &'static str = "UV_UPGRADE_TYPES";
/// Which version digits are allowed to change? Others will be skipped.
///
/// Default `1,2,3,4` (major, minor, patch, build number).
#[attr_hidden]
pub const UV_UPGRADE_ALLOW: &'static str = "UV_UPGRADE_ALLOW";
/// Overrides terminal width used for wrapping. This variable is not read by uv directly.
///
/// This is a quasi-standard variable, described, e.g., in `ncurses(3x)`.

View file

@ -1687,6 +1687,40 @@ pub enum DependencyType {
Group(GroupName),
}
impl DependencyType {
pub fn iter() -> [Self; 4] {
[
Self::Production,
Self::Dev,
Self::Optional(ExtraName::from_str("e").ok().unwrap()),
Self::Group(GroupName::from_str("g").ok().unwrap()),
]
}
}
impl FromStr for DependencyType {
type Err = DependencyTypeError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// case-insensitive, allow abbreviations
match s.to_lowercase().as_str() {
"prod" | "prd" | "p" => Ok(Self::Production),
"dev" | "d" => Ok(Self::Dev),
"optional" | "opt" | "o" | "extra" | "e" => {
Ok(Self::Optional(ExtraName::from_str("e").ok().unwrap()))
}
"groups" | "group" | "g" => Ok(Self::Group(GroupName::from_str("g").ok().unwrap())),
_ => Err(DependencyTypeError::Unknown(s.to_string())),
}
}
}
#[derive(Debug, Error)]
pub enum DependencyTypeError {
#[error("unknown value for: `{0}` (allowed: `prod,dev,optional,groups`)")]
Unknown(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(Serialize))]
pub struct BuildBackendSettingsSchema;

View file

@ -1,19 +1,21 @@
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::path::Path;
use std::str::FromStr;
use std::{fmt, iter, mem};
use thiserror::Error;
use toml_edit::{
Array, ArrayOfTables, DocumentMut, Formatted, Item, RawString, Table, TomlError, Value,
Array, ArrayOfTables, DocumentMut, Formatted, Item, RawString, Table, TableLike, TomlError,
Value,
};
use uv_cache_key::CanonicalUrl;
use uv_distribution_types::Index;
use uv_distribution_types::{Index, RequiresPython};
use uv_fs::PortablePath;
use uv_normalize::GroupName;
use uv_pep440::{Version, VersionParseError, VersionSpecifier, VersionSpecifiers};
use uv_pep440::{Version, VersionDigit, VersionParseError, VersionSpecifier, VersionSpecifiers};
use uv_pep508::{ExtraName, MarkerTree, PackageName, Requirement, VersionOrUrl};
use uv_redacted::DisplaySafeUrl;
@ -258,6 +260,39 @@ pub enum DependencyTarget {
PyProjectToml,
}
type UpgradeResult = (
usize,
String,
Requirement,
Requirement,
Version,
bool,
DependencyType,
Option<usize>,
Option<RequiresPython>,
);
type VersionedPackages = FxHashMap<Option<RequiresPython>, FxHashSet<PackageName>>;
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct PackageVersions {
versions: HashMap<PackageName, HashMap<Option<RequiresPython>, Version>>,
}
impl PackageVersions {
pub fn insert(&mut self, name: PackageName, version: Version, python: Option<RequiresPython>) {
self.versions
.entry(name)
.or_default()
.insert(python, version);
}
#[allow(clippy::ref_option)]
fn find(&self, name: &PackageName, python: &Option<RequiresPython>) -> Option<&Version> {
self.versions.get(name).and_then(|v| v.get(python))
}
}
impl PyProjectTomlMut {
/// Initialize a [`PyProjectTomlMut`] from a [`str`].
pub fn from_toml(raw: &str, target: DependencyTarget) -> Result<Self, Error> {
@ -330,6 +365,13 @@ impl PyProjectTomlMut {
Ok(doc)
}
pub fn get_requires_python(&self) -> Option<&str> {
self.doc
.get("project")
.and_then(|project| project.get("requires-python"))
.and_then(|item| item.as_str())
}
/// Adds a dependency to `project.dependencies`.
///
/// Returns `true` if the dependency was added, `false` if it was updated.
@ -1034,8 +1076,8 @@ impl PyProjectTomlMut {
/// Returns all the places in this `pyproject.toml` that contain a dependency with the given
/// name.
///
/// This method searches `project.dependencies`, `tool.uv.dev-dependencies`, and
/// `tool.uv.optional-dependencies`.
/// This method searches `project.dependencies`, `project.optional-dependencies`,
/// `dependency-groups` and `tool.uv.dev-dependencies`.
pub fn find_dependency(
&self,
name: &PackageName,
@ -1105,6 +1147,364 @@ impl PyProjectTomlMut {
types
}
/// Returns package names of all dependencies with version constraints, including requires-python.
///
/// This method searches `project.dependencies`, `project.optional-dependencies`,
/// `dependency-groups` and `tool.uv.dev-dependencies`.
pub fn find_versioned_dependencies(&self) -> VersionedPackages {
let mut versioned_dependencies = FxHashMap::default();
if let Some(project) = self.doc.get("project").and_then(Item::as_table) {
// Check `project.dependencies`.
if let Some(dependencies) = project.get("dependencies").and_then(Item::as_array) {
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies, None);
}
// Check `project.optional-dependencies`.
if let Some(extras) = project
.get("optional-dependencies")
.and_then(Item::as_table)
{
for (extra, dependencies) in extras {
let Some(dependencies) = dependencies.as_array() else {
continue;
};
let Ok(_extra) = ExtraName::from_str(extra) else {
continue;
};
Self::extract_names_if_versioned(
&mut versioned_dependencies,
dependencies,
None,
);
}
}
}
// Check `dependency-groups`.
if let Some(groups) = self.doc.get("dependency-groups").and_then(Item::as_table) {
let group_requires_python = self.get_uv_tool_dep_groups_requires_python();
for (group, dependencies) in groups {
let Ok(_group) = GroupName::from_str(group) else {
continue;
};
let Some(dependencies) = dependencies.as_array() else {
continue;
};
let requires_python = group_requires_python.get(&_group.to_string());
Self::extract_names_if_versioned(
&mut versioned_dependencies,
dependencies,
requires_python,
);
}
}
// Check `tool.uv.dev-dependencies`.
if let Some(dependencies) = self
.doc
.get("tool")
.and_then(Item::as_table)
.and_then(|tool| tool.get("uv"))
.and_then(Item::as_table)
.and_then(|uv| uv.get("dev-dependencies"))
.and_then(Item::as_array)
{
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies, None);
}
versioned_dependencies
}
fn extract_names_if_versioned(
packages: &mut VersionedPackages,
dependencies: &Array,
group_requires: Option<&RequiresPython>,
) {
let mut found = FxHashMap::default();
for dep in dependencies {
let Some(req) = dep.as_str().and_then(try_parse_requirement) else {
continue;
};
// Skip requirements without version constraints
if req.version_or_url.is_none() {
continue;
}
found
.entry(group_requires)
.or_insert_with(FxHashSet::default)
.insert(req.name);
}
for (requires, names) in found {
packages.entry(requires.cloned()).or_default().extend(names);
}
}
/// Returns all dependencies in this `pyproject.toml`.
///
/// This method searches `project.dependencies`, `project.optional-dependencies`,
/// `dependency-groups` and `tool.uv.dev-dependencies`.
pub fn upgrade_all_dependencies(
&mut self,
latest_versions: &PackageVersions,
tables: &[DependencyType],
allow: &[usize],
skipped: &mut VersionDigit,
requires_python: &Option<RequiresPython>,
) -> (Vec<UpgradeResult>, usize, usize) {
let mut all_upgrades = Vec::new();
let mut all_found = 0;
let mut all_skipped = 0;
// Check `project.dependencies`
if let Some(item) = tables
.contains(&DependencyType::Production)
.then(|| {
self.project_mut()
.ok()
.flatten()
.and_then(|p| p.get_mut("dependencies"))
})
.flatten()
{
let (found, count_skipped) = Self::replace_dependencies(
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Production,
allow,
skipped,
requires_python,
);
all_found += found;
all_skipped += count_skipped;
}
// Check `project.optional-dependencies`
if let Some(groups) = tables
.iter()
.find(|t| matches!(t, DependencyType::Optional(_)))
.and_then(|_| {
self.project_mut()
.ok()
.flatten()
.and_then(|p| p.get_mut("optional-dependencies"))
.and_then(Item::as_table_like_mut)
})
{
for (extra, item) in groups
.iter_mut()
.map(|(key, value)| (ExtraName::from_str(key.get()).ok(), value))
{
if let Some(_extra) = extra {
let (found, count_skipped) = Self::replace_dependencies(
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Optional(_extra),
allow,
skipped,
requires_python,
);
all_found += found;
all_skipped += count_skipped;
}
}
}
// Check `dependency-groups`.
if tables.iter().any(|t| matches!(t, DependencyType::Group(_))) {
let group_requires_python = self.get_uv_tool_dep_groups_requires_python();
let dep_groups = self
.doc
.get_mut("dependency-groups")
.and_then(Item::as_table_like_mut);
if let Some(groups) = dep_groups {
for (group, item) in groups
.iter_mut()
.map(|(key, value)| (GroupName::from_str(key.get()).ok(), value))
{
if let Some(_group) = group {
let python = group_requires_python
.get(&_group.to_string())
.or(requires_python.as_ref());
let (found, count_skipped) = Self::replace_dependencies(
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Group(_group),
allow,
skipped,
&python.cloned(),
);
all_found += found;
all_skipped += count_skipped;
}
}
}
}
// Check `tool.uv.dev-dependencies`
if let Some(item) = tables
.contains(&DependencyType::Dev)
.then(|| {
self.doc
.get_mut("tool")
.and_then(Item::as_table_mut)
.and_then(|tool| tool.get_mut("uv"))
.and_then(Item::as_table_mut)
.and_then(|uv| uv.get_mut("dev-dependencies"))
})
.flatten()
{
let (found, count_skipped) = Self::replace_dependencies(
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Dev,
allow,
skipped,
requires_python,
);
all_found += found;
all_skipped += count_skipped;
}
(all_upgrades, all_found, all_skipped)
}
fn get_uv_tool_dep_groups_requires_python(&self) -> FxHashMap<String, RequiresPython> {
self.doc
.get("tool")
.and_then(Item::as_table_like)
.and_then(|tool| tool.get("uv").and_then(Item::as_table_like))
.and_then(|uv| uv.get("dependency-groups").and_then(Item::as_table_like))
.map(Self::map_requires_python)
.unwrap_or_default()
}
fn map_requires_python(groups: &dyn TableLike) -> FxHashMap<String, RequiresPython> {
groups
.get_values()
.iter()
.filter_map(|(keys, value)| {
value
.as_inline_table()
.and_then(|i| i.get("requires-python"))
.and_then(|requires| {
requires
.as_str()
.and_then(|v| VersionSpecifiers::from_str(v).ok())
})
.map(|specifiers| {
(
keys.iter().join("."),
RequiresPython::from_specifiers(&specifiers),
)
})
})
.collect()
}
#[allow(clippy::ref_option)]
fn replace_dependencies(
latest_versions: &PackageVersions,
all_upgrades: &mut Vec<UpgradeResult>,
item: &mut Item,
dependency_type: &DependencyType,
allow: &[usize],
skipped: &mut VersionDigit,
requires_python: &Option<RequiresPython>,
) -> (usize, usize) {
if let Some(dependencies) = item.as_array_mut().filter(|d| !d.is_empty()) {
return Self::replace_upgrades(
latest_versions,
all_upgrades,
dependencies,
dependency_type,
allow,
skipped,
requires_python,
);
}
(0, 0)
}
#[allow(clippy::ref_option)]
fn find_upgrades(
latest_versions: &PackageVersions,
dependencies: &mut Array,
dependency_type: &DependencyType,
allow: &[usize],
skipped: &mut VersionDigit,
requires_python: &Option<RequiresPython>,
) -> (Vec<UpgradeResult>, usize, usize) {
let mut upgrades = Vec::new();
let mut count_skipped = 0;
let mut found = 0;
for (i, dep) in dependencies.iter().enumerate() {
let Some(mut req) = dep.as_str().and_then(try_parse_requirement) else {
continue;
};
found += 1;
let old = req.clone();
// Skip requirements without version constraints
let Some(VersionOrUrl::VersionSpecifier(mut version_specifiers)) = req.version_or_url
else {
continue;
};
if let Some(upgrade) = latest_versions.find(&old.name, requires_python) {
let (bumped, upgraded, semver, skip) =
version_specifiers.bump_last(upgrade, allow, skipped);
if bumped {
req.version_or_url = Some(VersionOrUrl::VersionSpecifier(version_specifiers));
upgrades.push((
i,
dep.as_str().unwrap().to_string(),
old,
req,
upgrade.clone(),
upgraded,
dependency_type.clone(),
semver,
requires_python.clone(),
));
} else if skip {
count_skipped += 1;
}
} else {
panic!("Error: No latest found for {}", old.name)
}
}
(upgrades, found, count_skipped)
}
#[allow(clippy::ref_option)]
fn replace_upgrades(
latest_versions: &PackageVersions,
all_upgrades: &mut Vec<UpgradeResult>,
dependencies: &mut Array,
dependency_type: &DependencyType,
allow: &[usize],
skipped: &mut VersionDigit,
requires_python: &Option<RequiresPython>,
) -> (usize, usize) {
let (upgrades, found, count_skipped) = Self::find_upgrades(
latest_versions,
dependencies,
dependency_type,
allow,
skipped,
requires_python,
);
for (i, _dep, _old, new, _upgrade, _upgraded, _, _, _) in &upgrades {
let string = new.to_string();
dependencies.replace(*i, toml_edit::Value::from(string));
}
all_upgrades.extend(upgrades);
(found, count_skipped)
}
pub fn version(&mut self) -> Result<Version, Error> {
let version = self
.doc
@ -1468,7 +1868,7 @@ fn remove_dependency(name: &PackageName, deps: &mut Array) -> Vec<Requirement> {
removed
}
/// Returns a `Vec` containing the all dependencies with the given name, along with their positions
/// Returns a `Vec` containing all dependencies with the given name, along with their positions
/// in the array.
fn find_dependencies(
name: &PackageName,

View file

@ -83,6 +83,7 @@ jiff = { workspace = true }
miette = { workspace = true, features = ["fancy-no-backtrace"] }
owo-colors = { workspace = true }
petgraph = { workspace = true }
prettytable-rs = "0.10.0"
regex = { workspace = true }
reqwest = { workspace = true }
rkyv = { workspace = true }

View file

@ -29,6 +29,7 @@ pub(crate) use project::remove::remove;
pub(crate) use project::run::{RunCommand, run};
pub(crate) use project::sync::sync;
pub(crate) use project::tree::tree;
pub(crate) use project::update::upgrade_project_dependencies;
pub(crate) use project::version::{project_version, self_version};
pub(crate) use publish::publish;
pub(crate) use python::dir::dir as python_dir;

View file

@ -68,6 +68,7 @@ pub(crate) mod remove;
pub(crate) mod run;
pub(crate) mod sync;
pub(crate) mod tree;
pub(crate) mod update;
pub(crate) mod version;
#[derive(thiserror::Error, Debug)]

View file

@ -383,7 +383,7 @@ pub(crate) async fn remove(
Ok(ExitStatus::Success)
}
/// Represents the destination where dependencies are added, either to a project or a script.
/// Represents the destination where dependencies are removed, either to a project or a script.
#[derive(Debug)]
enum RemoveTarget {
/// A PEP 723 script, with inline metadata.

View file

@ -0,0 +1,480 @@
use crate::commands::ExitStatus;
use crate::commands::pip::latest::LatestClient;
use crate::printer::Printer;
use anyhow::Result;
use futures::StreamExt;
use itertools::Itertools;
use owo_colors::OwoColorize;
use prettytable::format::FormatBuilder;
use prettytable::row;
use rustc_hash::{FxHashMap, FxHashSet};
use std::cmp::min;
use std::collections::BTreeMap;
use std::ffi::OsStr;
use std::fmt::Write;
use std::io::ErrorKind;
use std::path::Path;
use std::str::FromStr;
use std::sync::LazyLock;
use tokio::sync::Semaphore;
use uv_cache::Cache;
use uv_cli::{Maybe, UpgradeProjectArgs};
use uv_client::{BaseClientBuilder, RegistryClient, RegistryClientBuilder};
use uv_configuration::Concurrency;
use uv_distribution_filename::DistFilename;
use uv_distribution_types::{IndexCapabilities, IndexLocations, RequiresPython};
use uv_pep440::{Version, VersionDigit};
use uv_pep508::{PackageName, Requirement};
use uv_resolver::PrereleaseMode;
use uv_warnings::warn_user;
use uv_workspace::pyproject::DependencyType;
use uv_workspace::pyproject_mut::{DependencyTarget, PackageVersions, PyProjectTomlMut};
use walkdir::WalkDir;
/// Upgrade all dependencies in the project requirements (pyproject.toml).
///
/// This doesn't read or modify uv.lock, only constraints like `<1.0` are bumped.
pub(crate) async fn upgrade_project_dependencies(
args: UpgradeProjectArgs,
cache: Cache,
) -> Result<ExitStatus> {
let tables: Vec<_> = match args
.types
.iter()
.filter_map(|t| t.clone().into_option())
.collect::<Vec<_>>()
{
tables if !tables.is_empty() => tables,
_ => DependencyType::iter().to_vec(),
};
let allow: Vec<_> = match args
.allow
.iter()
.filter_map(|t| t.clone().into_option())
.collect::<Vec<_>>()
{
allow if !allow.is_empty() => allow,
_ => vec![1, 2, 3, 4],
};
let only_packages = !args.requirements.is_empty();
let tomls = if only_packages {
vec![String::new()]
} else {
match args
.recursive
.then(|| search_pyproject_tomls(Path::new(".")))
{
None => vec![String::new()], // recursive=false or no pyproject.toml files found
Some(Ok(tomls)) => tomls,
Some(Err(err)) => return Err(err), // error searching pyproject.toml files
}
};
let printer = Printer::Default;
let info = format!("{}{}", "info".cyan().bold(), ":".bold());
let uv_sync = format!("{}", "`uv sync -U`".green().bold());
let capabilities = IndexCapabilities::default();
let client_builder = BaseClientBuilder::new();
// Initialize the registry client.
let client = RegistryClientBuilder::try_from(client_builder)?
.cache(cache)
.index_locations(&IndexLocations::default())
.build();
let concurrency = Concurrency::default();
let mut item_written = false;
let mut all_found = 0;
let mut all_bumped = 0;
let mut files_bumped = 0;
let mut all_count_skipped = 0;
let mut all_skipped = VersionDigit::default();
// 1. args (override) 2. group (tool.uv.dependency-groups) 3. toml (project.requires-python)
let python_args = args
.python
.clone()
.and_then(Maybe::into_option)
.and_then(|v| RequiresPython::from_str(&v).ok());
let mut all_versioned = FxHashMap::default();
let mut toml_contents = BTreeMap::default();
let packages: Vec<_> = args
.requirements
.iter()
.filter_map(|r| {
let requirement = r.clone().into_option().expect("no req");
requirement.version_or_url.as_ref()?; // Skip unversioned requirements
Some(format!("\"{requirement}\""))
})
.collect();
for toml_dir in &tomls {
let toml = if only_packages {
if packages.is_empty() {
warn_user!("No versioned dependencies found in packages");
return Ok(ExitStatus::Error);
}
let content = format!("[project]\ndependencies = [\n{}\n]", packages.join(",\n"));
match PyProjectTomlMut::from_toml(&content, DependencyTarget::PyProjectToml) {
Ok(p) => p,
Err(err) => {
warn_user!("Couldn't parse packages: {}", err.to_string());
return Ok(ExitStatus::Error);
}
}
} else {
let pyproject_toml = Path::new(toml_dir).join("pyproject.toml");
read_pyproject_toml(&pyproject_toml).await?
};
let versioned = toml.find_versioned_dependencies();
if versioned.is_empty() {
continue; // Skip pyproject.toml without versioned dependencies
}
let python_toml = get_requires_python(&toml);
for (python_group, packages) in versioned {
let python = python_args.clone().or(python_group).or(python_toml.clone());
all_versioned
.entry(python)
.or_insert_with(FxHashSet::default)
.extend(packages);
}
toml_contents.insert(toml_dir, toml);
}
let mut package_versions = PackageVersions::default();
for (requires_python, packages) in all_versioned {
let latest_versions = find_latest(
&client,
&capabilities,
requires_python.clone(),
&packages,
concurrency.downloads,
)
.await;
// A package can be downloaded multiple times (one time per requires_python)
for (name, version) in latest_versions {
package_versions.insert(name.clone(), version, requires_python.clone());
}
}
for (toml_dir, toml) in &mut toml_contents {
let pyproject_toml = Path::new(*toml_dir).join("pyproject.toml");
let relative = if toml_dir.is_empty() || *toml_dir == "." {
String::new()
} else {
format!("{}/", &toml_dir[2..])
};
let subpath = format!("{relative}pyproject.toml");
let mut skipped = VersionDigit::default();
let python_toml = get_requires_python(toml);
let requires_python = python_args.clone().or(python_toml);
let (upgrades, found, count_skipped) = toml.upgrade_all_dependencies(
&package_versions,
&tables,
&allow,
&mut skipped,
&requires_python,
);
all_skipped.add_other(&skipped);
all_count_skipped += count_skipped;
let bumped = upgrades.len();
all_found += found;
all_bumped += bumped;
files_bumped += min(bumped, 1);
if upgrades.is_empty() {
if args.recursive && bumped == 0 {
if !skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Skipped {skipped} ({count_skipped} upgrades) of {} in {subpath}",
plural(found, "dependency"),
)?;
}
continue; // Skip intermediate messages if nothing was changed
}
if found == 0 {
writeln!(
printer.stderr(),
"{info} No dependencies found in {subpath}"
)?;
} else {
writeln!(
printer.stderr(),
"{info} No upgrades found for {} in {subpath}, check manually if not committed yet{}",
plural(found, "dependency"),
skipped.format(
" (skipped ",
&format!(" of {})", plural(count_skipped, "upgrade"))
)
)?;
}
continue;
}
if item_written {
writeln!(printer.stderr()).expect("");
}
item_written = false;
let mut table = prettytable::Table::new();
table.set_format(FormatBuilder::new().column_separator(' ').build());
let dry_run = format!(
"{} {subpath}",
if args.dry_run { "dry-run" } else { "upgraded" }
);
table.add_row(
row![r->"#", rb->"name", Fr->"-old", bFg->"+new", "latest", "S", "type", "py", dry_run],
); // diff-like
let remove_spaces = |v: &Requirement| {
v.clone()
.version_or_url
.unwrap()
.to_string()
.replace(' ', "")
};
upgrades
.iter()
.enumerate()
.for_each(|(i, (_, _dep, old, new, version, upgraded, dependency_type, semver_change, python))| {
let from = remove_spaces(old);
let to = remove_spaces(new);
let upordown = if *upgraded { "✅ up" } else { "❌ down" };
let _type = match dependency_type {
DependencyType::Production => "prod".into(),
DependencyType::Dev => "dev".into(),
DependencyType::Optional(extra) => format!("{extra} [extra]"),
DependencyType::Group(group) => format!("{group} [group]"),
};
let semver = semver_change.map_or(String::new(), |s| s.to_string());
let _python = format_requires_python(python.clone());
table.add_row(
row![r->i + 1, rb->old.name, Fr->from, bFg->to, version.to_string(), semver, _type, _python, upordown],
);
});
table.printstd();
if only_packages {
writeln!(
printer.stderr(),
"{info} Upgraded {bumped} of {} 🚀{}",
plural(found, "package"),
skipped.format(
" (skipped ",
&format!(" of {})", plural(count_skipped, "upgrade"))
)
)?;
} else if !args.dry_run {
if let Err(err) = fs_err::tokio::write(pyproject_toml, toml.to_string()).await {
return Err(err.into());
}
writeln!(
printer.stderr(),
"{info} Upgraded {bumped}/{found} in {subpath} 🚀 Check manually, update {uv_sync} and run tests{}",
skipped.format(
" (skipped ",
&format!(" of {})", plural(count_skipped, "upgrade"))
)
)?;
} else if !skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Skipped {skipped} ({}), upgraded {bumped} of {} in {subpath}",
plural(count_skipped, "upgrade"),
plural(found, "dependency"),
)?;
}
if !item_written {
item_written = true;
}
}
let files = plural(tomls.len(), "file");
if args.recursive && files_bumped != 1 {
if tomls.is_empty() {
warn_user!("No pyproject.toml files found recursively");
return Ok(ExitStatus::Error);
} else if all_bumped == 0 {
if all_found == 0 {
writeln!(
printer.stderr(),
"{info} No dependencies in {files} found recursively"
)?;
} else if !all_skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Skipped {all_skipped} ({}), {} in {files} not upgraded for --allow={}",
plural(all_count_skipped, "upgrade"),
plural(all_found, "dependency"),
format_allow(&allow)
)?;
} else {
writeln!(
printer.stderr(),
"{info} No upgrades in {} and {files} found, check manually if not committed yet",
plural(all_found, "dependency"),
)?;
}
} else if !all_skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Total: Skipped {all_skipped} ({}), upgraded {all_bumped} of {} for --allow={}",
plural(all_count_skipped, "upgrade"),
plural(all_found, "dependency"),
format_allow(&allow)
)?;
} else {
writeln!(
printer.stderr(),
"{info} Total: Upgraded {all_bumped}/{} in {files} 🚀 Check manually, update {uv_sync} and run tests{}",
plural(all_found, "dependency"),
all_skipped.format(
" (skipped ",
&format!(" of {})", plural(all_count_skipped, "upgrade"))
)
)?;
}
}
Ok(ExitStatus::Success)
}
fn plural(count: usize, word: &str) -> String {
if count != 1 && word.ends_with('y') {
format!("{count} {}ies", &word[..word.len() - 1])
} else {
format!("{count} {word}{}", if count == 1 { "" } else { "s" })
}
}
fn get_requires_python(toml: &PyProjectTomlMut) -> Option<RequiresPython> {
toml.get_requires_python()
.map(RequiresPython::from_str)
.transpose()
.ok()
.flatten()
}
fn format_requires_python(python: Option<RequiresPython>) -> String {
match python.map(|r| r.remove_zeroes()) {
Some(s) if s == ">4" => String::new(), // hide default value
Some(s) => s,
_ => String::new(),
}
}
fn format_allow(allow: &[usize]) -> String {
allow
.iter()
.sorted()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(",")
}
async fn read_pyproject_toml(pyproject_toml: &Path) -> Result<PyProjectTomlMut, anyhow::Error> {
let content = match fs_err::tokio::read_to_string(pyproject_toml.to_path_buf()).await {
Ok(content) => content,
Err(err) => {
if err.kind() == ErrorKind::NotFound {
warn_user!(
"Could not find {}",
pyproject_toml.to_str().expect("path not UTF-8")
);
} else {
warn_user!(
"Could not read {}",
pyproject_toml.to_str().expect("path not UTF-8")
);
}
return Err(anyhow::Error::from(err));
}
};
let toml = match PyProjectTomlMut::from_toml(&content, DependencyTarget::PyProjectToml) {
Ok(toml) => toml,
Err(err) => {
warn_user!("Could not parse pyproject.toml: {}", err);
return Err(anyhow::Error::from(err));
}
};
Ok(toml)
}
async fn find_latest<'a>(
client: &RegistryClient,
capabilities: &IndexCapabilities,
requires_python: Option<RequiresPython>,
names: &'a FxHashSet<PackageName>,
downloads: usize,
) -> FxHashMap<&'a PackageName, Version> {
static DEFAULT_PYTHON: LazyLock<RequiresPython> =
LazyLock::new(|| RequiresPython::from_str(">4").ok().unwrap());
let latest_client = LatestClient {
client,
capabilities,
prerelease: PrereleaseMode::Disallow,
exclude_newer: None,
tags: None,
requires_python: requires_python.as_ref().unwrap_or_else(|| &*DEFAULT_PYTHON),
};
let download_concurrency = Semaphore::new(downloads);
let mut fetches = futures::stream::iter(names)
.map(async |package| {
let latest = latest_client
.find_latest(package, None, &download_concurrency)
.await?;
Ok::<(&PackageName, Option<DistFilename>), uv_client::Error>((package, latest))
})
.buffer_unordered(downloads);
let mut map = FxHashMap::default();
while let Ok(Some((package, version))) = fetches.next().await.transpose() {
if let Some(version) = version.as_ref() {
map.insert(package, version.clone().into_version());
}
}
map
}
/// Recursively search for pyproject.toml files.
fn search_pyproject_tomls(root: &Path) -> Result<Vec<String>, anyhow::Error> {
let metadata = match fs_err::symlink_metadata(root) {
Ok(metadata) => metadata,
Err(err) if err.kind() == ErrorKind::NotFound => return Ok(vec![]),
Err(err) => return Err(anyhow::Error::from(err)),
};
if !metadata.is_dir() {
return Ok(vec![]);
}
// Hint: Doesn't skip special folders like `build`, `dist` or `target`
let is_hidden_or_not_pyproject = |path: &Path| {
path.file_name().and_then(OsStr::to_str).is_some_and(|s| {
s.starts_with('.') || s.starts_with('_') || path.is_file() && s != "pyproject.toml"
})
};
let mut matches: Vec<_> = WalkDir::new(root)
.sort_by_file_name()
.into_iter()
.filter_entry(|entry| {
// TODO(konsti): This should be prettier.
let relative = entry
.path()
.strip_prefix(root)
.expect("walkdir starts with root");
let hidden = is_hidden_or_not_pyproject(relative);
!hidden
})
.filter_map(|entry| {
let path = entry.as_ref().unwrap().path();
if path.is_dir() {
None
} else {
Some(path.parent().unwrap().to_str().unwrap().to_string())
}
})
.collect();
matches.sort();
Ok(matches)
}

View file

@ -1,15 +1,3 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::ffi::OsString;
use std::fmt::Write;
use std::io::stdout;
#[cfg(feature = "self-update")]
use std::ops::Bound;
use std::path::Path;
use std::process::ExitCode;
use std::str::FromStr;
use std::sync::atomic::Ordering;
use anstream::eprintln;
use anyhow::{Context, Result, bail};
use clap::error::{ContextKind, ContextValue};
@ -17,6 +5,19 @@ use clap::{CommandFactory, Parser};
use futures::FutureExt;
use owo_colors::OwoColorize;
use settings::PipTreeSettings;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::ffi::OsString;
use std::fmt::Write;
use std::io::stdout;
#[cfg(feature = "self-update")]
use std::ops::Bound;
use std::ops::Sub;
use std::path::Path;
use std::process::ExitCode;
use std::str::FromStr;
use std::sync::atomic::Ordering;
use std::time::{Duration, SystemTime};
use tokio::task::spawn_blocking;
use tracing::{debug, instrument};
@ -2071,6 +2072,23 @@ async fn run_project(
))
.await
}
ProjectCommand::Upgrade(args) => {
// --refresh -> now() -> uncached
// --no-refresh -> cached (overrides --refresh)
// otherwise: cache subsequent runs for 10 minutes
let timestamp = Timestamp::from(SystemTime::now().sub(Duration::from_secs(60 * 10)));
let refresh_package = args.refresh.refresh_package.clone();
let _refresh = if args.refresh.refresh || args.refresh.no_refresh {
Refresh::from_args(Some(!args.refresh.no_refresh), refresh_package)
} else if refresh_package.is_empty() {
Refresh::All(timestamp) // user didn't pass flags or package
} else {
Refresh::Packages(refresh_package, vec![], timestamp)
};
let cache = cache.init()?.with_refresh(_refresh);
Box::pin(commands::upgrade_project_dependencies(args, cache)).await
}
ProjectCommand::Tree(args) => {
// Resolve the settings from the command-line arguments and workspace configuration.
let args = settings::TreeSettings::resolve(args, filesystem);

View file

@ -21,6 +21,7 @@ fn help() {
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
upgrade Upgrade the project's dependency constraints
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
@ -101,6 +102,7 @@ fn help_flag() {
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
upgrade Upgrade the project's dependency constraints
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
@ -179,6 +181,7 @@ fn help_short_flag() {
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
upgrade Upgrade the project's dependency constraints
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
@ -863,6 +866,7 @@ fn help_unknown_subcommand() {
add
remove
version
upgrade
sync
lock
export
@ -890,6 +894,7 @@ fn help_unknown_subcommand() {
add
remove
version
upgrade
sync
lock
export
@ -945,6 +950,7 @@ fn help_with_global_option() {
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
upgrade Upgrade the project's dependency constraints
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
@ -1066,6 +1072,7 @@ fn help_with_no_pager() {
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
upgrade Upgrade the project's dependency constraints
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format

View file

@ -17,6 +17,7 @@ uv [OPTIONS] <COMMAND>
<dt><a href="#uv-add"><code>uv add</code></a></dt><dd><p>Add dependencies to the project</p></dd>
<dt><a href="#uv-remove"><code>uv remove</code></a></dt><dd><p>Remove dependencies from the project</p></dd>
<dt><a href="#uv-version"><code>uv version</code></a></dt><dd><p>Read or update the project's version</p></dd>
<dt><a href="#uv-upgrade"><code>uv upgrade</code></a></dt><dd><p>Upgrade the project's dependency constraints</p></dd>
<dt><a href="#uv-sync"><code>uv sync</code></a></dt><dd><p>Update the project's environment</p></dd>
<dt><a href="#uv-lock"><code>uv lock</code></a></dt><dd><p>Update the project's lockfile</p></dd>
<dt><a href="#uv-export"><code>uv export</code></a></dt><dd><p>Export the project's lockfile to an alternate format</p></dd>
@ -934,6 +935,81 @@ uv version [OPTIONS] [VALUE]
<p>You can configure fine-grained logging using the <code>RUST_LOG</code> environment variable. (<a href="https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives">https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives</a>)</p>
</dd></dl>
## uv upgrade
Upgrade the project's dependency constraints
<h3 class="cli-reference">Usage</h3>
```
uv upgrade [OPTIONS] [REQUIREMENTS]...
```
<h3 class="cli-reference">Arguments</h3>
<dl class="cli-reference"><dt id="uv-upgrade--requirements"><a href="#uv-upgrade--requirements"<code>REQUIREMENTS</code></a></dt><dd><p>Upgrade only the given requirements (i.e. <code>uv&lt;0.5</code>) instead of pyproject.toml files</p>
</dd></dl>
<h3 class="cli-reference">Options</h3>
<dl class="cli-reference"><dt id="uv-upgrade--allow"><a href="#uv-upgrade--allow"><code>--allow</code></a> <i>allow</i></dt><dd><p>Allow only some version digits to change, others will be skipped: <code>1,2,3,4</code> (major, minor, patch, build number)</p>
<p>May also be set with the <code>UV_UPGRADE_TYPES</code> environment variable.</p></dd><dt id="uv-upgrade--allow-insecure-host"><a href="#uv-upgrade--allow-insecure-host"><code>--allow-insecure-host</code></a>, <code>--trusted-host</code> <i>allow-insecure-host</i></dt><dd><p>Allow insecure connections to a host.</p>
<p>Can be provided multiple times.</p>
<p>Expects to receive either a hostname (e.g., <code>localhost</code>), a host-port pair (e.g., <code>localhost:8080</code>), or a URL (e.g., <code>https://localhost</code>).</p>
<p>WARNING: Hosts included in this list will not be verified against the system's certificate store. Only use <code>--allow-insecure-host</code> in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.</p>
<p>May also be set with the <code>UV_INSECURE_HOST</code> environment variable.</p></dd><dt id="uv-upgrade--cache-dir"><a href="#uv-upgrade--cache-dir"><code>--cache-dir</code></a> <i>cache-dir</i></dt><dd><p>Path to the cache directory.</p>
<p>Defaults to <code>$XDG_CACHE_HOME/uv</code> or <code>$HOME/.cache/uv</code> on macOS and Linux, and <code>%LOCALAPPDATA%\uv\cache</code> on Windows.</p>
<p>To view the location of the cache directory, run <code>uv cache dir</code>.</p>
<p>May also be set with the <code>UV_CACHE_DIR</code> environment variable.</p></dd><dt id="uv-upgrade--color"><a href="#uv-upgrade--color"><code>--color</code></a> <i>color-choice</i></dt><dd><p>Control the use of color in output.</p>
<p>By default, uv will automatically detect support for colors when writing to a terminal.</p>
<p>Possible values:</p>
<ul>
<li><code>auto</code>: Enables colored output only when the output is going to a terminal or TTY with support</li>
<li><code>always</code>: Enables colored output regardless of the detected environment</li>
<li><code>never</code>: Disables colored output</li>
</ul></dd><dt id="uv-upgrade--config-file"><a href="#uv-upgrade--config-file"><code>--config-file</code></a> <i>config-file</i></dt><dd><p>The path to a <code>uv.toml</code> file to use for configuration.</p>
<p>While uv configuration can be included in a <code>pyproject.toml</code> file, it is not allowed in this context.</p>
<p>May also be set with the <code>UV_CONFIG_FILE</code> environment variable.</p></dd><dt id="uv-upgrade--directory"><a href="#uv-upgrade--directory"><code>--directory</code></a> <i>directory</i></dt><dd><p>Change to the given directory prior to running the command.</p>
<p>Relative paths are resolved with the given directory as the base.</p>
<p>See <code>--project</code> to only change the project root directory.</p>
</dd><dt id="uv-upgrade--dry-run"><a href="#uv-upgrade--dry-run"><code>--dry-run</code></a></dt><dd><p>Run without performing the upgrades</p>
</dd><dt id="uv-upgrade--help"><a href="#uv-upgrade--help"><code>--help</code></a>, <code>-h</code></dt><dd><p>Display the concise help for this command</p>
</dd><dt id="uv-upgrade--managed-python"><a href="#uv-upgrade--managed-python"><code>--managed-python</code></a></dt><dd><p>Require use of uv-managed Python versions.</p>
<p>By default, uv prefers using Python versions it manages. However, it will use system Python versions if a uv-managed Python is not installed. This option disables use of system Python versions.</p>
<p>May also be set with the <code>UV_MANAGED_PYTHON</code> environment variable.</p></dd><dt id="uv-upgrade--native-tls"><a href="#uv-upgrade--native-tls"><code>--native-tls</code></a></dt><dd><p>Whether to load TLS certificates from the platform's native certificate store.</p>
<p>By default, uv loads certificates from the bundled <code>webpki-roots</code> crate. The <code>webpki-roots</code> are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).</p>
<p>However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.</p>
<p>May also be set with the <code>UV_NATIVE_TLS</code> environment variable.</p></dd><dt id="uv-upgrade--no-cache"><a href="#uv-upgrade--no-cache"><code>--no-cache</code></a>, <code>--no-cache-dir</code>, <code>-n</code></dt><dd><p>Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation</p>
<p>May also be set with the <code>UV_NO_CACHE</code> environment variable.</p></dd><dt id="uv-upgrade--no-config"><a href="#uv-upgrade--no-config"><code>--no-config</code></a></dt><dd><p>Avoid discovering configuration files (<code>pyproject.toml</code>, <code>uv.toml</code>).</p>
<p>Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.</p>
<p>May also be set with the <code>UV_NO_CONFIG</code> environment variable.</p></dd><dt id="uv-upgrade--no-managed-python"><a href="#uv-upgrade--no-managed-python"><code>--no-managed-python</code></a></dt><dd><p>Disable use of uv-managed Python versions.</p>
<p>Instead, uv will search for a suitable Python version on the system.</p>
<p>May also be set with the <code>UV_NO_MANAGED_PYTHON</code> environment variable.</p></dd><dt id="uv-upgrade--no-progress"><a href="#uv-upgrade--no-progress"><code>--no-progress</code></a></dt><dd><p>Hide all progress outputs.</p>
<p>For example, spinners or progress bars.</p>
<p>May also be set with the <code>UV_NO_PROGRESS</code> environment variable.</p></dd><dt id="uv-upgrade--no-python-downloads"><a href="#uv-upgrade--no-python-downloads"><code>--no-python-downloads</code></a></dt><dd><p>Disable automatic downloads of Python.</p>
</dd><dt id="uv-upgrade--offline"><a href="#uv-upgrade--offline"><code>--offline</code></a></dt><dd><p>Disable network access.</p>
<p>When disabled, uv will only use locally cached data and locally available files.</p>
<p>May also be set with the <code>UV_OFFLINE</code> environment variable.</p></dd><dt id="uv-upgrade--project"><a href="#uv-upgrade--project"><code>--project</code></a> <i>project</i></dt><dd><p>Run the command within the given project directory.</p>
<p>All <code>pyproject.toml</code>, <code>uv.toml</code>, and <code>.python-version</code> files will be discovered by walking up the directory tree from the project root, as will the project's virtual environment (<code>.venv</code>).</p>
<p>Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.</p>
<p>See <code>--directory</code> to change the working directory entirely.</p>
<p>This setting has no effect when used in the <code>uv pip</code> interface.</p>
<p>May also be set with the <code>UV_PROJECT</code> environment variable.</p></dd><dt id="uv-upgrade--python"><a href="#uv-upgrade--python"><code>--python</code></a>, <code>-p</code> <i>python</i></dt><dd><p>The Python interpreter to use during resolution (overrides pyproject.toml).</p>
<p>A Python interpreter is required for building source distributions to determine package
metadata when there are not wheels.</p>
<p>The interpreter is also used as the fallback value for the minimum Python version if
<code>requires-python</code> is not set.</p>
<p>See <a href="#uv-python">uv python</a> for details on Python discovery and supported request formats.</p>
<p>May also be set with the <code>UV_PYTHON</code> environment variable.</p></dd><dt id="uv-upgrade--quiet"><a href="#uv-upgrade--quiet"><code>--quiet</code></a>, <code>-q</code></dt><dd><p>Use quiet output.</p>
<p>Repeating this option, e.g., <code>-qq</code>, will enable a silent mode in which uv will write no output to stdout.</p>
</dd><dt id="uv-upgrade--recursive"><a href="#uv-upgrade--recursive"><code>--recursive</code></a></dt><dd><p>Search recursively for pyproject.toml files</p>
<p>May also be set with the <code>UV_UPGRADE_RECURSIVE</code> environment variable.</p></dd><dt id="uv-upgrade--refresh"><a href="#uv-upgrade--refresh"><code>--refresh</code></a></dt><dd><p>Refresh all cached data</p>
</dd><dt id="uv-upgrade--refresh-package"><a href="#uv-upgrade--refresh-package"><code>--refresh-package</code></a> <i>refresh-package</i></dt><dd><p>Refresh cached data for a specific package</p>
</dd><dt id="uv-upgrade--types"><a href="#uv-upgrade--types"><code>--types</code></a> <i>types</i></dt><dd><p>Only search specific tables in pyproject.toml: <code>prod,dev,optional,groups</code></p>
<p>May also be set with the <code>UV_UPGRADE_TYPES</code> environment variable.</p></dd><dt id="uv-upgrade--verbose"><a href="#uv-upgrade--verbose"><code>--verbose</code></a>, <code>-v</code></dt><dd><p>Use verbose output.</p>
<p>You can configure fine-grained logging using the <code>RUST_LOG</code> environment variable. (<a href="https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives">https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives</a>)</p>
</dd></dl>
## uv sync
Update the project's environment.