fetch latest versions in parallel

This commit is contained in:
Rene Leonhardt 2025-06-13 12:01:23 +02:00
parent 2365e79aef
commit 15978e285b
No known key found for this signature in database
GPG key ID: 8C95C84F75AB1E8E
2 changed files with 184 additions and 87 deletions

View file

@ -1,4 +1,5 @@
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
use std::path::Path;
@ -1122,16 +1123,88 @@ impl PyProjectTomlMut {
types
}
/// Returns package names of all dependencies with version constraints.
///
/// This method searches `project.dependencies`, `project.optional-dependencies`,
/// `dependency-groups` and `tool.uv.dev-dependencies`.
pub fn find_versioned_dependencies(&self) -> FxHashSet<PackageName> {
let mut versioned_dependencies = FxHashSet::default();
if let Some(project) = self.doc.get("project").and_then(Item::as_table) {
// Check `project.dependencies`.
if let Some(dependencies) = project.get("dependencies").and_then(Item::as_array) {
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies);
}
// Check `project.optional-dependencies`.
if let Some(extras) = project
.get("optional-dependencies")
.and_then(Item::as_table)
{
for (extra, dependencies) in extras {
let Some(dependencies) = dependencies.as_array() else {
continue;
};
let Ok(_extra) = ExtraName::from_str(extra) else {
continue;
};
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies);
}
}
}
// Check `dependency-groups`.
if let Some(groups) = self.doc.get("dependency-groups").and_then(Item::as_table) {
for (group, dependencies) in groups {
let Ok(_group) = GroupName::from_str(group) else {
continue;
};
let Some(dependencies) = dependencies.as_array() else {
continue;
};
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies);
}
}
// Check `tool.uv.dev-dependencies`.
if let Some(dependencies) = self
.doc
.get("tool")
.and_then(Item::as_table)
.and_then(|tool| tool.get("uv"))
.and_then(Item::as_table)
.and_then(|uv| uv.get("dev-dependencies"))
.and_then(Item::as_array)
{
Self::extract_names_if_versioned(&mut versioned_dependencies, dependencies);
}
versioned_dependencies
}
fn extract_names_if_versioned(types: &mut FxHashSet<PackageName>, dependencies: &Array) {
for dep in dependencies {
let Some(req) = dep.as_str().and_then(try_parse_requirement) else {
continue;
};
let name = req.name;
if types.contains(&name) {
continue;
}
// Skip requirements without version constraints
if let Some(VersionOrUrl::VersionSpecifier(_)) = req.version_or_url {
types.insert(name);
}
}
}
/// Returns all dependencies in this `pyproject.toml`.
///
/// This method searches `project.dependencies`, `project.optional-dependencies`,
/// `dependency-groups` and `tool.uv.dev-dependencies`.
pub async fn upgrade_all_dependencies<
F: Fn(String) -> Fut,
Fut: Future<Output = Option<Version>>,
>(
pub fn upgrade_all_dependencies(
&mut self,
find_latest: &F,
latest_versions: &FxHashMap<PackageName, Version>,
tables: &[DependencyType],
allow: &[usize],
skipped: &mut VersionDigit,
@ -1152,14 +1225,13 @@ impl PyProjectTomlMut {
{
found += item.as_array().map_or(0, Array::len);
Self::replace_dependencies(
find_latest,
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Production,
allow,
skipped,
)
.await;
);
}
// Check `project.optional-dependencies`
@ -1181,14 +1253,13 @@ impl PyProjectTomlMut {
if let Some(_extra) = extra {
found += item.as_array().map_or(0, Array::len);
Self::replace_dependencies(
find_latest,
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Optional(_extra),
allow,
skipped,
)
.await;
);
}
}
}
@ -1210,14 +1281,13 @@ impl PyProjectTomlMut {
if let Some(_group) = group {
found += item.as_array().map_or(0, Array::len);
Self::replace_dependencies(
find_latest,
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Group(_group),
allow,
skipped,
)
.await;
);
}
}
}
@ -1237,21 +1307,20 @@ impl PyProjectTomlMut {
{
found += item.as_array().map_or(0, Array::len);
Self::replace_dependencies(
find_latest,
latest_versions,
&mut all_upgrades,
item,
&DependencyType::Dev,
allow,
skipped,
)
.await;
);
}
(found, all_upgrades)
}
async fn replace_dependencies<Fut: Future<Output = Option<Version>>, F: Fn(String) -> Fut>(
find_latest: &F,
fn replace_dependencies(
latest_versions: &FxHashMap<PackageName, Version>,
all_upgrades: &mut Vec<UpgradeResult>,
item: &mut Item,
dependency_type: &DependencyType,
@ -1260,21 +1329,19 @@ impl PyProjectTomlMut {
) {
if let Some(dependencies) = item.as_array_mut().filter(|d| !d.is_empty()) {
Self::replace_upgrades(
find_latest,
latest_versions,
all_upgrades,
dependencies,
dependency_type,
allow,
skipped,
)
.await;
);
}
}
async fn find_upgrades<Fut: Future<Output = Option<Version>>, F: Fn(String) -> Fut>(
find_latest: F,
fn find_upgrades(
latest_versions: &FxHashMap<PackageName, Version>,
dependencies: &mut Array,
all_upgrades: &[UpgradeResult],
dependency_type: &DependencyType,
allow: &[usize],
skipped: &mut VersionDigit,
@ -1290,17 +1357,9 @@ impl PyProjectTomlMut {
else {
continue;
};
if let Some(upgrade) = match all_upgrades
.iter()
.find(|(_, _, _, r, _, _, _, _)| r.name == req.name)
{
Some((_, _, _, _, v, _, _, _)) => Some(v.clone()), // reuse cached upgrade
_ => find_latest(req.name.to_string())
.await
.filter(|latest| !version_specifiers.contains(latest)),
} {
if let Some(upgrade) = latest_versions.get(&old.name) {
let (bumped, upgraded, semver) =
version_specifiers.bump_last(&upgrade, allow, skipped);
version_specifiers.bump_last(upgrade, allow, skipped);
if bumped {
req.version_or_url = Some(VersionOrUrl::VersionSpecifier(version_specifiers));
upgrades.push((
@ -1308,7 +1367,7 @@ impl PyProjectTomlMut {
dep.as_str().unwrap().to_string(),
old,
req,
upgrade,
upgrade.clone(),
upgraded,
dependency_type.clone(),
semver,
@ -1319,8 +1378,8 @@ impl PyProjectTomlMut {
upgrades
}
async fn replace_upgrades<Fut: Future<Output = Option<Version>>, F: Fn(String) -> Fut>(
find_latest: F,
fn replace_upgrades(
latest_versions: &FxHashMap<PackageName, Version>,
all_upgrades: &mut Vec<UpgradeResult>,
dependencies: &mut Array,
dependency_type: &DependencyType,
@ -1328,14 +1387,12 @@ impl PyProjectTomlMut {
skipped: &mut VersionDigit,
) {
let upgrades = Self::find_upgrades(
find_latest,
latest_versions,
dependencies,
all_upgrades,
dependency_type,
allow,
skipped,
)
.await;
);
for (i, _dep, _old, new, _upgrade, _upgraded, _, _) in &upgrades {
let string = new.to_string();
dependencies.replace(*i, toml_edit::Value::from(string));

View file

@ -9,10 +9,12 @@ use crate::commands::ExitStatus;
use crate::commands::pip::latest::LatestClient;
use crate::printer::Printer;
use anyhow::Result;
use futures::StreamExt;
use itertools::Itertools;
use owo_colors::OwoColorize;
use prettytable::format::FormatBuilder;
use prettytable::row;
use rustc_hash::{FxHashMap, FxHashSet};
use tokio::sync::Semaphore;
use uv_cache::{Cache, Refresh};
use uv_cache_info::Timestamp;
@ -62,11 +64,28 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
let printer = Printer::Default;
let info = format!("{}{}", "info".cyan().bold(), ":".bold());
#[allow(deprecated)]
let cache_dir = env::home_dir().unwrap().join(".cache/uv");
let cache = Cache::from_settings(false, Some(cache_dir))?.init()?;
let capabilities = IndexCapabilities::default();
let client_builder = BaseClientBuilder::new();
// Initialize the registry client.
let client = RegistryClientBuilder::try_from(client_builder)?
.cache(cache.clone().with_refresh(Refresh::All(Timestamp::now())))
.index_locations(&IndexLocations::default())
.build();
let concurrency = Concurrency::default();
let download_concurrency = Semaphore::new(concurrency.downloads);
let (mut item_written, mut all_found, mut all_bumped, mut all_skipped) =
(false, 0, 0, VersionDigit::default());
for toml_dir in tomls {
let pyproject_toml = Path::new(&toml_dir).join("pyproject.toml");
let mut all_latest_versions = FxHashMap::default();
for toml_dir in &tomls {
let pyproject_toml = Path::new(toml_dir).join("pyproject.toml");
let content = match fs_err::tokio::read_to_string(pyproject_toml.clone()).await {
Ok(content) => content,
Err(err) => {
@ -86,19 +105,6 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
}
};
#[allow(deprecated)]
let cache_dir = env::home_dir().unwrap().join(".cache/uv");
let cache = Cache::from_settings(false, Some(cache_dir))?.init()?;
let capabilities = IndexCapabilities::default();
let client_builder = BaseClientBuilder::new();
// Initialize the registry client.
let client = RegistryClientBuilder::try_from(client_builder)?
.cache(cache.clone().with_refresh(Refresh::All(Timestamp::now())))
.index_locations(&IndexLocations::default())
.build();
let download_concurrency = Semaphore::new(Concurrency::default().downloads);
let python = args
.python
.clone()
@ -122,17 +128,23 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
requires_python: &requires_python,
};
let find_latest = async |name: String| {
client
.find_latest(
&PackageName::from_str(name.as_str()).unwrap(),
None,
&download_concurrency,
)
.await
.ok()
.flatten()
.map(DistFilename::into_version)
let find_latest = async |names: &FxHashSet<PackageName>| {
let mut fetches = futures::stream::iter(names.iter())
.map(async |name| {
let latest = client
.find_latest(name, None, &download_concurrency)
.await?;
Ok::<(&PackageName, Option<DistFilename>), uv_client::Error>((name, latest))
})
.buffer_unordered(concurrency.downloads);
let mut map = FxHashMap::default();
while let Ok(Some((package, version))) = fetches.next().await.transpose() {
if let Some(version) = version.as_ref() {
map.insert(package.clone(), version.clone().into_version());
}
}
map
};
let relative = if toml_dir == "." {
@ -142,9 +154,17 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
};
let subpath = format!("{relative}pyproject.toml");
let mut skipped = VersionDigit::default();
let (found, upgrades) = toml
.upgrade_all_dependencies(&find_latest, &tables, &allow, &mut skipped)
.await;
let versioned = toml.find_versioned_dependencies();
let query_versions = versioned
.into_iter()
.filter(|p| !all_latest_versions.contains_key(p))
.collect();
let latest_versions = find_latest(&query_versions).await;
all_latest_versions.extend(latest_versions.clone());
let (found, upgrades) =
toml.upgrade_all_dependencies(&latest_versions, &tables, &allow, &mut skipped);
all_skipped.add_other(&skipped);
let bumped = upgrades.len();
all_found += found;
@ -215,7 +235,7 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
}
writeln!(
printer.stderr(),
"{info} Upgraded {subpath} 🚀 Check manually, update lock + venv {} and run tests{}",
"{info} Upgraded {subpath} 🚀 Check manually, update {} and run tests{}",
"`uv sync -U`".green().bold(),
skipped.format(" (skipped ", ")")
)?;
@ -226,27 +246,47 @@ pub(crate) async fn upgrade_project_dependencies(args: UpgradeProjectArgs) -> Re
item_written = true;
}
}
if args.recursive && all_bumped == 0 {
if all_found == 0 {
writeln!(printer.stderr(), "{info} No dependencies found recursively")?;
let files = format!(
"{} file{}",
tomls.len(),
if tomls.len() == 1 { "" } else { "s" }
);
if args.recursive {
if tomls.is_empty() {
warn_user!("No pyproject.toml files found recursively");
return Ok(ExitStatus::Error);
} else if all_bumped == 0 {
if all_found == 0 {
writeln!(
printer.stderr(),
"{info} No dependencies in {files} found recursively"
)?;
} else {
writeln!(
printer.stderr(),
"{info} No upgrades in {all_found} dependencies and {files} found, check manually if not committed yet{}",
all_skipped.format(" (skipped ", ")")
)?;
}
} else if !all_skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Skipped {all_skipped} in {all_bumped} upgrades for --allow={}",
allow
.iter()
.sorted()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(",")
)?;
} else {
writeln!(
printer.stderr(),
"{info} No upgrades found recursively, check manually if not committed yet{}",
"{info} Upgraded {all_bumped} dependencies in {files} 🚀 Check manually, update {} and run tests{}",
"`uv sync -U`".green().bold(),
all_skipped.format(" (skipped ", ")")
)?;
}
} else if args.recursive && !all_skipped.is_empty() {
writeln!(
printer.stderr(),
"{info} Skipped {all_skipped} in {all_bumped} upgrades for --allow={}",
allow
.iter()
.sorted()
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join(",")
)?;
}
Ok(ExitStatus::Success)