Upgrade minimum Rust version to 1.83 (#9815)

This reverts commit 6cc7a560f7 to reapply
#9511 since we've disabled ppc64le-musl per #9793
This commit is contained in:
Zanie Blue 2024-12-11 10:06:19 -06:00 committed by GitHub
parent f64da9b763
commit ae25c2f4db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 72 additions and 65 deletions

View file

@ -13,7 +13,7 @@ resolver = "2"
[workspace.package] [workspace.package]
edition = "2021" edition = "2021"
rust-version = "1.81" rust-version = "1.83"
homepage = "https://pypi.org/project/uv/" homepage = "https://pypi.org/project/uv/"
documentation = "https://pypi.org/project/uv/" documentation = "https://pypi.org/project/uv/"
repository = "https://github.com/astral-sh/uv" repository = "https://github.com/astral-sh/uv"

View file

@ -23,7 +23,7 @@ pub enum WheelCache<'a> {
Git(&'a Url, &'a str), Git(&'a Url, &'a str),
} }
impl<'a> WheelCache<'a> { impl WheelCache<'_> {
/// The root directory for a cache bucket. /// The root directory for a cache bucket.
pub fn root(&self) -> PathBuf { pub fn root(&self) -> PathBuf {
match self { match self {

View file

@ -992,6 +992,7 @@ impl ArchivedCachePolicy {
/// This dictates what the caller should do next by indicating whether the /// This dictates what the caller should do next by indicating whether the
/// cached response is stale or not. /// cached response is stale or not.
#[derive(Debug)] #[derive(Debug)]
#[allow(clippy::large_enum_variant)]
pub enum BeforeRequest { pub enum BeforeRequest {
/// The cached response is still fresh, and the caller may return the /// The cached response is still fresh, and the caller may return the
/// cached response without issuing an HTTP requests. /// cached response without issuing an HTTP requests.

View file

@ -85,7 +85,7 @@ pub(crate) async fn wheel_metadata_from_remote_zip(
// The zip archive uses as BufReader which reads in chunks of 8192. To ensure we prefetch // The zip archive uses as BufReader which reads in chunks of 8192. To ensure we prefetch
// enough data we round the size up to the nearest multiple of the buffer size. // enough data we round the size up to the nearest multiple of the buffer size.
let buffer_size = 8192; let buffer_size = 8192;
let size = ((size + buffer_size - 1) / buffer_size) * buffer_size; let size = size.div_ceil(buffer_size) * buffer_size;
// Fetch the bytes from the zip archive that contain the requested file. // Fetch the bytes from the zip archive that contain the requested file.
reader reader

View file

@ -31,7 +31,7 @@ impl<'de> serde::Deserialize<'de> for PackageNameSpecifier {
{ {
struct Visitor; struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor { impl serde::de::Visitor<'_> for Visitor {
type Value = PackageNameSpecifier; type Value = PackageNameSpecifier;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {

View file

@ -93,7 +93,7 @@ pub enum SourceUrl<'a> {
Directory(DirectorySourceUrl<'a>), Directory(DirectorySourceUrl<'a>),
} }
impl<'a> SourceUrl<'a> { impl SourceUrl<'_> {
/// Return the [`Url`] of the source. /// Return the [`Url`] of the source.
pub fn url(&self) -> &Url { pub fn url(&self) -> &Url {
match self { match self {

View file

@ -11,7 +11,7 @@ pub enum HashPolicy<'a> {
Validate(&'a [HashDigest]), Validate(&'a [HashDigest]),
} }
impl<'a> HashPolicy<'a> { impl HashPolicy<'_> {
/// Returns `true` if the hash policy is `None`. /// Returns `true` if the hash policy is `None`.
pub fn is_none(&self) -> bool { pub fn is_none(&self) -> bool {
matches!(self, Self::None) matches!(self, Self::None)

View file

@ -103,7 +103,7 @@ pub enum VersionOrUrlRef<'a, T: Pep508Url = VerbatimUrl> {
Url(&'a T), Url(&'a T),
} }
impl<'a, T: Pep508Url> VersionOrUrlRef<'a, T> { impl<T: Pep508Url> VersionOrUrlRef<'_, T> {
/// If it is a URL, return its value. /// If it is a URL, return its value.
pub fn url(&self) -> Option<&T> { pub fn url(&self) -> Option<&T> {
match self { match self {
@ -140,7 +140,7 @@ pub enum InstalledVersion<'a> {
Url(&'a Url, &'a Version), Url(&'a Url, &'a Version),
} }
impl<'a> InstalledVersion<'a> { impl InstalledVersion<'_> {
/// If it is a URL, return its value. /// If it is a URL, return its value.
pub fn url(&self) -> Option<&Url> { pub fn url(&self) -> Option<&Url> {
match self { match self {

View file

@ -84,7 +84,7 @@ impl LoweredRequirement {
if workspace.packages().contains_key(&requirement.name) { if workspace.packages().contains_key(&requirement.name) {
// And it's not a recursive self-inclusion (extras that activate other extras), e.g. // And it's not a recursive self-inclusion (extras that activate other extras), e.g.
// `framework[machine_learning]` depends on `framework[cuda]`. // `framework[machine_learning]` depends on `framework[cuda]`.
if !project_name.is_some_and(|project_name| *project_name == requirement.name) { if project_name.is_none_or(|project_name| *project_name != requirement.name) {
// It must be declared as a workspace source. // It must be declared as a workspace source.
let Some(sources) = sources.as_ref() else { let Some(sources) = sources.as_ref() else {
// No sources were declared for the workspace package. // No sources were declared for the workspace package.
@ -141,7 +141,7 @@ impl LoweredRequirement {
// Support recursive editable inclusions. // Support recursive editable inclusions.
if has_sources if has_sources
&& requirement.version_or_url.is_none() && requirement.version_or_url.is_none()
&& !project_name.is_some_and(|project_name| *project_name == requirement.name) && project_name.is_none_or(|project_name| *project_name != requirement.name)
{ {
warn_user_once!( warn_user_once!(
"Missing version constraint (e.g., a lower bound) for `{}`", "Missing version constraint (e.g., a lower bound) for `{}`",

View file

@ -1755,7 +1755,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?; .map_err(Error::CacheWrite)?;
if let Err(err) = rename_with_retry(extracted, target).await { if let Err(err) = rename_with_retry(extracted, target).await {
// If the directory already exists, accept it. // If the directory already exists, accept it.
if target.is_dir() { if err.kind() == std::io::ErrorKind::AlreadyExists {
warn!("Directory already exists: {}", target.display()); warn!("Directory already exists: {}", target.display());
} else { } else {
return Err(Error::CacheWrite(err)); return Err(Error::CacheWrite(err));
@ -1816,7 +1816,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
.map_err(Error::CacheWrite)?; .map_err(Error::CacheWrite)?;
if let Err(err) = rename_with_retry(extracted, target).await { if let Err(err) = rename_with_retry(extracted, target).await {
// If the directory already exists, accept it. // If the directory already exists, accept it.
if target.is_dir() { if err.kind() == std::io::ErrorKind::AlreadyExists {
warn!("Directory already exists: {}", target.display()); warn!("Directory already exists: {}", target.display());
} else { } else {
return Err(Error::CacheWrite(err)); return Err(Error::CacheWrite(err));

View file

@ -80,7 +80,7 @@ where
} }
} }
impl<'a, R> tokio::io::AsyncRead for HashReader<'a, R> impl<R> tokio::io::AsyncRead for HashReader<'_, R>
where where
R: tokio::io::AsyncRead + Unpin, R: tokio::io::AsyncRead + Unpin,
{ {

View file

@ -387,7 +387,7 @@ fn clone_recursive(
match attempt { match attempt {
Attempt::Initial => { Attempt::Initial => {
if let Err(err) = reflink::reflink(&from, &to) { if let Err(err) = reflink::reflink(&from, &to) {
if matches!(err.kind(), std::io::ErrorKind::AlreadyExists) { if err.kind() == std::io::ErrorKind::AlreadyExists {
// If cloning/copying fails and the directory exists already, it must be merged recursively. // If cloning/copying fails and the directory exists already, it must be merged recursively.
if entry.file_type()?.is_dir() { if entry.file_type()?.is_dir() {
for entry in fs::read_dir(from)? { for entry in fs::read_dir(from)? {
@ -423,7 +423,7 @@ fn clone_recursive(
} }
Attempt::Subsequent => { Attempt::Subsequent => {
if let Err(err) = reflink::reflink(&from, &to) { if let Err(err) = reflink::reflink(&from, &to) {
if matches!(err.kind(), std::io::ErrorKind::AlreadyExists) { if err.kind() == std::io::ErrorKind::AlreadyExists {
// If cloning/copying fails and the directory exists already, it must be merged recursively. // If cloning/copying fails and the directory exists already, it must be merged recursively.
if entry.file_type()?.is_dir() { if entry.file_type()?.is_dir() {
for entry in fs::read_dir(from)? { for entry in fs::read_dir(from)? {

View file

@ -198,7 +198,7 @@ struct SerializeVisitor<'a> {
entries: &'a mut BTreeMap<String, OptionField>, entries: &'a mut BTreeMap<String, OptionField>,
} }
impl<'a> Visit for SerializeVisitor<'a> { impl Visit for SerializeVisitor<'_> {
fn record_set(&mut self, name: &str, set: OptionSet) { fn record_set(&mut self, name: &str, set: OptionSet) {
// Collect the entries of the set. // Collect the entries of the set.
let mut entries = BTreeMap::new(); let mut entries = BTreeMap::new();

View file

@ -3926,7 +3926,7 @@ mod tests {
/// assertion failure messages. /// assertion failure messages.
struct VersionBloatedDebug<'a>(&'a Version); struct VersionBloatedDebug<'a>(&'a Version);
impl<'a> std::fmt::Debug for VersionBloatedDebug<'a> { impl std::fmt::Debug for VersionBloatedDebug<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Version") f.debug_struct("Version")
.field("epoch", &self.0.epoch()) .field("epoch", &self.0.epoch())

View file

@ -1295,7 +1295,7 @@ pub struct MarkerTreeDebugGraph<'a> {
marker: &'a MarkerTree, marker: &'a MarkerTree,
} }
impl<'a> fmt::Debug for MarkerTreeDebugGraph<'a> { impl fmt::Debug for MarkerTreeDebugGraph<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.marker.fmt_graph(f, 0) self.marker.fmt_graph(f, 0)
} }
@ -1312,7 +1312,7 @@ pub struct MarkerTreeDebugRaw<'a> {
marker: &'a MarkerTree, marker: &'a MarkerTree,
} }
impl<'a> fmt::Debug for MarkerTreeDebugRaw<'a> { impl fmt::Debug for MarkerTreeDebugRaw<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let node = INTERNER.shared.node(self.marker.0); let node = INTERNER.shared.node(self.marker.0);
f.debug_tuple("MarkerTreeDebugRaw").field(node).finish() f.debug_tuple("MarkerTreeDebugRaw").field(node).finish()

View file

@ -243,7 +243,7 @@ impl<'a> From<(&'a PackageName, &'a GroupName)> for ConflictItemRef<'a> {
} }
} }
impl<'a> hashbrown::Equivalent<ConflictItem> for ConflictItemRef<'a> { impl hashbrown::Equivalent<ConflictItem> for ConflictItemRef<'_> {
fn equivalent(&self, key: &ConflictItem) -> bool { fn equivalent(&self, key: &ConflictItem) -> bool {
key.as_ref() == *self key.as_ref() == *self
} }
@ -335,7 +335,7 @@ impl<'a> From<&'a GroupName> for ConflictPackageRef<'a> {
} }
} }
impl<'a> PartialEq<ConflictPackage> for ConflictPackageRef<'a> { impl PartialEq<ConflictPackage> for ConflictPackageRef<'_> {
fn eq(&self, other: &ConflictPackage) -> bool { fn eq(&self, other: &ConflictPackage) -> bool {
other.as_ref() == *self other.as_ref() == *self
} }
@ -347,7 +347,7 @@ impl<'a> PartialEq<ConflictPackageRef<'a>> for ConflictPackage {
} }
} }
impl<'a> hashbrown::Equivalent<ConflictPackage> for ConflictPackageRef<'a> { impl hashbrown::Equivalent<ConflictPackage> for ConflictPackageRef<'_> {
fn equivalent(&self, key: &ConflictPackage) -> bool { fn equivalent(&self, key: &ConflictPackage) -> bool {
key.as_ref() == *self key.as_ref() == *self
} }

View file

@ -211,7 +211,7 @@ impl Metadata23 {
writer.push_str(&format!("{}{}\n", " ".repeat(key.len() + 2), line)); writer.push_str(&format!("{}{}\n", " ".repeat(key.len() + 2), line));
} }
} }
fn write_opt_str(writer: &mut String, key: &str, value: &Option<impl Display>) { fn write_opt_str(writer: &mut String, key: &str, value: Option<&impl Display>) {
if let Some(value) = value { if let Some(value) = value {
write_str(writer, key, value); write_str(writer, key, value);
} }
@ -233,28 +233,40 @@ impl Metadata23 {
write_all(&mut writer, "Platform", &self.platforms); write_all(&mut writer, "Platform", &self.platforms);
write_all(&mut writer, "Supported-Platform", &self.supported_platforms); write_all(&mut writer, "Supported-Platform", &self.supported_platforms);
write_all(&mut writer, "Summary", &self.summary); write_all(&mut writer, "Summary", &self.summary);
write_opt_str(&mut writer, "Keywords", &self.keywords); write_opt_str(&mut writer, "Keywords", self.keywords.as_ref());
write_opt_str(&mut writer, "Home-Page", &self.home_page); write_opt_str(&mut writer, "Home-Page", self.home_page.as_ref());
write_opt_str(&mut writer, "Download-URL", &self.download_url); write_opt_str(&mut writer, "Download-URL", self.download_url.as_ref());
write_opt_str(&mut writer, "Author", &self.author); write_opt_str(&mut writer, "Author", self.author.as_ref());
write_opt_str(&mut writer, "Author-email", &self.author_email); write_opt_str(&mut writer, "Author-email", self.author_email.as_ref());
write_opt_str(&mut writer, "License", &self.license); write_opt_str(&mut writer, "License", self.license.as_ref());
write_opt_str(&mut writer, "License-Expression", &self.license_expression); write_opt_str(
&mut writer,
"License-Expression",
self.license_expression.as_ref(),
);
write_all(&mut writer, "License-File", &self.license_files); write_all(&mut writer, "License-File", &self.license_files);
write_all(&mut writer, "Classifier", &self.classifiers); write_all(&mut writer, "Classifier", &self.classifiers);
write_all(&mut writer, "Requires-Dist", &self.requires_dist); write_all(&mut writer, "Requires-Dist", &self.requires_dist);
write_all(&mut writer, "Provides-Dist", &self.provides_dist); write_all(&mut writer, "Provides-Dist", &self.provides_dist);
write_all(&mut writer, "Obsoletes-Dist", &self.obsoletes_dist); write_all(&mut writer, "Obsoletes-Dist", &self.obsoletes_dist);
write_opt_str(&mut writer, "Maintainer", &self.maintainer); write_opt_str(&mut writer, "Maintainer", self.maintainer.as_ref());
write_opt_str(&mut writer, "Maintainer-email", &self.maintainer_email); write_opt_str(
write_opt_str(&mut writer, "Requires-Python", &self.requires_python); &mut writer,
"Maintainer-email",
self.maintainer_email.as_ref(),
);
write_opt_str(
&mut writer,
"Requires-Python",
self.requires_python.as_ref(),
);
write_all(&mut writer, "Requires-External", &self.requires_external); write_all(&mut writer, "Requires-External", &self.requires_external);
write_all(&mut writer, "Project-URL", &self.project_urls); write_all(&mut writer, "Project-URL", &self.project_urls);
write_all(&mut writer, "Provides-Extra", &self.provides_extras); write_all(&mut writer, "Provides-Extra", &self.provides_extras);
write_opt_str( write_opt_str(
&mut writer, &mut writer,
"Description-Content-Type", "Description-Content-Type",
&self.description_content_type, self.description_content_type.as_ref(),
); );
write_all(&mut writer, "Dynamic", &self.dynamic); write_all(&mut writer, "Dynamic", &self.dynamic);

View file

@ -1193,17 +1193,17 @@ pub(crate) fn is_windows_store_shim(path: &Path) -> bool {
} }
// Ex) `WindowsApps` // Ex) `WindowsApps`
if !components if components
.next() .next()
.is_some_and(|component| component.as_os_str() == "WindowsApps") .is_none_or(|component| component.as_os_str() != "WindowsApps")
{ {
return false; return false;
} }
// Ex) `Microsoft` // Ex) `Microsoft`
if !components if components
.next() .next()
.is_some_and(|component| component.as_os_str() == "Microsoft") .is_none_or(|component| component.as_os_str() != "Microsoft")
{ {
return false; return false;
} }

View file

@ -89,7 +89,7 @@ impl PythonInstallation {
python_install_mirror: Option<&str>, python_install_mirror: Option<&str>,
pypy_install_mirror: Option<&str>, pypy_install_mirror: Option<&str>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let request = request.unwrap_or_else(|| &PythonRequest::Default); let request = request.unwrap_or(&PythonRequest::Default);
// Search for the installation // Search for the installation
match Self::find(request, environments, preference, cache) { match Self::find(request, environments, preference, cache) {

View file

@ -135,7 +135,7 @@ impl CandidateSelector {
is_excluded: bool, is_excluded: bool,
index: Option<&'a IndexUrl>, index: Option<&'a IndexUrl>,
env: &ResolverEnvironment, env: &ResolverEnvironment,
) -> Option<Candidate> { ) -> Option<Candidate<'a>> {
// In the branches, we "sort" the preferences by marker-matching through an iterator that // In the branches, we "sort" the preferences by marker-matching through an iterator that
// first has the matching half and then the mismatching half. // first has the matching half and then the mismatching half.
let preferences_match = preferences let preferences_match = preferences
@ -282,7 +282,7 @@ impl CandidateSelector {
range: &Range<Version>, range: &Range<Version>,
version_maps: &'a [VersionMap], version_maps: &'a [VersionMap],
env: &ResolverEnvironment, env: &ResolverEnvironment,
) -> Option<Candidate> { ) -> Option<Candidate<'a>> {
trace!( trace!(
"Selecting candidate for {package_name} with range {range} with {} remote versions", "Selecting candidate for {package_name} with range {range} with {} remote versions",
version_maps.iter().map(VersionMap::len).sum::<usize>(), version_maps.iter().map(VersionMap::len).sum::<usize>(),

View file

@ -787,7 +787,7 @@ impl<'range> From<&'range Range<Version>> for SentinelRange<'range> {
} }
} }
impl<'range> SentinelRange<'range> { impl SentinelRange<'_> {
/// Returns `true` if the range appears to be, e.g., `>1.0.0, <1.0.0+[max]`. /// Returns `true` if the range appears to be, e.g., `>1.0.0, <1.0.0+[max]`.
pub fn is_sentinel(&self) -> bool { pub fn is_sentinel(&self) -> bool {
self.0.iter().all(|(lower, upper)| { self.0.iter().all(|(lower, upper)| {

View file

@ -2752,7 +2752,7 @@ impl<'de> serde::de::Deserialize<'de> for RegistrySource {
{ {
struct Visitor; struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor { impl serde::de::Visitor<'_> for Visitor {
type Value = RegistrySource; type Value = RegistrySource;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
@ -2973,7 +2973,7 @@ impl SourceDist {
) -> Result<Option<SourceDist>, LockError> { ) -> Result<Option<SourceDist>, LockError> {
// Reject distributions from registries that don't match the index URL, as can occur with // Reject distributions from registries that don't match the index URL, as can occur with
// `--find-links`. // `--find-links`.
if !index.is_some_and(|index| *index == reg_dist.index) { if index.is_none_or(|index| *index != reg_dist.index) {
return Ok(None); return Ok(None);
} }

View file

@ -111,7 +111,7 @@ impl PubGrubDependency {
// Detect self-dependencies. // Detect self-dependencies.
if dev.is_none() { if dev.is_none() {
debug_assert!( debug_assert!(
!source_name.is_some_and(|source_name| source_name == name), source_name.is_none_or(|source_name| source_name != name),
"extras not flattened for {name}" "extras not flattened for {name}"
); );
} }
@ -125,7 +125,7 @@ impl PubGrubDependency {
// Detect self-dependencies. // Detect self-dependencies.
if dev.is_none() { if dev.is_none() {
debug_assert!( debug_assert!(
!source_name.is_some_and(|source_name| source_name == name), source_name.is_none_or(|source_name| source_name != name),
"group not flattened for {name}" "group not flattened for {name}"
); );
} }

View file

@ -350,7 +350,7 @@ impl ScriptTag {
let mut lines = contents.lines(); let mut lines = contents.lines();
// Ensure that the first line is exactly `# /// script`. // Ensure that the first line is exactly `# /// script`.
if !lines.next().is_some_and(|line| line == "# /// script") { if lines.next().is_none_or(|line| line != "# /// script") {
return Ok(None); return Ok(None);
} }

View file

@ -50,14 +50,7 @@ impl FilesystemOptions {
Ok(Some(Self(options))) Ok(Some(Self(options)))
} }
Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
Err(_) if !dir.is_dir() => { Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::NotADirectory => Ok(None),
// Ex) `XDG_CONFIG_HOME=/dev/null`
tracing::debug!(
"User configuration directory `{}` does not exist or is not a directory",
dir.display()
);
Ok(None)
}
Err(err) => Err(err), Err(err) => Err(err),
} }
} }

View file

@ -10,7 +10,7 @@ pub enum BuildIsolation<'a> {
SharedPackage(&'a PythonEnvironment, &'a [PackageName]), SharedPackage(&'a PythonEnvironment, &'a [PackageName]),
} }
impl<'a> BuildIsolation<'a> { impl BuildIsolation<'_> {
/// Returns `true` if build isolation is enforced for the given package name. /// Returns `true` if build isolation is enforced for the given package name.
pub fn is_isolated(&self, package: Option<&PackageName>) -> bool { pub fn is_isolated(&self, package: Option<&PackageName>) -> bool {
match self { match self {

View file

@ -277,10 +277,10 @@ impl PyProjectTomlMut {
// If necessary, update the name. // If necessary, update the name.
if let Some(index) = index.name.as_deref() { if let Some(index) = index.name.as_deref() {
if !table if table
.get("name") .get("name")
.and_then(|name| name.as_str()) .and_then(|name| name.as_str())
.is_some_and(|name| name == index) .is_none_or(|name| name != index)
{ {
let mut formatted = Formatted::new(index.to_string()); let mut formatted = Formatted::new(index.to_string());
if let Some(value) = table.get("name").and_then(Item::as_value) { if let Some(value) = table.get("name").and_then(Item::as_value) {
@ -296,11 +296,11 @@ impl PyProjectTomlMut {
} }
// If necessary, update the URL. // If necessary, update the URL.
if !table if table
.get("url") .get("url")
.and_then(|item| item.as_str()) .and_then(|item| item.as_str())
.and_then(|url| Url::parse(url).ok()) .and_then(|url| Url::parse(url).ok())
.is_some_and(|url| CanonicalUrl::new(&url) == CanonicalUrl::new(index.url.url())) .is_none_or(|url| CanonicalUrl::new(&url) != CanonicalUrl::new(index.url.url()))
{ {
let mut formatted = Formatted::new(index.url.to_string()); let mut formatted = Formatted::new(index.url.to_string());
if let Some(value) = table.get("url").and_then(Item::as_value) { if let Some(value) = table.get("url").and_then(Item::as_value) {

View file

@ -773,12 +773,12 @@ impl Workspace {
member_glob.to_string(), member_glob.to_string(),
)); ));
} }
// If the entry is _not_ a directory, skip it. Err(err) if err.kind() == std::io::ErrorKind::NotADirectory => {
Err(_) if !member_root.is_dir() => {
warn!( warn!(
"Ignoring non-directory workspace member: `{}`", "Ignoring non-directory workspace member: `{}`",
member_root.simplified_display() member_root.simplified_display()
); );
continue; continue;
} }
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
@ -1032,7 +1032,7 @@ impl ProjectWorkspace {
let project = pyproject_toml let project = pyproject_toml
.project .project
.clone() .clone()
.ok_or_else(|| WorkspaceError::MissingProject(pyproject_path))?; .ok_or(WorkspaceError::MissingProject(pyproject_path))?;
Self::from_project(project_root, &project, &pyproject_toml, options).await Self::from_project(project_root, &project, &pyproject_toml, options).await
} }

View file

@ -1109,7 +1109,7 @@ enum Source<'a> {
Directory(Cow<'a, Path>), Directory(Cow<'a, Path>),
} }
impl<'a> Source<'a> { impl Source<'_> {
fn path(&self) -> &Path { fn path(&self) -> &Path {
match self { match self {
Self::File(path) => path.as_ref(), Self::File(path) => path.as_ref(),

View file

@ -82,6 +82,7 @@ impl IgnoreCurrentlyBeingDeleted for Result<(), std::io::Error> {
fn ignore_currently_being_deleted(self) -> Self { fn ignore_currently_being_deleted(self) -> Self {
match self { match self {
Ok(()) => Ok(()), Ok(()) => Ok(()),
Err(err) if err.kind() == std::io::ErrorKind::DirectoryNotEmpty => Ok(()),
Err(err) if err.is_in_process_of_being_deleted() => Ok(()), Err(err) if err.is_in_process_of_being_deleted() => Ok(()),
Err(err) => Err(err), Err(err) => Err(err),
} }

View file

@ -1,2 +1,2 @@
[toolchain] [toolchain]
channel = "1.81" channel = "1.83"