Enable workspace lint configuration in remaining crates (#4329)

## Summary

We didn't have Clippy enabled (to match our workspace settings) in a few
crates.
This commit is contained in:
Charlie Marsh 2024-06-17 23:02:28 -04:00 committed by GitHub
parent b8c0391667
commit c996e8e3f3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 249 additions and 204 deletions

View file

@ -4,7 +4,6 @@ version = "0.6.0"
description = "A library for python version numbers and specifiers, implementing PEP 440"
license = "Apache-2.0 OR BSD-2-Clause"
include = ["/src", "Changelog.md", "License-Apache", "License-BSD", "Readme.md", "pyproject.toml"]
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
@ -16,6 +15,9 @@ authors = { workspace = true }
name = "pep440_rs"
crate-type = ["rlib", "cdylib"]
[lints]
workspace = true
[dependencies]
once_cell = { workspace = true }
pyo3 = { workspace = true, optional = true, features = ["extension-module", "abi3-py37"] }

View file

@ -67,9 +67,9 @@ impl Operator {
/// specifiers [spec].
///
/// [spec]: https://packaging.python.org/en/latest/specifications/version-specifiers/
pub(crate) fn is_local_compatible(&self) -> bool {
pub(crate) fn is_local_compatible(self) -> bool {
!matches!(
*self,
self,
Self::GreaterThan
| Self::GreaterThanEqual
| Self::LessThan
@ -149,10 +149,12 @@ impl std::fmt::Display for Operator {
#[cfg(feature = "pyo3")]
#[pymethods]
impl Operator {
#[allow(clippy::trivially_copy_pass_by_ref)]
fn __str__(&self) -> String {
self.to_string()
}
#[allow(clippy::trivially_copy_pass_by_ref)]
fn __repr__(&self) -> String {
self.to_string()
}
@ -412,6 +414,7 @@ impl Version {
///
/// When the iterator yields no elements.
#[inline]
#[must_use]
pub fn with_release<I, R>(mut self, release_numbers: I) -> Self
where
I: IntoIterator<Item = R>,
@ -456,6 +459,7 @@ impl Version {
/// Set the epoch and return the updated version.
#[inline]
#[must_use]
pub fn with_epoch(mut self, value: u64) -> Self {
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
if small.set_epoch(value) {
@ -468,6 +472,7 @@ impl Version {
/// Set the pre-release component and return the updated version.
#[inline]
#[must_use]
pub fn with_pre(mut self, value: Option<PreRelease>) -> Self {
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
if small.set_pre(value) {
@ -480,6 +485,7 @@ impl Version {
/// Set the post-release component and return the updated version.
#[inline]
#[must_use]
pub fn with_post(mut self, value: Option<u64>) -> Self {
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
if small.set_post(value) {
@ -492,6 +498,7 @@ impl Version {
/// Set the dev-release component and return the updated version.
#[inline]
#[must_use]
pub fn with_dev(mut self, value: Option<u64>) -> Self {
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
if small.set_dev(value) {
@ -504,6 +511,7 @@ impl Version {
/// Set the local segments and return the updated version.
#[inline]
#[must_use]
pub fn with_local(mut self, value: Vec<LocalSegment>) -> Self {
if value.is_empty() {
self.without_local()
@ -518,6 +526,7 @@ impl Version {
/// and local version labels MUST be ignored entirely when checking if
/// candidate versions match a given version specifier."
#[inline]
#[must_use]
pub fn without_local(mut self) -> Self {
// A "small" version is already guaranteed not to have a local
// component, so we only need to do anything if we have a "full"
@ -534,6 +543,7 @@ impl Version {
/// The version `1.0min0` is smaller than all other `1.0` versions,
/// like `1.0a1`, `1.0dev0`, etc.
#[inline]
#[must_use]
pub fn with_min(mut self, value: Option<u64>) -> Self {
debug_assert!(!self.is_pre(), "min is not allowed on pre-release versions");
debug_assert!(!self.is_dev(), "min is not allowed on dev versions");
@ -552,6 +562,7 @@ impl Version {
/// The version `1.0max0` is larger than all other `1.0` versions,
/// like `1.0.post1`, `1.0+local`, etc.
#[inline]
#[must_use]
pub fn with_max(mut self, value: Option<u64>) -> Self {
debug_assert!(
!self.is_post(),
@ -680,7 +691,7 @@ impl std::fmt::Display for Version {
"+{}",
self.local()
.iter()
.map(std::string::ToString::to_string)
.map(ToString::to_string)
.collect::<Vec<String>>()
.join(".")
)
@ -779,7 +790,7 @@ impl FromStr for Version {
/// calendar versions, like `2023.03`, to be represented.)
/// * There is *at most* one of the following components: pre, dev or post.
/// * If there is a pre segment, then its numeric value is less than 64.
/// * If there is a dev or post segment, then its value is less than u8::MAX.
/// * If there is a dev or post segment, then its value is less than `u8::MAX`.
/// * There are zero "local" segments.
///
/// The above constraints were chosen as a balancing point between being able
@ -871,23 +882,25 @@ impl VersionSmall {
const SUFFIX_NONE: u64 = 5;
const SUFFIX_POST: u64 = 6;
const SUFFIX_MAX: u64 = 7;
const SUFFIX_MAX_VERSION: u64 = 0x1FFFFF;
const SUFFIX_MAX_VERSION: u64 = 0x001F_FFFF;
#[inline]
fn new() -> Self {
Self {
repr: 0x00000000_00A00000,
repr: 0x0000_0000_00A0_0000,
release: [0, 0, 0, 0],
len: 0,
}
}
#[inline]
#[allow(clippy::unused_self)]
fn epoch(&self) -> u64 {
0
}
#[inline]
#[allow(clippy::unused_self)]
fn set_epoch(&mut self, value: u64) -> bool {
if value != 0 {
return false;
@ -902,7 +915,7 @@ impl VersionSmall {
#[inline]
fn clear_release(&mut self) {
self.repr &= !0xFFFFFFFF_FF000000;
self.repr &= !0xFFFF_FFFF_FF00_0000;
self.release = [0, 0, 0, 0];
self.len = 0;
}
@ -1122,6 +1135,7 @@ impl VersionSmall {
}
#[inline]
#[allow(clippy::unused_self)]
fn local(&self) -> &[LocalSegment] {
// A "small" version is never used if the version has a non-zero number
// of local segments.
@ -1138,7 +1152,7 @@ impl VersionSmall {
#[inline]
fn set_suffix_kind(&mut self, kind: u64) {
debug_assert!(kind <= Self::SUFFIX_MAX);
self.repr &= !0xE00000;
self.repr &= !0x00E0_0000;
self.repr |= kind << 21;
if kind == Self::SUFFIX_NONE {
self.set_suffix_version(0);
@ -1147,13 +1161,13 @@ impl VersionSmall {
#[inline]
fn suffix_version(&self) -> u64 {
self.repr & 0x1FFFFF
self.repr & 0x001F_FFFF
}
#[inline]
fn set_suffix_version(&mut self, value: u64) {
debug_assert!(value <= 0x1FFFFF);
self.repr &= !0x1FFFFF;
debug_assert!(value <= 0x001F_FFFF);
self.repr &= !0x001F_FFFF;
self.repr |= value;
}
}
@ -1196,8 +1210,8 @@ struct VersionFull {
/// if any
dev: Option<u64>,
/// A [local version
/// identifier](https://peps.python.org/pep-0440/#local-version-identif
/// iers) such as `+deadbeef` in `1.2.3+deadbeef`
/// identifier](https://peps.python.org/pep-0440/#local-version-identifiers)
/// such as `+deadbeef` in `1.2.3+deadbeef`
///
/// > They consist of a normal public version identifier (as defined
/// > in the previous section), along with an arbitrary “local version
@ -1460,10 +1474,10 @@ impl<'a> Parser<'a> {
fn parse(self) -> Result<Version, VersionParseError> {
match self.parse_pattern() {
Ok(vpat) => {
if !vpat.is_wildcard() {
Ok(vpat.into_version())
} else {
if vpat.is_wildcard() {
Err(ErrorKind::Wildcard.into())
} else {
Ok(vpat.into_version())
}
}
// If we get an error when parsing a version pattern, then
@ -2456,7 +2470,7 @@ fn starts_with_ignore_ascii_case(needle: &[u8], haystack: &[u8]) -> bool {
/// # Motivation
///
/// We hand-write this for a couple reasons. Firstly, the standard library's
/// FromStr impl for parsing integers requires UTF-8 validation first. We
/// `FromStr` impl for parsing integers requires UTF-8 validation first. We
/// don't need that for version parsing since we stay in the realm of ASCII.
/// Secondly, std's version is a little more flexible because it supports
/// signed integers. So for example, it permits a leading `+` before the actual
@ -2647,19 +2661,19 @@ mod tests {
),
(
"1.2+123456",
Version::new([1, 2]).with_local(vec![LocalSegment::Number(123456)]),
Version::new([1, 2]).with_local(vec![LocalSegment::Number(123_456)]),
),
(
"1.2.r32+123456",
Version::new([1, 2])
.with_post(Some(32))
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
(
"1.2.rev33+123456",
Version::new([1, 2])
.with_post(Some(33))
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
// Explicit epoch of 1
(
@ -2848,28 +2862,28 @@ mod tests {
"1!1.2+123456",
Version::new([1, 2])
.with_epoch(1)
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
(
"1!1.2.r32+123456",
Version::new([1, 2])
.with_epoch(1)
.with_post(Some(32))
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
(
"1!1.2.rev33+123456",
Version::new([1, 2])
.with_epoch(1)
.with_post(Some(33))
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
(
"98765!1.2.rev33+123456",
Version::new([1, 2])
.with_epoch(98765)
.with_post(Some(33))
.with_local(vec![LocalSegment::Number(123456)]),
.with_local(vec![LocalSegment::Number(123_456)]),
),
];
for (string, structured) in versions {
@ -3397,7 +3411,7 @@ mod tests {
assert_eq!(
p("5+18446744073709551615.abc"),
Version::new([5]).with_local(vec![
LocalSegment::Number(18446744073709551615),
LocalSegment::Number(18_446_744_073_709_551_615),
LocalSegment::String("abc".to_string()),
])
);
@ -3496,7 +3510,7 @@ mod tests {
assert_eq!(p(" \n5\n \t"), Version::new([5]));
// min tests
assert!(Parser::new("1.min0".as_bytes()).parse().is_err())
assert!(Parser::new("1.min0".as_bytes()).parse().is_err());
}
// Tests the error cases of our version parser.
@ -3626,7 +3640,7 @@ mod tests {
"1.1.dev1",
];
for (i, v1) in versions.iter().enumerate() {
for v2 in versions[i + 1..].iter() {
for v2 in &versions[i + 1..] {
let less = v1.parse::<Version>().unwrap();
let greater = v2.parse::<Version>().unwrap();
assert_eq!(
@ -3668,7 +3682,7 @@ mod tests {
"1.1.dev1",
];
for greater in versions.iter() {
for greater in versions {
let greater = greater.parse::<Version>().unwrap();
assert_eq!(
less.cmp(&greater),
@ -3707,7 +3721,7 @@ mod tests {
"1.0",
];
for less in versions.iter() {
for less in versions {
let less = less.parse::<Version>().unwrap();
assert_eq!(
less.cmp(&greater),
@ -3728,7 +3742,7 @@ mod tests {
let versions = &["1.0a1", "1.0a1+local", "1.0a1.post1"];
for less in versions.iter() {
for less in versions {
let less = less.parse::<Version>().unwrap();
assert_eq!(
less.cmp(&greater),
@ -3749,7 +3763,7 @@ mod tests {
let versions = &["1.0b1", "1.0b1+local", "1.0b1.post1", "1.0"];
for greater in versions.iter() {
for greater in versions {
let greater = greater.parse::<Version>().unwrap();
assert_eq!(
less.cmp(&greater),
@ -3771,9 +3785,12 @@ mod tests {
assert_eq!(p("01"), Ok(1));
assert_eq!(p("9"), Ok(9));
assert_eq!(p("10"), Ok(10));
assert_eq!(p("18446744073709551615"), Ok(18446744073709551615));
assert_eq!(p("018446744073709551615"), Ok(18446744073709551615));
assert_eq!(p("000000018446744073709551615"), Ok(18446744073709551615));
assert_eq!(p("18446744073709551615"), Ok(18_446_744_073_709_551_615));
assert_eq!(p("018446744073709551615"), Ok(18_446_744_073_709_551_615));
assert_eq!(
p("000000018446744073709551615"),
Ok(18_446_744_073_709_551_615)
);
assert_eq!(p("10a"), Err(ErrorKind::InvalidDigit { got: b'a' }.into()));
assert_eq!(p("10["), Err(ErrorKind::InvalidDigit { got: b'[' }.into()));

View file

@ -480,11 +480,11 @@ impl VersionSpecifier {
// "Except where specifically noted below, local version identifiers MUST NOT be permitted
// in version specifiers, and local version labels MUST be ignored entirely when checking
// if candidate versions match a given version specifier."
let (this, other) = if !self.version.local().is_empty() {
(self.version.clone(), version.clone())
} else {
let (this, other) = if self.version.local().is_empty() {
// self is already without local
(self.version.clone(), version.clone().without_local())
} else {
(self.version.clone(), version.clone())
};
match self.operator {
@ -650,7 +650,7 @@ impl std::fmt::Display for VersionSpecifierBuildError {
let local = version
.local()
.iter()
.map(|segment| segment.to_string())
.map(ToString::to_string)
.collect::<Vec<String>>()
.join(".");
write!(

View file

@ -16,13 +16,13 @@ authors = { workspace = true }
name = "pep508_rs"
crate-type = ["cdylib", "rlib"]
[dependencies]
pep440_rs = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
[lints]
workspace = true
[dependencies]
derivative = { workspace = true }
once_cell = { workspace = true }
pep440_rs = { workspace = true }
pyo3 = { workspace = true, optional = true, features = ["abi3", "extension-module"] }
pyo3-log = { workspace = true, optional = true }
regex = { workspace = true }
@ -32,6 +32,8 @@ thiserror = { workspace = true }
tracing = { workspace = true, optional = true }
unicode-width = { workspace = true }
url = { workspace = true, features = ["serde"] }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
[dev-dependencies]
insta = { version = "1.36.1" }

View file

@ -35,7 +35,6 @@ use pyo3::{
};
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use thiserror::Error;
use unicode_width::UnicodeWidthChar;
use url::Url;
use cursor::Cursor;
@ -95,7 +94,7 @@ impl<T: Pep508Url> Display for Pep508Error<T> {
// We can use char indices here since it's a Vec<char>
let start_offset = self.input[..self.start]
.chars()
.flat_map(|c| c.width())
.filter_map(unicode_width::UnicodeWidthChar::width)
.sum::<usize>();
let underline_len = if self.start == self.input.len() {
// We also allow 0 here for convenience
@ -108,7 +107,7 @@ impl<T: Pep508Url> Display for Pep508Error<T> {
} else {
self.input[self.start..self.start + self.len]
.chars()
.flat_map(|c| c.width())
.filter_map(unicode_width::UnicodeWidthChar::width)
.sum::<usize>()
};
write!(
@ -247,7 +246,7 @@ impl PyRequirement {
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`
#[getter]
pub fn marker(&self) -> Option<String> {
self.marker.as_ref().map(std::string::ToString::to_string)
self.marker.as_ref().map(ToString::to_string)
}
/// Parses a PEP 440 string
@ -405,6 +404,7 @@ impl<T: Pep508Url> Requirement<T> {
///
/// For example, given `flask >= 2.0.2`, calling `with_extra_marker("dotenv")` would return
/// `flask >= 2.0.2 ; extra == "dotenv"`.
#[must_use]
pub fn with_extra_marker(self, extra: &ExtraName) -> Self {
let marker = match self.marker {
Some(expression) => MarkerTree::And(vec![
@ -463,7 +463,7 @@ where
F: FnMut(MarkerWarningKind, String),
{
fn report(&mut self, kind: MarkerWarningKind, warning: String) {
(self)(kind, warning)
(self)(kind, warning);
}
}
@ -471,10 +471,11 @@ where
pub struct TracingReporter;
impl Reporter for TracingReporter {
fn report(&mut self, _kind: MarkerWarningKind, _message: String) {
#[allow(unused_variables)]
fn report(&mut self, _kind: MarkerWarningKind, message: String) {
#[cfg(feature = "tracing")]
{
tracing::warn!("{}", _message);
tracing::warn!("{message}");
}
}
}

View file

@ -274,8 +274,7 @@ impl FromStr for MarkerOperator {
// ends with in
.and_then(|space_in| space_in.strip_suffix("in"))
// and has only whitespace in between
.map(|space| !space.is_empty() && space.trim().is_empty())
.unwrap_or_default() =>
.is_some_and(|space| !space.is_empty() && space.trim().is_empty()) =>
{
Self::NotIn
}
@ -547,6 +546,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::implementation_name`].
#[inline]
#[must_use]
pub fn with_implementation_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).implementation_name = value.into();
self
@ -556,6 +556,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::implementation_version`].
#[inline]
#[must_use]
pub fn with_implementation_version(
mut self,
value: impl Into<StringVersion>,
@ -568,6 +569,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::os_name`].
#[inline]
#[must_use]
pub fn with_os_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).os_name = value.into();
self
@ -577,6 +579,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::platform_machine`].
#[inline]
#[must_use]
pub fn with_platform_machine(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).platform_machine = value.into();
self
@ -587,6 +590,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::platform_python_implementation`].
#[inline]
#[must_use]
pub fn with_platform_python_implementation(
mut self,
value: impl Into<String>,
@ -599,6 +603,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::platform_release`].
#[inline]
#[must_use]
pub fn with_platform_release(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).platform_release = value.into();
self
@ -608,6 +613,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::platform_system`].
#[inline]
#[must_use]
pub fn with_platform_system(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).platform_system = value.into();
self
@ -617,6 +623,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::platform_version`].
#[inline]
#[must_use]
pub fn with_platform_version(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).platform_version = value.into();
self
@ -626,6 +633,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::python_full_version`].
#[inline]
#[must_use]
pub fn with_python_full_version(
mut self,
value: impl Into<StringVersion>,
@ -638,6 +646,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::python_full_version`].
#[inline]
#[must_use]
pub fn with_python_version(mut self, value: impl Into<StringVersion>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).python_version = value.into();
self
@ -647,6 +656,7 @@ impl MarkerEnvironment {
///
/// See also [`MarkerEnvironment::sys_platform`].
#[inline]
#[must_use]
pub fn with_sys_platform(mut self, value: impl Into<String>) -> MarkerEnvironment {
Arc::make_mut(&mut self.inner).sys_platform = value.into();
self
@ -1004,9 +1014,8 @@ impl MarkerExpression {
reporter.report(
MarkerWarningKind::Pep440Error,
format!(
"Expected double quoted PEP 440 version to compare with {}, found {},
will evaluate to false",
key, r_value
"Expected double quoted PEP 440 version to compare with {key}, found {r_value},
will evaluate to false"
),
);
@ -1166,8 +1175,7 @@ impl MarkerExpression {
reporter.report(
MarkerWarningKind::Pep440Error,
format!(
"Expected PEP 440 version to compare with {}, found {}, will evaluate to false: {}",
key, value, err
"Expected PEP 440 version to compare with {key}, found {value}, will evaluate to false: {err}"
),
);
@ -1219,8 +1227,7 @@ impl MarkerExpression {
reporter.report(
MarkerWarningKind::Pep440Error,
format!(
"Expected PEP 440 version to compare with {}, found {}, will evaluate to false: {}",
key, value, err
"Expected PEP 440 version to compare with {key}, found {value}, will evaluate to false: {err}"
),
);
@ -1232,9 +1239,8 @@ impl MarkerExpression {
reporter.report(
MarkerWarningKind::Pep440Error,
format!(
"Expected PEP 440 version operator to compare {} with '{}',
found '{}', will evaluate to false",
key, version, marker_operator
"Expected PEP 440 version operator to compare {key} with '{version}',
found '{marker_operator}', will evaluate to false"
),
);
@ -1267,9 +1273,9 @@ impl MarkerExpression {
}
};
match ExtraOperator::from_marker_operator(operator) {
Some(operator) => Some(MarkerExpression::Extra { operator, name }),
None => {
if let Some(operator) = ExtraOperator::from_marker_operator(operator) {
Some(MarkerExpression::Extra { operator, name })
} else {
reporter.report(
MarkerWarningKind::ExtraInvalidComparison,
"Comparing extra with something other than a quoted string is wrong,
@ -1279,7 +1285,6 @@ impl MarkerExpression {
None
}
}
}
/// Evaluate a <`marker_value`> <`marker_op`> <`marker_value`> expression
///
@ -1324,7 +1329,7 @@ impl MarkerExpression {
} => env
.map(|env| {
let l_string = env.get_string(key);
self.compare_strings(l_string, operator, value, reporter)
Self::compare_strings(l_string, *operator, value, reporter)
})
.unwrap_or(true),
MarkerExpression::StringInverted {
@ -1334,7 +1339,7 @@ impl MarkerExpression {
} => env
.map(|env| {
let r_string = env.get_string(key);
self.compare_strings(value, operator, r_string, reporter)
Self::compare_strings(value, *operator, r_string, reporter)
})
.unwrap_or(true),
MarkerExpression::Extra {
@ -1424,9 +1429,8 @@ impl MarkerExpression {
/// Compare strings by PEP 508 logic, with warnings
fn compare_strings(
&self,
l_string: &str,
operator: &MarkerOperator,
operator: MarkerOperator,
r_string: &str,
reporter: &mut impl Reporter,
) -> bool {
@ -1857,7 +1861,7 @@ impl MarkerTree {
let this = std::mem::replace(self, MarkerTree::And(vec![]));
*self = MarkerTree::And(vec![this]);
}
_ => {}
MarkerTree::And(_) => {}
}
if let MarkerTree::And(ref mut exprs) = *self {
if let MarkerTree::And(tree) = tree {
@ -1879,7 +1883,7 @@ impl MarkerTree {
let this = std::mem::replace(self, MarkerTree::And(vec![]));
*self = MarkerTree::Or(vec![this]);
}
_ => {}
MarkerTree::Or(_) => {}
}
if let MarkerTree::Or(ref mut exprs) = *self {
if let MarkerTree::Or(tree) = tree {
@ -1928,7 +1932,7 @@ impl Display for MarkerTree {
fn parse_marker_operator<T: Pep508Url>(
cursor: &mut Cursor,
) -> Result<MarkerOperator, Pep508Error<T>> {
let (start, len) = if cursor.peek_char().is_some_and(|c| c.is_alphabetic()) {
let (start, len) = if cursor.peek_char().is_some_and(char::is_alphabetic) {
// "in" or "not"
cursor.take_while(|char| !char.is_whitespace() && char != '\'' && char != '"')
} else {
@ -2301,7 +2305,7 @@ mod test {
assert_eq!(captured_logs[0].level, log::Level::Warn);
assert_eq!(captured_logs.len(), 1);
});
let string_string = MarkerTree::from_str(r#"os.name == 'posix' and platform.machine == 'x86_64' and platform.python_implementation == 'CPython' and 'Ubuntu' in platform.version and sys.platform == 'linux'"#).unwrap();
let string_string = MarkerTree::from_str(r"os.name == 'posix' and platform.machine == 'x86_64' and platform.python_implementation == 'CPython' and 'Ubuntu' in platform.version and sys.platform == 'linux'").unwrap();
string_string.evaluate(&env37, &[]);
testing_logger::validate(|captured_logs| {
let messages: Vec<_> = captured_logs

View file

@ -137,7 +137,7 @@ impl<Url: UnnamedRequirementUrl> Display for UnnamedRequirement<Url> {
)?;
}
if let Some(marker) = &self.marker {
write!(f, " ; {}", marker)?;
write!(f, " ; {marker}")?;
}
Ok(())
}

View file

@ -50,7 +50,7 @@ impl VerbatimUrl {
// Convert to a URL.
let mut url = Url::from_file_path(path.clone())
.map_err(|_| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
.map_err(|()| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
// Set the fragment, if it exists.
if let Some(fragment) = fragment {
@ -84,14 +84,14 @@ impl VerbatimUrl {
// Normalize the path.
let path = normalize_path(&path)
.map_err(|err| VerbatimUrlError::Normalization(path.to_path_buf(), err))?;
.map_err(|err| VerbatimUrlError::Normalization(path.clone(), err))?;
// Extract the fragment, if it exists.
let (path, fragment) = split_fragment(&path);
// Convert to a URL.
let mut url = Url::from_file_path(path.clone())
.map_err(|_| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
.map_err(|()| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
// Set the fragment, if it exists.
if let Some(fragment) = fragment {
@ -122,7 +122,7 @@ impl VerbatimUrl {
// Convert to a URL.
let mut url = Url::from_file_path(path.clone())
.unwrap_or_else(|_| panic!("path is absolute: {}", path.display()));
.unwrap_or_else(|()| panic!("path is absolute: {}", path.display()));
// Set the fragment, if it exists.
if let Some(fragment) = fragment {
@ -160,7 +160,7 @@ impl VerbatimUrl {
pub fn as_path(&self) -> Result<PathBuf, VerbatimUrlError> {
self.url
.to_file_path()
.map_err(|_| VerbatimUrlError::UrlConversion(self.url.to_file_path().unwrap()))
.map_err(|()| VerbatimUrlError::UrlConversion(self.url.to_file_path().unwrap()))
}
}

View file

@ -3,6 +3,9 @@ name = "uv-auth"
version = "0.0.1"
edition = "2021"
[lints]
workspace = true
[dependencies]
anyhow = { workspace = true }
async-trait = { workspace = true }

View file

@ -64,7 +64,7 @@ impl CredentialsCache {
/// Note we do not cache per username, but if a username is passed we will confirm that the
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
/// If multiple usernames are used per URL, the realm cache should be queried instead.
pub(crate) fn get_url(&self, url: &Url, username: Username) -> Option<Arc<Credentials>> {
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Credentials>> {
let urls = self.urls.read().unwrap();
let credentials = urls.get(url);
if let Some(credentials) = credentials {
@ -93,15 +93,15 @@ impl CredentialsCache {
let username = credentials.to_username();
if username.is_some() {
let realm = (Realm::from(url), username.clone());
self.insert_realm(realm, credentials.clone());
self.insert_realm(realm, &credentials);
}
// Insert an entry for requests with no username
self.insert_realm((Realm::from(url), Username::none()), credentials.clone());
self.insert_realm((Realm::from(url), Username::none()), &credentials);
// Insert an entry for the URL
let mut urls = self.urls.write().unwrap();
urls.insert(url.clone(), credentials.clone());
urls.insert(url, credentials);
}
/// Private interface to update a realm cache entry.
@ -110,7 +110,7 @@ impl CredentialsCache {
fn insert_realm(
&self,
key: (Realm, Username),
credentials: Arc<Credentials>,
credentials: &Arc<Credentials>,
) -> Option<Arc<Credentials>> {
// Do not cache empty credentials
if credentials.is_empty() {
@ -169,9 +169,9 @@ impl UrlTrie {
self.states[state].value.as_ref()
}
fn insert(&mut self, url: Url, value: Arc<Credentials>) {
fn insert(&mut self, url: &Url, value: Arc<Credentials>) {
let mut state = 0;
let realm = Realm::from(&url).to_string();
let realm = Realm::from(url).to_string();
for component in [realm.as_str()]
.into_iter()
.chain(url.path_segments().unwrap().filter(|item| !item.is_empty()))
@ -234,19 +234,19 @@ mod tests {
let mut trie = UrlTrie::new();
trie.insert(
Url::parse("https://burntsushi.net").unwrap(),
&Url::parse("https://burntsushi.net").unwrap(),
credentials1.clone(),
);
trie.insert(
Url::parse("https://astral.sh").unwrap(),
&Url::parse("https://astral.sh").unwrap(),
credentials2.clone(),
);
trie.insert(
Url::parse("https://example.com/foo").unwrap(),
&Url::parse("https://example.com/foo").unwrap(),
credentials3.clone(),
);
trie.insert(
Url::parse("https://example.com/bar").unwrap(),
&Url::parse("https://example.com/bar").unwrap(),
credentials4.clone(),
);

View file

@ -24,7 +24,7 @@ impl Username {
/// Create a new username.
///
/// Unlike `reqwest`, empty usernames are be encoded as `None` instead of an empty string.
pub fn new(value: Option<String>) -> Self {
pub(crate) fn new(value: Option<String>) -> Self {
// Ensure empty strings are `None`
if let Some(value) = value {
if value.is_empty() {
@ -37,19 +37,19 @@ impl Username {
}
}
pub fn none() -> Self {
pub(crate) fn none() -> Self {
Self::new(None)
}
pub fn is_none(&self) -> bool {
pub(crate) fn is_none(&self) -> bool {
self.0.is_none()
}
pub fn is_some(&self) -> bool {
pub(crate) fn is_some(&self) -> bool {
self.0.is_some()
}
pub fn as_deref(&self) -> Option<&str> {
pub(crate) fn as_deref(&self) -> Option<&str> {
self.0.as_deref()
}
}
@ -67,33 +67,33 @@ impl From<Option<String>> for Username {
}
impl Credentials {
pub fn new(username: Option<String>, password: Option<String>) -> Self {
pub(crate) fn new(username: Option<String>, password: Option<String>) -> Self {
Self {
username: Username::new(username),
password,
}
}
pub fn username(&self) -> Option<&str> {
pub(crate) fn username(&self) -> Option<&str> {
self.username.as_deref()
}
pub fn to_username(&self) -> Username {
pub(crate) fn to_username(&self) -> Username {
self.username.clone()
}
pub fn password(&self) -> Option<&str> {
pub(crate) fn password(&self) -> Option<&str> {
self.password.as_deref()
}
pub fn is_empty(&self) -> bool {
pub(crate) fn is_empty(&self) -> bool {
self.password.is_none() && self.username.is_none()
}
/// Return [`Credentials`] for a [`Url`] from a [`Netrc`] file, if any.
///
/// If a username is provided, it must match the login in the netrc file or [`None`] is returned.
pub fn from_netrc(netrc: &Netrc, url: &Url, username: Option<&str>) -> Option<Self> {
pub(crate) fn from_netrc(netrc: &Netrc, url: &Url, username: Option<&str>) -> Option<Self> {
let host = url.host_str()?;
let entry = netrc
.hosts
@ -114,7 +114,7 @@ impl Credentials {
/// Parse [`Credentials`] from a URL, if any.
///
/// Returns [`None`] if both [`Url::username`] and [`Url::password`] are not populated.
pub fn from_url(url: &Url) -> Option<Self> {
pub(crate) fn from_url(url: &Url) -> Option<Self> {
if url.username().is_empty() && url.password().is_none() {
return None;
}
@ -142,7 +142,7 @@ impl Credentials {
/// Parse [`Credentials`] from an HTTP request, if any.
///
/// Only HTTP Basic Authentication is supported.
pub fn from_request(request: &Request) -> Option<Self> {
pub(crate) fn from_request(request: &Request) -> Option<Self> {
// First, attempt to retrieve the credentials from the URL
Self::from_url(request.url()).or(
// Then, attempt to pull the credentials from the headers
@ -195,7 +195,7 @@ impl Credentials {
write!(encoder, "{}:", self.username().unwrap_or_default())
.expect("Write to base64 encoder should succeed");
if let Some(password) = self.password() {
write!(encoder, "{}", password).expect("Write to base64 encoder should succeed");
write!(encoder, "{password}").expect("Write to base64 encoder should succeed");
}
}
let mut header = HeaderValue::from_bytes(&buf).expect("base64 is always valid HeaderValue");
@ -207,7 +207,7 @@ impl Credentials {
///
/// Any existing credentials will be overridden.
#[must_use]
pub fn authenticate(&self, mut request: reqwest::Request) -> reqwest::Request {
pub(crate) fn authenticate(&self, mut request: reqwest::Request) -> reqwest::Request {
request
.headers_mut()
.insert(reqwest::header::AUTHORIZATION, Self::to_header_value(self));

View file

@ -15,7 +15,7 @@ pub struct KeyringProvider {
}
#[derive(Debug)]
pub enum KeyringProviderBackend {
pub(crate) enum KeyringProviderBackend {
/// Use the `keyring` command to fetch credentials.
Subprocess,
#[cfg(test)]
@ -59,7 +59,7 @@ impl KeyringProvider {
}
#[cfg(test)]
KeyringProviderBackend::Dummy(ref store) => {
self.fetch_dummy(store, url.as_str(), username)
Self::fetch_dummy(store, url.as_str(), username)
}
};
// And fallback to a check for the host
@ -74,7 +74,7 @@ impl KeyringProvider {
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
#[cfg(test)]
KeyringProviderBackend::Dummy(ref store) => {
self.fetch_dummy(store, &host, username)
Self::fetch_dummy(store, &host, username)
}
};
}
@ -116,14 +116,13 @@ impl KeyringProvider {
#[cfg(test)]
fn fetch_dummy(
&self,
store: &std::collections::HashMap<(String, &'static str), &'static str>,
service_name: &str,
username: &str,
) -> Option<String> {
store
.get(&(service_name.to_string(), username))
.map(|password| password.to_string())
.map(|password| (*password).to_string())
}
/// Create a new provider with [`KeyringProviderBackend::Dummy`].
@ -131,13 +130,12 @@ impl KeyringProvider {
pub fn dummy<S: Into<String>, T: IntoIterator<Item = ((S, &'static str), &'static str)>>(
iter: T,
) -> Self {
use std::collections::HashMap;
Self {
backend: KeyringProviderBackend::Dummy(HashMap::from_iter(
backend: KeyringProviderBackend::Dummy(
iter.into_iter()
.map(|((service, username), password)| ((service.into(), username), password)),
)),
.map(|((service, username), password)| ((service.into(), username), password))
.collect(),
),
}
}

View file

@ -180,7 +180,7 @@ impl Middleware for AuthMiddleware {
trace!("Request for {url} is unauthenticated, checking cache");
// Check the cache for a URL match
let credentials = self.cache().get_url(request.url(), Username::none());
let credentials = self.cache().get_url(request.url(), &Username::none());
if let Some(credentials) = credentials.as_ref() {
request = credentials.authenticate(request);
if credentials.password().is_some() {
@ -287,7 +287,7 @@ impl AuthMiddleware {
.is_ok_and(|response| response.error_for_status_ref().is_ok())
{
trace!("Updating cached credentials for {url} to {credentials:?}");
self.cache().insert(&url, credentials)
self.cache().insert(&url, credentials);
};
result
@ -346,16 +346,15 @@ impl AuthMiddleware {
// implementation returns different credentials for different URLs in the
// same realm we will use the wrong credentials.
} else if let Some(credentials) = match self.keyring {
Some(ref keyring) => match credentials.and_then(|credentials| credentials.username()) {
Some(username) => {
Some(ref keyring) => {
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
debug!("Checking keyring for credentials for {username}@{url}");
keyring.fetch(url, username).await
}
None => {
} else {
debug!("Skipping keyring lookup for {url} with no username");
None
}
},
}
None => None,
} {
debug!("Found credentials in keyring for {url}");
@ -1065,12 +1064,12 @@ mod tests {
);
assert_eq!(
client.get(format!("{}/foo", url_1)).send().await?.status(),
client.get(format!("{url_1}/foo")).send().await?.status(),
200,
"Requests can be to different paths in the same realm"
);
assert_eq!(
client.get(format!("{}/foo", url_2)).send().await?.status(),
client.get(format!("{url_2}/foo")).send().await?.status(),
200,
"Requests can be to different paths in the same realm"
);

View file

@ -3,6 +3,9 @@ name = "uv-client"
version = "0.0.1"
edition = "2021"
[lints]
workspace = true
[dependencies]
cache-key = { workspace = true }
distribution-filename = { workspace = true }

View file

@ -113,7 +113,7 @@ impl<'a> BaseClientBuilder<'a> {
if let Some(markers) = self.markers {
let linehaul = LineHaul::new(markers, self.platform);
if let Ok(output) = serde_json::to_string(&linehaul) {
user_agent_string += &format!(" {}", output);
user_agent_string += &format!(" {output}");
}
}
@ -267,7 +267,7 @@ impl RetryableStrategy for LoggingRetryableStrategy {
.join("\n");
debug!(
"Transient request failure for {}, retrying: {err}\n{context}",
err.url().map(|url| url.as_str()).unwrap_or("unknown URL")
err.url().map(reqwest::Url::as_str).unwrap_or("unknown URL")
);
}
}

View file

@ -50,7 +50,7 @@ pub trait Cacheable: Sized {
/// implement `Cacheable`.
#[derive(Debug, Deserialize, Serialize)]
#[serde(transparent)]
pub struct SerdeCacheable<T> {
pub(crate) struct SerdeCacheable<T> {
inner: T,
}
@ -228,20 +228,17 @@ impl CachedClient {
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
{
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
let cached_response = match Self::read_cache(cache_entry).await {
Some(cached) => {
let cached_response = if let Some(cached) = Self::read_cache(cache_entry).await {
self.send_cached(req, cache_control, cached)
.boxed_local()
.await?
}
None => {
} else {
debug!("No cache entry for: {}", req.url());
let (response, cache_policy) = self.fresh_request(req).await?;
CachedResponse::ModifiedOrNew {
response,
cache_policy,
}
}
};
match cached_response {
CachedResponse::FreshCache(cached) => match Payload::from_aligned_bytes(cached.data) {

View file

@ -929,7 +929,8 @@ mod tests {
}
/// Test for AWS Code Artifact
/// From https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088
///
/// See: <https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088>
#[test]
fn parse_code_artifact_index_html() {
let text = r#"

View file

@ -22,43 +22,44 @@ use crate::rkyvutil::OwnedArchive;
)]
#[archive(check_bytes)]
#[archive_attr(derive(Debug))]
#[allow(clippy::struct_excessive_bools)]
pub struct CacheControl {
// directives for requests and responses
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age-2
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age>
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age-2>
pub max_age_seconds: Option<u64>,
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache>
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2>
pub no_cache: bool,
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store-2
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store>
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store-2>
pub no_store: bool,
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform-2
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform>
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform-2>
pub no_transform: bool,
// request-only directives
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale>
pub max_stale_seconds: Option<u64>,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-min-fresh
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-min-fresh>
pub min_fresh_seconds: Option<u64>,
// response-only directives
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-only-if-cached
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-only-if-cached>
pub only_if_cached: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-must-revalidate
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-must-revalidate>
pub must_revalidate: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-must-understand
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-must-understand>
pub must_understand: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-private
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-private>
pub private: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-proxy-revalidate
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-proxy-revalidate>
pub proxy_revalidate: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-public
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-public>
pub public: bool,
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-s-maxage
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-s-maxage>
pub s_maxage_seconds: Option<u64>,
/// https://httpwg.org/specs/rfc8246.html
/// <https://httpwg.org/specs/rfc8246.html>
pub immutable: bool,
}
@ -72,7 +73,7 @@ impl CacheControl {
impl<'b, B: 'b + ?Sized + AsRef<[u8]>> FromIterator<&'b B> for CacheControl {
fn from_iter<T: IntoIterator<Item = &'b B>>(it: T) -> Self {
Self::from_iter(CacheControlParser::new(it))
CacheControlParser::new(it).collect()
}
}
@ -182,7 +183,10 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> CacheControlPa
/// given iterator should yield elements that satisfy `AsRef<[u8]>`.
fn new<II: IntoIterator<IntoIter = I>>(headers: II) -> CacheControlParser<'b, I> {
let mut directives = headers.into_iter();
let cur = directives.next().map(|h| h.as_ref()).unwrap_or(b"");
let cur = directives
.next()
.map(std::convert::AsRef::as_ref)
.unwrap_or(b"");
CacheControlParser {
cur,
directives,
@ -262,7 +266,7 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> CacheControlPa
self.cur = &self.cur[1..];
self.parse_quoted_string()
} else {
self.parse_token().map(|s| s.into_bytes())
self.parse_token().map(std::string::String::into_bytes)
}
}
@ -368,7 +372,7 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> Iterator
fn next(&mut self) -> Option<CacheControlDirective> {
loop {
if self.cur.is_empty() {
self.cur = self.directives.next().map(|h| h.as_ref())?;
self.cur = self.directives.next().map(std::convert::AsRef::as_ref)?;
}
while !self.cur.is_empty() {
self.skip_whitespace();

View file

@ -122,17 +122,17 @@ actually need to make an HTTP request).
# Additional reading
* Short introduction to `Cache-Control`: https://csswizardry.com/2019/03/cache-control-for-civilians/
* Caching best practcies: https://jakearchibald.com/2016/caching-best-practices/
* Overview of HTTP caching: https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching
* MDN docs for `Cache-Control`: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
* THe 1997 RFC for HTTP 1.1: https://www.rfc-editor.org/rfc/rfc2068#section-13
* The 1999 update to HTTP 1.1: https://www.rfc-editor.org/rfc/rfc2616.html#section-13
* The "stale content" cache-control extension: https://httpwg.org/specs/rfc5861.html
* HTTP 1.1 caching (superseded by RFC 9111): https://httpwg.org/specs/rfc7234.html
* The "immutable" cache-control extension: https://httpwg.org/specs/rfc8246.html
* HTTP semantics (If-None-Match, etc.): https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3
* HTTP caching (obsoletes RFC 7234): https://www.rfc-editor.org/rfc/rfc9111.html
* Short introduction to `Cache-Control`: <https://csswizardry.com/2019/03/cache-control-for-civilians/>
* Caching best practcies: <https://jakearchibald.com/2016/caching-best-practices/>
* Overview of HTTP caching: <https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching>
* MDN docs for `Cache-Control`: <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control>
* The 1997 RFC for HTTP 1.1: <https://www.rfc-editor.org/rfc/rfc2068#section-13>
* The 1999 update to HTTP 1.1: <https://www.rfc-editor.org/rfc/rfc2616.html#section-13>
* The "stale content" cache-control extension: <https://httpwg.org/specs/rfc5861.html>
* HTTP 1.1 caching (superseded by RFC 9111): <https://httpwg.org/specs/rfc7234.html>
* The "immutable" cache-control extension: <https://httpwg.org/specs/rfc8246.html>
* HTTP semantics (If-None-Match, etc.): <https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3>
* HTTP caching (obsoletes RFC 7234): <https://www.rfc-editor.org/rfc/rfc9111.html>
*/
use std::time::{Duration, SystemTime};
@ -1193,7 +1193,7 @@ impl<'a> From<&'a http::HeaderMap> for ResponseHeaders {
#[archive(check_bytes)]
#[archive_attr(derive(Debug))]
struct ETag {
/// The actual ETag validator value.
/// The actual `ETag` validator value.
///
/// This is received in the response, recorded as part of the cache policy
/// and then sent back in a re-validation request. This is the "best"
@ -1219,7 +1219,7 @@ struct ETag {
}
impl ETag {
/// Parses an ETag from a header value.
/// Parses an `ETag` from a header value.
///
/// We are a little permissive here and allow arbitrary bytes,
/// where as [RFC 9110 S8.8.3] is a bit more restrictive.

View file

@ -68,7 +68,7 @@ impl LineHaul {
.iter()
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
let libc = match platform.map(|platform| platform.os()) {
let libc = match platform.map(platform_tags::Platform::os) {
Some(Os::Manylinux { major, minor }) => Some(Libc {
lib: Some("glibc".to_string()),
version: Some(format!("{major}.{minor}")),
@ -94,7 +94,7 @@ impl LineHaul {
libc,
})
} else if cfg!(target_os = "macos") {
let version = match platform.map(|platform| platform.os()) {
let version = match platform.map(platform_tags::Platform::os) {
Some(Os::Macos { major, minor }) => Some(format!("{major}.{minor}")),
_ => None,
};

View file

@ -13,7 +13,7 @@ pub(crate) struct OfflineError {
impl OfflineError {
/// Returns the URL that caused the error.
pub fn url(&self) -> &Url {
pub(crate) fn url(&self) -> &Url {
&self.url
}
}

View file

@ -130,15 +130,15 @@ impl<'a> RegistryClientBuilder<'a> {
let mut builder = BaseClientBuilder::new();
if let Some(client) = self.client {
builder = builder.client(client)
builder = builder.client(client);
}
if let Some(markers) = self.markers {
builder = builder.markers(markers)
builder = builder.markers(markers);
}
if let Some(platform) = self.platform {
builder = builder.platform(platform)
builder = builder.platform(platform);
}
let client = builder
@ -380,7 +380,7 @@ impl RegistryClient {
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
let path = url
.to_file_path()
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
.join("index.html");
let text = fs_err::tokio::read_to_string(&path)
.await
@ -416,7 +416,7 @@ impl RegistryClient {
if url.scheme() == "file" {
let path = url
.to_file_path()
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?;
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
WheelLocation::Path(path)
} else {
WheelLocation::Url(url)
@ -427,7 +427,7 @@ impl RegistryClient {
if url.scheme() == "file" {
let path = url
.to_file_path()
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?;
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
WheelLocation::Path(path)
} else {
WheelLocation::Url(url)
@ -769,7 +769,7 @@ impl VersionFiles {
match filename {
DistFilename::WheelFilename(name) => self.wheels.push(VersionWheel { name, file }),
DistFilename::SourceDistFilename(name) => {
self.source_dists.push(VersionSourceDist { name, file })
self.source_dists.push(VersionSourceDist { name, file });
}
}
}
@ -990,7 +990,8 @@ mod tests {
}
/// Test for AWS Code Artifact registry
/// Regression coverage of https://github.com/astral-sh/uv/issues/1388
///
/// See: <https://github.com/astral-sh/uv/issues/1388>
#[test]
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
let text = r#"
@ -1021,7 +1022,7 @@ mod tests {
.iter()
.map(|file| pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
let urls = urls.iter().map(|url| url.as_str()).collect::<Vec<_>>();
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
insta::assert_debug_snapshot!(urls, @r###"
[
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",

View file

@ -172,7 +172,10 @@ where
// archive for SimpleMetadata in the constructor, so we can skip
// validation here. Since we don't mutate the buffer, this conversion
// is guaranteed to be correct.
unsafe { rkyv::archived_root::<A>(&self.raw) }
#[allow(unsafe_code)]
unsafe {
rkyv::archived_root::<A>(&self.raw)
}
}
}
@ -230,6 +233,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
}
#[inline]
#[allow(unsafe_code)]
unsafe fn resolve_aligned<T: Archive + ?Sized>(
&mut self,
value: &T,
@ -241,6 +245,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
}
#[inline]
#[allow(unsafe_code)]
unsafe fn resolve_unsized_aligned<T: ArchiveUnsized + ?Sized>(
&mut self,
value: &T,
@ -255,6 +260,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
impl<const N: usize> rkyv::ser::ScratchSpace for Serializer<N> {
#[inline]
#[allow(unsafe_code)]
unsafe fn push_scratch(
&mut self,
layout: std::alloc::Layout,
@ -265,6 +271,7 @@ impl<const N: usize> rkyv::ser::ScratchSpace for Serializer<N> {
}
#[inline]
#[allow(unsafe_code)]
unsafe fn pop_scratch(
&mut self,
ptr: std::ptr::NonNull<u8>,
@ -325,7 +332,7 @@ impl std::error::Error for SerializerError {
///
/// > Regular serializers dont support the custom error handling needed for
/// > this type by default. To use this wrapper, a custom serializer with an
/// > error type satisfying <S as Fallible>::Error: From<AsStringError> must be
/// > error type satisfying <S as Fallible>`::Error`: From<AsStringError> must be
/// > provided.
///
/// If we didn't need to use `rkyv::with::AsString` (which we do for

View file

@ -31,7 +31,7 @@ async fn test_user_agent_has_version() -> Result<()> {
.headers()
.get(USER_AGENT)
.and_then(|v| v.to_str().ok())
.map(|s| s.to_string())
.map(ToString::to_string)
.unwrap_or_default(); // Empty Default
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
});
@ -89,7 +89,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
.headers()
.get(USER_AGENT)
.and_then(|v| v.to_str().ok())
.map(|s| s.to_string())
.map(ToString::to_string)
.unwrap_or_default(); // Empty Default
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
});

View file

@ -6,6 +6,9 @@ edition = "2021"
[lib]
proc-macro = true
[lints]
workspace = true
[dependencies]
quote = { workspace = true }
syn = { workspace = true }

View file

@ -4,6 +4,9 @@ version = "0.0.1"
edition = "2021"
description = "Normalization for distribution, package and extra anmes"
[lints]
workspace = true
[dependencies]
rkyv = { workspace = true }
schemars = { workspace = true, optional = true }

View file

@ -9,6 +9,9 @@ repository.workspace = true
authors.workspace = true
license.workspace = true
[lints]
workspace = true
[dependencies]
cache-key = { workspace = true }
distribution-filename = { workspace = true }
@ -39,6 +42,3 @@ thiserror = { workspace = true }
toml = { workspace = true }
tracing = { workspace = true }
url = { workspace = true }
[lints]
workspace = true

View file

@ -189,7 +189,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
let deps = edges
.iter()
.map(|dependency| format!("{}", dependency.name()))
.chain(source.iter().map(std::string::ToString::to_string))
.chain(source.iter().map(ToString::to_string))
.collect::<Vec<_>>()
.join(", ");
let comment = format!("# via {deps}").green().to_string();
@ -214,7 +214,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
let separator = "\n";
let deps = source
.iter()
.map(std::string::ToString::to_string)
.map(ToString::to_string)
.chain(
edges
.iter()