mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 21:35:00 +00:00
Enable workspace lint configuration in remaining crates (#4329)
## Summary We didn't have Clippy enabled (to match our workspace settings) in a few crates.
This commit is contained in:
parent
b8c0391667
commit
c996e8e3f3
28 changed files with 249 additions and 204 deletions
|
@ -4,7 +4,6 @@ version = "0.6.0"
|
||||||
description = "A library for python version numbers and specifiers, implementing PEP 440"
|
description = "A library for python version numbers and specifiers, implementing PEP 440"
|
||||||
license = "Apache-2.0 OR BSD-2-Clause"
|
license = "Apache-2.0 OR BSD-2-Clause"
|
||||||
include = ["/src", "Changelog.md", "License-Apache", "License-BSD", "Readme.md", "pyproject.toml"]
|
include = ["/src", "Changelog.md", "License-Apache", "License-BSD", "Readme.md", "pyproject.toml"]
|
||||||
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
@ -16,6 +15,9 @@ authors = { workspace = true }
|
||||||
name = "pep440_rs"
|
name = "pep440_rs"
|
||||||
crate-type = ["rlib", "cdylib"]
|
crate-type = ["rlib", "cdylib"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
pyo3 = { workspace = true, optional = true, features = ["extension-module", "abi3-py37"] }
|
pyo3 = { workspace = true, optional = true, features = ["extension-module", "abi3-py37"] }
|
||||||
|
|
|
@ -67,9 +67,9 @@ impl Operator {
|
||||||
/// specifiers [spec].
|
/// specifiers [spec].
|
||||||
///
|
///
|
||||||
/// [spec]: https://packaging.python.org/en/latest/specifications/version-specifiers/
|
/// [spec]: https://packaging.python.org/en/latest/specifications/version-specifiers/
|
||||||
pub(crate) fn is_local_compatible(&self) -> bool {
|
pub(crate) fn is_local_compatible(self) -> bool {
|
||||||
!matches!(
|
!matches!(
|
||||||
*self,
|
self,
|
||||||
Self::GreaterThan
|
Self::GreaterThan
|
||||||
| Self::GreaterThanEqual
|
| Self::GreaterThanEqual
|
||||||
| Self::LessThan
|
| Self::LessThan
|
||||||
|
@ -149,10 +149,12 @@ impl std::fmt::Display for Operator {
|
||||||
#[cfg(feature = "pyo3")]
|
#[cfg(feature = "pyo3")]
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
impl Operator {
|
impl Operator {
|
||||||
|
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||||
fn __str__(&self) -> String {
|
fn __str__(&self) -> String {
|
||||||
self.to_string()
|
self.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::trivially_copy_pass_by_ref)]
|
||||||
fn __repr__(&self) -> String {
|
fn __repr__(&self) -> String {
|
||||||
self.to_string()
|
self.to_string()
|
||||||
}
|
}
|
||||||
|
@ -412,6 +414,7 @@ impl Version {
|
||||||
///
|
///
|
||||||
/// When the iterator yields no elements.
|
/// When the iterator yields no elements.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_release<I, R>(mut self, release_numbers: I) -> Self
|
pub fn with_release<I, R>(mut self, release_numbers: I) -> Self
|
||||||
where
|
where
|
||||||
I: IntoIterator<Item = R>,
|
I: IntoIterator<Item = R>,
|
||||||
|
@ -456,6 +459,7 @@ impl Version {
|
||||||
|
|
||||||
/// Set the epoch and return the updated version.
|
/// Set the epoch and return the updated version.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_epoch(mut self, value: u64) -> Self {
|
pub fn with_epoch(mut self, value: u64) -> Self {
|
||||||
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
||||||
if small.set_epoch(value) {
|
if small.set_epoch(value) {
|
||||||
|
@ -468,6 +472,7 @@ impl Version {
|
||||||
|
|
||||||
/// Set the pre-release component and return the updated version.
|
/// Set the pre-release component and return the updated version.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_pre(mut self, value: Option<PreRelease>) -> Self {
|
pub fn with_pre(mut self, value: Option<PreRelease>) -> Self {
|
||||||
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
||||||
if small.set_pre(value) {
|
if small.set_pre(value) {
|
||||||
|
@ -480,6 +485,7 @@ impl Version {
|
||||||
|
|
||||||
/// Set the post-release component and return the updated version.
|
/// Set the post-release component and return the updated version.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_post(mut self, value: Option<u64>) -> Self {
|
pub fn with_post(mut self, value: Option<u64>) -> Self {
|
||||||
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
||||||
if small.set_post(value) {
|
if small.set_post(value) {
|
||||||
|
@ -492,6 +498,7 @@ impl Version {
|
||||||
|
|
||||||
/// Set the dev-release component and return the updated version.
|
/// Set the dev-release component and return the updated version.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_dev(mut self, value: Option<u64>) -> Self {
|
pub fn with_dev(mut self, value: Option<u64>) -> Self {
|
||||||
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
if let VersionInner::Small { ref mut small } = Arc::make_mut(&mut self.inner) {
|
||||||
if small.set_dev(value) {
|
if small.set_dev(value) {
|
||||||
|
@ -504,6 +511,7 @@ impl Version {
|
||||||
|
|
||||||
/// Set the local segments and return the updated version.
|
/// Set the local segments and return the updated version.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_local(mut self, value: Vec<LocalSegment>) -> Self {
|
pub fn with_local(mut self, value: Vec<LocalSegment>) -> Self {
|
||||||
if value.is_empty() {
|
if value.is_empty() {
|
||||||
self.without_local()
|
self.without_local()
|
||||||
|
@ -518,6 +526,7 @@ impl Version {
|
||||||
/// and local version labels MUST be ignored entirely when checking if
|
/// and local version labels MUST be ignored entirely when checking if
|
||||||
/// candidate versions match a given version specifier."
|
/// candidate versions match a given version specifier."
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn without_local(mut self) -> Self {
|
pub fn without_local(mut self) -> Self {
|
||||||
// A "small" version is already guaranteed not to have a local
|
// A "small" version is already guaranteed not to have a local
|
||||||
// component, so we only need to do anything if we have a "full"
|
// component, so we only need to do anything if we have a "full"
|
||||||
|
@ -534,6 +543,7 @@ impl Version {
|
||||||
/// The version `1.0min0` is smaller than all other `1.0` versions,
|
/// The version `1.0min0` is smaller than all other `1.0` versions,
|
||||||
/// like `1.0a1`, `1.0dev0`, etc.
|
/// like `1.0a1`, `1.0dev0`, etc.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_min(mut self, value: Option<u64>) -> Self {
|
pub fn with_min(mut self, value: Option<u64>) -> Self {
|
||||||
debug_assert!(!self.is_pre(), "min is not allowed on pre-release versions");
|
debug_assert!(!self.is_pre(), "min is not allowed on pre-release versions");
|
||||||
debug_assert!(!self.is_dev(), "min is not allowed on dev versions");
|
debug_assert!(!self.is_dev(), "min is not allowed on dev versions");
|
||||||
|
@ -552,6 +562,7 @@ impl Version {
|
||||||
/// The version `1.0max0` is larger than all other `1.0` versions,
|
/// The version `1.0max0` is larger than all other `1.0` versions,
|
||||||
/// like `1.0.post1`, `1.0+local`, etc.
|
/// like `1.0.post1`, `1.0+local`, etc.
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_max(mut self, value: Option<u64>) -> Self {
|
pub fn with_max(mut self, value: Option<u64>) -> Self {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
!self.is_post(),
|
!self.is_post(),
|
||||||
|
@ -680,7 +691,7 @@ impl std::fmt::Display for Version {
|
||||||
"+{}",
|
"+{}",
|
||||||
self.local()
|
self.local()
|
||||||
.iter()
|
.iter()
|
||||||
.map(std::string::ToString::to_string)
|
.map(ToString::to_string)
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(".")
|
.join(".")
|
||||||
)
|
)
|
||||||
|
@ -779,7 +790,7 @@ impl FromStr for Version {
|
||||||
/// calendar versions, like `2023.03`, to be represented.)
|
/// calendar versions, like `2023.03`, to be represented.)
|
||||||
/// * There is *at most* one of the following components: pre, dev or post.
|
/// * There is *at most* one of the following components: pre, dev or post.
|
||||||
/// * If there is a pre segment, then its numeric value is less than 64.
|
/// * If there is a pre segment, then its numeric value is less than 64.
|
||||||
/// * If there is a dev or post segment, then its value is less than u8::MAX.
|
/// * If there is a dev or post segment, then its value is less than `u8::MAX`.
|
||||||
/// * There are zero "local" segments.
|
/// * There are zero "local" segments.
|
||||||
///
|
///
|
||||||
/// The above constraints were chosen as a balancing point between being able
|
/// The above constraints were chosen as a balancing point between being able
|
||||||
|
@ -871,23 +882,25 @@ impl VersionSmall {
|
||||||
const SUFFIX_NONE: u64 = 5;
|
const SUFFIX_NONE: u64 = 5;
|
||||||
const SUFFIX_POST: u64 = 6;
|
const SUFFIX_POST: u64 = 6;
|
||||||
const SUFFIX_MAX: u64 = 7;
|
const SUFFIX_MAX: u64 = 7;
|
||||||
const SUFFIX_MAX_VERSION: u64 = 0x1FFFFF;
|
const SUFFIX_MAX_VERSION: u64 = 0x001F_FFFF;
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
repr: 0x00000000_00A00000,
|
repr: 0x0000_0000_00A0_0000,
|
||||||
release: [0, 0, 0, 0],
|
release: [0, 0, 0, 0],
|
||||||
len: 0,
|
len: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(clippy::unused_self)]
|
||||||
fn epoch(&self) -> u64 {
|
fn epoch(&self) -> u64 {
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(clippy::unused_self)]
|
||||||
fn set_epoch(&mut self, value: u64) -> bool {
|
fn set_epoch(&mut self, value: u64) -> bool {
|
||||||
if value != 0 {
|
if value != 0 {
|
||||||
return false;
|
return false;
|
||||||
|
@ -902,7 +915,7 @@ impl VersionSmall {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn clear_release(&mut self) {
|
fn clear_release(&mut self) {
|
||||||
self.repr &= !0xFFFFFFFF_FF000000;
|
self.repr &= !0xFFFF_FFFF_FF00_0000;
|
||||||
self.release = [0, 0, 0, 0];
|
self.release = [0, 0, 0, 0];
|
||||||
self.len = 0;
|
self.len = 0;
|
||||||
}
|
}
|
||||||
|
@ -1122,6 +1135,7 @@ impl VersionSmall {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(clippy::unused_self)]
|
||||||
fn local(&self) -> &[LocalSegment] {
|
fn local(&self) -> &[LocalSegment] {
|
||||||
// A "small" version is never used if the version has a non-zero number
|
// A "small" version is never used if the version has a non-zero number
|
||||||
// of local segments.
|
// of local segments.
|
||||||
|
@ -1138,7 +1152,7 @@ impl VersionSmall {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn set_suffix_kind(&mut self, kind: u64) {
|
fn set_suffix_kind(&mut self, kind: u64) {
|
||||||
debug_assert!(kind <= Self::SUFFIX_MAX);
|
debug_assert!(kind <= Self::SUFFIX_MAX);
|
||||||
self.repr &= !0xE00000;
|
self.repr &= !0x00E0_0000;
|
||||||
self.repr |= kind << 21;
|
self.repr |= kind << 21;
|
||||||
if kind == Self::SUFFIX_NONE {
|
if kind == Self::SUFFIX_NONE {
|
||||||
self.set_suffix_version(0);
|
self.set_suffix_version(0);
|
||||||
|
@ -1147,13 +1161,13 @@ impl VersionSmall {
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn suffix_version(&self) -> u64 {
|
fn suffix_version(&self) -> u64 {
|
||||||
self.repr & 0x1FFFFF
|
self.repr & 0x001F_FFFF
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn set_suffix_version(&mut self, value: u64) {
|
fn set_suffix_version(&mut self, value: u64) {
|
||||||
debug_assert!(value <= 0x1FFFFF);
|
debug_assert!(value <= 0x001F_FFFF);
|
||||||
self.repr &= !0x1FFFFF;
|
self.repr &= !0x001F_FFFF;
|
||||||
self.repr |= value;
|
self.repr |= value;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1196,8 +1210,8 @@ struct VersionFull {
|
||||||
/// if any
|
/// if any
|
||||||
dev: Option<u64>,
|
dev: Option<u64>,
|
||||||
/// A [local version
|
/// A [local version
|
||||||
/// identifier](https://peps.python.org/pep-0440/#local-version-identif
|
/// identifier](https://peps.python.org/pep-0440/#local-version-identifiers)
|
||||||
/// iers) such as `+deadbeef` in `1.2.3+deadbeef`
|
/// such as `+deadbeef` in `1.2.3+deadbeef`
|
||||||
///
|
///
|
||||||
/// > They consist of a normal public version identifier (as defined
|
/// > They consist of a normal public version identifier (as defined
|
||||||
/// > in the previous section), along with an arbitrary “local version
|
/// > in the previous section), along with an arbitrary “local version
|
||||||
|
@ -1460,10 +1474,10 @@ impl<'a> Parser<'a> {
|
||||||
fn parse(self) -> Result<Version, VersionParseError> {
|
fn parse(self) -> Result<Version, VersionParseError> {
|
||||||
match self.parse_pattern() {
|
match self.parse_pattern() {
|
||||||
Ok(vpat) => {
|
Ok(vpat) => {
|
||||||
if !vpat.is_wildcard() {
|
if vpat.is_wildcard() {
|
||||||
Ok(vpat.into_version())
|
|
||||||
} else {
|
|
||||||
Err(ErrorKind::Wildcard.into())
|
Err(ErrorKind::Wildcard.into())
|
||||||
|
} else {
|
||||||
|
Ok(vpat.into_version())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If we get an error when parsing a version pattern, then
|
// If we get an error when parsing a version pattern, then
|
||||||
|
@ -2456,7 +2470,7 @@ fn starts_with_ignore_ascii_case(needle: &[u8], haystack: &[u8]) -> bool {
|
||||||
/// # Motivation
|
/// # Motivation
|
||||||
///
|
///
|
||||||
/// We hand-write this for a couple reasons. Firstly, the standard library's
|
/// We hand-write this for a couple reasons. Firstly, the standard library's
|
||||||
/// FromStr impl for parsing integers requires UTF-8 validation first. We
|
/// `FromStr` impl for parsing integers requires UTF-8 validation first. We
|
||||||
/// don't need that for version parsing since we stay in the realm of ASCII.
|
/// don't need that for version parsing since we stay in the realm of ASCII.
|
||||||
/// Secondly, std's version is a little more flexible because it supports
|
/// Secondly, std's version is a little more flexible because it supports
|
||||||
/// signed integers. So for example, it permits a leading `+` before the actual
|
/// signed integers. So for example, it permits a leading `+` before the actual
|
||||||
|
@ -2647,19 +2661,19 @@ mod tests {
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"1.2+123456",
|
"1.2+123456",
|
||||||
Version::new([1, 2]).with_local(vec![LocalSegment::Number(123456)]),
|
Version::new([1, 2]).with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"1.2.r32+123456",
|
"1.2.r32+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_post(Some(32))
|
.with_post(Some(32))
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"1.2.rev33+123456",
|
"1.2.rev33+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_post(Some(33))
|
.with_post(Some(33))
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
// Explicit epoch of 1
|
// Explicit epoch of 1
|
||||||
(
|
(
|
||||||
|
@ -2848,28 +2862,28 @@ mod tests {
|
||||||
"1!1.2+123456",
|
"1!1.2+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_epoch(1)
|
.with_epoch(1)
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"1!1.2.r32+123456",
|
"1!1.2.r32+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_epoch(1)
|
.with_epoch(1)
|
||||||
.with_post(Some(32))
|
.with_post(Some(32))
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"1!1.2.rev33+123456",
|
"1!1.2.rev33+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_epoch(1)
|
.with_epoch(1)
|
||||||
.with_post(Some(33))
|
.with_post(Some(33))
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"98765!1.2.rev33+123456",
|
"98765!1.2.rev33+123456",
|
||||||
Version::new([1, 2])
|
Version::new([1, 2])
|
||||||
.with_epoch(98765)
|
.with_epoch(98765)
|
||||||
.with_post(Some(33))
|
.with_post(Some(33))
|
||||||
.with_local(vec![LocalSegment::Number(123456)]),
|
.with_local(vec![LocalSegment::Number(123_456)]),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
for (string, structured) in versions {
|
for (string, structured) in versions {
|
||||||
|
@ -3397,7 +3411,7 @@ mod tests {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
p("5+18446744073709551615.abc"),
|
p("5+18446744073709551615.abc"),
|
||||||
Version::new([5]).with_local(vec![
|
Version::new([5]).with_local(vec![
|
||||||
LocalSegment::Number(18446744073709551615),
|
LocalSegment::Number(18_446_744_073_709_551_615),
|
||||||
LocalSegment::String("abc".to_string()),
|
LocalSegment::String("abc".to_string()),
|
||||||
])
|
])
|
||||||
);
|
);
|
||||||
|
@ -3496,7 +3510,7 @@ mod tests {
|
||||||
assert_eq!(p(" \n5\n \t"), Version::new([5]));
|
assert_eq!(p(" \n5\n \t"), Version::new([5]));
|
||||||
|
|
||||||
// min tests
|
// min tests
|
||||||
assert!(Parser::new("1.min0".as_bytes()).parse().is_err())
|
assert!(Parser::new("1.min0".as_bytes()).parse().is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tests the error cases of our version parser.
|
// Tests the error cases of our version parser.
|
||||||
|
@ -3626,7 +3640,7 @@ mod tests {
|
||||||
"1.1.dev1",
|
"1.1.dev1",
|
||||||
];
|
];
|
||||||
for (i, v1) in versions.iter().enumerate() {
|
for (i, v1) in versions.iter().enumerate() {
|
||||||
for v2 in versions[i + 1..].iter() {
|
for v2 in &versions[i + 1..] {
|
||||||
let less = v1.parse::<Version>().unwrap();
|
let less = v1.parse::<Version>().unwrap();
|
||||||
let greater = v2.parse::<Version>().unwrap();
|
let greater = v2.parse::<Version>().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -3668,7 +3682,7 @@ mod tests {
|
||||||
"1.1.dev1",
|
"1.1.dev1",
|
||||||
];
|
];
|
||||||
|
|
||||||
for greater in versions.iter() {
|
for greater in versions {
|
||||||
let greater = greater.parse::<Version>().unwrap();
|
let greater = greater.parse::<Version>().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
less.cmp(&greater),
|
less.cmp(&greater),
|
||||||
|
@ -3707,7 +3721,7 @@ mod tests {
|
||||||
"1.0",
|
"1.0",
|
||||||
];
|
];
|
||||||
|
|
||||||
for less in versions.iter() {
|
for less in versions {
|
||||||
let less = less.parse::<Version>().unwrap();
|
let less = less.parse::<Version>().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
less.cmp(&greater),
|
less.cmp(&greater),
|
||||||
|
@ -3728,7 +3742,7 @@ mod tests {
|
||||||
|
|
||||||
let versions = &["1.0a1", "1.0a1+local", "1.0a1.post1"];
|
let versions = &["1.0a1", "1.0a1+local", "1.0a1.post1"];
|
||||||
|
|
||||||
for less in versions.iter() {
|
for less in versions {
|
||||||
let less = less.parse::<Version>().unwrap();
|
let less = less.parse::<Version>().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
less.cmp(&greater),
|
less.cmp(&greater),
|
||||||
|
@ -3749,7 +3763,7 @@ mod tests {
|
||||||
|
|
||||||
let versions = &["1.0b1", "1.0b1+local", "1.0b1.post1", "1.0"];
|
let versions = &["1.0b1", "1.0b1+local", "1.0b1.post1", "1.0"];
|
||||||
|
|
||||||
for greater in versions.iter() {
|
for greater in versions {
|
||||||
let greater = greater.parse::<Version>().unwrap();
|
let greater = greater.parse::<Version>().unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
less.cmp(&greater),
|
less.cmp(&greater),
|
||||||
|
@ -3771,9 +3785,12 @@ mod tests {
|
||||||
assert_eq!(p("01"), Ok(1));
|
assert_eq!(p("01"), Ok(1));
|
||||||
assert_eq!(p("9"), Ok(9));
|
assert_eq!(p("9"), Ok(9));
|
||||||
assert_eq!(p("10"), Ok(10));
|
assert_eq!(p("10"), Ok(10));
|
||||||
assert_eq!(p("18446744073709551615"), Ok(18446744073709551615));
|
assert_eq!(p("18446744073709551615"), Ok(18_446_744_073_709_551_615));
|
||||||
assert_eq!(p("018446744073709551615"), Ok(18446744073709551615));
|
assert_eq!(p("018446744073709551615"), Ok(18_446_744_073_709_551_615));
|
||||||
assert_eq!(p("000000018446744073709551615"), Ok(18446744073709551615));
|
assert_eq!(
|
||||||
|
p("000000018446744073709551615"),
|
||||||
|
Ok(18_446_744_073_709_551_615)
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(p("10a"), Err(ErrorKind::InvalidDigit { got: b'a' }.into()));
|
assert_eq!(p("10a"), Err(ErrorKind::InvalidDigit { got: b'a' }.into()));
|
||||||
assert_eq!(p("10["), Err(ErrorKind::InvalidDigit { got: b'[' }.into()));
|
assert_eq!(p("10["), Err(ErrorKind::InvalidDigit { got: b'[' }.into()));
|
||||||
|
|
|
@ -480,11 +480,11 @@ impl VersionSpecifier {
|
||||||
// "Except where specifically noted below, local version identifiers MUST NOT be permitted
|
// "Except where specifically noted below, local version identifiers MUST NOT be permitted
|
||||||
// in version specifiers, and local version labels MUST be ignored entirely when checking
|
// in version specifiers, and local version labels MUST be ignored entirely when checking
|
||||||
// if candidate versions match a given version specifier."
|
// if candidate versions match a given version specifier."
|
||||||
let (this, other) = if !self.version.local().is_empty() {
|
let (this, other) = if self.version.local().is_empty() {
|
||||||
(self.version.clone(), version.clone())
|
|
||||||
} else {
|
|
||||||
// self is already without local
|
// self is already without local
|
||||||
(self.version.clone(), version.clone().without_local())
|
(self.version.clone(), version.clone().without_local())
|
||||||
|
} else {
|
||||||
|
(self.version.clone(), version.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
match self.operator {
|
match self.operator {
|
||||||
|
@ -650,7 +650,7 @@ impl std::fmt::Display for VersionSpecifierBuildError {
|
||||||
let local = version
|
let local = version
|
||||||
.local()
|
.local()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|segment| segment.to_string())
|
.map(ToString::to_string)
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(".");
|
.join(".");
|
||||||
write!(
|
write!(
|
||||||
|
|
|
@ -16,13 +16,13 @@ authors = { workspace = true }
|
||||||
name = "pep508_rs"
|
name = "pep508_rs"
|
||||||
crate-type = ["cdylib", "rlib"]
|
crate-type = ["cdylib", "rlib"]
|
||||||
|
|
||||||
[dependencies]
|
[lints]
|
||||||
pep440_rs = { workspace = true }
|
workspace = true
|
||||||
uv-fs = { workspace = true }
|
|
||||||
uv-normalize = { workspace = true }
|
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
derivative = { workspace = true }
|
derivative = { workspace = true }
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
|
pep440_rs = { workspace = true }
|
||||||
pyo3 = { workspace = true, optional = true, features = ["abi3", "extension-module"] }
|
pyo3 = { workspace = true, optional = true, features = ["abi3", "extension-module"] }
|
||||||
pyo3-log = { workspace = true, optional = true }
|
pyo3-log = { workspace = true, optional = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
|
@ -32,6 +32,8 @@ thiserror = { workspace = true }
|
||||||
tracing = { workspace = true, optional = true }
|
tracing = { workspace = true, optional = true }
|
||||||
unicode-width = { workspace = true }
|
unicode-width = { workspace = true }
|
||||||
url = { workspace = true, features = ["serde"] }
|
url = { workspace = true, features = ["serde"] }
|
||||||
|
uv-fs = { workspace = true }
|
||||||
|
uv-normalize = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = { version = "1.36.1" }
|
insta = { version = "1.36.1" }
|
||||||
|
|
|
@ -35,7 +35,6 @@ use pyo3::{
|
||||||
};
|
};
|
||||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use unicode_width::UnicodeWidthChar;
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use cursor::Cursor;
|
use cursor::Cursor;
|
||||||
|
@ -95,7 +94,7 @@ impl<T: Pep508Url> Display for Pep508Error<T> {
|
||||||
// We can use char indices here since it's a Vec<char>
|
// We can use char indices here since it's a Vec<char>
|
||||||
let start_offset = self.input[..self.start]
|
let start_offset = self.input[..self.start]
|
||||||
.chars()
|
.chars()
|
||||||
.flat_map(|c| c.width())
|
.filter_map(unicode_width::UnicodeWidthChar::width)
|
||||||
.sum::<usize>();
|
.sum::<usize>();
|
||||||
let underline_len = if self.start == self.input.len() {
|
let underline_len = if self.start == self.input.len() {
|
||||||
// We also allow 0 here for convenience
|
// We also allow 0 here for convenience
|
||||||
|
@ -108,7 +107,7 @@ impl<T: Pep508Url> Display for Pep508Error<T> {
|
||||||
} else {
|
} else {
|
||||||
self.input[self.start..self.start + self.len]
|
self.input[self.start..self.start + self.len]
|
||||||
.chars()
|
.chars()
|
||||||
.flat_map(|c| c.width())
|
.filter_map(unicode_width::UnicodeWidthChar::width)
|
||||||
.sum::<usize>()
|
.sum::<usize>()
|
||||||
};
|
};
|
||||||
write!(
|
write!(
|
||||||
|
@ -247,7 +246,7 @@ impl PyRequirement {
|
||||||
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`
|
/// `requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8"`
|
||||||
#[getter]
|
#[getter]
|
||||||
pub fn marker(&self) -> Option<String> {
|
pub fn marker(&self) -> Option<String> {
|
||||||
self.marker.as_ref().map(std::string::ToString::to_string)
|
self.marker.as_ref().map(ToString::to_string)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a PEP 440 string
|
/// Parses a PEP 440 string
|
||||||
|
@ -405,6 +404,7 @@ impl<T: Pep508Url> Requirement<T> {
|
||||||
///
|
///
|
||||||
/// For example, given `flask >= 2.0.2`, calling `with_extra_marker("dotenv")` would return
|
/// For example, given `flask >= 2.0.2`, calling `with_extra_marker("dotenv")` would return
|
||||||
/// `flask >= 2.0.2 ; extra == "dotenv"`.
|
/// `flask >= 2.0.2 ; extra == "dotenv"`.
|
||||||
|
#[must_use]
|
||||||
pub fn with_extra_marker(self, extra: &ExtraName) -> Self {
|
pub fn with_extra_marker(self, extra: &ExtraName) -> Self {
|
||||||
let marker = match self.marker {
|
let marker = match self.marker {
|
||||||
Some(expression) => MarkerTree::And(vec![
|
Some(expression) => MarkerTree::And(vec![
|
||||||
|
@ -463,7 +463,7 @@ where
|
||||||
F: FnMut(MarkerWarningKind, String),
|
F: FnMut(MarkerWarningKind, String),
|
||||||
{
|
{
|
||||||
fn report(&mut self, kind: MarkerWarningKind, warning: String) {
|
fn report(&mut self, kind: MarkerWarningKind, warning: String) {
|
||||||
(self)(kind, warning)
|
(self)(kind, warning);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -471,10 +471,11 @@ where
|
||||||
pub struct TracingReporter;
|
pub struct TracingReporter;
|
||||||
|
|
||||||
impl Reporter for TracingReporter {
|
impl Reporter for TracingReporter {
|
||||||
fn report(&mut self, _kind: MarkerWarningKind, _message: String) {
|
#[allow(unused_variables)]
|
||||||
|
fn report(&mut self, _kind: MarkerWarningKind, message: String) {
|
||||||
#[cfg(feature = "tracing")]
|
#[cfg(feature = "tracing")]
|
||||||
{
|
{
|
||||||
tracing::warn!("{}", _message);
|
tracing::warn!("{message}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -274,8 +274,7 @@ impl FromStr for MarkerOperator {
|
||||||
// ends with in
|
// ends with in
|
||||||
.and_then(|space_in| space_in.strip_suffix("in"))
|
.and_then(|space_in| space_in.strip_suffix("in"))
|
||||||
// and has only whitespace in between
|
// and has only whitespace in between
|
||||||
.map(|space| !space.is_empty() && space.trim().is_empty())
|
.is_some_and(|space| !space.is_empty() && space.trim().is_empty()) =>
|
||||||
.unwrap_or_default() =>
|
|
||||||
{
|
{
|
||||||
Self::NotIn
|
Self::NotIn
|
||||||
}
|
}
|
||||||
|
@ -547,6 +546,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::implementation_name`].
|
/// See also [`MarkerEnvironment::implementation_name`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_implementation_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_implementation_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).implementation_name = value.into();
|
Arc::make_mut(&mut self.inner).implementation_name = value.into();
|
||||||
self
|
self
|
||||||
|
@ -556,6 +556,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::implementation_version`].
|
/// See also [`MarkerEnvironment::implementation_version`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_implementation_version(
|
pub fn with_implementation_version(
|
||||||
mut self,
|
mut self,
|
||||||
value: impl Into<StringVersion>,
|
value: impl Into<StringVersion>,
|
||||||
|
@ -568,6 +569,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::os_name`].
|
/// See also [`MarkerEnvironment::os_name`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_os_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_os_name(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).os_name = value.into();
|
Arc::make_mut(&mut self.inner).os_name = value.into();
|
||||||
self
|
self
|
||||||
|
@ -577,6 +579,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::platform_machine`].
|
/// See also [`MarkerEnvironment::platform_machine`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_platform_machine(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_platform_machine(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).platform_machine = value.into();
|
Arc::make_mut(&mut self.inner).platform_machine = value.into();
|
||||||
self
|
self
|
||||||
|
@ -587,6 +590,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::platform_python_implementation`].
|
/// See also [`MarkerEnvironment::platform_python_implementation`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_platform_python_implementation(
|
pub fn with_platform_python_implementation(
|
||||||
mut self,
|
mut self,
|
||||||
value: impl Into<String>,
|
value: impl Into<String>,
|
||||||
|
@ -599,6 +603,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::platform_release`].
|
/// See also [`MarkerEnvironment::platform_release`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_platform_release(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_platform_release(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).platform_release = value.into();
|
Arc::make_mut(&mut self.inner).platform_release = value.into();
|
||||||
self
|
self
|
||||||
|
@ -608,6 +613,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::platform_system`].
|
/// See also [`MarkerEnvironment::platform_system`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_platform_system(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_platform_system(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).platform_system = value.into();
|
Arc::make_mut(&mut self.inner).platform_system = value.into();
|
||||||
self
|
self
|
||||||
|
@ -617,6 +623,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::platform_version`].
|
/// See also [`MarkerEnvironment::platform_version`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_platform_version(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_platform_version(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).platform_version = value.into();
|
Arc::make_mut(&mut self.inner).platform_version = value.into();
|
||||||
self
|
self
|
||||||
|
@ -626,6 +633,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::python_full_version`].
|
/// See also [`MarkerEnvironment::python_full_version`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_python_full_version(
|
pub fn with_python_full_version(
|
||||||
mut self,
|
mut self,
|
||||||
value: impl Into<StringVersion>,
|
value: impl Into<StringVersion>,
|
||||||
|
@ -638,6 +646,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::python_full_version`].
|
/// See also [`MarkerEnvironment::python_full_version`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_python_version(mut self, value: impl Into<StringVersion>) -> MarkerEnvironment {
|
pub fn with_python_version(mut self, value: impl Into<StringVersion>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).python_version = value.into();
|
Arc::make_mut(&mut self.inner).python_version = value.into();
|
||||||
self
|
self
|
||||||
|
@ -647,6 +656,7 @@ impl MarkerEnvironment {
|
||||||
///
|
///
|
||||||
/// See also [`MarkerEnvironment::sys_platform`].
|
/// See also [`MarkerEnvironment::sys_platform`].
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn with_sys_platform(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
pub fn with_sys_platform(mut self, value: impl Into<String>) -> MarkerEnvironment {
|
||||||
Arc::make_mut(&mut self.inner).sys_platform = value.into();
|
Arc::make_mut(&mut self.inner).sys_platform = value.into();
|
||||||
self
|
self
|
||||||
|
@ -1004,9 +1014,8 @@ impl MarkerExpression {
|
||||||
reporter.report(
|
reporter.report(
|
||||||
MarkerWarningKind::Pep440Error,
|
MarkerWarningKind::Pep440Error,
|
||||||
format!(
|
format!(
|
||||||
"Expected double quoted PEP 440 version to compare with {}, found {},
|
"Expected double quoted PEP 440 version to compare with {key}, found {r_value},
|
||||||
will evaluate to false",
|
will evaluate to false"
|
||||||
key, r_value
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1166,8 +1175,7 @@ impl MarkerExpression {
|
||||||
reporter.report(
|
reporter.report(
|
||||||
MarkerWarningKind::Pep440Error,
|
MarkerWarningKind::Pep440Error,
|
||||||
format!(
|
format!(
|
||||||
"Expected PEP 440 version to compare with {}, found {}, will evaluate to false: {}",
|
"Expected PEP 440 version to compare with {key}, found {value}, will evaluate to false: {err}"
|
||||||
key, value, err
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1219,8 +1227,7 @@ impl MarkerExpression {
|
||||||
reporter.report(
|
reporter.report(
|
||||||
MarkerWarningKind::Pep440Error,
|
MarkerWarningKind::Pep440Error,
|
||||||
format!(
|
format!(
|
||||||
"Expected PEP 440 version to compare with {}, found {}, will evaluate to false: {}",
|
"Expected PEP 440 version to compare with {key}, found {value}, will evaluate to false: {err}"
|
||||||
key, value, err
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1232,9 +1239,8 @@ impl MarkerExpression {
|
||||||
reporter.report(
|
reporter.report(
|
||||||
MarkerWarningKind::Pep440Error,
|
MarkerWarningKind::Pep440Error,
|
||||||
format!(
|
format!(
|
||||||
"Expected PEP 440 version operator to compare {} with '{}',
|
"Expected PEP 440 version operator to compare {key} with '{version}',
|
||||||
found '{}', will evaluate to false",
|
found '{marker_operator}', will evaluate to false"
|
||||||
key, version, marker_operator
|
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1267,17 +1273,16 @@ impl MarkerExpression {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
match ExtraOperator::from_marker_operator(operator) {
|
if let Some(operator) = ExtraOperator::from_marker_operator(operator) {
|
||||||
Some(operator) => Some(MarkerExpression::Extra { operator, name }),
|
Some(MarkerExpression::Extra { operator, name })
|
||||||
None => {
|
} else {
|
||||||
reporter.report(
|
reporter.report(
|
||||||
MarkerWarningKind::ExtraInvalidComparison,
|
MarkerWarningKind::ExtraInvalidComparison,
|
||||||
"Comparing extra with something other than a quoted string is wrong,
|
"Comparing extra with something other than a quoted string is wrong,
|
||||||
will evaluate to false"
|
will evaluate to false"
|
||||||
.to_string(),
|
.to_string(),
|
||||||
);
|
);
|
||||||
None
|
None
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1324,7 +1329,7 @@ impl MarkerExpression {
|
||||||
} => env
|
} => env
|
||||||
.map(|env| {
|
.map(|env| {
|
||||||
let l_string = env.get_string(key);
|
let l_string = env.get_string(key);
|
||||||
self.compare_strings(l_string, operator, value, reporter)
|
Self::compare_strings(l_string, *operator, value, reporter)
|
||||||
})
|
})
|
||||||
.unwrap_or(true),
|
.unwrap_or(true),
|
||||||
MarkerExpression::StringInverted {
|
MarkerExpression::StringInverted {
|
||||||
|
@ -1334,7 +1339,7 @@ impl MarkerExpression {
|
||||||
} => env
|
} => env
|
||||||
.map(|env| {
|
.map(|env| {
|
||||||
let r_string = env.get_string(key);
|
let r_string = env.get_string(key);
|
||||||
self.compare_strings(value, operator, r_string, reporter)
|
Self::compare_strings(value, *operator, r_string, reporter)
|
||||||
})
|
})
|
||||||
.unwrap_or(true),
|
.unwrap_or(true),
|
||||||
MarkerExpression::Extra {
|
MarkerExpression::Extra {
|
||||||
|
@ -1424,9 +1429,8 @@ impl MarkerExpression {
|
||||||
|
|
||||||
/// Compare strings by PEP 508 logic, with warnings
|
/// Compare strings by PEP 508 logic, with warnings
|
||||||
fn compare_strings(
|
fn compare_strings(
|
||||||
&self,
|
|
||||||
l_string: &str,
|
l_string: &str,
|
||||||
operator: &MarkerOperator,
|
operator: MarkerOperator,
|
||||||
r_string: &str,
|
r_string: &str,
|
||||||
reporter: &mut impl Reporter,
|
reporter: &mut impl Reporter,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
|
@ -1857,7 +1861,7 @@ impl MarkerTree {
|
||||||
let this = std::mem::replace(self, MarkerTree::And(vec![]));
|
let this = std::mem::replace(self, MarkerTree::And(vec![]));
|
||||||
*self = MarkerTree::And(vec![this]);
|
*self = MarkerTree::And(vec![this]);
|
||||||
}
|
}
|
||||||
_ => {}
|
MarkerTree::And(_) => {}
|
||||||
}
|
}
|
||||||
if let MarkerTree::And(ref mut exprs) = *self {
|
if let MarkerTree::And(ref mut exprs) = *self {
|
||||||
if let MarkerTree::And(tree) = tree {
|
if let MarkerTree::And(tree) = tree {
|
||||||
|
@ -1879,7 +1883,7 @@ impl MarkerTree {
|
||||||
let this = std::mem::replace(self, MarkerTree::And(vec![]));
|
let this = std::mem::replace(self, MarkerTree::And(vec![]));
|
||||||
*self = MarkerTree::Or(vec![this]);
|
*self = MarkerTree::Or(vec![this]);
|
||||||
}
|
}
|
||||||
_ => {}
|
MarkerTree::Or(_) => {}
|
||||||
}
|
}
|
||||||
if let MarkerTree::Or(ref mut exprs) = *self {
|
if let MarkerTree::Or(ref mut exprs) = *self {
|
||||||
if let MarkerTree::Or(tree) = tree {
|
if let MarkerTree::Or(tree) = tree {
|
||||||
|
@ -1928,7 +1932,7 @@ impl Display for MarkerTree {
|
||||||
fn parse_marker_operator<T: Pep508Url>(
|
fn parse_marker_operator<T: Pep508Url>(
|
||||||
cursor: &mut Cursor,
|
cursor: &mut Cursor,
|
||||||
) -> Result<MarkerOperator, Pep508Error<T>> {
|
) -> Result<MarkerOperator, Pep508Error<T>> {
|
||||||
let (start, len) = if cursor.peek_char().is_some_and(|c| c.is_alphabetic()) {
|
let (start, len) = if cursor.peek_char().is_some_and(char::is_alphabetic) {
|
||||||
// "in" or "not"
|
// "in" or "not"
|
||||||
cursor.take_while(|char| !char.is_whitespace() && char != '\'' && char != '"')
|
cursor.take_while(|char| !char.is_whitespace() && char != '\'' && char != '"')
|
||||||
} else {
|
} else {
|
||||||
|
@ -2301,7 +2305,7 @@ mod test {
|
||||||
assert_eq!(captured_logs[0].level, log::Level::Warn);
|
assert_eq!(captured_logs[0].level, log::Level::Warn);
|
||||||
assert_eq!(captured_logs.len(), 1);
|
assert_eq!(captured_logs.len(), 1);
|
||||||
});
|
});
|
||||||
let string_string = MarkerTree::from_str(r#"os.name == 'posix' and platform.machine == 'x86_64' and platform.python_implementation == 'CPython' and 'Ubuntu' in platform.version and sys.platform == 'linux'"#).unwrap();
|
let string_string = MarkerTree::from_str(r"os.name == 'posix' and platform.machine == 'x86_64' and platform.python_implementation == 'CPython' and 'Ubuntu' in platform.version and sys.platform == 'linux'").unwrap();
|
||||||
string_string.evaluate(&env37, &[]);
|
string_string.evaluate(&env37, &[]);
|
||||||
testing_logger::validate(|captured_logs| {
|
testing_logger::validate(|captured_logs| {
|
||||||
let messages: Vec<_> = captured_logs
|
let messages: Vec<_> = captured_logs
|
||||||
|
|
|
@ -137,7 +137,7 @@ impl<Url: UnnamedRequirementUrl> Display for UnnamedRequirement<Url> {
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if let Some(marker) = &self.marker {
|
if let Some(marker) = &self.marker {
|
||||||
write!(f, " ; {}", marker)?;
|
write!(f, " ; {marker}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ impl VerbatimUrl {
|
||||||
|
|
||||||
// Convert to a URL.
|
// Convert to a URL.
|
||||||
let mut url = Url::from_file_path(path.clone())
|
let mut url = Url::from_file_path(path.clone())
|
||||||
.map_err(|_| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
|
.map_err(|()| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
|
||||||
|
|
||||||
// Set the fragment, if it exists.
|
// Set the fragment, if it exists.
|
||||||
if let Some(fragment) = fragment {
|
if let Some(fragment) = fragment {
|
||||||
|
@ -84,14 +84,14 @@ impl VerbatimUrl {
|
||||||
|
|
||||||
// Normalize the path.
|
// Normalize the path.
|
||||||
let path = normalize_path(&path)
|
let path = normalize_path(&path)
|
||||||
.map_err(|err| VerbatimUrlError::Normalization(path.to_path_buf(), err))?;
|
.map_err(|err| VerbatimUrlError::Normalization(path.clone(), err))?;
|
||||||
|
|
||||||
// Extract the fragment, if it exists.
|
// Extract the fragment, if it exists.
|
||||||
let (path, fragment) = split_fragment(&path);
|
let (path, fragment) = split_fragment(&path);
|
||||||
|
|
||||||
// Convert to a URL.
|
// Convert to a URL.
|
||||||
let mut url = Url::from_file_path(path.clone())
|
let mut url = Url::from_file_path(path.clone())
|
||||||
.map_err(|_| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
|
.map_err(|()| VerbatimUrlError::UrlConversion(path.to_path_buf()))?;
|
||||||
|
|
||||||
// Set the fragment, if it exists.
|
// Set the fragment, if it exists.
|
||||||
if let Some(fragment) = fragment {
|
if let Some(fragment) = fragment {
|
||||||
|
@ -122,7 +122,7 @@ impl VerbatimUrl {
|
||||||
|
|
||||||
// Convert to a URL.
|
// Convert to a URL.
|
||||||
let mut url = Url::from_file_path(path.clone())
|
let mut url = Url::from_file_path(path.clone())
|
||||||
.unwrap_or_else(|_| panic!("path is absolute: {}", path.display()));
|
.unwrap_or_else(|()| panic!("path is absolute: {}", path.display()));
|
||||||
|
|
||||||
// Set the fragment, if it exists.
|
// Set the fragment, if it exists.
|
||||||
if let Some(fragment) = fragment {
|
if let Some(fragment) = fragment {
|
||||||
|
@ -160,7 +160,7 @@ impl VerbatimUrl {
|
||||||
pub fn as_path(&self) -> Result<PathBuf, VerbatimUrlError> {
|
pub fn as_path(&self) -> Result<PathBuf, VerbatimUrlError> {
|
||||||
self.url
|
self.url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|_| VerbatimUrlError::UrlConversion(self.url.to_file_path().unwrap()))
|
.map_err(|()| VerbatimUrlError::UrlConversion(self.url.to_file_path().unwrap()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,9 @@ name = "uv-auth"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl CredentialsCache {
|
||||||
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
||||||
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
||||||
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
||||||
pub(crate) fn get_url(&self, url: &Url, username: Username) -> Option<Arc<Credentials>> {
|
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Credentials>> {
|
||||||
let urls = self.urls.read().unwrap();
|
let urls = self.urls.read().unwrap();
|
||||||
let credentials = urls.get(url);
|
let credentials = urls.get(url);
|
||||||
if let Some(credentials) = credentials {
|
if let Some(credentials) = credentials {
|
||||||
|
@ -93,15 +93,15 @@ impl CredentialsCache {
|
||||||
let username = credentials.to_username();
|
let username = credentials.to_username();
|
||||||
if username.is_some() {
|
if username.is_some() {
|
||||||
let realm = (Realm::from(url), username.clone());
|
let realm = (Realm::from(url), username.clone());
|
||||||
self.insert_realm(realm, credentials.clone());
|
self.insert_realm(realm, &credentials);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert an entry for requests with no username
|
// Insert an entry for requests with no username
|
||||||
self.insert_realm((Realm::from(url), Username::none()), credentials.clone());
|
self.insert_realm((Realm::from(url), Username::none()), &credentials);
|
||||||
|
|
||||||
// Insert an entry for the URL
|
// Insert an entry for the URL
|
||||||
let mut urls = self.urls.write().unwrap();
|
let mut urls = self.urls.write().unwrap();
|
||||||
urls.insert(url.clone(), credentials.clone());
|
urls.insert(url, credentials);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Private interface to update a realm cache entry.
|
/// Private interface to update a realm cache entry.
|
||||||
|
@ -110,7 +110,7 @@ impl CredentialsCache {
|
||||||
fn insert_realm(
|
fn insert_realm(
|
||||||
&self,
|
&self,
|
||||||
key: (Realm, Username),
|
key: (Realm, Username),
|
||||||
credentials: Arc<Credentials>,
|
credentials: &Arc<Credentials>,
|
||||||
) -> Option<Arc<Credentials>> {
|
) -> Option<Arc<Credentials>> {
|
||||||
// Do not cache empty credentials
|
// Do not cache empty credentials
|
||||||
if credentials.is_empty() {
|
if credentials.is_empty() {
|
||||||
|
@ -169,9 +169,9 @@ impl UrlTrie {
|
||||||
self.states[state].value.as_ref()
|
self.states[state].value.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, url: Url, value: Arc<Credentials>) {
|
fn insert(&mut self, url: &Url, value: Arc<Credentials>) {
|
||||||
let mut state = 0;
|
let mut state = 0;
|
||||||
let realm = Realm::from(&url).to_string();
|
let realm = Realm::from(url).to_string();
|
||||||
for component in [realm.as_str()]
|
for component in [realm.as_str()]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(url.path_segments().unwrap().filter(|item| !item.is_empty()))
|
.chain(url.path_segments().unwrap().filter(|item| !item.is_empty()))
|
||||||
|
@ -234,19 +234,19 @@ mod tests {
|
||||||
|
|
||||||
let mut trie = UrlTrie::new();
|
let mut trie = UrlTrie::new();
|
||||||
trie.insert(
|
trie.insert(
|
||||||
Url::parse("https://burntsushi.net").unwrap(),
|
&Url::parse("https://burntsushi.net").unwrap(),
|
||||||
credentials1.clone(),
|
credentials1.clone(),
|
||||||
);
|
);
|
||||||
trie.insert(
|
trie.insert(
|
||||||
Url::parse("https://astral.sh").unwrap(),
|
&Url::parse("https://astral.sh").unwrap(),
|
||||||
credentials2.clone(),
|
credentials2.clone(),
|
||||||
);
|
);
|
||||||
trie.insert(
|
trie.insert(
|
||||||
Url::parse("https://example.com/foo").unwrap(),
|
&Url::parse("https://example.com/foo").unwrap(),
|
||||||
credentials3.clone(),
|
credentials3.clone(),
|
||||||
);
|
);
|
||||||
trie.insert(
|
trie.insert(
|
||||||
Url::parse("https://example.com/bar").unwrap(),
|
&Url::parse("https://example.com/bar").unwrap(),
|
||||||
credentials4.clone(),
|
credentials4.clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ impl Username {
|
||||||
/// Create a new username.
|
/// Create a new username.
|
||||||
///
|
///
|
||||||
/// Unlike `reqwest`, empty usernames are be encoded as `None` instead of an empty string.
|
/// Unlike `reqwest`, empty usernames are be encoded as `None` instead of an empty string.
|
||||||
pub fn new(value: Option<String>) -> Self {
|
pub(crate) fn new(value: Option<String>) -> Self {
|
||||||
// Ensure empty strings are `None`
|
// Ensure empty strings are `None`
|
||||||
if let Some(value) = value {
|
if let Some(value) = value {
|
||||||
if value.is_empty() {
|
if value.is_empty() {
|
||||||
|
@ -37,19 +37,19 @@ impl Username {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn none() -> Self {
|
pub(crate) fn none() -> Self {
|
||||||
Self::new(None)
|
Self::new(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_none(&self) -> bool {
|
pub(crate) fn is_none(&self) -> bool {
|
||||||
self.0.is_none()
|
self.0.is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_some(&self) -> bool {
|
pub(crate) fn is_some(&self) -> bool {
|
||||||
self.0.is_some()
|
self.0.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_deref(&self) -> Option<&str> {
|
pub(crate) fn as_deref(&self) -> Option<&str> {
|
||||||
self.0.as_deref()
|
self.0.as_deref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,33 +67,33 @@ impl From<Option<String>> for Username {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Credentials {
|
impl Credentials {
|
||||||
pub fn new(username: Option<String>, password: Option<String>) -> Self {
|
pub(crate) fn new(username: Option<String>, password: Option<String>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
username: Username::new(username),
|
username: Username::new(username),
|
||||||
password,
|
password,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn username(&self) -> Option<&str> {
|
pub(crate) fn username(&self) -> Option<&str> {
|
||||||
self.username.as_deref()
|
self.username.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_username(&self) -> Username {
|
pub(crate) fn to_username(&self) -> Username {
|
||||||
self.username.clone()
|
self.username.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn password(&self) -> Option<&str> {
|
pub(crate) fn password(&self) -> Option<&str> {
|
||||||
self.password.as_deref()
|
self.password.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
pub(crate) fn is_empty(&self) -> bool {
|
||||||
self.password.is_none() && self.username.is_none()
|
self.password.is_none() && self.username.is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return [`Credentials`] for a [`Url`] from a [`Netrc`] file, if any.
|
/// Return [`Credentials`] for a [`Url`] from a [`Netrc`] file, if any.
|
||||||
///
|
///
|
||||||
/// If a username is provided, it must match the login in the netrc file or [`None`] is returned.
|
/// If a username is provided, it must match the login in the netrc file or [`None`] is returned.
|
||||||
pub fn from_netrc(netrc: &Netrc, url: &Url, username: Option<&str>) -> Option<Self> {
|
pub(crate) fn from_netrc(netrc: &Netrc, url: &Url, username: Option<&str>) -> Option<Self> {
|
||||||
let host = url.host_str()?;
|
let host = url.host_str()?;
|
||||||
let entry = netrc
|
let entry = netrc
|
||||||
.hosts
|
.hosts
|
||||||
|
@ -114,7 +114,7 @@ impl Credentials {
|
||||||
/// Parse [`Credentials`] from a URL, if any.
|
/// Parse [`Credentials`] from a URL, if any.
|
||||||
///
|
///
|
||||||
/// Returns [`None`] if both [`Url::username`] and [`Url::password`] are not populated.
|
/// Returns [`None`] if both [`Url::username`] and [`Url::password`] are not populated.
|
||||||
pub fn from_url(url: &Url) -> Option<Self> {
|
pub(crate) fn from_url(url: &Url) -> Option<Self> {
|
||||||
if url.username().is_empty() && url.password().is_none() {
|
if url.username().is_empty() && url.password().is_none() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ impl Credentials {
|
||||||
/// Parse [`Credentials`] from an HTTP request, if any.
|
/// Parse [`Credentials`] from an HTTP request, if any.
|
||||||
///
|
///
|
||||||
/// Only HTTP Basic Authentication is supported.
|
/// Only HTTP Basic Authentication is supported.
|
||||||
pub fn from_request(request: &Request) -> Option<Self> {
|
pub(crate) fn from_request(request: &Request) -> Option<Self> {
|
||||||
// First, attempt to retrieve the credentials from the URL
|
// First, attempt to retrieve the credentials from the URL
|
||||||
Self::from_url(request.url()).or(
|
Self::from_url(request.url()).or(
|
||||||
// Then, attempt to pull the credentials from the headers
|
// Then, attempt to pull the credentials from the headers
|
||||||
|
@ -195,7 +195,7 @@ impl Credentials {
|
||||||
write!(encoder, "{}:", self.username().unwrap_or_default())
|
write!(encoder, "{}:", self.username().unwrap_or_default())
|
||||||
.expect("Write to base64 encoder should succeed");
|
.expect("Write to base64 encoder should succeed");
|
||||||
if let Some(password) = self.password() {
|
if let Some(password) = self.password() {
|
||||||
write!(encoder, "{}", password).expect("Write to base64 encoder should succeed");
|
write!(encoder, "{password}").expect("Write to base64 encoder should succeed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut header = HeaderValue::from_bytes(&buf).expect("base64 is always valid HeaderValue");
|
let mut header = HeaderValue::from_bytes(&buf).expect("base64 is always valid HeaderValue");
|
||||||
|
@ -207,7 +207,7 @@ impl Credentials {
|
||||||
///
|
///
|
||||||
/// Any existing credentials will be overridden.
|
/// Any existing credentials will be overridden.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn authenticate(&self, mut request: reqwest::Request) -> reqwest::Request {
|
pub(crate) fn authenticate(&self, mut request: reqwest::Request) -> reqwest::Request {
|
||||||
request
|
request
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.insert(reqwest::header::AUTHORIZATION, Self::to_header_value(self));
|
.insert(reqwest::header::AUTHORIZATION, Self::to_header_value(self));
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub struct KeyringProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum KeyringProviderBackend {
|
pub(crate) enum KeyringProviderBackend {
|
||||||
/// Use the `keyring` command to fetch credentials.
|
/// Use the `keyring` command to fetch credentials.
|
||||||
Subprocess,
|
Subprocess,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -59,7 +59,7 @@ impl KeyringProvider {
|
||||||
}
|
}
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
KeyringProviderBackend::Dummy(ref store) => {
|
KeyringProviderBackend::Dummy(ref store) => {
|
||||||
self.fetch_dummy(store, url.as_str(), username)
|
Self::fetch_dummy(store, url.as_str(), username)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// And fallback to a check for the host
|
// And fallback to a check for the host
|
||||||
|
@ -74,7 +74,7 @@ impl KeyringProvider {
|
||||||
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
|
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
KeyringProviderBackend::Dummy(ref store) => {
|
KeyringProviderBackend::Dummy(ref store) => {
|
||||||
self.fetch_dummy(store, &host, username)
|
Self::fetch_dummy(store, &host, username)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -116,14 +116,13 @@ impl KeyringProvider {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn fetch_dummy(
|
fn fetch_dummy(
|
||||||
&self,
|
|
||||||
store: &std::collections::HashMap<(String, &'static str), &'static str>,
|
store: &std::collections::HashMap<(String, &'static str), &'static str>,
|
||||||
service_name: &str,
|
service_name: &str,
|
||||||
username: &str,
|
username: &str,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
store
|
store
|
||||||
.get(&(service_name.to_string(), username))
|
.get(&(service_name.to_string(), username))
|
||||||
.map(|password| password.to_string())
|
.map(|password| (*password).to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new provider with [`KeyringProviderBackend::Dummy`].
|
/// Create a new provider with [`KeyringProviderBackend::Dummy`].
|
||||||
|
@ -131,13 +130,12 @@ impl KeyringProvider {
|
||||||
pub fn dummy<S: Into<String>, T: IntoIterator<Item = ((S, &'static str), &'static str)>>(
|
pub fn dummy<S: Into<String>, T: IntoIterator<Item = ((S, &'static str), &'static str)>>(
|
||||||
iter: T,
|
iter: T,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
backend: KeyringProviderBackend::Dummy(HashMap::from_iter(
|
backend: KeyringProviderBackend::Dummy(
|
||||||
iter.into_iter()
|
iter.into_iter()
|
||||||
.map(|((service, username), password)| ((service.into(), username), password)),
|
.map(|((service, username), password)| ((service.into(), username), password))
|
||||||
)),
|
.collect(),
|
||||||
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -180,7 +180,7 @@ impl Middleware for AuthMiddleware {
|
||||||
trace!("Request for {url} is unauthenticated, checking cache");
|
trace!("Request for {url} is unauthenticated, checking cache");
|
||||||
|
|
||||||
// Check the cache for a URL match
|
// Check the cache for a URL match
|
||||||
let credentials = self.cache().get_url(request.url(), Username::none());
|
let credentials = self.cache().get_url(request.url(), &Username::none());
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
request = credentials.authenticate(request);
|
request = credentials.authenticate(request);
|
||||||
if credentials.password().is_some() {
|
if credentials.password().is_some() {
|
||||||
|
@ -287,7 +287,7 @@ impl AuthMiddleware {
|
||||||
.is_ok_and(|response| response.error_for_status_ref().is_ok())
|
.is_ok_and(|response| response.error_for_status_ref().is_ok())
|
||||||
{
|
{
|
||||||
trace!("Updating cached credentials for {url} to {credentials:?}");
|
trace!("Updating cached credentials for {url} to {credentials:?}");
|
||||||
self.cache().insert(&url, credentials)
|
self.cache().insert(&url, credentials);
|
||||||
};
|
};
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -346,16 +346,15 @@ impl AuthMiddleware {
|
||||||
// implementation returns different credentials for different URLs in the
|
// implementation returns different credentials for different URLs in the
|
||||||
// same realm we will use the wrong credentials.
|
// same realm we will use the wrong credentials.
|
||||||
} else if let Some(credentials) = match self.keyring {
|
} else if let Some(credentials) = match self.keyring {
|
||||||
Some(ref keyring) => match credentials.and_then(|credentials| credentials.username()) {
|
Some(ref keyring) => {
|
||||||
Some(username) => {
|
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
||||||
debug!("Checking keyring for credentials for {username}@{url}");
|
debug!("Checking keyring for credentials for {username}@{url}");
|
||||||
keyring.fetch(url, username).await
|
keyring.fetch(url, username).await
|
||||||
}
|
} else {
|
||||||
None => {
|
|
||||||
debug!("Skipping keyring lookup for {url} with no username");
|
debug!("Skipping keyring lookup for {url} with no username");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
None => None,
|
None => None,
|
||||||
} {
|
} {
|
||||||
debug!("Found credentials in keyring for {url}");
|
debug!("Found credentials in keyring for {url}");
|
||||||
|
@ -1065,12 +1064,12 @@ mod tests {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
client.get(format!("{}/foo", url_1)).send().await?.status(),
|
client.get(format!("{url_1}/foo")).send().await?.status(),
|
||||||
200,
|
200,
|
||||||
"Requests can be to different paths in the same realm"
|
"Requests can be to different paths in the same realm"
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
client.get(format!("{}/foo", url_2)).send().await?.status(),
|
client.get(format!("{url_2}/foo")).send().await?.status(),
|
||||||
200,
|
200,
|
||||||
"Requests can be to different paths in the same realm"
|
"Requests can be to different paths in the same realm"
|
||||||
);
|
);
|
||||||
|
|
|
@ -3,6 +3,9 @@ name = "uv-client"
|
||||||
version = "0.0.1"
|
version = "0.0.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cache-key = { workspace = true }
|
cache-key = { workspace = true }
|
||||||
distribution-filename = { workspace = true }
|
distribution-filename = { workspace = true }
|
||||||
|
|
|
@ -113,7 +113,7 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
if let Some(markers) = self.markers {
|
if let Some(markers) = self.markers {
|
||||||
let linehaul = LineHaul::new(markers, self.platform);
|
let linehaul = LineHaul::new(markers, self.platform);
|
||||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||||
user_agent_string += &format!(" {}", output);
|
user_agent_string += &format!(" {output}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,7 +267,7 @@ impl RetryableStrategy for LoggingRetryableStrategy {
|
||||||
.join("\n");
|
.join("\n");
|
||||||
debug!(
|
debug!(
|
||||||
"Transient request failure for {}, retrying: {err}\n{context}",
|
"Transient request failure for {}, retrying: {err}\n{context}",
|
||||||
err.url().map(|url| url.as_str()).unwrap_or("unknown URL")
|
err.url().map(reqwest::Url::as_str).unwrap_or("unknown URL")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ pub trait Cacheable: Sized {
|
||||||
/// implement `Cacheable`.
|
/// implement `Cacheable`.
|
||||||
#[derive(Debug, Deserialize, Serialize)]
|
#[derive(Debug, Deserialize, Serialize)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct SerdeCacheable<T> {
|
pub(crate) struct SerdeCacheable<T> {
|
||||||
inner: T,
|
inner: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,19 +228,16 @@ impl CachedClient {
|
||||||
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
|
CallbackReturn: Future<Output = Result<Payload, CallBackError>>,
|
||||||
{
|
{
|
||||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||||
let cached_response = match Self::read_cache(cache_entry).await {
|
let cached_response = if let Some(cached) = Self::read_cache(cache_entry).await {
|
||||||
Some(cached) => {
|
self.send_cached(req, cache_control, cached)
|
||||||
self.send_cached(req, cache_control, cached)
|
.boxed_local()
|
||||||
.boxed_local()
|
.await?
|
||||||
.await?
|
} else {
|
||||||
}
|
debug!("No cache entry for: {}", req.url());
|
||||||
None => {
|
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||||
debug!("No cache entry for: {}", req.url());
|
CachedResponse::ModifiedOrNew {
|
||||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
response,
|
||||||
CachedResponse::ModifiedOrNew {
|
cache_policy,
|
||||||
response,
|
|
||||||
cache_policy,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match cached_response {
|
match cached_response {
|
||||||
|
|
|
@ -929,7 +929,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test for AWS Code Artifact
|
/// Test for AWS Code Artifact
|
||||||
/// From https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088
|
///
|
||||||
|
/// See: <https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088>
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_code_artifact_index_html() {
|
fn parse_code_artifact_index_html() {
|
||||||
let text = r#"
|
let text = r#"
|
||||||
|
|
|
@ -22,43 +22,44 @@ use crate::rkyvutil::OwnedArchive;
|
||||||
)]
|
)]
|
||||||
#[archive(check_bytes)]
|
#[archive(check_bytes)]
|
||||||
#[archive_attr(derive(Debug))]
|
#[archive_attr(derive(Debug))]
|
||||||
|
#[allow(clippy::struct_excessive_bools)]
|
||||||
pub struct CacheControl {
|
pub struct CacheControl {
|
||||||
// directives for requests and responses
|
// directives for requests and responses
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age>
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age-2
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age-2>
|
||||||
pub max_age_seconds: Option<u64>,
|
pub max_age_seconds: Option<u64>,
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache>
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-cache-2>
|
||||||
pub no_cache: bool,
|
pub no_cache: bool,
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store>
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store-2
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-store-2>
|
||||||
pub no_store: bool,
|
pub no_store: bool,
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform>
|
||||||
/// * https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform-2
|
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-no-transform-2>
|
||||||
pub no_transform: bool,
|
pub no_transform: bool,
|
||||||
|
|
||||||
// request-only directives
|
// request-only directives
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-stale>
|
||||||
pub max_stale_seconds: Option<u64>,
|
pub max_stale_seconds: Option<u64>,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-min-fresh
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-min-fresh>
|
||||||
pub min_fresh_seconds: Option<u64>,
|
pub min_fresh_seconds: Option<u64>,
|
||||||
|
|
||||||
// response-only directives
|
// response-only directives
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-only-if-cached
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-only-if-cached>
|
||||||
pub only_if_cached: bool,
|
pub only_if_cached: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-must-revalidate
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-must-revalidate>
|
||||||
pub must_revalidate: bool,
|
pub must_revalidate: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-must-understand
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-must-understand>
|
||||||
pub must_understand: bool,
|
pub must_understand: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-private
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-private>
|
||||||
pub private: bool,
|
pub private: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-proxy-revalidate
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-proxy-revalidate>
|
||||||
pub proxy_revalidate: bool,
|
pub proxy_revalidate: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-public
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-public>
|
||||||
pub public: bool,
|
pub public: bool,
|
||||||
/// https://www.rfc-editor.org/rfc/rfc9111.html#name-s-maxage
|
/// <https://www.rfc-editor.org/rfc/rfc9111.html#name-s-maxage>
|
||||||
pub s_maxage_seconds: Option<u64>,
|
pub s_maxage_seconds: Option<u64>,
|
||||||
/// https://httpwg.org/specs/rfc8246.html
|
/// <https://httpwg.org/specs/rfc8246.html>
|
||||||
pub immutable: bool,
|
pub immutable: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +73,7 @@ impl CacheControl {
|
||||||
|
|
||||||
impl<'b, B: 'b + ?Sized + AsRef<[u8]>> FromIterator<&'b B> for CacheControl {
|
impl<'b, B: 'b + ?Sized + AsRef<[u8]>> FromIterator<&'b B> for CacheControl {
|
||||||
fn from_iter<T: IntoIterator<Item = &'b B>>(it: T) -> Self {
|
fn from_iter<T: IntoIterator<Item = &'b B>>(it: T) -> Self {
|
||||||
Self::from_iter(CacheControlParser::new(it))
|
CacheControlParser::new(it).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,7 +183,10 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> CacheControlPa
|
||||||
/// given iterator should yield elements that satisfy `AsRef<[u8]>`.
|
/// given iterator should yield elements that satisfy `AsRef<[u8]>`.
|
||||||
fn new<II: IntoIterator<IntoIter = I>>(headers: II) -> CacheControlParser<'b, I> {
|
fn new<II: IntoIterator<IntoIter = I>>(headers: II) -> CacheControlParser<'b, I> {
|
||||||
let mut directives = headers.into_iter();
|
let mut directives = headers.into_iter();
|
||||||
let cur = directives.next().map(|h| h.as_ref()).unwrap_or(b"");
|
let cur = directives
|
||||||
|
.next()
|
||||||
|
.map(std::convert::AsRef::as_ref)
|
||||||
|
.unwrap_or(b"");
|
||||||
CacheControlParser {
|
CacheControlParser {
|
||||||
cur,
|
cur,
|
||||||
directives,
|
directives,
|
||||||
|
@ -262,7 +266,7 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> CacheControlPa
|
||||||
self.cur = &self.cur[1..];
|
self.cur = &self.cur[1..];
|
||||||
self.parse_quoted_string()
|
self.parse_quoted_string()
|
||||||
} else {
|
} else {
|
||||||
self.parse_token().map(|s| s.into_bytes())
|
self.parse_token().map(std::string::String::into_bytes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -368,7 +372,7 @@ impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> Iterator
|
||||||
fn next(&mut self) -> Option<CacheControlDirective> {
|
fn next(&mut self) -> Option<CacheControlDirective> {
|
||||||
loop {
|
loop {
|
||||||
if self.cur.is_empty() {
|
if self.cur.is_empty() {
|
||||||
self.cur = self.directives.next().map(|h| h.as_ref())?;
|
self.cur = self.directives.next().map(std::convert::AsRef::as_ref)?;
|
||||||
}
|
}
|
||||||
while !self.cur.is_empty() {
|
while !self.cur.is_empty() {
|
||||||
self.skip_whitespace();
|
self.skip_whitespace();
|
||||||
|
|
|
@ -122,17 +122,17 @@ actually need to make an HTTP request).
|
||||||
|
|
||||||
# Additional reading
|
# Additional reading
|
||||||
|
|
||||||
* Short introduction to `Cache-Control`: https://csswizardry.com/2019/03/cache-control-for-civilians/
|
* Short introduction to `Cache-Control`: <https://csswizardry.com/2019/03/cache-control-for-civilians/>
|
||||||
* Caching best practcies: https://jakearchibald.com/2016/caching-best-practices/
|
* Caching best practcies: <https://jakearchibald.com/2016/caching-best-practices/>
|
||||||
* Overview of HTTP caching: https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching
|
* Overview of HTTP caching: <https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching>
|
||||||
* MDN docs for `Cache-Control`: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
|
* MDN docs for `Cache-Control`: <https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control>
|
||||||
* THe 1997 RFC for HTTP 1.1: https://www.rfc-editor.org/rfc/rfc2068#section-13
|
* The 1997 RFC for HTTP 1.1: <https://www.rfc-editor.org/rfc/rfc2068#section-13>
|
||||||
* The 1999 update to HTTP 1.1: https://www.rfc-editor.org/rfc/rfc2616.html#section-13
|
* The 1999 update to HTTP 1.1: <https://www.rfc-editor.org/rfc/rfc2616.html#section-13>
|
||||||
* The "stale content" cache-control extension: https://httpwg.org/specs/rfc5861.html
|
* The "stale content" cache-control extension: <https://httpwg.org/specs/rfc5861.html>
|
||||||
* HTTP 1.1 caching (superseded by RFC 9111): https://httpwg.org/specs/rfc7234.html
|
* HTTP 1.1 caching (superseded by RFC 9111): <https://httpwg.org/specs/rfc7234.html>
|
||||||
* The "immutable" cache-control extension: https://httpwg.org/specs/rfc8246.html
|
* The "immutable" cache-control extension: <https://httpwg.org/specs/rfc8246.html>
|
||||||
* HTTP semantics (If-None-Match, etc.): https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3
|
* HTTP semantics (If-None-Match, etc.): <https://www.rfc-editor.org/rfc/rfc9110#section-8.8.3>
|
||||||
* HTTP caching (obsoletes RFC 7234): https://www.rfc-editor.org/rfc/rfc9111.html
|
* HTTP caching (obsoletes RFC 7234): <https://www.rfc-editor.org/rfc/rfc9111.html>
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::time::{Duration, SystemTime};
|
use std::time::{Duration, SystemTime};
|
||||||
|
@ -1193,7 +1193,7 @@ impl<'a> From<&'a http::HeaderMap> for ResponseHeaders {
|
||||||
#[archive(check_bytes)]
|
#[archive(check_bytes)]
|
||||||
#[archive_attr(derive(Debug))]
|
#[archive_attr(derive(Debug))]
|
||||||
struct ETag {
|
struct ETag {
|
||||||
/// The actual ETag validator value.
|
/// The actual `ETag` validator value.
|
||||||
///
|
///
|
||||||
/// This is received in the response, recorded as part of the cache policy
|
/// This is received in the response, recorded as part of the cache policy
|
||||||
/// and then sent back in a re-validation request. This is the "best"
|
/// and then sent back in a re-validation request. This is the "best"
|
||||||
|
@ -1219,7 +1219,7 @@ struct ETag {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ETag {
|
impl ETag {
|
||||||
/// Parses an ETag from a header value.
|
/// Parses an `ETag` from a header value.
|
||||||
///
|
///
|
||||||
/// We are a little permissive here and allow arbitrary bytes,
|
/// We are a little permissive here and allow arbitrary bytes,
|
||||||
/// where as [RFC 9110 S8.8.3] is a bit more restrictive.
|
/// where as [RFC 9110 S8.8.3] is a bit more restrictive.
|
||||||
|
|
|
@ -68,7 +68,7 @@ impl LineHaul {
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
||||||
|
|
||||||
let libc = match platform.map(|platform| platform.os()) {
|
let libc = match platform.map(platform_tags::Platform::os) {
|
||||||
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
||||||
lib: Some("glibc".to_string()),
|
lib: Some("glibc".to_string()),
|
||||||
version: Some(format!("{major}.{minor}")),
|
version: Some(format!("{major}.{minor}")),
|
||||||
|
@ -94,7 +94,7 @@ impl LineHaul {
|
||||||
libc,
|
libc,
|
||||||
})
|
})
|
||||||
} else if cfg!(target_os = "macos") {
|
} else if cfg!(target_os = "macos") {
|
||||||
let version = match platform.map(|platform| platform.os()) {
|
let version = match platform.map(platform_tags::Platform::os) {
|
||||||
Some(Os::Macos { major, minor }) => Some(format!("{major}.{minor}")),
|
Some(Os::Macos { major, minor }) => Some(format!("{major}.{minor}")),
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,7 +13,7 @@ pub(crate) struct OfflineError {
|
||||||
|
|
||||||
impl OfflineError {
|
impl OfflineError {
|
||||||
/// Returns the URL that caused the error.
|
/// Returns the URL that caused the error.
|
||||||
pub fn url(&self) -> &Url {
|
pub(crate) fn url(&self) -> &Url {
|
||||||
&self.url
|
&self.url
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,15 +130,15 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
let mut builder = BaseClientBuilder::new();
|
let mut builder = BaseClientBuilder::new();
|
||||||
|
|
||||||
if let Some(client) = self.client {
|
if let Some(client) = self.client {
|
||||||
builder = builder.client(client)
|
builder = builder.client(client);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(markers) = self.markers {
|
if let Some(markers) = self.markers {
|
||||||
builder = builder.markers(markers)
|
builder = builder.markers(markers);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(platform) = self.platform {
|
if let Some(platform) = self.platform {
|
||||||
builder = builder.platform(platform)
|
builder = builder.platform(platform);
|
||||||
}
|
}
|
||||||
|
|
||||||
let client = builder
|
let client = builder
|
||||||
|
@ -380,7 +380,7 @@ impl RegistryClient {
|
||||||
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||||
let path = url
|
let path = url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?
|
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
||||||
.join("index.html");
|
.join("index.html");
|
||||||
let text = fs_err::tokio::read_to_string(&path)
|
let text = fs_err::tokio::read_to_string(&path)
|
||||||
.await
|
.await
|
||||||
|
@ -416,7 +416,7 @@ impl RegistryClient {
|
||||||
if url.scheme() == "file" {
|
if url.scheme() == "file" {
|
||||||
let path = url
|
let path = url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?;
|
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
|
||||||
WheelLocation::Path(path)
|
WheelLocation::Path(path)
|
||||||
} else {
|
} else {
|
||||||
WheelLocation::Url(url)
|
WheelLocation::Url(url)
|
||||||
|
@ -427,7 +427,7 @@ impl RegistryClient {
|
||||||
if url.scheme() == "file" {
|
if url.scheme() == "file" {
|
||||||
let path = url
|
let path = url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|_| ErrorKind::NonFileUrl(url.clone()))?;
|
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
|
||||||
WheelLocation::Path(path)
|
WheelLocation::Path(path)
|
||||||
} else {
|
} else {
|
||||||
WheelLocation::Url(url)
|
WheelLocation::Url(url)
|
||||||
|
@ -769,7 +769,7 @@ impl VersionFiles {
|
||||||
match filename {
|
match filename {
|
||||||
DistFilename::WheelFilename(name) => self.wheels.push(VersionWheel { name, file }),
|
DistFilename::WheelFilename(name) => self.wheels.push(VersionWheel { name, file }),
|
||||||
DistFilename::SourceDistFilename(name) => {
|
DistFilename::SourceDistFilename(name) => {
|
||||||
self.source_dists.push(VersionSourceDist { name, file })
|
self.source_dists.push(VersionSourceDist { name, file });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -990,7 +990,8 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test for AWS Code Artifact registry
|
/// Test for AWS Code Artifact registry
|
||||||
/// Regression coverage of https://github.com/astral-sh/uv/issues/1388
|
///
|
||||||
|
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||||
#[test]
|
#[test]
|
||||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||||
let text = r#"
|
let text = r#"
|
||||||
|
@ -1021,7 +1022,7 @@ mod tests {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|file| pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
.map(|file| pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||||
let urls = urls.iter().map(|url| url.as_str()).collect::<Vec<_>>();
|
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
|
||||||
insta::assert_debug_snapshot!(urls, @r###"
|
insta::assert_debug_snapshot!(urls, @r###"
|
||||||
[
|
[
|
||||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||||
|
|
|
@ -172,7 +172,10 @@ where
|
||||||
// archive for SimpleMetadata in the constructor, so we can skip
|
// archive for SimpleMetadata in the constructor, so we can skip
|
||||||
// validation here. Since we don't mutate the buffer, this conversion
|
// validation here. Since we don't mutate the buffer, this conversion
|
||||||
// is guaranteed to be correct.
|
// is guaranteed to be correct.
|
||||||
unsafe { rkyv::archived_root::<A>(&self.raw) }
|
#[allow(unsafe_code)]
|
||||||
|
unsafe {
|
||||||
|
rkyv::archived_root::<A>(&self.raw)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -230,6 +233,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(unsafe_code)]
|
||||||
unsafe fn resolve_aligned<T: Archive + ?Sized>(
|
unsafe fn resolve_aligned<T: Archive + ?Sized>(
|
||||||
&mut self,
|
&mut self,
|
||||||
value: &T,
|
value: &T,
|
||||||
|
@ -241,6 +245,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(unsafe_code)]
|
||||||
unsafe fn resolve_unsized_aligned<T: ArchiveUnsized + ?Sized>(
|
unsafe fn resolve_unsized_aligned<T: ArchiveUnsized + ?Sized>(
|
||||||
&mut self,
|
&mut self,
|
||||||
value: &T,
|
value: &T,
|
||||||
|
@ -255,6 +260,7 @@ impl<const N: usize> rkyv::ser::Serializer for Serializer<N> {
|
||||||
|
|
||||||
impl<const N: usize> rkyv::ser::ScratchSpace for Serializer<N> {
|
impl<const N: usize> rkyv::ser::ScratchSpace for Serializer<N> {
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(unsafe_code)]
|
||||||
unsafe fn push_scratch(
|
unsafe fn push_scratch(
|
||||||
&mut self,
|
&mut self,
|
||||||
layout: std::alloc::Layout,
|
layout: std::alloc::Layout,
|
||||||
|
@ -265,6 +271,7 @@ impl<const N: usize> rkyv::ser::ScratchSpace for Serializer<N> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[allow(unsafe_code)]
|
||||||
unsafe fn pop_scratch(
|
unsafe fn pop_scratch(
|
||||||
&mut self,
|
&mut self,
|
||||||
ptr: std::ptr::NonNull<u8>,
|
ptr: std::ptr::NonNull<u8>,
|
||||||
|
@ -325,7 +332,7 @@ impl std::error::Error for SerializerError {
|
||||||
///
|
///
|
||||||
/// > Regular serializers don’t support the custom error handling needed for
|
/// > Regular serializers don’t support the custom error handling needed for
|
||||||
/// > this type by default. To use this wrapper, a custom serializer with an
|
/// > this type by default. To use this wrapper, a custom serializer with an
|
||||||
/// > error type satisfying <S as Fallible>::Error: From<AsStringError> must be
|
/// > error type satisfying <S as Fallible>`::Error`: From<AsStringError> must be
|
||||||
/// > provided.
|
/// > provided.
|
||||||
///
|
///
|
||||||
/// If we didn't need to use `rkyv::with::AsString` (which we do for
|
/// If we didn't need to use `rkyv::with::AsString` (which we do for
|
||||||
|
|
|
@ -31,7 +31,7 @@ async fn test_user_agent_has_version() -> Result<()> {
|
||||||
.headers()
|
.headers()
|
||||||
.get(USER_AGENT)
|
.get(USER_AGENT)
|
||||||
.and_then(|v| v.to_str().ok())
|
.and_then(|v| v.to_str().ok())
|
||||||
.map(|s| s.to_string())
|
.map(ToString::to_string)
|
||||||
.unwrap_or_default(); // Empty Default
|
.unwrap_or_default(); // Empty Default
|
||||||
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||||
});
|
});
|
||||||
|
@ -89,7 +89,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
.headers()
|
.headers()
|
||||||
.get(USER_AGENT)
|
.get(USER_AGENT)
|
||||||
.and_then(|v| v.to_str().ok())
|
.and_then(|v| v.to_str().ok())
|
||||||
.map(|s| s.to_string())
|
.map(ToString::to_string)
|
||||||
.unwrap_or_default(); // Empty Default
|
.unwrap_or_default(); // Empty Default
|
||||||
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||||
});
|
});
|
||||||
|
|
|
@ -6,6 +6,9 @@ edition = "2021"
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
quote = { workspace = true }
|
quote = { workspace = true }
|
||||||
syn = { workspace = true }
|
syn = { workspace = true }
|
||||||
|
|
|
@ -4,6 +4,9 @@ version = "0.0.1"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "Normalization for distribution, package and extra anmes"
|
description = "Normalization for distribution, package and extra anmes"
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rkyv = { workspace = true }
|
rkyv = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
|
|
|
@ -9,6 +9,9 @@ repository.workspace = true
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
cache-key = { workspace = true }
|
cache-key = { workspace = true }
|
||||||
distribution-filename = { workspace = true }
|
distribution-filename = { workspace = true }
|
||||||
|
@ -39,6 +42,3 @@ thiserror = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
|
@ -189,7 +189,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
|
||||||
let deps = edges
|
let deps = edges
|
||||||
.iter()
|
.iter()
|
||||||
.map(|dependency| format!("{}", dependency.name()))
|
.map(|dependency| format!("{}", dependency.name()))
|
||||||
.chain(source.iter().map(std::string::ToString::to_string))
|
.chain(source.iter().map(ToString::to_string))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.join(", ");
|
.join(", ");
|
||||||
let comment = format!("# via {deps}").green().to_string();
|
let comment = format!("# via {deps}").green().to_string();
|
||||||
|
@ -214,7 +214,7 @@ impl std::fmt::Display for DisplayResolutionGraph<'_> {
|
||||||
let separator = "\n";
|
let separator = "\n";
|
||||||
let deps = source
|
let deps = source
|
||||||
.iter()
|
.iter()
|
||||||
.map(std::string::ToString::to_string)
|
.map(ToString::to_string)
|
||||||
.chain(
|
.chain(
|
||||||
edges
|
edges
|
||||||
.iter()
|
.iter()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue